after_merge
stringlengths 28
79.6k
| before_merge
stringlengths 20
79.6k
| url
stringlengths 38
71
| full_traceback
stringlengths 43
922k
| traceback_type
stringclasses 555
values |
|---|---|---|---|---|
def plot(result_pickle_file_path, show, plot_save_file):
"""
[sys_analyser] draw result DataFrame
"""
import pandas as pd
from .plot import plot_result
result_dict = pd.read_pickle(result_pickle_file_path)
plot_result(result_dict, show, plot_save_file)
|
def plot(result_dict_file, show, plot_save_file):
"""
[sys_analyser] draw result DataFrame
"""
import pandas as pd
from .plot import plot_result
result_dict = pd.read_pickle(result_dict_file)
plot_result(result_dict, show, plot_save_file)
|
https://github.com/ricequant/rqalpha/issues/109
|
Traceback (most recent call last):
File "c:\programdata\anaconda2\lib\runpy.py", line 174, in _run_module_as_main
"__main__", fname, loader, pkg_name)
File "c:\programdata\anaconda2\lib\runpy.py", line 72, in _run_code
exec code in run_globals
β β {'__builtins__': <module '__builtin__' (built-in)>, '__file__': 'C:\ProgramData\Anaconda2\Scripts\rqalpha.exe\__main__.py',...
qalpha.exe\__main__.py", line 2>> at 0256EA40, file "C:\ProgramData\Anaconda2\Scripts
File "C:\ProgramData\Anaconda2\Scripts\rqalpha.exe\__main__.py", line 9, in <module>
sys.exit(entry_point())
β β <function entry_point at 0x047D1CF0>
β <module 'sys' (built-in)>
File "c:\programdata\anaconda2\lib\site-packages\rqalpha\__main__.py", line 66, in entry_point
cli(obj={})
β <click.core.Group object at 0x047CFE90>
File "c:\programdata\anaconda2\lib\site-packages\click\core.py", line 722, in __call__
return self.main(*args, **kwargs)
β β β {'obj': {'VERBOSE': 0}}
β β ()
β <click.core.Group object at 0x047CFE90>
File "c:\programdata\anaconda2\lib\site-packages\click\core.py", line 697, in main
rv = self.invoke(ctx)
β β <click.core.Context object at 0x0482CC10>
β <click.core.Group object at 0x047CFE90>
File "c:\programdata\anaconda2\lib\site-packages\click\core.py", line 1066, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
β β β <click.core.Context object at 0x0482CE50>
β β <click.core.Context object at 0x0482CE50>
β <function _process_result at 0x0482D5B0>
File "c:\programdata\anaconda2\lib\site-packages\click\core.py", line 895, in invoke
return ctx.invoke(self.callback, **ctx.params)
β β β <click.core.Context object at 0x0482CE50>
β β <click.core.Command object at 0x0482CF50>
β <click.core.Context object at 0x0482CE50>
File "c:\programdata\anaconda2\lib\site-packages\click\core.py", line 535, in invoke
return callback(*args, **kwargs)
β β β {'result_pickle_file_path': u'./1.pkl', 'plot_save_file': None, 'show': True}
β β ()
β <function plot at 0x0482D830>
TypeError: plot() got an unexpected keyword argument 'result_pickle_file_path'
|
TypeError
|
def stream_logs(self):
"""Stream a pod's log."""
for line in self.api.read_namespaced_pod_log(
self.name, self.namespace, follow=True, _preload_content=False
):
# verify that the line is JSON
line = line.decode("utf-8")
try:
json.loads(line)
except ValueError:
# log event wasn't JSON.
# use the line itself as the message with unknown phase.
# We don't know what the right phase is, use 'unknown'.
# If it was a fatal error, presumably a 'failure'
# message will arrive shortly.
app_log.error("log event not json: %r", line)
line = json.dumps(
{
"phase": "unknown",
"message": line,
}
)
self.progress("log", line)
|
def stream_logs(self):
"""Stream a pod's log."""
for line in self.api.read_namespaced_pod_log(
self.name, self.namespace, follow=True, _preload_content=False
):
self.progress("log", line.decode("utf-8"))
|
https://github.com/jupyterhub/binderhub/issues/164
|
/ # jupyter-repo2docker https://github.com/yuvipanda/example-requirements --json-logs
Traceback (most recent call last):
File "/usr/local/bin/jupyter-repo2docker", line 11, in <module>
load_entry_point('jupyter-repo2docker==0.4.1', 'console_scripts', 'jupyter-repo2docker')()
File "/usr/local/lib/python3.6/site-packages/repo2docker/__main__.py", line 6, in main
f.start()
File "/usr/local/lib/python3.6/site-packages/repo2docker/app.py", line 309, in start
checkout_path
File "/usr/local/lib/python3.6/site-packages/repo2docker/app.py", line 95, in fetch
capture=self.json_logs):
File "/usr/local/lib/python3.6/site-packages/repo2docker/utils.py", line 12, in execute_cmd
proc = subprocess.Popen(cmd, **kwargs)
File "/usr/local/lib/python3.6/subprocess.py", line 709, in __init__
restore_signals, start_new_session)
File "/usr/local/lib/python3.6/subprocess.py", line 1344, in _execute_child
raise child_exception_type(errno_num, err_msg, err_filename)
FileNotFoundError: [Errno 2] No such file or directory: 'git': 'git'
|
FileNotFoundError
|
def addRecentProjectFile(self, projectFile):
projectFile = QUrl(projectFile).toLocalFile()
projects = self._recentProjectFiles()
# remove duplicates while preserving order
from collections import OrderedDict
uniqueProjects = OrderedDict.fromkeys(projects)
projects = list(uniqueProjects)
# remove previous usage of the value
if projectFile in uniqueProjects:
projects.remove(projectFile)
# add the new value in the first place
projects.insert(0, projectFile)
# keep only the 10 first elements
projects = projects[0:20]
settings = QSettings()
settings.beginGroup("RecentFiles")
size = settings.beginWriteArray("Projects")
for i, p in enumerate(projects):
settings.setArrayIndex(i)
settings.setValue("filepath", p)
settings.endArray()
settings.sync()
self.recentProjectFilesChanged.emit()
|
def addRecentProjectFile(self, projectFile):
projectFile = QUrl(projectFile).path()
projects = self._recentProjectFiles()
# remove duplicates while preserving order
from collections import OrderedDict
uniqueProjects = OrderedDict.fromkeys(projects)
projects = list(uniqueProjects)
# remove previous usage of the value
if projectFile in uniqueProjects:
projects.remove(projectFile)
# add the new value in the first place
projects.insert(0, projectFile)
# keep only the 10 first elements
projects = projects[0:20]
settings = QSettings()
settings.beginGroup("RecentFiles")
size = settings.beginWriteArray("Projects")
for i, p in enumerate(projects):
settings.setArrayIndex(i)
settings.setValue("filepath", p)
settings.endArray()
settings.sync()
self.recentProjectFilesChanged.emit()
|
https://github.com/alicevision/meshroom/issues/912
|
[2020-05-23 16:12:48,660][ERROR] Traceback (most recent call last):
File "D:\Meshroom_Src\meshroom\meshroom\ui\reconstruction.py", line 432, in load
super(Reconstruction, self).load(filepath, setupProjectFile)
File "D:\Meshroom_Src\meshroom\meshroom\ui\graph.py", line 314, in load
g.load(filepath, setupProjectFile)
File "D:\Meshroom_Src\meshroom\meshroom\core\graph.py", line 247, in load
with open(filepath) as jsonFile:
OSError: [Errno 22] Invalid argument: '/D:/Meshroom_Dev/test-project/mostree.mg'
|
OSError
|
def addSfmAugmentation(self, withMVS=False):
"""
Create a new augmentation step connected to the last SfM node of this Reconstruction and
return the created CameraInit and SfM nodes.
If the Reconstruction is not initialized (empty initial CameraInit), this method won't
create anything and return initial CameraInit and SfM nodes.
Args:
withMVS (bool): whether to create the MVS pipeline after the augmentation
Returns:
Node, Node: CameraInit, StructureFromMotion
"""
sfm = self.lastSfmNode()
if not sfm:
return None, None
if len(self._cameraInits) == 1:
assert self._cameraInit == self._cameraInits[0]
# Initial CameraInit is empty, use this one
if len(self._cameraInits[0].viewpoints) == 0:
return self._cameraInit, sfm
with self.groupedGraphModification("SfM Augmentation"):
sfm, mvs = multiview.sfmAugmentation(self, self.lastSfmNode(), withMVS=withMVS)
self.sfmAugmented.emit(sfm[0], mvs[-1] if mvs else sfm[-1])
return sfm[0], sfm[-1]
|
def addSfmAugmentation(self, withMVS=False):
"""
Create a new augmentation step connected to the last SfM node of this Reconstruction and
return the created CameraInit and SfM nodes.
If the Reconstruction is not initialized (empty initial CameraInit), this method won't
create anything and return initial CameraInit and SfM nodes.
Args:
withMVS (bool): whether to create the MVS pipeline after the augmentation
Returns:
Node, Node: CameraInit, StructureFromMotion
"""
sfm = self.lastSfmNode()
if not sfm:
return None, None
if len(self._cameraInits) == 1:
assert self._cameraInit == self._cameraInits[0]
# Initial CameraInit is empty, use this one
if len(self._cameraInits[0].viewpoints) == 0:
return self._cameraInit, sfm
with self.groupedGraphModification("SfM Augmentation"):
sfm, mvs = multiview.sfmAugmentation(self, self.lastSfmNode(), withMVS=withMVS)
self.sfmAugmented.emit(sfm[0], mvs[-1])
return sfm[0], sfm[-1]
|
https://github.com/alicevision/meshroom/issues/127
|
Traceback (most recent call last):
File "C:\Users\andre\work\meshroom\meshroom\ui\reconstruction.py", line 72, in start
raise RuntimeError("Invalid folder provided: {}".format(folder))
RuntimeError: Invalid folder provided: /F:/ai-ml-models/images/live
|
RuntimeError
|
def load_pymathics_doc(self):
if self.pymathics_doc_loaded:
return
from mathics.settings import default_pymathics_modules
pymathicspart = None
# Look the "Pymathics Modules" part, and if it does not exist, create it.
for part in self.parts:
if part.title == "Pymathics Modules":
pymathicspart = part
if pymathicspart is None:
pymathicspart = DocPart(self, "Pymathics Modules", is_reference=True)
self.parts.append(pymathicspart)
# For each module, create the documentation object and load the chapters in the pymathics part.
for pymmodule in default_pymathics_modules:
pymathicsdoc = PyMathicsDocumentation(pymmodule)
for part in pymathicsdoc.parts:
for ch in part.chapters:
ch.title = f"{pymmodule} {part.title} {ch.title}"
ch.part = pymathicspart
pymathicspart.chapters_by_slug[ch.slug] = ch
pymathicspart.chapters.append(ch)
self.pymathics_doc_loaded = True
|
def load_pymathics_doc(self):
if self.pymathics_doc_loaded:
return
from mathics.settings import default_pymathics_modules
pymathicspart = None
# Look the "Pymathics Modules" part, and if it does not exist, create it.
for part in self.parts:
if part.title == "Pymathics Modules":
pymathicspart = part
if pymathicspart is None:
pymathicspart = DocPart(self, "Pymathics Modules", is_reference=True)
self.parts.append(pymathicspart)
# For each module, create the documentation object and load the chapters in the pymathics part.
for pymmodule in default_pymathics_modules:
pymathicsdoc = PyMathicsDocumentation(pymmodule)
for part in pymathicsdoc.parts:
for ch in part.chapters:
ch.title = f"{pymmodule.name} {part.title} {ch.title}"
ch.part = pymathicspart
pymathicspart.chapters_by_slug[ch.slug] = ch
pymathicspart.chapters.append(ch)
self.pymathics_doc_loaded = True
|
https://github.com/mathics/Mathics/issues/906
|
$ mathicsserver
warning: database file /home/pablo/.local/var/mathics/mathics.sqlite not found
Migrating database /home/pablo/.local/var/mathics/mathics.sqlite
Traceback (most recent call last):
File "/home/pablo/Documents/Mathics/mathics/manage.py", line 13, in <module>
execute_from_command_line(sys.argv)
File "/home/pablo/.local/lib/python3.6/site-packages/django/core/management/__init__.py", line 381, in execute_from_command_line
utility.execute()
File "/home/pablo/.local/lib/python3.6/site-packages/django/core/management/__init__.py", line 375, in execute
self.fetch_command(subcommand).run_from_argv(self.argv)
File "/home/pablo/.local/lib/python3.6/site-packages/django/core/management/base.py", line 323, in run_from_argv
self.execute(*args, **cmd_options)
File "/home/pablo/.local/lib/python3.6/site-packages/django/core/management/base.py", line 361, in execute
self.check()
File "/home/pablo/.local/lib/python3.6/site-packages/django/core/management/base.py", line 390, in check
include_deployment_checks=include_deployment_checks,
File "/home/pablo/.local/lib/python3.6/site-packages/django/core/management/commands/migrate.py", line 65, in _run_checks
issues.extend(super()._run_checks(**kwargs))
File "/home/pablo/.local/lib/python3.6/site-packages/django/core/management/base.py", line 377, in _run_checks
return checks.run_checks(**kwargs)
File "/home/pablo/.local/lib/python3.6/site-packages/django/core/checks/registry.py", line 72, in run_checks
new_errors = check(app_configs=app_configs)
File "/home/pablo/.local/lib/python3.6/site-packages/django/core/checks/urls.py", line 40, in check_url_namespaces_unique
all_namespaces = _load_all_namespaces(resolver)
File "/home/pablo/.local/lib/python3.6/site-packages/django/core/checks/urls.py", line 57, in _load_all_namespaces
url_patterns = getattr(resolver, 'url_patterns', [])
File "/home/pablo/.local/lib/python3.6/site-packages/django/utils/functional.py", line 80, in __get__
res = instance.__dict__[self.name] = self.func(instance)
File "/home/pablo/.local/lib/python3.6/site-packages/django/urls/resolvers.py", line 571, in url_patterns
patterns = getattr(self.urlconf_module, "urlpatterns", self.urlconf_module)
File "/home/pablo/.local/lib/python3.6/site-packages/django/utils/functional.py", line 80, in __get__
res = instance.__dict__[self.name] = self.func(instance)
File "/home/pablo/.local/lib/python3.6/site-packages/django/urls/resolvers.py", line 564, in urlconf_module
return import_module(self.urlconf_name)
File "/usr/lib/python3.6/importlib/__init__.py", line 126, in import_module
return _bootstrap._gcd_import(name[level:], package, level)
File "<frozen importlib._bootstrap>", line 994, in _gcd_import
File "<frozen importlib._bootstrap>", line 971, in _find_and_load
File "<frozen importlib._bootstrap>", line 955, in _find_and_load_unlocked
File "<frozen importlib._bootstrap>", line 665, in _load_unlocked
File "<frozen importlib._bootstrap_external>", line 678, in exec_module
File "<frozen importlib._bootstrap>", line 219, in _call_with_frames_removed
File "/home/pablo/Documents/Mathics/mathics/urls.py", line 14, in <module>
url(r'^', include('mathics.web.urls')),
File "/home/pablo/.local/lib/python3.6/site-packages/django/urls/conf.py", line 34, in include
urlconf_module = import_module(urlconf_module)
File "/usr/lib/python3.6/importlib/__init__.py", line 126, in import_module
return _bootstrap._gcd_import(name[level:], package, level)
File "<frozen importlib._bootstrap>", line 994, in _gcd_import
File "<frozen importlib._bootstrap>", line 971, in _find_and_load
File "<frozen importlib._bootstrap>", line 955, in _find_and_load_unlocked
File "<frozen importlib._bootstrap>", line 665, in _load_unlocked
File "<frozen importlib._bootstrap_external>", line 678, in exec_module
File "<frozen importlib._bootstrap>", line 219, in _call_with_frames_removed
File "/home/pablo/Documents/Mathics/mathics/web/urls.py", line 6, in <module>
from mathics.web.views import query, main_view, login, logout, save, open, get_worksheets, doc_search, doc_part, doc_chapter, doc_section, doc
File "/home/pablo/Documents/Mathics/mathics/web/views.py", line 28, in <module>
documentation.load_pymathics_doc()
File "/home/pablo/Documents/Mathics/mathics/doc/doc.py", line 727, in load_pymathics_doc
pymathicsdoc = PyMathicsDocumentation(pymmodule)
File "/home/pablo/Documents/Mathics/mathics/doc/doc.py", line 765, in __init__
self.name = self.pymathicsmodule.pymathics_version_data['name']
KeyError: 'name'
error: failed to create database
|
KeyError
|
def __init__(self, module=None):
self.title = "Overview"
self.parts = []
self.parts_by_slug = {}
self.doc_dir = None
self.xml_data_file = None
self.tex_data_file = None
self.latex_file = None
self.symbols = {}
if module is None:
return
import importlib
# Load the module and verifies it is a pymathics module
try:
self.pymathicsmodule = importlib.import_module(module)
except ImportError:
print("Module does not exist")
mainfolder = ""
self.pymathicsmodule = None
self.parts = []
return
try:
mainfolder = self.pymathicsmodule.__path__[0]
self.name = self.pymathicsmodule.pymathics_version_data["name"]
self.version = self.pymathicsmodule.pymathics_version_data["version"]
self.author = self.pymathicsmodule.pymathics_version_data["author"]
except (AttributeError, KeyError, IndexError):
print(module + " is not a pymathics module.")
mainfolder = ""
self.pymathicsmodule = None
self.parts = []
return
# Paths
self.doc_dir = self.pymathicsmodule.__path__[0] + "/doc/"
self.xml_data_file = self.doc_dir + "xml/data"
self.tex_data_file = self.doc_dir + "tex/data"
self.latex_file = self.doc_dir + "tex/documentation.tex"
# Load the dictionary of mathics symbols defined in the module
self.symbols = {}
from mathics.builtin import is_builtin, Builtin
print("loading symbols")
for name in dir(self.pymathicsmodule):
var = getattr(self.pymathicsmodule, name)
if (
hasattr(var, "__module__")
and var.__module__ != "mathics.builtin.base"
and is_builtin(var)
and not name.startswith("_")
and var.__module__[: len(self.pymathicsmodule.__name__)]
== self.pymathicsmodule.__name__
): # nopep8
instance = var(expression=False)
if isinstance(instance, Builtin):
self.symbols[instance.get_name()] = instance
# Defines de default first part, in case we are building an independent documentation module.
self.title = "Overview"
self.parts = []
self.parts_by_slug = {}
try:
files = listdir(self.doc_dir)
files.sort()
except FileNotFoundError:
self.doc_dir = ""
self.xml_data_file = ""
self.tex_data_file = ""
self.latex_file = ""
files = []
appendix = []
for file in files:
part_title = file[2:]
if part_title.endswith(".mdoc"):
part_title = part_title[: -len(".mdoc")]
part = DocPart(self, part_title)
text = open(self.doc_dir + file, "rb").read().decode("utf8")
text = filter_comments(text)
chapters = CHAPTER_RE.findall(text)
for title, text in chapters:
chapter = DocChapter(part, title)
text += '<section title=""></section>'
sections = SECTION_RE.findall(text)
for pre_text, title, text in sections:
if not chapter.doc:
chapter.doc = Doc(pre_text)
if title:
section = DocSection(chapter, title, text)
chapter.sections.append(section)
part.chapters.append(chapter)
if file[0].isdigit():
self.parts.append(part)
else:
part.is_appendix = True
appendix.append(part)
# Builds the automatic documentation
builtin_part = DocPart(self, "Pymathics Modules", is_reference=True)
title, text = get_module_doc(self.pymathicsmodule)
chapter = DocChapter(builtin_part, title, Doc(text))
for name in self.symbols:
instance = self.symbols[name]
installed = True
for package in getattr(instance, "requires", []):
try:
importlib.import_module(package)
except ImportError:
installed = False
break
section = DocSection(
chapter,
strip_system_prefix(name),
instance.__doc__ or "",
operator=instance.get_operator(),
installed=installed,
)
chapter.sections.append(section)
builtin_part.chapters.append(chapter)
self.parts.append(builtin_part)
# Adds possible appendices
for part in appendix:
self.parts.append(part)
# set keys of tests
for tests in self.get_tests():
for test in tests.tests:
test.key = (tests.part, tests.chapter, tests.section, test.index)
|
def __init__(self, module=None):
self.title = "Overview"
self.parts = []
self.parts_by_slug = {}
self.doc_dir = None
self.xml_data_file = None
self.tex_data_file = None
self.latex_file = None
self.symbols = {}
if module is None:
return
import importlib
# Load the module and verifies it is a pymathics module
try:
self.pymathicsmodule = importlib.import_module(module)
except ImportError:
print("Module does not exist")
mainfolder = ""
self.pymathicsmodule = None
self.parts = []
return
if hasattr(self.pymathicsmodule, "pymathics_version_data"):
mainfolder = self.pymathicsmodule.__path__[0]
self.name = self.pymathicsmodule.pymathics_version_data["name"]
self.version = self.pymathicsmodule.pymathics_version_data["version"]
self.author = self.pymathicsmodule.pymathics_version_data["author"]
else:
print(module + " is not a pymathics module.")
mainfolder = ""
self.pymathicsmodule = None
self.parts = []
return
# Paths
self.doc_dir = self.pymathicsmodule.__path__[0] + "/doc/"
self.xml_data_file = self.doc_dir + "xml/data"
self.tex_data_file = self.doc_dir + "tex/data"
self.latex_file = self.doc_dir + "tex/documentation.tex"
# Load the dictionary of mathics symbols defined in the module
self.symbols = {}
from mathics.builtin import is_builtin, Builtin
print("loading symbols")
for name in dir(self.pymathicsmodule):
var = getattr(self.pymathicsmodule, name)
if (
hasattr(var, "__module__")
and var.__module__ != "mathics.builtin.base"
and is_builtin(var)
and not name.startswith("_")
and var.__module__[: len(self.pymathicsmodule.__name__)]
== self.pymathicsmodule.__name__
): # nopep8
instance = var(expression=False)
if isinstance(instance, Builtin):
self.symbols[instance.get_name()] = instance
# Defines de default first part, in case we are building an independent documentation module.
self.title = "Overview"
self.parts = []
self.parts_by_slug = {}
try:
files = listdir(self.doc_dir)
files.sort()
except FileNotFoundError:
self.doc_dir = ""
self.xml_data_file = ""
self.tex_data_file = ""
self.latex_file = ""
files = []
appendix = []
for file in files:
part_title = file[2:]
if part_title.endswith(".mdoc"):
part_title = part_title[: -len(".mdoc")]
part = DocPart(self, part_title)
text = open(self.doc_dir + file, "rb").read().decode("utf8")
text = filter_comments(text)
chapters = CHAPTER_RE.findall(text)
for title, text in chapters:
chapter = DocChapter(part, title)
text += '<section title=""></section>'
sections = SECTION_RE.findall(text)
for pre_text, title, text in sections:
if not chapter.doc:
chapter.doc = Doc(pre_text)
if title:
section = DocSection(chapter, title, text)
chapter.sections.append(section)
part.chapters.append(chapter)
if file[0].isdigit():
self.parts.append(part)
else:
part.is_appendix = True
appendix.append(part)
# Builds the automatic documentation
builtin_part = DocPart(self, "Pymathics Modules", is_reference=True)
title, text = get_module_doc(self.pymathicsmodule)
chapter = DocChapter(builtin_part, title, Doc(text))
for name in self.symbols:
instance = self.symbols[name]
installed = True
for package in getattr(instance, "requires", []):
try:
importlib.import_module(package)
except ImportError:
installed = False
break
section = DocSection(
chapter,
strip_system_prefix(name),
instance.__doc__ or "",
operator=instance.get_operator(),
installed=installed,
)
chapter.sections.append(section)
builtin_part.chapters.append(chapter)
self.parts.append(builtin_part)
# Adds possible appendices
for part in appendix:
self.parts.append(part)
# set keys of tests
for tests in self.get_tests():
for test in tests.tests:
test.key = (tests.part, tests.chapter, tests.section, test.index)
|
https://github.com/mathics/Mathics/issues/906
|
$ mathicsserver
warning: database file /home/pablo/.local/var/mathics/mathics.sqlite not found
Migrating database /home/pablo/.local/var/mathics/mathics.sqlite
Traceback (most recent call last):
File "/home/pablo/Documents/Mathics/mathics/manage.py", line 13, in <module>
execute_from_command_line(sys.argv)
File "/home/pablo/.local/lib/python3.6/site-packages/django/core/management/__init__.py", line 381, in execute_from_command_line
utility.execute()
File "/home/pablo/.local/lib/python3.6/site-packages/django/core/management/__init__.py", line 375, in execute
self.fetch_command(subcommand).run_from_argv(self.argv)
File "/home/pablo/.local/lib/python3.6/site-packages/django/core/management/base.py", line 323, in run_from_argv
self.execute(*args, **cmd_options)
File "/home/pablo/.local/lib/python3.6/site-packages/django/core/management/base.py", line 361, in execute
self.check()
File "/home/pablo/.local/lib/python3.6/site-packages/django/core/management/base.py", line 390, in check
include_deployment_checks=include_deployment_checks,
File "/home/pablo/.local/lib/python3.6/site-packages/django/core/management/commands/migrate.py", line 65, in _run_checks
issues.extend(super()._run_checks(**kwargs))
File "/home/pablo/.local/lib/python3.6/site-packages/django/core/management/base.py", line 377, in _run_checks
return checks.run_checks(**kwargs)
File "/home/pablo/.local/lib/python3.6/site-packages/django/core/checks/registry.py", line 72, in run_checks
new_errors = check(app_configs=app_configs)
File "/home/pablo/.local/lib/python3.6/site-packages/django/core/checks/urls.py", line 40, in check_url_namespaces_unique
all_namespaces = _load_all_namespaces(resolver)
File "/home/pablo/.local/lib/python3.6/site-packages/django/core/checks/urls.py", line 57, in _load_all_namespaces
url_patterns = getattr(resolver, 'url_patterns', [])
File "/home/pablo/.local/lib/python3.6/site-packages/django/utils/functional.py", line 80, in __get__
res = instance.__dict__[self.name] = self.func(instance)
File "/home/pablo/.local/lib/python3.6/site-packages/django/urls/resolvers.py", line 571, in url_patterns
patterns = getattr(self.urlconf_module, "urlpatterns", self.urlconf_module)
File "/home/pablo/.local/lib/python3.6/site-packages/django/utils/functional.py", line 80, in __get__
res = instance.__dict__[self.name] = self.func(instance)
File "/home/pablo/.local/lib/python3.6/site-packages/django/urls/resolvers.py", line 564, in urlconf_module
return import_module(self.urlconf_name)
File "/usr/lib/python3.6/importlib/__init__.py", line 126, in import_module
return _bootstrap._gcd_import(name[level:], package, level)
File "<frozen importlib._bootstrap>", line 994, in _gcd_import
File "<frozen importlib._bootstrap>", line 971, in _find_and_load
File "<frozen importlib._bootstrap>", line 955, in _find_and_load_unlocked
File "<frozen importlib._bootstrap>", line 665, in _load_unlocked
File "<frozen importlib._bootstrap_external>", line 678, in exec_module
File "<frozen importlib._bootstrap>", line 219, in _call_with_frames_removed
File "/home/pablo/Documents/Mathics/mathics/urls.py", line 14, in <module>
url(r'^', include('mathics.web.urls')),
File "/home/pablo/.local/lib/python3.6/site-packages/django/urls/conf.py", line 34, in include
urlconf_module = import_module(urlconf_module)
File "/usr/lib/python3.6/importlib/__init__.py", line 126, in import_module
return _bootstrap._gcd_import(name[level:], package, level)
File "<frozen importlib._bootstrap>", line 994, in _gcd_import
File "<frozen importlib._bootstrap>", line 971, in _find_and_load
File "<frozen importlib._bootstrap>", line 955, in _find_and_load_unlocked
File "<frozen importlib._bootstrap>", line 665, in _load_unlocked
File "<frozen importlib._bootstrap_external>", line 678, in exec_module
File "<frozen importlib._bootstrap>", line 219, in _call_with_frames_removed
File "/home/pablo/Documents/Mathics/mathics/web/urls.py", line 6, in <module>
from mathics.web.views import query, main_view, login, logout, save, open, get_worksheets, doc_search, doc_part, doc_chapter, doc_section, doc
File "/home/pablo/Documents/Mathics/mathics/web/views.py", line 28, in <module>
documentation.load_pymathics_doc()
File "/home/pablo/Documents/Mathics/mathics/doc/doc.py", line 727, in load_pymathics_doc
pymathicsdoc = PyMathicsDocumentation(pymmodule)
File "/home/pablo/Documents/Mathics/mathics/doc/doc.py", line 765, in __init__
self.name = self.pymathicsmodule.pymathics_version_data['name']
KeyError: 'name'
error: failed to create database
|
KeyError
|
def clear_pymathics_modules(self):
from mathics.builtin import builtins, builtins_by_module
# Remove all modules that are not in mathics
# print("cleaning pymathics modules")
for key in list(builtins_by_module.keys()):
if not key.startswith("mathics."):
print(f'removing module "{key}" not in mathics.')
del builtins_by_module[key]
# print("reloading symbols from current builtins.")
for s in self.pymathics:
if s in self.builtin:
# If there was a true built-in definition for the symbol, restore it, else, remove he symbol.
if self.pymathics[s]:
self.builtin[s] = self.pymathics[s]
builtins[s] = None
for key, val in builtins_by_module.items():
for simb in val:
if simb.get_name() == s:
builtins[s] = simb
break
if builtins[s] is not None:
break
if builtins[s] is None:
builtins.__delitem__(s)
else:
self.builtin.__delitem__(s)
builtins.__delitem__(s)
self.pymathics = {}
# print("everything is clean")
return None
|
def clear_pymathics_modules(self):
from mathics.builtin import builtins, builtins_by_module
# Remove all modules that are not in mathics
# print("cleaning pymathics modules")
for key in list(builtins_by_module.keys()):
if key[:8] != "mathics.":
print("removing module ", key, " not in mathics.")
del builtins_by_module[key]
# print("reloading symbols from current builtins.")
for s in self.pymathics:
if s in self.builtin:
# If there was a true built-in definition for the symbol, restore it, else, remove he symbol.
if self.pymathics[s]:
self.builtin[s] = self.pymathics[s]
builtins[s] = None
for key, val in builtins_by_module:
for simb in val:
if simb.get_name() == s:
builtins[s] = simb
break
if builtins[s] is not None:
break
if builtins[s] is None:
builtins.__delitem__(s)
else:
self.builtin.__delitem__(s)
builtins.__delitem__(s)
self.pymathics = {}
# print("everything is clean")
return None
|
https://github.com/mathics/Mathics/issues/836
|
Mathics 1.1.dev0
on CPython 3.6.9 (default, Jul 17 2020, 12:50:27)
using SymPy 1.6.2, mpmath 1.1.0
Copyright (C) 2011-2020 The Mathics Team.
This program comes with ABSOLUTELY NO WARRANTY.
This is free software, and you are welcome to redistribute it
under certain conditions.
See the documentation for the full license.
Quit by pressing CONTROL-D
In[1]:= a = 3
Out[1]= 3
In[2]:= Quit[]
removing module pymathics.natlang not in mathics.
In[1]:= LoadModule["pymathics.natlang"]
Out[1]= pymathics.natlang
In[2]:= Quit[]
removing module pymathics.natlang not in mathics.
Traceback (most recent call last):
File "/home/pablo/.local/bin/mathics", line 315, in <module>
main()
File "/home/pablo/.local/bin/mathics", line 298, in main
result = evaluation.evaluate(query, timeout=settings.TIMEOUT)
File "/home/pablo/Documents/Mathics/mathics/core/evaluation.py", line 286, in evaluate
result = run_with_timeout_and_stack(evaluate, timeout)
File "/home/pablo/Documents/Mathics/mathics/core/evaluation.py", line 95, in run_with_timeout_and_stack
return request()
File "/home/pablo/Documents/Mathics/mathics/core/evaluation.py", line 264, in evaluate
result = query.evaluate(self)
File "/home/pablo/Documents/Mathics/mathics/core/expression.py", line 853, in evaluate
expr, reevaluate = expr.evaluate_next(evaluation)
File "/home/pablo/Documents/Mathics/mathics/core/expression.py", line 975, in evaluate_next
result = rule.apply(new, evaluation, fully=False)
File "/home/pablo/Documents/Mathics/mathics/core/rules.py", line 63, in apply
yield_match, expression, {}, evaluation, fully=fully)
File "/home/pablo/Documents/Mathics/mathics/core/pattern.py", line 203, in match
yield_head, expression.get_head(), vars, evaluation)
File "/home/pablo/Documents/Mathics/mathics/core/pattern.py", line 132, in match
yield_func(vars, None)
File "/home/pablo/Documents/Mathics/mathics/core/pattern.py", line 198, in yield_head
yield_func(head_vars, None)
File "/home/pablo/Documents/Mathics/mathics/core/rules.py", line 39, in yield_match
new_expression = self.do_replace(expression, vars, options, evaluation)
File "/home/pablo/Documents/Mathics/mathics/core/rules.py", line 124, in do_replace
return self.function(evaluation=evaluation, **vars_noctx)
File "/home/pablo/Documents/Mathics/mathics/builtin/assignment.py", line 2205, in apply
evaluation.definitions.clear_pymathics_modules()
File "/home/pablo/Documents/Mathics/mathics/core/definitions.py", line 157, in clear_pymathics_modules
for key, val in builtins_by_module:
ValueError: too many values to unpack (expected 2)
|
ValueError
|
def apply(self, evaluation):
"Exit"
exit()
|
def apply(self, evaluation):
"Exit[]"
sys.exit()
|
https://github.com/mathics/Mathics/issues/813
|
Copyright (C) 2011-2016 The Mathics Team.
This program comes with ABSOLUTELY NO WARRANTY.
This is free software, and you are welcome to redistribute it
under certain conditions.
See the documentation for the full license.
Quit by pressing CONTROL-D
In[1]:= Quit[]
Traceback (most recent call last):
File "~/Documents/Mathics/mathics/main.py", line 303, in <module>
main()
File "~/Documents/Mathics/mathics/main.py", line 286, in main
result = evaluation.evaluate(query, timeout=settings.TIMEOUT)
File "~/Documents/Mathics/mathics/core/evaluation.py", line 288, in evaluate
result = run_with_timeout_and_stack(evaluate, timeout)
File "~/Documents/Mathics/mathics/core/evaluation.py", line 95, in run_with_timeout_and_stack
return request()
File "~/Documents/Mathics/mathics/core/evaluation.py", line 265, in evaluate
result = query.evaluate(self)
File "~/Documents/Mathics/mathics/core/expression.py", line 853, in evaluate
expr, reevaluate = expr.evaluate_next(evaluation)
File "~/Documents/Mathics/mathics/core/expression.py", line 975, in evaluate_next
result = rule.apply(new, evaluation, fully=False)
File "~/Documents/Mathics/mathics/core/rules.py", line 63, in apply
yield_match, expression, {}, evaluation, fully=fully)
File "~/Documents/Mathics/mathics/core/pattern.py", line 203, in match
yield_head, expression.get_head(), vars, evaluation)
File "~/Documents/Mathics/mathics/core/pattern.py", line 132, in match
yield_func(vars, None)
File "~/Documents/Mathics/mathics/core/pattern.py", line 198, in yield_head
yield_func(head_vars, None)
File "~/Documents/Mathics/mathics/core/rules.py", line 39, in yield_match
new_expression = self.do_replace(expression, vars, options, evaluation)
File "~/Documents/Mathics/mathics/core/rules.py", line 124, in do_replace
return self.function(evaluation=evaluation, **vars_noctx)
File "~/Documents/Mathics/mathics/builtin/evaluation.py", line 465, in apply
sys.exit()
NameError: name 'sys' is not defined
|
NameError
|
def apply_n(self, n, evaluation):
"Exit[n_Integer]"
exit(n.get_int_value())
|
def apply_n(self, n, evaluation):
"Exit[n_Integer]"
sys.exit(n.get_int_value())
|
https://github.com/mathics/Mathics/issues/813
|
Copyright (C) 2011-2016 The Mathics Team.
This program comes with ABSOLUTELY NO WARRANTY.
This is free software, and you are welcome to redistribute it
under certain conditions.
See the documentation for the full license.
Quit by pressing CONTROL-D
In[1]:= Quit[]
Traceback (most recent call last):
File "~/Documents/Mathics/mathics/main.py", line 303, in <module>
main()
File "~/Documents/Mathics/mathics/main.py", line 286, in main
result = evaluation.evaluate(query, timeout=settings.TIMEOUT)
File "~/Documents/Mathics/mathics/core/evaluation.py", line 288, in evaluate
result = run_with_timeout_and_stack(evaluate, timeout)
File "~/Documents/Mathics/mathics/core/evaluation.py", line 95, in run_with_timeout_and_stack
return request()
File "~/Documents/Mathics/mathics/core/evaluation.py", line 265, in evaluate
result = query.evaluate(self)
File "~/Documents/Mathics/mathics/core/expression.py", line 853, in evaluate
expr, reevaluate = expr.evaluate_next(evaluation)
File "~/Documents/Mathics/mathics/core/expression.py", line 975, in evaluate_next
result = rule.apply(new, evaluation, fully=False)
File "~/Documents/Mathics/mathics/core/rules.py", line 63, in apply
yield_match, expression, {}, evaluation, fully=fully)
File "~/Documents/Mathics/mathics/core/pattern.py", line 203, in match
yield_head, expression.get_head(), vars, evaluation)
File "~/Documents/Mathics/mathics/core/pattern.py", line 132, in match
yield_func(vars, None)
File "~/Documents/Mathics/mathics/core/pattern.py", line 198, in yield_head
yield_func(head_vars, None)
File "~/Documents/Mathics/mathics/core/rules.py", line 39, in yield_match
new_expression = self.do_replace(expression, vars, options, evaluation)
File "~/Documents/Mathics/mathics/core/rules.py", line 124, in do_replace
return self.function(evaluation=evaluation, **vars_noctx)
File "~/Documents/Mathics/mathics/builtin/evaluation.py", line 465, in apply
sys.exit()
NameError: name 'sys' is not defined
|
NameError
|
def apply(self, url, elements, evaluation):
"FetchURL[url_String, elements_]"
import tempfile
import os
py_url = url.get_string_value()
temp_handle, temp_path = tempfile.mkstemp(suffix="")
try:
f = urllib2.urlopen(py_url)
try:
if sys.version_info >= (3, 0):
content_type = f.info().get_content_type()
else:
content_type = f.headers["content-type"]
os.write(temp_handle, f.read())
finally:
f.close()
def determine_filetype():
return mimetype_dict.get(content_type)
result = Import._import(temp_path, determine_filetype, elements, evaluation)
except HTTPError as e:
evaluation.message(
"FetchURL",
"httperr",
url,
"the server returned an HTTP status code of %s (%s)"
% (e.code, str(e.reason)),
)
return Symbol("$Failed")
except URLError as e: # see https://docs.python.org/3/howto/urllib2.html
if hasattr(e, "reason"):
evaluation.message("FetchURL", "httperr", url, str(e.reason))
elif hasattr(e, "code"):
evaluation.message(
"FetchURL", "httperr", url, "server returned %s" % e.code
)
return Symbol("$Failed")
except ValueError as e:
evaluation.message("FetchURL", "httperr", url, str(e))
return Symbol("$Failed")
finally:
os.unlink(temp_path)
return result
|
def apply(self, url, elements, evaluation):
"FetchURL[url_String, elements_]"
import tempfile
import os
py_url = url.get_string_value()
temp_handle, temp_path = tempfile.mkstemp(suffix="")
try:
with urllib2.urlopen(py_url) as f:
content_type = f.info().get_content_type()
os.write(temp_handle, f.read())
def determine_filetype():
return mimetype_dict.get(content_type)
result = Import._import(temp_path, determine_filetype, elements, evaluation)
except HTTPError as e:
evaluation.message(
"FetchURL",
"httperr",
url,
"the server returned an HTTP status code of %s (%s)"
% (e.code, str(e.reason)),
)
return Symbol("$Failed")
except URLError as e: # see https://docs.python.org/3/howto/urllib2.html
if hasattr(e, "reason"):
evaluation.message("FetchURL", "httperr", url, str(e.reason))
elif hasattr(e, "code"):
evaluation.message(
"FetchURL", "httperr", url, "server returned %s" % e.code
)
return Symbol("$Failed")
except ValueError as e:
evaluation.message("FetchURL", "httperr", url, str(e))
return Symbol("$Failed")
finally:
os.unlink(temp_path)
return result
|
https://github.com/mathics/Mathics/issues/562
|
In[1]:= Import["https://upload.wikimedia.org/wikipedia/en/2/24/Lenna.png"]
Traceback (most recent call last):
File "/home/angus/venv_pypy/bin/mathics", line 11, in <module>
load_entry_point('Mathics', 'console_scripts', 'mathics')()
File "/home/angus/Mathics/mathics/main.py", line 286, in main
result = evaluation.evaluate(query, timeout=settings.TIMEOUT)
File "/home/angus/Mathics/mathics/core/evaluation.py", line 257, in evaluate
result = run_with_timeout(evaluate, timeout)
File "/home/angus/Mathics/mathics/core/evaluation.py", line 76, in run_with_timeout
return request()
File "/home/angus/Mathics/mathics/core/evaluation.py", line 240, in evaluate
result = query.evaluate(self)
File "/home/angus/Mathics/mathics/core/expression.py", line 868, in evaluate
return result.evaluate(evaluation)
File "/home/angus/Mathics/mathics/core/expression.py", line 868, in evaluate
return result.evaluate(evaluation)
File "/home/angus/Mathics/mathics/core/expression.py", line 862, in evaluate
result = rule.apply(new, evaluation, fully=False)
File "/home/angus/Mathics/mathics/core/rules.py", line 73, in apply
yield_match, expression, {}, evaluation, fully=fully)
File "/home/angus/Mathics/mathics/core/pattern.py", line 206, in match
yield_head, expression.get_head(), vars, evaluation)
File "/home/angus/Mathics/mathics/core/pattern.py", line 135, in match
yield_func(vars, None)
File "/home/angus/Mathics/mathics/core/pattern.py", line 198, in yield_head
yield_choice, expression, attributes, head_vars)
File "/home/angus/Mathics/mathics/core/pattern.py", line 321, in get_pre_choices
yield_func(vars)
File "/home/angus/Mathics/mathics/core/pattern.py", line 187, in yield_choice
wrap_oneid=expression.get_head_name() != 'System`MakeBoxes')
File "/home/angus/Mathics/mathics/core/pattern.py", line 478, in match_leaf
include_flattened=include_flattened)
File "/home/angus/Mathics/mathics/core/pattern.py", line 342, in get_wrappings
yield_func(items[0])
File "/home/angus/Mathics/mathics/core/pattern.py", line 474, in yield_wrapping
leaf_count=leaf_count, wrap_oneid=wrap_oneid)
File "/home/angus/Mathics/mathics/builtin/patterns.py", line 768, in match
self.pattern.match(yield_func, expression, new_vars, evaluation)
File "/home/angus/Mathics/mathics/builtin/patterns.py", line 951, in match
yield_func(vars, None)
File "/home/angus/Mathics/mathics/core/pattern.py", line 466, in match_yield
leaf_count=leaf_count, wrap_oneid=wrap_oneid)
File "/home/angus/Mathics/mathics/core/pattern.py", line 478, in match_leaf
include_flattened=include_flattened)
File "/home/angus/Mathics/mathics/core/pattern.py", line 342, in get_wrappings
yield_func(items[0])
File "/home/angus/Mathics/mathics/core/pattern.py", line 474, in yield_wrapping
leaf_count=leaf_count, wrap_oneid=wrap_oneid)
File "/home/angus/Mathics/mathics/builtin/patterns.py", line 768, in match
self.pattern.match(yield_func, expression, new_vars, evaluation)
File "/home/angus/Mathics/mathics/builtin/patterns.py", line 953, in match
yield_func(vars, None)
File "/home/angus/Mathics/mathics/core/pattern.py", line 469, in match_yield
yield_func(new_vars, items_rest)
File "/home/angus/Mathics/mathics/core/pattern.py", line 458, in leaf_yield
(rest_expression[0] + items_rest[0], next_rest[1]))
File "/home/angus/Mathics/mathics/core/rules.py", line 39, in yield_match
new_expression = self.do_replace(vars, options, evaluation)
File "/home/angus/Mathics/mathics/core/rules.py", line 131, in do_replace
return self.function(evaluation=evaluation, **vars_noctx)
File "/home/angus/Mathics/mathics/builtin/importexport.py", line 393, in apply
with urllib2.urlopen(py_url) as f:
AttributeError: addinfourl instance has no attribute '__enter__'
|
AttributeError
|
def _get_system_stats(self):
with ConnectTo(StatisticDbViewer, self._config) as stats_db:
backend_data = stats_db.get_statistic("backend")
try:
return {
"backend_cpu_percentage": "{}%".format(
backend_data["system"]["cpu_percentage"]
),
"number_of_running_analyses": len(
backend_data["analysis"]["current_analyses"]
),
}
except KeyError:
return {"backend_cpu_percentage": "n/a", "number_of_running_analyses": "n/a"}
|
def _get_system_stats(self):
with ConnectTo(StatisticDbViewer, self._config) as stats_db:
backend_data = stats_db.get_statistic("backend")
return {
"backend_cpu_percentage": backend_data["system"]["cpu_percentage"],
"number_of_running_analyses": len(backend_data["analysis"]["current_analyses"]),
}
|
https://github.com/fkie-cad/FACT_core/issues/448
|
[2020-07-07 09:46:38,595] ERROR in app: Exception on /ajax/stats/system [GET]
Traceback (most recent call last):
File "/usr/local/lib/python3.8/dist-packages/flask/app.py", line 2447, in wsgi_app
response = self.full_dispatch_request()
File "/usr/local/lib/python3.8/dist-packages/flask/app.py", line 1952, in full_dispatch_request
rv = self.handle_user_exception(e)
File "/usr/local/lib/python3.8/dist-packages/flask_restful/__init__.py", line 272, in error_router
return original_handler(e)
File "/usr/local/lib/python3.8/dist-packages/flask/app.py", line 1821, in handle_user_exception
reraise(exc_type, exc_value, tb)
File "/usr/local/lib/python3.8/dist-packages/flask/_compat.py", line 39, in reraise
raise value
File "/usr/local/lib/python3.8/dist-packages/flask/app.py", line 1950, in full_dispatch_request
rv = self.dispatch_request()
File "/usr/local/lib/python3.8/dist-packages/flask/app.py", line 1936, in dispatch_request
return self.view_functions[rule.endpoint](**req.view_args)
File "./web_interface/security/decorator.py", line 11, in decorated_view
return fn(*args, **kwargs)
File "./web_interface/components/ajax_routes.py", line 186, in _get_system_stats
'backend_cpu_percentage': backend_data['system']['cpu_percentage'],
KeyError: 'system'
|
KeyError
|
def _install_css_and_js_files():
with OperateInDirectory("../web_interface/static"):
os.makedirs("web_css", exist_ok=True)
os.makedirs("web_js", exist_ok=True)
wget_static_web_content(
"https://github.com/vakata/jstree/zipball/3.3.9",
".",
[
"unzip 3.3.9",
"rm 3.3.9",
"rm -rf ./web_js/jstree/vakata*",
"mv vakata* web_js/jstree",
],
"jstree",
)
wget_static_web_content(
"https://ajax.googleapis.com/ajax/libs/angularjs/1.4.8/angular.min.js",
".",
[],
"angularJS",
)
wget_static_web_content(
"https://github.com/chartjs/Chart.js/releases/download/v2.3.0/Chart.js",
".",
[],
"charts.js",
)
_build_highlight_js()
for css_url in [
"https://stackpath.bootstrapcdn.com/bootstrap/4.3.1/css/bootstrap.min.css",
"https://cdnjs.cloudflare.com/ajax/libs/bootstrap-datepicker/1.8.0/css/bootstrap-datepicker.standalone.css",
]:
wget_static_web_content(css_url, "web_css", [])
for js_url in [
"https://cdnjs.cloudflare.com/ajax/libs/jquery/1.12.1/jquery.min.js",
"https://cdnjs.cloudflare.com/ajax/libs/popper.js/1.14.7/umd/popper.min.js",
"https://stackpath.bootstrapcdn.com/bootstrap/4.3.1/js/bootstrap.min.js",
"https://cdnjs.cloudflare.com/ajax/libs/bootstrap-datepicker/1.8.0/js/bootstrap-datepicker.js",
"https://raw.githubusercontent.com/moment/moment/develop/moment.js",
]:
wget_static_web_content(js_url, "web_js", [])
if not Path("web_css/fontawesome").exists():
wget_static_web_content(
"https://use.fontawesome.com/releases/v5.13.0/fontawesome-free-5.13.0-web.zip",
".",
[
"unzip fontawesome-free-5.13.0-web.zip",
"rm fontawesome-free-5.13.0-web.zip",
"mv fontawesome-free-5.13.0-web web_css/fontawesome",
],
)
if not Path("bootstrap3-editable").exists():
wget_static_web_content(
"https://vitalets.github.io/x-editable/assets/zip/bootstrap3-editable-1.5.1.zip",
".",
[
"unzip -o bootstrap3-editable-1.5.1.zip",
"rm bootstrap3-editable-1.5.1.zip CHANGELOG.txt LICENSE-MIT README.md",
"rm -rf inputs-ext",
],
"x-editable",
)
|
def _install_css_and_js_files():
with OperateInDirectory("../web_interface/static"):
os.makedirs("web_css", exist_ok=True)
os.makedirs("web_js", exist_ok=True)
wget_static_web_content(
"https://github.com/vakata/jstree/zipball/3.3.9",
".",
["unzip 3.3.9", "rm 3.3.9", "mv vakata* web_js/jstree"],
"jstree",
)
wget_static_web_content(
"https://ajax.googleapis.com/ajax/libs/angularjs/1.4.8/angular.min.js",
".",
[],
"angularJS",
)
wget_static_web_content(
"https://github.com/chartjs/Chart.js/releases/download/v2.3.0/Chart.js",
".",
[],
"charts.js",
)
_build_highlight_js()
for css_url in [
"https://stackpath.bootstrapcdn.com/bootstrap/4.3.1/css/bootstrap.min.css",
"https://cdnjs.cloudflare.com/ajax/libs/bootstrap-datepicker/1.8.0/css/bootstrap-datepicker.standalone.css",
]:
wget_static_web_content(css_url, "web_css", [])
for js_url in [
"https://cdnjs.cloudflare.com/ajax/libs/jquery/1.12.1/jquery.min.js",
"https://cdnjs.cloudflare.com/ajax/libs/popper.js/1.14.7/umd/popper.min.js",
"https://stackpath.bootstrapcdn.com/bootstrap/4.3.1/js/bootstrap.min.js",
"https://cdnjs.cloudflare.com/ajax/libs/bootstrap-datepicker/1.8.0/js/bootstrap-datepicker.js",
"https://raw.githubusercontent.com/moment/moment/develop/moment.js",
]:
wget_static_web_content(js_url, "web_js", [])
if not Path("web_css/fontawesome").exists():
wget_static_web_content(
"https://use.fontawesome.com/releases/v5.13.0/fontawesome-free-5.13.0-web.zip",
".",
[
"unzip fontawesome-free-5.13.0-web.zip",
"rm fontawesome-free-5.13.0-web.zip",
"mv fontawesome-free-5.13.0-web web_css/fontawesome",
],
)
if not Path("bootstrap3-editable").exists():
wget_static_web_content(
"https://vitalets.github.io/x-editable/assets/zip/bootstrap3-editable-1.5.1.zip",
".",
[
"unzip -o bootstrap3-editable-1.5.1.zip",
"rm bootstrap3-editable-1.5.1.zip CHANGELOG.txt LICENSE-MIT README.md",
"rm -rf inputs-ext",
],
"x-editable",
)
|
https://github.com/fkie-cad/FACT_core/issues/392
|
[2020-04-16 10:42:50][frontend][INFO]: Install static jstree content
Traceback (most recent call last):
File "src/install.py", line 173, in <module>
install()
File "src/install.py", line 157, in install
frontend(not args.no_radare, args.nginx)
File "/home/weidenba/FACT_core/src/install/frontend.py", line 165, in main
_install_css_and_js_files()
File "/home/weidenba/FACT_core/src/install/frontend.py", line 107, in _install_css_and_js_files
wget_static_web_content('https://github.com/vakata/jstree/zipball/3.3.9', '.', ['unzip 3.3.9', 'rm 3.3.9', 'mv vakata* web_js/jstree'], 'jstree')
File "/home/weidenba/FACT_core/src/install/frontend.py", line 34, in wget_static_web_content
raise InstallationError('Problem in processing resource at {}\n{}'.format(url, action_output))
helperFunctions.install.InstallationError: Problem in processing resource at https://github.com/vakata/jstree/zipball/3.3.9
mv: cannot move 'vakata-jstree-a7f2242' to 'web_js/jstree/vakata-jstree-a7f2242': Directory not empty
|
helperFunctions.install.InstallationError
|
def get_stats_pie(self, result, stats):
pie_invalid, pie_off, pie_on, pie_partial = self.extract_pie_data_from_analysis(
result
)
total_amount_of_files = self.calculate_total_files_for_pie(
[pie_off, pie_on, pie_partial, pie_invalid]
)
self.append_pie_stats_to_result_dict(
pie_invalid, pie_off, pie_on, pie_partial, stats, total_amount_of_files
)
|
def get_stats_pie(self, result, stats):
pie_invalid, pie_off, pie_on, pie_partial = self.extract_pie_data_from_analysis(
result
)
total_amount_of_files = self.calculate_total_files_for_pie(
pie_off, pie_on, pie_partial, pie_invalid
)
self.append_pie_stats_to_result_dict(
pie_invalid, pie_off, pie_on, pie_partial, stats, total_amount_of_files
)
|
https://github.com/fkie-cad/FACT_core/issues/88
|
[2018-03-28 13:02:04][update_statistic][INFO]: Try to start Mongo Server...
[2018-03-28 13:02:04][MongoMgr][INFO]: start local mongo database
Traceback (most recent call last):
File "src/update_statistic.py", line 48, in <module>
sys.exit(main())
File "src/update_statistic.py", line 38, in main
updater.update_all_stats()
File "/home/weidenba/git/FACT_core_github/src/statistic/update.py", line 45, in update_all_stats
self.db.update_statistic('exploit_mitigations', self._get_exploit_mitigations_stats())
File "/home/weidenba/git/FACT_core_github/src/statistic/update.py", line 102, in _get_exploit_mitigations_stats
self.get_stats_pie(result, stats)
File "/home/weidenba/git/FACT_core_github/src/statistic/update.py", line 175, in get_stats_pie
total_amount_of_files = self.calculate_total_files_for_pie(pie_off, pie_on, pie_partial, pie_invalid)
File "/home/weidenba/git/FACT_core_github/src/statistic/update.py", line 194, in calculate_total_files_for_pie
total_amount_of_files = pie_on[0][1] + pie_off[0][1] + pie_partial[0][1] + pie_invalid[0][1]
IndexError: list index out of range
|
IndexError
|
def calculate_total_files_for_pie(pie_stats):
total_amount_of_files = 0
for item in pie_stats:
with suppress(IndexError):
total_amount_of_files += item[0][1]
return total_amount_of_files
|
def calculate_total_files_for_pie(pie_off, pie_on, pie_partial, pie_invalid):
if (
len(pie_on) > 0
or len(pie_off) > 0
or len(pie_partial) > 0
or len(pie_invalid) > 0
):
total_amount_of_files = (
pie_on[0][1] + pie_off[0][1] + pie_partial[0][1] + pie_invalid[0][1]
)
else:
total_amount_of_files = 0
return total_amount_of_files
|
https://github.com/fkie-cad/FACT_core/issues/88
|
[2018-03-28 13:02:04][update_statistic][INFO]: Try to start Mongo Server...
[2018-03-28 13:02:04][MongoMgr][INFO]: start local mongo database
Traceback (most recent call last):
File "src/update_statistic.py", line 48, in <module>
sys.exit(main())
File "src/update_statistic.py", line 38, in main
updater.update_all_stats()
File "/home/weidenba/git/FACT_core_github/src/statistic/update.py", line 45, in update_all_stats
self.db.update_statistic('exploit_mitigations', self._get_exploit_mitigations_stats())
File "/home/weidenba/git/FACT_core_github/src/statistic/update.py", line 102, in _get_exploit_mitigations_stats
self.get_stats_pie(result, stats)
File "/home/weidenba/git/FACT_core_github/src/statistic/update.py", line 175, in get_stats_pie
total_amount_of_files = self.calculate_total_files_for_pie(pie_off, pie_on, pie_partial, pie_invalid)
File "/home/weidenba/git/FACT_core_github/src/statistic/update.py", line 194, in calculate_total_files_for_pie
total_amount_of_files = pie_on[0][1] + pie_off[0][1] + pie_partial[0][1] + pie_invalid[0][1]
IndexError: list index out of range
|
IndexError
|
def __init__(
self, X, sensitive_features, y, estimator, constraints, eps, B, opt_lambda=True
):
self.X = X
self.constraints = constraints
self.constraints.load_data(X, y, sensitive_features=sensitive_features)
self.obj = self.constraints.default_objective()
self.obj.load_data(X, y, sensitive_features=sensitive_features)
self.pickled_estimator = pickle.dumps(estimator)
self.eps = eps
self.B = B
self.opt_lambda = opt_lambda
self.hs = pd.Series(dtype="float64")
self.classifiers = pd.Series(dtype="float64")
self.errors = pd.Series(dtype="float64")
self.gammas = pd.DataFrame()
self.lambdas = pd.DataFrame()
self.n = self.X.shape[0]
self.n_oracle_calls = 0
self.n_oracle_calls_dummy_returned = 0
self.oracle_execution_times = []
self.last_linprog_n_hs = 0
self.last_linprog_result = None
|
def __init__(
self, X, sensitive_features, y, estimator, constraints, eps, B, opt_lambda=True
):
self.X = X
self.constraints = constraints
self.constraints.load_data(X, y, sensitive_features=sensitive_features)
self.obj = self.constraints.default_objective()
self.obj.load_data(X, y, sensitive_features=sensitive_features)
self.pickled_estimator = pickle.dumps(estimator)
self.eps = eps
self.B = B
self.opt_lambda = opt_lambda
self.hs = pd.Series(dtype="float64")
self.classifiers = pd.Series(dtype="float64")
self.errors = pd.Series(dtype="float64")
self.gammas = pd.DataFrame()
self.lambdas = pd.DataFrame()
self.n = self.X.shape[0]
self.n_oracle_calls = 0
self.oracle_execution_times = []
self.last_linprog_n_hs = 0
self.last_linprog_result = None
|
https://github.com/fairlearn/fairlearn/issues/395
|
from sklearn.linear_model import LogisticRegression
LogisticRegression().fit([[1],[2],[3]], [0,0,0])
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "~\AppData\Local\Continuum\anaconda3\lib\site-packages\sklearn\linear_model\_logistic.py", line 1558, in fit
" class: %r" % classes_[0])
ValueError: This solver needs samples of at least 2 classes in the data, but the data contains only one class: 0
|
ValueError
|
def _call_oracle(self, lambda_vec):
signed_weights = self.obj.signed_weights() + self.constraints.signed_weights(
lambda_vec
)
redY = 1 * (signed_weights > 0)
redW = signed_weights.abs()
redW = self.n * redW / redW.sum()
redY_unique = np.unique(redY)
classifier = None
if len(redY_unique) == 1:
logger.debug("redY had single value. Using DummyClassifier")
classifier = DummyClassifier(strategy="constant", constant=redY_unique[0])
self.n_oracle_calls_dummy_returned += 1
else:
classifier = pickle.loads(self.pickled_estimator)
oracle_call_start_time = time()
classifier.fit(self.X, redY, sample_weight=redW)
self.oracle_execution_times.append(time() - oracle_call_start_time)
self.n_oracle_calls += 1
return classifier
|
def _call_oracle(self, lambda_vec):
signed_weights = self.obj.signed_weights() + self.constraints.signed_weights(
lambda_vec
)
redY = 1 * (signed_weights > 0)
redW = signed_weights.abs()
redW = self.n * redW / redW.sum()
classifier = pickle.loads(self.pickled_estimator)
oracle_call_start_time = time()
classifier.fit(self.X, redY, sample_weight=redW)
self.oracle_execution_times.append(time() - oracle_call_start_time)
self.n_oracle_calls += 1
return classifier
|
https://github.com/fairlearn/fairlearn/issues/395
|
from sklearn.linear_model import LogisticRegression
LogisticRegression().fit([[1],[2],[3]], [0,0,0])
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "~\AppData\Local\Continuum\anaconda3\lib\site-packages\sklearn\linear_model\_logistic.py", line 1558, in fit
" class: %r" % classes_[0])
ValueError: This solver needs samples of at least 2 classes in the data, but the data contains only one class: 0
|
ValueError
|
def __init__(self, estimator, constraints, eps=0.01, T=50, nu=None, eta_mul=2.0): # noqa: D103
self._estimator = estimator
self._constraints = constraints
self._eps = eps
self._T = T
self._nu = nu
self._eta_mul = eta_mul
self._best_gap = None
self._predictors = None
self._weights = None
self._last_t = None
self._best_t = None
self._n_oracle_calls = 0
self._n_oracle_calls_dummy_returned = 0
self._oracle_execution_times = None
self._lambda_vecs = pd.DataFrame()
self._lambda_vecs_LP = pd.DataFrame()
self._lambda_vecs_lagrangian = pd.DataFrame()
|
def __init__(self, estimator, constraints, eps=0.01, T=50, nu=None, eta_mul=2.0): # noqa: D103
self._estimator = estimator
self._constraints = constraints
self._eps = eps
self._T = T
self._nu = nu
self._eta_mul = eta_mul
self._best_gap = None
self._predictors = None
self._weights = None
self._last_t = None
self._best_t = None
self._n_oracle_calls = 0
self._oracle_execution_times = None
self._lambda_vecs = pd.DataFrame()
self._lambda_vecs_LP = pd.DataFrame()
self._lambda_vecs_lagrangian = pd.DataFrame()
|
https://github.com/fairlearn/fairlearn/issues/395
|
from sklearn.linear_model import LogisticRegression
LogisticRegression().fit([[1],[2],[3]], [0,0,0])
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "~\AppData\Local\Continuum\anaconda3\lib\site-packages\sklearn\linear_model\_logistic.py", line 1558, in fit
" class: %r" % classes_[0])
ValueError: This solver needs samples of at least 2 classes in the data, but the data contains only one class: 0
|
ValueError
|
def fit(self, X, y, **kwargs):
"""Return a fair classifier under specified fairness constraints.
:param X: The feature matrix
:type X: numpy.ndarray or pandas.DataFrame
:param y: The label vector
:type y: numpy.ndarray, pandas.DataFrame, pandas.Series, or list
"""
_, y_train, sensitive_features = _validate_and_reformat_input(X, y, **kwargs)
n = y_train.shape[0]
logger.debug("...Exponentiated Gradient STARTING")
B = 1 / self._eps
lagrangian = _Lagrangian(
X, sensitive_features, y_train, self._estimator, self._constraints, self._eps, B
)
theta = pd.Series(0, lagrangian.constraints.index)
Qsum = pd.Series(dtype="float64")
gaps_EG = []
gaps = []
Qs = []
last_regret_checked = _REGRET_CHECK_START_T
last_gap = np.PINF
for t in range(0, self._T):
logger.debug("...iter=%03d", t)
# set lambdas for every constraint
lambda_vec = B * np.exp(theta) / (1 + np.exp(theta).sum())
self._lambda_vecs[t] = lambda_vec
lambda_EG = self._lambda_vecs.mean(axis=1)
# select classifier according to best_h method
h, h_idx = lagrangian.best_h(lambda_vec)
if t == 0:
if self._nu is None:
self._nu = _ACCURACY_MUL * (h(X) - y_train).abs().std() / np.sqrt(n)
eta_min = self._nu / (2 * B)
eta = self._eta_mul / B
logger.debug(
"...eps=%.3f, B=%.1f, nu=%.6f, T=%d, eta_min=%.6f",
self._eps,
B,
self._nu,
self._T,
eta_min,
)
if h_idx not in Qsum.index:
Qsum.at[h_idx] = 0.0
Qsum[h_idx] += 1.0
gamma = lagrangian.gammas[h_idx]
Q_EG = Qsum / Qsum.sum()
result_EG = lagrangian.eval_gap(Q_EG, lambda_EG, self._nu)
gap_EG = result_EG.gap()
gaps_EG.append(gap_EG)
if t == 0 or not _RUN_LP_STEP:
gap_LP = np.PINF
else:
# saddle point optimization over the convex hull of
# classifiers returned so far
Q_LP, self._lambda_vecs_LP[t], result_LP = lagrangian.solve_linprog(
self._nu
)
gap_LP = result_LP.gap()
# keep values from exponentiated gradient or linear programming
if gap_EG < gap_LP:
Qs.append(Q_EG)
gaps.append(gap_EG)
else:
Qs.append(Q_LP)
gaps.append(gap_LP)
logger.debug(
"%seta=%.6f, L_low=%.3f, L=%.3f, L_high=%.3f, gap=%.6f, disp=%.3f, "
"err=%.3f, gap_LP=%.6f",
_INDENTATION,
eta,
result_EG.L_low,
result_EG.L,
result_EG.L_high,
gap_EG,
result_EG.gamma.max(),
result_EG.error,
gap_LP,
)
if (gaps[t] < self._nu) and (t >= _MIN_T):
# solution found
break
# update regret
if t >= last_regret_checked * _REGRET_CHECK_INCREASE_T:
best_gap = min(gaps_EG)
if best_gap > last_gap * _SHRINK_REGRET:
eta *= _SHRINK_ETA
last_regret_checked = t
last_gap = best_gap
# update theta based on learning rate
theta += eta * (gamma - self._eps)
# retain relevant result data
gaps_series = pd.Series(gaps)
gaps_best = gaps_series[gaps_series <= gaps_series.min() + _PRECISION]
self._best_t = gaps_best.index[-1]
self._best_gap = gaps[self._best_t]
self._weights = Qs[self._best_t]
self._hs = lagrangian.hs
for h_idx in self._hs.index:
if h_idx not in self._weights.index:
self._weights.at[h_idx] = 0.0
self._last_t = len(Qs) - 1
self._predictors = lagrangian.classifiers
self._n_oracle_calls = lagrangian.n_oracle_calls
self._n_oracle_calls_dummy_returned = lagrangian.n_oracle_calls_dummy_returned
self._oracle_execution_times = lagrangian.oracle_execution_times
self._lambda_vecs_lagrangian = lagrangian.lambdas
logger.debug(
"...eps=%.3f, B=%.1f, nu=%.6f, T=%d, eta_min=%.6f",
self._eps,
B,
self._nu,
self._T,
eta_min,
)
logger.debug(
"...last_t=%d, best_t=%d, best_gap=%.6f, n_oracle_calls=%d, n_hs=%d",
self._last_t,
self._best_t,
self._best_gap,
lagrangian.n_oracle_calls,
len(lagrangian.classifiers),
)
|
def fit(self, X, y, **kwargs):
"""Return a fair classifier under specified fairness constraints.
:param X: The feature matrix
:type X: numpy.ndarray or pandas.DataFrame
:param y: The label vector
:type y: numpy.ndarray, pandas.DataFrame, pandas.Series, or list
"""
_, y_train, sensitive_features = _validate_and_reformat_input(X, y, **kwargs)
n = y_train.shape[0]
logger.debug("...Exponentiated Gradient STARTING")
B = 1 / self._eps
lagrangian = _Lagrangian(
X, sensitive_features, y_train, self._estimator, self._constraints, self._eps, B
)
theta = pd.Series(0, lagrangian.constraints.index)
Qsum = pd.Series(dtype="float64")
gaps_EG = []
gaps = []
Qs = []
last_regret_checked = _REGRET_CHECK_START_T
last_gap = np.PINF
for t in range(0, self._T):
logger.debug("...iter=%03d", t)
# set lambdas for every constraint
lambda_vec = B * np.exp(theta) / (1 + np.exp(theta).sum())
self._lambda_vecs[t] = lambda_vec
lambda_EG = self._lambda_vecs.mean(axis=1)
# select classifier according to best_h method
h, h_idx = lagrangian.best_h(lambda_vec)
if t == 0:
if self._nu is None:
self._nu = _ACCURACY_MUL * (h(X) - y_train).abs().std() / np.sqrt(n)
eta_min = self._nu / (2 * B)
eta = self._eta_mul / B
logger.debug(
"...eps=%.3f, B=%.1f, nu=%.6f, T=%d, eta_min=%.6f",
self._eps,
B,
self._nu,
self._T,
eta_min,
)
if h_idx not in Qsum.index:
Qsum.at[h_idx] = 0.0
Qsum[h_idx] += 1.0
gamma = lagrangian.gammas[h_idx]
Q_EG = Qsum / Qsum.sum()
result_EG = lagrangian.eval_gap(Q_EG, lambda_EG, self._nu)
gap_EG = result_EG.gap()
gaps_EG.append(gap_EG)
if t == 0 or not _RUN_LP_STEP:
gap_LP = np.PINF
else:
# saddle point optimization over the convex hull of
# classifiers returned so far
Q_LP, self._lambda_vecs_LP[t], result_LP = lagrangian.solve_linprog(
self._nu
)
gap_LP = result_LP.gap()
# keep values from exponentiated gradient or linear programming
if gap_EG < gap_LP:
Qs.append(Q_EG)
gaps.append(gap_EG)
else:
Qs.append(Q_LP)
gaps.append(gap_LP)
logger.debug(
"%seta=%.6f, L_low=%.3f, L=%.3f, L_high=%.3f, gap=%.6f, disp=%.3f, "
"err=%.3f, gap_LP=%.6f",
_INDENTATION,
eta,
result_EG.L_low,
result_EG.L,
result_EG.L_high,
gap_EG,
result_EG.gamma.max(),
result_EG.error,
gap_LP,
)
if (gaps[t] < self._nu) and (t >= _MIN_T):
# solution found
break
# update regret
if t >= last_regret_checked * _REGRET_CHECK_INCREASE_T:
best_gap = min(gaps_EG)
if best_gap > last_gap * _SHRINK_REGRET:
eta *= _SHRINK_ETA
last_regret_checked = t
last_gap = best_gap
# update theta based on learning rate
theta += eta * (gamma - self._eps)
# retain relevant result data
gaps_series = pd.Series(gaps)
gaps_best = gaps_series[gaps_series <= gaps_series.min() + _PRECISION]
self._best_t = gaps_best.index[-1]
self._best_gap = gaps[self._best_t]
self._weights = Qs[self._best_t]
self._hs = lagrangian.hs
for h_idx in self._hs.index:
if h_idx not in self._weights.index:
self._weights.at[h_idx] = 0.0
self._last_t = len(Qs) - 1
self._predictors = lagrangian.classifiers
self._n_oracle_calls = lagrangian.n_oracle_calls
self._oracle_execution_times = lagrangian.oracle_execution_times
self._lambda_vecs_lagrangian = lagrangian.lambdas
logger.debug(
"...eps=%.3f, B=%.1f, nu=%.6f, T=%d, eta_min=%.6f",
self._eps,
B,
self._nu,
self._T,
eta_min,
)
logger.debug(
"...last_t=%d, best_t=%d, best_gap=%.6f, n_oracle_calls=%d, n_hs=%d",
self._last_t,
self._best_t,
self._best_gap,
lagrangian.n_oracle_calls,
len(lagrangian.classifiers),
)
|
https://github.com/fairlearn/fairlearn/issues/395
|
from sklearn.linear_model import LogisticRegression
LogisticRegression().fit([[1],[2],[3]], [0,0,0])
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "~\AppData\Local\Continuum\anaconda3\lib\site-packages\sklearn\linear_model\_logistic.py", line 1558, in fit
" class: %r" % classes_[0])
ValueError: This solver needs samples of at least 2 classes in the data, but the data contains only one class: 0
|
ValueError
|
def fit(self, X, y, **kwargs):
"""Run the grid search.
This will result in multiple copies of the
estimator being made, and the :code:`fit(X)` method
of each one called.
:param X: The feature matrix
:type X: numpy.ndarray or pandas.DataFrame
:param y: The label vector
:type y: numpy.ndarray, pandas.DataFrame, pandas.Series, or list
:param sensitive_features: A (currently) required keyword argument listing the
feature used by the constraints object
:type sensitive_features: numpy.ndarray, pandas.DataFrame, pandas.Series, or list (for now)
"""
if isinstance(self.constraints, ClassificationMoment):
logger.debug("Classification problem detected")
is_classification_reduction = True
else:
logger.debug("Regression problem detected")
is_classification_reduction = False
_, y_train, sensitive_features_train = _validate_and_reformat_input(
X, y, enforce_binary_labels=is_classification_reduction, **kwargs
)
kwargs[_KW_SENSITIVE_FEATURES] = sensitive_features_train
# Prep the parity constraints and objective
logger.debug("Preparing constraints and objective")
self.constraints.load_data(X, y_train, **kwargs)
objective = self.constraints.default_objective()
objective.load_data(X, y_train, **kwargs)
# Basis information
pos_basis = self.constraints.pos_basis
neg_basis = self.constraints.neg_basis
neg_allowed = self.constraints.neg_basis_present
objective_in_the_span = self.constraints.default_objective_lambda_vec is not None
if self.grid is None:
logger.debug("Creating grid of size %i", self.grid_size)
grid = _GridGenerator(
self.grid_size,
self.grid_limit,
pos_basis,
neg_basis,
neg_allowed,
objective_in_the_span,
self.grid_offset,
).grid
else:
logger.debug("Using supplied grid")
grid = self.grid
# Fit the estimates
logger.debug("Setup complete. Starting grid search")
for i in grid.columns:
lambda_vec = grid[i]
logger.debug("Obtaining weights")
weights = self.constraints.signed_weights(lambda_vec)
if not objective_in_the_span:
weights = weights + objective.signed_weights()
if is_classification_reduction:
logger.debug("Applying relabelling for classification problem")
y_reduction = 1 * (weights > 0)
weights = weights.abs()
else:
y_reduction = y_train
y_reduction_unique = np.unique(y_reduction)
if len(y_reduction_unique) == 1:
logger.debug("y_reduction had single value. Using DummyClassifier")
current_estimator = DummyClassifier(
strategy="constant", constant=y_reduction_unique[0]
)
else:
logger.debug("Using underlying estimator")
current_estimator = copy.deepcopy(self.estimator)
oracle_call_start_time = time()
current_estimator.fit(X, y_reduction, sample_weight=weights)
oracle_call_execution_time = time() - oracle_call_start_time
logger.debug("Call to estimator complete")
def predict_fct(X):
return current_estimator.predict(X)
self._predictors.append(current_estimator)
self._lambda_vecs[i] = lambda_vec
self._objectives.append(objective.gamma(predict_fct)[0])
self._gammas[i] = self.constraints.gamma(predict_fct)
self._oracle_execution_times.append(oracle_call_execution_time)
logger.debug("Selecting best_result")
if self.selection_rule == TRADEOFF_OPTIMIZATION:
def loss_fct(i):
return (
self.objective_weight * self._objectives[i]
+ self.constraint_weight * self._gammas[i].max()
)
losses = [loss_fct(i) for i in range(len(self._objectives))]
self._best_grid_index = losses.index(min(losses))
else:
raise RuntimeError("Unsupported selection rule")
return
|
def fit(self, X, y, **kwargs):
"""Run the grid search.
This will result in multiple copies of the
estimator being made, and the :code:`fit(X)` method
of each one called.
:param X: The feature matrix
:type X: numpy.ndarray or pandas.DataFrame
:param y: The label vector
:type y: numpy.ndarray, pandas.DataFrame, pandas.Series, or list
:param sensitive_features: A (currently) required keyword argument listing the
feature used by the constraints object
:type sensitive_features: numpy.ndarray, pandas.DataFrame, pandas.Series, or list (for now)
"""
if isinstance(self.constraints, ClassificationMoment):
logger.debug("Classification problem detected")
is_classification_reduction = True
else:
logger.debug("Regression problem detected")
is_classification_reduction = False
_, y_train, sensitive_features_train = _validate_and_reformat_input(
X, y, enforce_binary_labels=is_classification_reduction, **kwargs
)
kwargs[_KW_SENSITIVE_FEATURES] = sensitive_features_train
# Prep the parity constraints and objective
logger.debug("Preparing constraints and objective")
self.constraints.load_data(X, y_train, **kwargs)
objective = self.constraints.default_objective()
objective.load_data(X, y_train, **kwargs)
# Basis information
pos_basis = self.constraints.pos_basis
neg_basis = self.constraints.neg_basis
neg_allowed = self.constraints.neg_basis_present
objective_in_the_span = self.constraints.default_objective_lambda_vec is not None
if self.grid is None:
logger.debug("Creating grid of size %i", self.grid_size)
grid = _GridGenerator(
self.grid_size,
self.grid_limit,
pos_basis,
neg_basis,
neg_allowed,
objective_in_the_span,
self.grid_offset,
).grid
else:
logger.debug("Using supplied grid")
grid = self.grid
# Fit the estimates
logger.debug("Setup complete. Starting grid search")
for i in grid.columns:
lambda_vec = grid[i]
logger.debug("Obtaining weights")
weights = self.constraints.signed_weights(lambda_vec)
if not objective_in_the_span:
weights = weights + objective.signed_weights()
if is_classification_reduction:
logger.debug("Applying relabelling for classification problem")
y_reduction = 1 * (weights > 0)
weights = weights.abs()
else:
y_reduction = y_train
current_estimator = copy.deepcopy(self.estimator)
logger.debug("Calling underlying estimator")
oracle_call_start_time = time()
current_estimator.fit(X, y_reduction, sample_weight=weights)
oracle_call_execution_time = time() - oracle_call_start_time
logger.debug("Call to underlying estimator complete")
def predict_fct(X):
return current_estimator.predict(X)
self._predictors.append(current_estimator)
self._lambda_vecs[i] = lambda_vec
self._objectives.append(objective.gamma(predict_fct)[0])
self._gammas[i] = self.constraints.gamma(predict_fct)
self._oracle_execution_times.append(oracle_call_execution_time)
logger.debug("Selecting best_result")
if self.selection_rule == TRADEOFF_OPTIMIZATION:
def loss_fct(i):
return (
self.objective_weight * self._objectives[i]
+ self.constraint_weight * self._gammas[i].max()
)
losses = [loss_fct(i) for i in range(len(self._objectives))]
self._best_grid_index = losses.index(min(losses))
else:
raise RuntimeError("Unsupported selection rule")
return
|
https://github.com/fairlearn/fairlearn/issues/395
|
from sklearn.linear_model import LogisticRegression
LogisticRegression().fit([[1],[2],[3]], [0,0,0])
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "~\AppData\Local\Continuum\anaconda3\lib\site-packages\sklearn\linear_model\_logistic.py", line 1558, in fit
" class: %r" % classes_[0])
ValueError: This solver needs samples of at least 2 classes in the data, but the data contains only one class: 0
|
ValueError
|
def check_value_shape(self, value, slice_):
"""Checks if value can be set to the slice"""
if None not in self.shape and self.dtype != "O":
if not all([isinstance(sh, int) for sh in slice_]):
expected_value_shape = tuple(
[
len(range(*slice_shape.indices(self.shape[i])))
for i, slice_shape in enumerate(slice_)
if not isinstance(slice_shape, int)
]
)
if isinstance(value, list):
value = np.array(value)
if isinstance(value, np.ndarray):
value_shape = [dim for dim in value.shape if dim != 1]
expected_shape = [dim for dim in expected_value_shape if dim != 1]
if value_shape != expected_shape:
raise ValueShapeError(expected_value_shape, value.shape)
else:
value = value.reshape(expected_value_shape)
else:
expected_value_shape = (1,)
if isinstance(value, list):
value = np.array(value)
if isinstance(value, np.ndarray) and value.shape != expected_value_shape:
raise ValueShapeError(expected_value_shape, value.shape)
return value
|
def check_value_shape(self, value, slice_):
"""Checks if value can be set to the slice"""
if None not in self.shape and self.dtype != "O":
if not all([isinstance(sh, int) for sh in slice_]):
expected_value_shape = tuple(
[
len(range(*slice_shape.indices(self.shape[i])))
for i, slice_shape in enumerate(slice_)
if not isinstance(slice_shape, int)
]
)
if isinstance(value, list):
value = np.array(value)
if isinstance(value, np.ndarray):
if value.shape[0] == 1 and expected_value_shape[0] != 1:
value = np.squeeze(value, axis=0)
if value.shape[-1] == 1 and expected_value_shape[-1] != 1:
value = np.squeeze(value, axis=-1)
if value.shape != expected_value_shape:
raise ValueShapeError(expected_value_shape, value.shape)
else:
expected_value_shape = (1,)
if isinstance(value, list):
value = np.array(value)
if isinstance(value, np.ndarray) and value.shape != expected_value_shape:
raise ValueShapeError(expected_value_shape, value.shape)
return value
|
https://github.com/activeloopai/Hub/issues/316
|
Traceback (most recent call last):
File "examples/upload_mpi.py", line 52, in <module>
res_ds = out_ds.store(tag)
File "/Hub/hub/compute/transform.py", line 372, in store
n_results = self.store_shard(ds_in_shard, ds_out, start, token=token)
File "/Hub/hub/compute/transform.py", line 288, in store_shard
self.upload(
File "/Hub/hub/compute/transform.py", line 222, in upload
list(self.map(upload_chunk, index_batched_values))
File "/Hub/hub/compute/transform.py", line 212, in upload_chunk
ds[key, i * length : i * length + batch_length] = batch
File "/Hub/hub/api/datasetview.py", line 131, in __setitem__
self.dataset._tensors[subpath][slice_list] = assign_value
File "/Hub/hub/store/dynamic_tensor.py", line 187, in __setitem__
max_shape = value[0].shape
AttributeError: 'float' object has no attribute 'shape'
|
AttributeError
|
def __init__(
self,
url: str,
mode: str = "a",
shape=None,
schema=None,
token=None,
fs=None,
fs_map=None,
cache: int = defaults.DEFAULT_MEMORY_CACHE_SIZE,
storage_cache: int = defaults.DEFAULT_STORAGE_CACHE_SIZE,
lock_cache=True,
tokenizer=None,
):
"""| Open a new or existing dataset for read/write
Parameters
----------
url: str
The url where dataset is located/should be created
mode: str, optional (default to "a")
Python way to tell whether dataset is for read or write (ex. "r", "w", "a")
shape: tuple, optional
Tuple with (num_samples,) format, where num_samples is number of samples
schema: optional
Describes the data of a single sample. Hub schemas are used for that
Required for 'a' and 'w' modes
token: str or dict, optional
If url is refering to a place where authorization is required,
token is the parameter to pass the credentials, it can be filepath or dict
fs: optional
fs_map: optional
cache: int, optional
Size of the memory cache. Default is 64MB (2**26)
if 0, False or None, then cache is not used
storage_cache: int, optional
Size of the storage cache. Default is 256MB (2**28)
if 0, False or None, then storage cache is not used
lock_cache: bool, optional
Lock the cache for avoiding multiprocessing errors
"""
shape = norm_shape(shape)
if len(shape) != 1:
raise ShapeLengthException()
mode = mode or "a"
storage_cache = norm_cache(storage_cache) if cache else 0
cache = norm_cache(cache)
schema: SchemaDict = featurify(schema) if schema else None
self._url = url
self._token = token
self._mode = mode
self.tokenizer = tokenizer
self._fs, self._path = (fs, url) if fs else get_fs_and_path(self._url, token=token)
self._cache = cache
self._storage_cache = storage_cache
self.lock_cache = lock_cache
self.verison = "1.x"
needcreate = self._check_and_prepare_dir()
fs_map = fs_map or get_storage_map(
self._fs, self._path, cache, lock=lock_cache, storage_cache=storage_cache
)
self._fs_map = fs_map
self.username = None
self.dataset_name = None
if not needcreate:
self.meta = json.loads(fs_map["meta.json"].decode("utf-8"))
self._shape = tuple(self.meta["shape"])
self._schema = hub.schema.deserialize.deserialize(self.meta["schema"])
self._flat_tensors = tuple(flatten(self.schema))
self._tensors = dict(self._open_storage_tensors())
if shape != (None,) and shape != self._shape:
raise TypeError(
f"Shape in metafile [{self._shape}] and shape in arguments [{shape}] are !=, use mode='w' to overwrite dataset"
)
if schema is not None and sorted(schema.dict_.keys()) != sorted(
self._schema.dict_.keys()
):
raise TypeError(
"Schema in metafile and schema in arguments do not match, use mode='w' to overwrite dataset"
)
else:
if shape[0] is None:
raise ShapeArgumentNotFoundException()
if schema is None:
raise SchemaArgumentNotFoundException()
try:
if shape is None:
raise ShapeArgumentNotFoundException()
if schema is None:
raise SchemaArgumentNotFoundException()
self._schema = schema
self._shape = tuple(shape)
self.meta = self._store_meta()
self._flat_tensors = tuple(flatten(self.schema))
self._tensors = dict(self._generate_storage_tensors())
self.flush()
except Exception as e:
try:
self.close()
except Exception:
pass
self._fs.rm(self._path, recursive=True)
logger.error("Deleting the dataset " + traceback.format_exc() + str(e))
raise
if needcreate and (
self._path.startswith("s3://snark-hub-dev/")
or self._path.startswith("s3://snark-hub/")
):
subpath = self._path[5:]
spl = subpath.split("/")
if len(spl) < 4:
raise ValueError("Invalid Path for dataset")
self.username = spl[-2]
self.dataset_name = spl[-1]
HubControlClient().create_dataset_entry(
self.username, self.dataset_name, self.meta
)
|
def __init__(
self,
url: str,
mode: str = "a",
safe_mode: bool = False,
shape=None,
schema=None,
token=None,
fs=None,
fs_map=None,
cache: int = 2**26,
storage_cache: int = 2**28,
lock_cache=True,
tokenizer=None,
):
"""| Open a new or existing dataset for read/write
Parameters
----------
url: str
The url where dataset is located/should be created
mode: str, optional (default to "w")
Python way to tell whether dataset is for read or write (ex. "r", "w", "a")
safe_mode: bool, optional
if dataset exists it cannot be rewritten in safe mode, otherwise it lets to write the first time
shape: tuple, optional
Tuple with (num_samples,) format, where num_samples is number of samples
schema: optional
Describes the data of a single sample. Hub schemas are used for that
Required for 'a' and 'w' modes
token: str or dict, optional
If url is refering to a place where authorization is required,
token is the parameter to pass the credentials, it can be filepath or dict
fs: optional
fs_map: optional
cache: int, optional
Size of the memory cache. Default is 64MB (2**26)
if 0, False or None, then cache is not used
storage_cache: int, optional
Size of the storage cache. Default is 256MB (2**28)
if 0, False or None, then storage cache is not used
lock_cache: bool, optional
Lock the cache for avoiding multiprocessing errors
"""
shape = shape or (None,)
if isinstance(shape, int):
shape = [shape]
if shape is not None:
if len(tuple(shape)) != 1:
raise ShapeLengthException
if mode is None:
raise NoneValueException("mode")
if not cache:
storage_cache = False
self.url = url
self.token = token
self.mode = mode
self.tokenizer = tokenizer
self._fs, self._path = (fs, url) if fs else get_fs_and_path(self.url, token=token)
self.cache = cache
self._storage_cache = storage_cache
self.lock_cache = lock_cache
self.verison = "1.x"
needcreate = self._check_and_prepare_dir()
fs_map = fs_map or get_storage_map(
self._fs, self._path, cache, lock=lock_cache, storage_cache=storage_cache
)
self._fs_map = fs_map
if safe_mode and not needcreate:
mode = "r"
self.username = None
self.dataset_name = None
if not needcreate:
self.meta = json.loads(fs_map["meta.json"].decode("utf-8"))
self.shape = tuple(self.meta["shape"])
self.schema = hub.schema.deserialize.deserialize(self.meta["schema"])
self._flat_tensors = tuple(flatten(self.schema))
self._tensors = dict(self._open_storage_tensors())
else:
if shape[0] is None:
raise ShapeArgumentNotFoundException()
if schema is None:
raise SchemaArgumentNotFoundException()
try:
if shape is None:
raise ShapeArgumentNotFoundException()
if schema is None:
raise SchemaArgumentNotFoundException()
self.schema: HubSchema = featurify(schema)
self.shape = tuple(shape)
self.meta = self._store_meta()
self._flat_tensors = tuple(flatten(self.schema))
self._tensors = dict(self._generate_storage_tensors())
self.flush()
except Exception as e:
try:
self.close()
except Exception:
pass
self._fs.rm(self._path, recursive=True)
logger.error("Deleting the dataset " + traceback.format_exc() + str(e))
raise
if needcreate and (
self._path.startswith("s3://snark-hub-dev/")
or self._path.startswith("s3://snark-hub/")
):
subpath = self._path[5:]
spl = subpath.split("/")
if len(spl) < 4:
raise ValueError("Invalid Path for dataset")
self.username = spl[-2]
self.dataset_name = spl[-1]
HubControlClient().create_dataset_entry(
self.username, self.dataset_name, self.meta
)
|
https://github.com/activeloopai/Hub/issues/318
|
Traceback (most recent call last):
File "examples/load.py", line 7, in <module>
ds = hub.load(path)
File "/Users/davitb/Git/Hub/hub/__init__.py", line 54, in load
return Dataset(tag)
File "/Users/davitb/Git/Hub/hub/api/dataset.py", line 141, in __init__
raise ShapeArgumentNotFoundException()
hub.exceptions.ShapeArgumentNotFoundException: Parameter 'shape' should be provided for Dataset creation.
|
hub.exceptions.ShapeArgumentNotFoundException
|
def _check_and_prepare_dir(self):
"""
Checks if input data is ok.
Creates or overwrites dataset folder.
Returns True dataset needs to be created opposed to read.
"""
fs, path, mode = self._fs, self._path, self._mode
if path.startswith("s3://"):
with open(posixpath.expanduser("~/.activeloop/store"), "rb") as f:
stored_username = json.load(f)["_id"]
current_username = path.split("/")[-2]
if stored_username != current_username:
try:
fs.listdir(path)
except:
raise WrongUsernameException(stored_username)
exist_meta = fs.exists(posixpath.join(path, "meta.json"))
if exist_meta:
if "w" in mode:
fs.rm(path, recursive=True)
fs.makedirs(path)
return True
return False
else:
if "r" in mode:
raise HubDatasetNotFoundException(path)
exist_dir = fs.exists(path)
if not exist_dir:
fs.makedirs(path)
elif get_file_count(fs, path) > 0:
if "w" in mode:
raise NotHubDatasetToOverwriteException()
else:
raise NotHubDatasetToAppendException()
return True
|
def _check_and_prepare_dir(self):
"""
Checks if input data is ok.
Creates or overwrites dataset folder.
Returns True dataset needs to be created opposed to read.
"""
fs, path, mode = self._fs, self._path, self.mode
if path.startswith("s3://"):
with open(posixpath.expanduser("~/.activeloop/store"), "rb") as f:
stored_username = json.load(f)["_id"]
current_username = path.split("/")[-2]
if stored_username != current_username:
try:
fs.listdir(path)
except:
raise WrongUsernameException(stored_username)
exist_meta = fs.exists(posixpath.join(path, "meta.json"))
if exist_meta:
if "w" in mode:
fs.rm(path, recursive=True)
fs.makedirs(path)
return True
return False
else:
if "r" in mode:
raise HubDatasetNotFoundException(path)
exist_dir = fs.exists(path)
if not exist_dir:
fs.makedirs(path)
elif get_file_count(fs, path) > 0:
if "w" in mode:
raise NotHubDatasetToOverwriteException()
else:
raise NotHubDatasetToAppendException()
return True
|
https://github.com/activeloopai/Hub/issues/318
|
Traceback (most recent call last):
File "examples/load.py", line 7, in <module>
ds = hub.load(path)
File "/Users/davitb/Git/Hub/hub/__init__.py", line 54, in load
return Dataset(tag)
File "/Users/davitb/Git/Hub/hub/api/dataset.py", line 141, in __init__
raise ShapeArgumentNotFoundException()
hub.exceptions.ShapeArgumentNotFoundException: Parameter 'shape' should be provided for Dataset creation.
|
hub.exceptions.ShapeArgumentNotFoundException
|
def resize_shape(self, size: int) -> None:
"""Resize the shape of the dataset by resizing each tensor first dimension"""
if size == self.shape[0]:
return
self._shape = (int(size),)
self.meta = self._store_meta()
for t in self._tensors.values():
t.resize_shape(int(size))
self._update_dataset_state()
|
def resize_shape(self, size: int) -> None:
"""Resize the shape of the dataset by resizing each tensor first dimension"""
if size == self.shape[0]:
return
self.shape = (int(size),)
self.meta = self._store_meta()
for t in self._tensors.values():
t.resize_shape(int(size))
self._update_dataset_state()
|
https://github.com/activeloopai/Hub/issues/318
|
Traceback (most recent call last):
File "examples/load.py", line 7, in <module>
ds = hub.load(path)
File "/Users/davitb/Git/Hub/hub/__init__.py", line 54, in load
return Dataset(tag)
File "/Users/davitb/Git/Hub/hub/api/dataset.py", line 141, in __init__
raise ShapeArgumentNotFoundException()
hub.exceptions.ShapeArgumentNotFoundException: Parameter 'shape' should be provided for Dataset creation.
|
hub.exceptions.ShapeArgumentNotFoundException
|
def _get_max_shape(self, shape, max_shape):
if max_shape is None:
return tuple([s or self._int32max for s in shape])
elif isinstance(max_shape, int):
assert max_shape == shape[0]
return self._get_max_shape(shape, None)
else:
max_shape = tuple(max_shape)
assert len(shape) == len(max_shape)
for s, ms in zip(shape, max_shape):
if not isinstance(ms, int):
raise HubException("MaxShape Dimension should be int")
if s is not None and s != ms:
raise HubException(
"""Dimension in shape cannot be != max_shape dimension,
if shape is not None """
)
assert s == ms or s is None and isinstance(ms, int)
return max_shape
|
def _get_max_shape(self, shape, max_shape):
if max_shape is None:
return tuple([s or self._int32max for s in shape])
elif isinstance(max_shape, int):
assert max_shape == shape[0]
return self._get_max_shape(shape, None)
else:
max_shape = tuple(max_shape)
assert len(shape) == len(max_shape)
for s, ms in zip(shape, max_shape):
assert s == ms or s is None and isinstance(ms, int)
return max_shape
|
https://github.com/activeloopai/Hub/issues/298
|
β feature_testing python upload_animals.py
26180
{'labels': ClassLabel(shape=(), dtype='int64', names=['pecora', 'mucca', 'cane', 'ragno', 'cavallo', 'elefante', 'gallina', 'gatto', 'scoiattolo', 'farfalla'], num_classes=10), 'image': Image(shape=(120, 120, 3), dtype='uint8', max_shape=(120, 120, 4))}
ClassLabel(shape=(), dtype='int64', names=['pecora', 'mucca', 'cane', 'ragno', 'cavallo', 'elefante', 'gallina', 'gatto', 'scoiattolo', 'farfalla'], num_classes=10)
Deleting the dataset Traceback (most recent call last):
File "/home/debo/Hub/hub/api/dataset.py", line 154, in __init__
self._tensors = dict(self._generate_storage_tensors())
File "/home/debo/Hub/hub/api/dataset.py", line 249, in _generate_storage_tensors
yield t_path, DynamicTensor(
File "/home/debo/Hub/hub/store/dynamic_tensor.py", line 67, in __init__
shapeDt = ShapeDetector(shape, max_shape, chunks, dtype)
File "/home/debo/Hub/hub/store/shape_detector.py", line 27, in __init__
self._max_shape = max_shape = self._get_max_shape(shape, max_shape)
File "/home/debo/Hub/hub/store/shape_detector.py", line 50, in _get_max_shape
assert s == ms or s is None and isinstance(ms, int)
AssertionError
Traceback (most recent call last):
File "upload_animals.py", line 23, in <module>
ds,labels = Dataset.from_directory(url,root_folder,image_shape,(ds_size,),'w+',max_shape=(120,120,4))
File "/home/debo/Hub/hub/api/dataset.py", line 680, in from_directory
ds = Dataset(
File "/home/debo/Hub/hub/api/dataset.py", line 154, in __init__
self._tensors = dict(self._generate_storage_tensors())
File "/home/debo/Hub/hub/api/dataset.py", line 249, in _generate_storage_tensors
yield t_path, DynamicTensor(
File "/home/debo/Hub/hub/store/dynamic_tensor.py", line 67, in __init__
shapeDt = ShapeDetector(shape, max_shape, chunks, dtype)
File "/home/debo/Hub/hub/store/shape_detector.py", line 27, in __init__
self._max_shape = max_shape = self._get_max_shape(shape, max_shape)
File "/home/debo/Hub/hub/store/shape_detector.py", line 50, in _get_max_shape
assert s == ms or s is None and isinstance(ms, int)
AssertionError
|
AssertionError
|
def verify_cli_version():
os.environ["OUTDATED_IGNORE"] = 1
try:
version = pkg_resources.get_distribution(hub.__name__).version
is_outdated, latest_version = check_outdated(hub.__name__, version)
if is_outdated:
print(
"\033[93m"
+ "Hub is out of date. Please upgrade the package by running `pip3 install --upgrade snark`"
+ "\033[0m"
)
except Exception as e:
logger.error(str(e))
|
def verify_cli_version():
try:
version = pkg_resources.get_distribution(hub.__name__).version
is_outdated, latest_version = check_outdated(hub.__name__, version)
if is_outdated:
print(
"\033[93m"
+ "Hub is out of date. Please upgrade the package by running `pip3 install --upgrade snark`"
+ "\033[0m"
)
except Exception as e:
logger.error(str(e))
|
https://github.com/activeloopai/Hub/issues/216
|
Traceback (most recent call last):
File "hub/compute/tests/test_transform.py", line 284, in <module>
test_threaded()
File "hub/compute/tests/test_transform.py", line 88, in test_threaded
create_classification_dataset(ds_init).store(
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 246, in store
ds = self.upload(results, url=url, token=token, progressbar=progressbar)
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 177, in upload
list(self.map(upload_chunk, index_batched_values))
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/pathos/threading.py", line 134, in map
return _pool.map(star(f), zip(*args)) # chunksize
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 364, in map
return self._map_async(func, iterable, mapstar, chunksize).get()
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 771, in get
raise self._value
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 125, in worker
result = (True, func(*args, **kwds))
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 48, in mapstar
return list(map(*args))
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/pathos/helpers/mp_helper.py", line 15, in <lambda>
func = lambda args: f(*args)
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 167, in upload_chunk
ds[key, i * length + k] = el
File "/Users/davitb/Git/Hub/hub/api/dataset.py", line 316, in __setitem__
self._tensors[subpath][slice_list] = value
File "/Users/davitb/Git/Hub/hub/store/dynamic_tensor.py", line 207, in __setitem__
self._dynamic_tensor[slice_[0]]
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 571, in __getitem__
return self.get_basic_selection(selection, fields=fields)
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 696, in get_basic_selection
return self._get_basic_selection_nd(selection=selection, out=out,
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 739, in _get_basic_selection_nd
return self._get_selection(indexer=indexer, out=out, fields=fields)
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1028, in _get_selection
self._chunk_getitem(chunk_coords, chunk_selection, out, out_selection,
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1649, in _chunk_getitem
self._process_chunk(out, cdata, chunk_selection, drop_axes,
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1592, in _process_chunk
chunk = self._decode_chunk(cdata)
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1802, in _decode_chunk
chunk = chunk.reshape(self._chunks, order=self._order)
ValueError: cannot reshape array of size 0 into shape (50,3)
|
ValueError
|
def check_response_status(self, response):
"""
Check response status and throw corresponding exception on failure
"""
code = response.status_code
if code < 200 or code >= 300:
try:
message = response.json()["description"]
except Exception:
message = " "
logger.debug(f'Error received: status code: {code}, message: "{message}"')
if code == 400:
raise BadRequestException(response)
elif response.status_code == 401:
raise AuthenticationException()
elif response.status_code == 403:
raise AuthorizationException()
elif response.status_code == 404:
if message != " ":
raise NotFoundException(message)
else:
raise NotFoundException
elif response.status_code == 429:
raise OverLimitException(message)
elif response.status_code == 502:
raise BadGatewayException()
elif response.status_code == 504:
raise GatewayTimeoutException(message)
elif response.status_code == 423:
raise LockedException(message)
elif 500 <= response.status_code < 600:
if "Server under maintenance" in response.content.decode():
raise ServerException(
"Server under maintenance, please try again later."
)
else:
raise ServerException()
else:
msg = "An error occurred. Server response: {}".format(response.status_code)
raise HubException(message=msg)
|
def check_response_status(self, response):
"""
Check response status and throw corresponding exception on failure
"""
code = response.status_code
if code < 200 or code >= 300:
try:
message = response.json()["error"]
except Exception:
message = " "
logger.debug(f'Error received: status code: {code}, message: "{message}"')
if code == 400:
raise BadRequestException(response)
elif response.status_code == 401:
raise AuthenticationException()
elif response.status_code == 403:
raise AuthorizationException()
elif response.status_code == 404:
raise NotFoundException()
elif response.status_code == 429:
raise OverLimitException(message)
elif response.status_code == 502:
raise BadGatewayException()
elif response.status_code == 504:
raise GatewayTimeoutException(message)
elif response.status_code == 423:
raise LockedException(message)
elif 500 <= response.status_code < 600:
if "Server under maintenance" in response.content.decode():
raise ServerException(
"Server under maintenance, please try again later."
)
else:
raise ServerException()
else:
msg = "An error occurred. Server response: {}".format(response.status_code)
raise HubException(message=msg)
|
https://github.com/activeloopai/Hub/issues/216
|
Traceback (most recent call last):
File "hub/compute/tests/test_transform.py", line 284, in <module>
test_threaded()
File "hub/compute/tests/test_transform.py", line 88, in test_threaded
create_classification_dataset(ds_init).store(
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 246, in store
ds = self.upload(results, url=url, token=token, progressbar=progressbar)
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 177, in upload
list(self.map(upload_chunk, index_batched_values))
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/pathos/threading.py", line 134, in map
return _pool.map(star(f), zip(*args)) # chunksize
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 364, in map
return self._map_async(func, iterable, mapstar, chunksize).get()
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 771, in get
raise self._value
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 125, in worker
result = (True, func(*args, **kwds))
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 48, in mapstar
return list(map(*args))
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/pathos/helpers/mp_helper.py", line 15, in <lambda>
func = lambda args: f(*args)
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 167, in upload_chunk
ds[key, i * length + k] = el
File "/Users/davitb/Git/Hub/hub/api/dataset.py", line 316, in __setitem__
self._tensors[subpath][slice_list] = value
File "/Users/davitb/Git/Hub/hub/store/dynamic_tensor.py", line 207, in __setitem__
self._dynamic_tensor[slice_[0]]
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 571, in __getitem__
return self.get_basic_selection(selection, fields=fields)
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 696, in get_basic_selection
return self._get_basic_selection_nd(selection=selection, out=out,
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 739, in _get_basic_selection_nd
return self._get_selection(indexer=indexer, out=out, fields=fields)
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1028, in _get_selection
self._chunk_getitem(chunk_coords, chunk_selection, out, out_selection,
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1649, in _chunk_getitem
self._process_chunk(out, cdata, chunk_selection, drop_axes,
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1592, in _process_chunk
chunk = self._decode_chunk(cdata)
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1802, in _decode_chunk
chunk = chunk.reshape(self._chunks, order=self._order)
ValueError: cannot reshape array of size 0 into shape (50,3)
|
ValueError
|
def init(
token: str = "",
cloud=False,
n_workers=1,
memory_limit=None,
processes=False,
threads_per_worker=1,
distributed=True,
):
"""Initializes cluster either local or on the cloud
Parameters
----------
token: str
token provided by snark
cache: float
Amount on local memory to cache locally, default 2e9 (2GB)
cloud: bool
Should be run locally or on the cloud
n_workers: int
number of concurrent workers, default to1
threads_per_worker: int
Number of threads per each worker
"""
print("initialized")
if "dask" not in sys.modules:
raise ModuleNotInstalledException("dask")
else:
import dask
from dask.distributed import Client
global dask
global Client
global _client
if _client is not None:
_client.close()
if cloud:
raise NotImplementedError
elif not distributed:
client = None
dask.config.set(scheduler="threading")
hub.config.DISTRIBUTED = False
else:
n_workers = n_workers if n_workers is not None else psutil.cpu_count()
memory_limit = (
memory_limit
if memory_limit is not None
else psutil.virtual_memory().available
)
local_directory = os.path.join(
os.path.expanduser("~"),
".activeloop",
"tmp",
)
if not os.path.exists(local_directory):
os.makedirs(local_directory)
client = Client(
n_workers=n_workers,
processes=processes,
memory_limit=memory_limit,
threads_per_worker=threads_per_worker,
local_directory=local_directory,
)
config.DISTRIBUTED = True
_client = client
return client
|
def init(
token: str = "",
cloud=False,
n_workers=1,
memory_limit=None,
processes=False,
threads_per_worker=1,
distributed=True,
):
"""Initializes cluster either local or on the cloud
Parameters
----------
token: str
token provided by snark
cache: float
Amount on local memory to cache locally, default 2e9 (2GB)
cloud: bool
Should be run locally or on the cloud
n_workers: int
number of concurrent workers, default to1
threads_per_worker: int
Number of threads per each worker
"""
print("initialized")
global _client
if _client is not None:
_client.close()
if cloud:
raise NotImplementedError
elif not distributed:
client = None
dask.config.set(scheduler="threading")
hub.config.DISTRIBUTED = False
else:
n_workers = n_workers if n_workers is not None else psutil.cpu_count()
memory_limit = (
memory_limit
if memory_limit is not None
else psutil.virtual_memory().available
)
local_directory = os.path.join(
os.path.expanduser("~"),
".activeloop",
"tmp",
)
if not os.path.exists(local_directory):
os.makedirs(local_directory)
client = Client(
n_workers=n_workers,
processes=processes,
memory_limit=memory_limit,
threads_per_worker=threads_per_worker,
local_directory=local_directory,
)
config.DISTRIBUTED = True
_client = client
return client
|
https://github.com/activeloopai/Hub/issues/216
|
Traceback (most recent call last):
File "hub/compute/tests/test_transform.py", line 284, in <module>
test_threaded()
File "hub/compute/tests/test_transform.py", line 88, in test_threaded
create_classification_dataset(ds_init).store(
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 246, in store
ds = self.upload(results, url=url, token=token, progressbar=progressbar)
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 177, in upload
list(self.map(upload_chunk, index_batched_values))
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/pathos/threading.py", line 134, in map
return _pool.map(star(f), zip(*args)) # chunksize
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 364, in map
return self._map_async(func, iterable, mapstar, chunksize).get()
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 771, in get
raise self._value
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 125, in worker
result = (True, func(*args, **kwds))
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 48, in mapstar
return list(map(*args))
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/pathos/helpers/mp_helper.py", line 15, in <lambda>
func = lambda args: f(*args)
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 167, in upload_chunk
ds[key, i * length + k] = el
File "/Users/davitb/Git/Hub/hub/api/dataset.py", line 316, in __setitem__
self._tensors[subpath][slice_list] = value
File "/Users/davitb/Git/Hub/hub/store/dynamic_tensor.py", line 207, in __setitem__
self._dynamic_tensor[slice_[0]]
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 571, in __getitem__
return self.get_basic_selection(selection, fields=fields)
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 696, in get_basic_selection
return self._get_basic_selection_nd(selection=selection, out=out,
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 739, in _get_basic_selection_nd
return self._get_selection(indexer=indexer, out=out, fields=fields)
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1028, in _get_selection
self._chunk_getitem(chunk_coords, chunk_selection, out, out_selection,
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1649, in _chunk_getitem
self._process_chunk(out, cdata, chunk_selection, drop_axes,
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1592, in _process_chunk
chunk = self._decode_chunk(cdata)
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1802, in _decode_chunk
chunk = chunk.reshape(self._chunks, order=self._order)
ValueError: cannot reshape array of size 0 into shape (50,3)
|
ValueError
|
def generate(generator: DatasetGenerator, input) -> Dataset:
"""Generates dataset based on DatabaseGenerator class instance and iterable input
For every element in input runs generators __call__ function.
That function should return dict of numpy arrays containing single or multiple outputs for axis 0 of generating dataset
"""
if "dask" not in sys.modules:
raise ModuleNotInstalledException("dask")
else:
import dask
import dask.array
global dask
meta = _meta_preprocess(generator.meta())
keys = sorted(meta.keys())
tasks = [dask.delayed(_generate, nout=len(meta))(generator, i) for i in input]
if len(tasks) == 0:
return Dataset(
{
key: Tensor(
meta[key],
dask.array.from_array(np.empty(shape=(0,), dtype="uint8")),
)
for ki, key in enumerate(keys)
}
)
return Dataset(
{
key: Tensor(
meta[key],
dask.array.concatenate(
[
dask.array.from_delayed(
task[ki],
shape=_dask_shape(meta[key]["shape"]),
dtype=meta[key]["dtype"],
)
for task in tasks
]
),
delayed_objs=[task[ki] for task in tasks],
)
for ki, key in enumerate(keys)
}
)
|
def generate(generator: DatasetGenerator, input) -> Dataset:
"""Generates dataset based on DatabaseGenerator class instance and iterable input
For every element in input runs generators __call__ function.
That function should return dict of numpy arrays containing single or multiple outputs for axis 0 of generating dataset
"""
meta = _meta_preprocess(generator.meta())
keys = sorted(meta.keys())
tasks = [dask.delayed(_generate, nout=len(meta))(generator, i) for i in input]
if len(tasks) == 0:
return Dataset(
{
key: Tensor(
meta[key],
dask.array.from_array(np.empty(shape=(0,), dtype="uint8")),
)
for ki, key in enumerate(keys)
}
)
return Dataset(
{
key: Tensor(
meta[key],
dask.array.concatenate(
[
dask.array.from_delayed(
task[ki],
shape=_dask_shape(meta[key]["shape"]),
dtype=meta[key]["dtype"],
)
for task in tasks
]
),
delayed_objs=[task[ki] for task in tasks],
)
for ki, key in enumerate(keys)
}
)
|
https://github.com/activeloopai/Hub/issues/216
|
Traceback (most recent call last):
File "hub/compute/tests/test_transform.py", line 284, in <module>
test_threaded()
File "hub/compute/tests/test_transform.py", line 88, in test_threaded
create_classification_dataset(ds_init).store(
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 246, in store
ds = self.upload(results, url=url, token=token, progressbar=progressbar)
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 177, in upload
list(self.map(upload_chunk, index_batched_values))
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/pathos/threading.py", line 134, in map
return _pool.map(star(f), zip(*args)) # chunksize
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 364, in map
return self._map_async(func, iterable, mapstar, chunksize).get()
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 771, in get
raise self._value
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 125, in worker
result = (True, func(*args, **kwds))
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 48, in mapstar
return list(map(*args))
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/pathos/helpers/mp_helper.py", line 15, in <lambda>
func = lambda args: f(*args)
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 167, in upload_chunk
ds[key, i * length + k] = el
File "/Users/davitb/Git/Hub/hub/api/dataset.py", line 316, in __setitem__
self._tensors[subpath][slice_list] = value
File "/Users/davitb/Git/Hub/hub/store/dynamic_tensor.py", line 207, in __setitem__
self._dynamic_tensor[slice_[0]]
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 571, in __getitem__
return self.get_basic_selection(selection, fields=fields)
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 696, in get_basic_selection
return self._get_basic_selection_nd(selection=selection, out=out,
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 739, in _get_basic_selection_nd
return self._get_selection(indexer=indexer, out=out, fields=fields)
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1028, in _get_selection
self._chunk_getitem(chunk_coords, chunk_selection, out, out_selection,
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1649, in _chunk_getitem
self._process_chunk(out, cdata, chunk_selection, drop_axes,
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1592, in _process_chunk
chunk = self._decode_chunk(cdata)
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1802, in _decode_chunk
chunk = chunk.reshape(self._chunks, order=self._order)
ValueError: cannot reshape array of size 0 into shape (50,3)
|
ValueError
|
def concat(datasets: Iterable[Dataset]) -> Dataset:
"""Concats multiple datasets into one along axis 0
This is equivalent to concat every tensor with the same key
"""
if "dask" not in sys.modules:
raise ModuleNotInstalledException("dask")
else:
import dask
import dask.array
global dask
keys = [sorted(dataset._tensors.keys()) for dataset in datasets]
for key in keys:
assert key == keys[0]
keys = keys[0]
return Dataset(
{
key: Tensor(
_meta_concat([dataset._tensors[key]._meta for dataset in datasets]),
dask.array.concatenate(
[dataset._tensors[key]._array for dataset in datasets]
),
tuple(
itertools.chain(
*[
dataset._tensors[key]._delayed_objs or []
for dataset in datasets
]
)
),
)
for key in keys
}
)
|
def concat(datasets: Iterable[Dataset]) -> Dataset:
"""Concats multiple datasets into one along axis 0
This is equivalent to concat every tensor with the same key
"""
keys = [sorted(dataset._tensors.keys()) for dataset in datasets]
for key in keys:
assert key == keys[0]
keys = keys[0]
return Dataset(
{
key: Tensor(
_meta_concat([dataset._tensors[key]._meta for dataset in datasets]),
dask.array.concatenate(
[dataset._tensors[key]._array for dataset in datasets]
),
tuple(
itertools.chain(
*[
dataset._tensors[key]._delayed_objs or []
for dataset in datasets
]
)
),
)
for key in keys
}
)
|
https://github.com/activeloopai/Hub/issues/216
|
Traceback (most recent call last):
File "hub/compute/tests/test_transform.py", line 284, in <module>
test_threaded()
File "hub/compute/tests/test_transform.py", line 88, in test_threaded
create_classification_dataset(ds_init).store(
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 246, in store
ds = self.upload(results, url=url, token=token, progressbar=progressbar)
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 177, in upload
list(self.map(upload_chunk, index_batched_values))
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/pathos/threading.py", line 134, in map
return _pool.map(star(f), zip(*args)) # chunksize
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 364, in map
return self._map_async(func, iterable, mapstar, chunksize).get()
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 771, in get
raise self._value
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 125, in worker
result = (True, func(*args, **kwds))
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 48, in mapstar
return list(map(*args))
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/pathos/helpers/mp_helper.py", line 15, in <lambda>
func = lambda args: f(*args)
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 167, in upload_chunk
ds[key, i * length + k] = el
File "/Users/davitb/Git/Hub/hub/api/dataset.py", line 316, in __setitem__
self._tensors[subpath][slice_list] = value
File "/Users/davitb/Git/Hub/hub/store/dynamic_tensor.py", line 207, in __setitem__
self._dynamic_tensor[slice_[0]]
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 571, in __getitem__
return self.get_basic_selection(selection, fields=fields)
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 696, in get_basic_selection
return self._get_basic_selection_nd(selection=selection, out=out,
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 739, in _get_basic_selection_nd
return self._get_selection(indexer=indexer, out=out, fields=fields)
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1028, in _get_selection
self._chunk_getitem(chunk_coords, chunk_selection, out, out_selection,
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1649, in _chunk_getitem
self._process_chunk(out, cdata, chunk_selection, drop_axes,
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1592, in _process_chunk
chunk = self._decode_chunk(cdata)
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1802, in _decode_chunk
chunk = chunk.reshape(self._chunks, order=self._order)
ValueError: cannot reshape array of size 0 into shape (50,3)
|
ValueError
|
def __init__(self, tensors: Dict[str, Tensor], metainfo=dict()):
"""Creates dict given dict of tensors (name -> Tensor key value pairs)"""
self._tensors = tensors
self._metainfo = metainfo
shape = None
for name, tensor in tensors.items():
if shape is None or tensor.ndim > len(shape):
shape = tensor.shape
self._len = tensor.count
self.verison = "0.x"
if "dask" not in sys.modules:
raise ModuleNotInstalledException("dask")
else:
import dask
import dask.array
global dask
|
def __init__(self, tensors: Dict[str, Tensor], metainfo=dict()):
"""Creates dict given dict of tensors (name -> Tensor key value pairs)"""
self._tensors = tensors
self._metainfo = metainfo
shape = None
for name, tensor in tensors.items():
if shape is None or tensor.ndim > len(shape):
shape = tensor.shape
self._len = tensor.count
|
https://github.com/activeloopai/Hub/issues/216
|
Traceback (most recent call last):
File "hub/compute/tests/test_transform.py", line 284, in <module>
test_threaded()
File "hub/compute/tests/test_transform.py", line 88, in test_threaded
create_classification_dataset(ds_init).store(
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 246, in store
ds = self.upload(results, url=url, token=token, progressbar=progressbar)
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 177, in upload
list(self.map(upload_chunk, index_batched_values))
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/pathos/threading.py", line 134, in map
return _pool.map(star(f), zip(*args)) # chunksize
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 364, in map
return self._map_async(func, iterable, mapstar, chunksize).get()
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 771, in get
raise self._value
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 125, in worker
result = (True, func(*args, **kwds))
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 48, in mapstar
return list(map(*args))
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/pathos/helpers/mp_helper.py", line 15, in <lambda>
func = lambda args: f(*args)
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 167, in upload_chunk
ds[key, i * length + k] = el
File "/Users/davitb/Git/Hub/hub/api/dataset.py", line 316, in __setitem__
self._tensors[subpath][slice_list] = value
File "/Users/davitb/Git/Hub/hub/store/dynamic_tensor.py", line 207, in __setitem__
self._dynamic_tensor[slice_[0]]
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 571, in __getitem__
return self.get_basic_selection(selection, fields=fields)
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 696, in get_basic_selection
return self._get_basic_selection_nd(selection=selection, out=out,
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 739, in _get_basic_selection_nd
return self._get_selection(indexer=indexer, out=out, fields=fields)
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1028, in _get_selection
self._chunk_getitem(chunk_coords, chunk_selection, out, out_selection,
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1649, in _chunk_getitem
self._process_chunk(out, cdata, chunk_selection, drop_axes,
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1592, in _process_chunk
chunk = self._decode_chunk(cdata)
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1802, in _decode_chunk
chunk = chunk.reshape(self._chunks, order=self._order)
ValueError: cannot reshape array of size 0 into shape (50,3)
|
ValueError
|
def to_pytorch(self, transform=None, max_text_len=30):
"""
Transforms into pytorch dataset
Parameters
----------
transform: func
any transform that takes input a dictionary of a sample and returns transformed dictionary
max_text_len: integer
the maximum length of text strings that would be stored. Strings longer than this would be snipped
"""
try:
import torch
global torch
except ImportError:
pass
return TorchDataset(self, transform, max_text_len)
|
def to_pytorch(self, transform=None, max_text_len=30):
"""
Transforms into pytorch dataset
Parameters
----------
transform: func
any transform that takes input a dictionary of a sample and returns transformed dictionary
max_text_len: integer
the maximum length of text strings that would be stored. Strings longer than this would be snipped
"""
return TorchDataset(self, transform, max_text_len)
|
https://github.com/activeloopai/Hub/issues/216
|
Traceback (most recent call last):
File "hub/compute/tests/test_transform.py", line 284, in <module>
test_threaded()
File "hub/compute/tests/test_transform.py", line 88, in test_threaded
create_classification_dataset(ds_init).store(
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 246, in store
ds = self.upload(results, url=url, token=token, progressbar=progressbar)
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 177, in upload
list(self.map(upload_chunk, index_batched_values))
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/pathos/threading.py", line 134, in map
return _pool.map(star(f), zip(*args)) # chunksize
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 364, in map
return self._map_async(func, iterable, mapstar, chunksize).get()
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 771, in get
raise self._value
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 125, in worker
result = (True, func(*args, **kwds))
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 48, in mapstar
return list(map(*args))
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/pathos/helpers/mp_helper.py", line 15, in <lambda>
func = lambda args: f(*args)
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 167, in upload_chunk
ds[key, i * length + k] = el
File "/Users/davitb/Git/Hub/hub/api/dataset.py", line 316, in __setitem__
self._tensors[subpath][slice_list] = value
File "/Users/davitb/Git/Hub/hub/store/dynamic_tensor.py", line 207, in __setitem__
self._dynamic_tensor[slice_[0]]
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 571, in __getitem__
return self.get_basic_selection(selection, fields=fields)
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 696, in get_basic_selection
return self._get_basic_selection_nd(selection=selection, out=out,
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 739, in _get_basic_selection_nd
return self._get_selection(indexer=indexer, out=out, fields=fields)
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1028, in _get_selection
self._chunk_getitem(chunk_coords, chunk_selection, out, out_selection,
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1649, in _chunk_getitem
self._process_chunk(out, cdata, chunk_selection, drop_axes,
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1592, in _process_chunk
chunk = self._decode_chunk(cdata)
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1802, in _decode_chunk
chunk = chunk.reshape(self._chunks, order=self._order)
ValueError: cannot reshape array of size 0 into shape (50,3)
|
ValueError
|
def to_tensorflow(self, max_text_len=30):
"""
Transforms into tensorflow dataset
Parameters
----------
max_text_len: integer
the maximum length of text strings that would be stored. Strings longer than this would be snipped
"""
try:
import tensorflow as tf
except ImportError:
pass
def tf_gen(step=4):
with dask.config.set(scheduler="sync"):
for index in range(0, len(self), step):
arrs = [self[index : index + step].values() for i in range(1)]
arrs = list(map(lambda x: x._array, _flatten(arrs)))
arrs = dask.delayed(list, pure=False, nout=len(list(self.keys())))(arrs)
arrs = arrs.compute()
for ind, arr in enumerate(arrs):
if arr.dtype.type is np.str_:
arr = [
([ord(x) for x in sample.tolist()[0:max_text_len]])
for sample in arr
]
arr = np.array(
[
np.pad(
sample,
(0, max_text_len - len(sample)),
"constant",
constant_values=(32),
)
for sample in arr
]
)
arrs[ind] = arr
for i in range(step):
sample = {key: r[i] for key, r in zip(self[index].keys(), arrs)}
yield sample
def tf_dtype(np_dtype):
try:
if "U" in np_dtype:
return tf.dtypes.as_dtype("string")
return tf.dtypes.as_dtype(np_dtype)
except Exception as e:
logger.log(e)
return tf.variant
output_shapes = {}
output_types = {}
for key in self.keys():
output_types[key] = tf_dtype(self._tensors[key].dtype)
output_shapes[key] = self._tensors[key].shape[1:]
# if this is a string, we change the type to int, as it's going to become ascii. shape is also set to None
if output_types[key] == tf.dtypes.as_dtype("string"):
output_types[key] = tf.dtypes.as_dtype("int8")
output_shapes[key] = None
# TODO use None for dimensions you don't know the length tf.TensorShape([None])
# FIXME Dataset Generator is not very good with multiprocessing but its good for fast tensorflow support
return tf.data.Dataset.from_generator(
tf_gen,
output_types=output_types,
# output_shapes=output_shapes,
)
|
def to_tensorflow(self, max_text_len=30):
"""
Transforms into tensorflow dataset
Parameters
----------
max_text_len: integer
the maximum length of text strings that would be stored. Strings longer than this would be snipped
"""
try:
import tensorflow as tf
except ImportError:
pass
def tf_gen(step=4):
with dask.config.set(scheduler="sync"):
for index in range(0, len(self), step):
arrs = [self[index : index + step].values() for i in range(1)]
arrs = list(map(lambda x: x._array, _flatten(arrs)))
arrs = dask.delayed(list, pure=False, nout=len(list(self.keys())))(arrs)
arrs = arrs.compute()
for ind, arr in enumerate(arrs):
if arr.dtype.type is np.str_:
arr = [
([ord(x) for x in sample.tolist()[0:max_text_len]])
for sample in arr
]
arr = np.array(
[
np.pad(
sample,
(0, max_text_len - len(sample)),
"constant",
constant_values=(32),
)
for sample in arr
]
)
arrs[ind] = arr
for i in range(step):
sample = {key: r[i] for key, r in zip(self[index].keys(), arrs)}
yield sample
def tf_dtype(np_dtype):
try:
if "U" in np_dtype:
return tf.dtypes.as_dtype("string")
return tf.dtypes.as_dtype(np_dtype)
except Exception as e:
return tf.variant
output_shapes = {}
output_types = {}
for key in self.keys():
output_types[key] = tf_dtype(self._tensors[key].dtype)
output_shapes[key] = self._tensors[key].shape[1:]
# if this is a string, we change the type to int, as it's going to become ascii. shape is also set to None
if output_types[key] == tf.dtypes.as_dtype("string"):
output_types[key] = tf.dtypes.as_dtype("int8")
output_shapes[key] = None
# TODO use None for dimensions you don't know the length tf.TensorShape([None])
# FIXME Dataset Generator is not very good with multiprocessing but its good for fast tensorflow support
return tf.data.Dataset.from_generator(
tf_gen,
output_types=output_types,
# output_shapes=output_shapes,
)
|
https://github.com/activeloopai/Hub/issues/216
|
Traceback (most recent call last):
File "hub/compute/tests/test_transform.py", line 284, in <module>
test_threaded()
File "hub/compute/tests/test_transform.py", line 88, in test_threaded
create_classification_dataset(ds_init).store(
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 246, in store
ds = self.upload(results, url=url, token=token, progressbar=progressbar)
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 177, in upload
list(self.map(upload_chunk, index_batched_values))
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/pathos/threading.py", line 134, in map
return _pool.map(star(f), zip(*args)) # chunksize
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 364, in map
return self._map_async(func, iterable, mapstar, chunksize).get()
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 771, in get
raise self._value
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 125, in worker
result = (True, func(*args, **kwds))
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 48, in mapstar
return list(map(*args))
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/pathos/helpers/mp_helper.py", line 15, in <lambda>
func = lambda args: f(*args)
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 167, in upload_chunk
ds[key, i * length + k] = el
File "/Users/davitb/Git/Hub/hub/api/dataset.py", line 316, in __setitem__
self._tensors[subpath][slice_list] = value
File "/Users/davitb/Git/Hub/hub/store/dynamic_tensor.py", line 207, in __setitem__
self._dynamic_tensor[slice_[0]]
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 571, in __getitem__
return self.get_basic_selection(selection, fields=fields)
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 696, in get_basic_selection
return self._get_basic_selection_nd(selection=selection, out=out,
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 739, in _get_basic_selection_nd
return self._get_selection(indexer=indexer, out=out, fields=fields)
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1028, in _get_selection
self._chunk_getitem(chunk_coords, chunk_selection, out, out_selection,
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1649, in _chunk_getitem
self._process_chunk(out, cdata, chunk_selection, drop_axes,
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1592, in _process_chunk
chunk = self._decode_chunk(cdata)
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1802, in _decode_chunk
chunk = chunk.reshape(self._chunks, order=self._order)
ValueError: cannot reshape array of size 0 into shape (50,3)
|
ValueError
|
def tf_dtype(np_dtype):
try:
if "U" in np_dtype:
return tf.dtypes.as_dtype("string")
return tf.dtypes.as_dtype(np_dtype)
except Exception as e:
logger.log(e)
return tf.variant
|
def tf_dtype(np_dtype):
try:
if "U" in np_dtype:
return tf.dtypes.as_dtype("string")
return tf.dtypes.as_dtype(np_dtype)
except Exception as e:
return tf.variant
|
https://github.com/activeloopai/Hub/issues/216
|
Traceback (most recent call last):
File "hub/compute/tests/test_transform.py", line 284, in <module>
test_threaded()
File "hub/compute/tests/test_transform.py", line 88, in test_threaded
create_classification_dataset(ds_init).store(
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 246, in store
ds = self.upload(results, url=url, token=token, progressbar=progressbar)
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 177, in upload
list(self.map(upload_chunk, index_batched_values))
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/pathos/threading.py", line 134, in map
return _pool.map(star(f), zip(*args)) # chunksize
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 364, in map
return self._map_async(func, iterable, mapstar, chunksize).get()
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 771, in get
raise self._value
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 125, in worker
result = (True, func(*args, **kwds))
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 48, in mapstar
return list(map(*args))
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/pathos/helpers/mp_helper.py", line 15, in <lambda>
func = lambda args: f(*args)
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 167, in upload_chunk
ds[key, i * length + k] = el
File "/Users/davitb/Git/Hub/hub/api/dataset.py", line 316, in __setitem__
self._tensors[subpath][slice_list] = value
File "/Users/davitb/Git/Hub/hub/store/dynamic_tensor.py", line 207, in __setitem__
self._dynamic_tensor[slice_[0]]
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 571, in __getitem__
return self.get_basic_selection(selection, fields=fields)
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 696, in get_basic_selection
return self._get_basic_selection_nd(selection=selection, out=out,
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 739, in _get_basic_selection_nd
return self._get_selection(indexer=indexer, out=out, fields=fields)
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1028, in _get_selection
self._chunk_getitem(chunk_coords, chunk_selection, out, out_selection,
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1649, in _chunk_getitem
self._process_chunk(out, cdata, chunk_selection, drop_axes,
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1592, in _process_chunk
chunk = self._decode_chunk(cdata)
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1802, in _decode_chunk
chunk = chunk.reshape(self._chunks, order=self._order)
ValueError: cannot reshape array of size 0 into shape (50,3)
|
ValueError
|
def load(tag, creds=None, session_creds=True) -> Dataset:
"""Load a dataset from repository using given url and credentials (optional)"""
fs, path = _load_fs_and_path(tag, creds, session_creds=session_creds)
fs: fsspec.AbstractFileSystem = fs
path_2 = f"{path}/meta.json"
if not fs.exists(path):
raise HubDatasetNotFoundException(tag)
with fs.open(path_2, "r") as f:
ds_meta = json.loads(f.read())
for name in ds_meta["tensors"]:
assert fs.exists(f"{path}/{name}"), (
f"Tensor {name} of {tag} dataset does not exist"
)
if "dask" not in sys.modules:
raise ModuleNotInstalledException("dask")
else:
import dask
import dask.array
global dask
if ds_meta["len"] == 0:
logger.warning("The dataset is empty (has 0 samples)")
return Dataset(
{
name: Tensor(
tmeta,
dask.array.from_array(
np.empty(shape=(0,) + tuple(tmeta["shape"][1:]), dtype="uint8"),
),
)
for name, tmeta in ds_meta["tensors"].items()
},
metainfo=ds_meta.get("metainfo"),
)
len_ = ds_meta["len"]
# added reverse compatibility for previous versions
for name, tmeta in ds_meta["tensors"].items():
if "chunksize" not in tmeta:
tmeta["chunksize"] = 1
return Dataset(
{
name: Tensor(
tmeta,
_dask_concat(
[
dask.array.from_delayed(
dask.delayed(_numpy_load)(
fs,
f"{path}/{name}/{i}.npy",
codec_from_name(tmeta.get("dcompress")),
),
shape=(min(tmeta["chunksize"], len_ - i),)
+ tuple(tmeta["shape"][1:]),
dtype=tmeta["dtype"],
)
for i in range(0, len_, tmeta["chunksize"])
]
),
)
for name, tmeta in ds_meta["tensors"].items()
},
metainfo=ds_meta.get("metainfo"),
)
|
def load(tag, creds=None, session_creds=True) -> Dataset:
"""Load a dataset from repository using given url and credentials (optional)"""
fs, path = _load_fs_and_path(tag, creds, session_creds=session_creds)
fs: fsspec.AbstractFileSystem = fs
path_2 = f"{path}/meta.json"
if not fs.exists(path):
from hub.exceptions import DatasetNotFound
raise DatasetNotFound(tag)
with fs.open(path_2, "r") as f:
ds_meta = json.loads(f.read())
for name in ds_meta["tensors"]:
assert fs.exists(f"{path}/{name}"), (
f"Tensor {name} of {tag} dataset does not exist"
)
if ds_meta["len"] == 0:
logger.warning("The dataset is empty (has 0 samples)")
return Dataset(
{
name: Tensor(
tmeta,
dask.array.from_array(
np.empty(shape=(0,) + tuple(tmeta["shape"][1:]), dtype="uint8"),
),
)
for name, tmeta in ds_meta["tensors"].items()
},
metainfo=ds_meta.get("metainfo"),
)
len_ = ds_meta["len"]
# added reverse compatibility for previous versions
for name, tmeta in ds_meta["tensors"].items():
if "chunksize" not in tmeta:
tmeta["chunksize"] = 1
return Dataset(
{
name: Tensor(
tmeta,
_dask_concat(
[
dask.array.from_delayed(
dask.delayed(_numpy_load)(
fs,
f"{path}/{name}/{i}.npy",
codec_from_name(tmeta.get("dcompress")),
),
shape=(min(tmeta["chunksize"], len_ - i),)
+ tuple(tmeta["shape"][1:]),
dtype=tmeta["dtype"],
)
for i in range(0, len_, tmeta["chunksize"])
]
),
)
for name, tmeta in ds_meta["tensors"].items()
},
metainfo=ds_meta.get("metainfo"),
)
|
https://github.com/activeloopai/Hub/issues/216
|
Traceback (most recent call last):
File "hub/compute/tests/test_transform.py", line 284, in <module>
test_threaded()
File "hub/compute/tests/test_transform.py", line 88, in test_threaded
create_classification_dataset(ds_init).store(
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 246, in store
ds = self.upload(results, url=url, token=token, progressbar=progressbar)
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 177, in upload
list(self.map(upload_chunk, index_batched_values))
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/pathos/threading.py", line 134, in map
return _pool.map(star(f), zip(*args)) # chunksize
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 364, in map
return self._map_async(func, iterable, mapstar, chunksize).get()
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 771, in get
raise self._value
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 125, in worker
result = (True, func(*args, **kwds))
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 48, in mapstar
return list(map(*args))
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/pathos/helpers/mp_helper.py", line 15, in <lambda>
func = lambda args: f(*args)
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 167, in upload_chunk
ds[key, i * length + k] = el
File "/Users/davitb/Git/Hub/hub/api/dataset.py", line 316, in __setitem__
self._tensors[subpath][slice_list] = value
File "/Users/davitb/Git/Hub/hub/store/dynamic_tensor.py", line 207, in __setitem__
self._dynamic_tensor[slice_[0]]
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 571, in __getitem__
return self.get_basic_selection(selection, fields=fields)
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 696, in get_basic_selection
return self._get_basic_selection_nd(selection=selection, out=out,
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 739, in _get_basic_selection_nd
return self._get_selection(indexer=indexer, out=out, fields=fields)
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1028, in _get_selection
self._chunk_getitem(chunk_coords, chunk_selection, out, out_selection,
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1649, in _chunk_getitem
self._process_chunk(out, cdata, chunk_selection, drop_axes,
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1592, in _process_chunk
chunk = self._decode_chunk(cdata)
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1802, in _decode_chunk
chunk = chunk.reshape(self._chunks, order=self._order)
ValueError: cannot reshape array of size 0 into shape (50,3)
|
ValueError
|
def from_array(array, dtag=None, dcompress=None, chunksize=None) -> Tensor:
"""Generates tensor from arraylike object
Parameters
----------
array : np.ndarray
Numpy array like object with shape, dtype, dims
dtag : str, optional
Describes type of the data stored in this array (image, mask, labels, ...)
dcompress: str, optional
Argument for compression algorithm, ignore this one, this one does not have any affect yet!
chunksize:
Information about how many items (from axis 0) should be stored in the same file if a command is given to save this tensor
Returns
-------
Tensor
newly generated tensor itself
"""
if "dask" not in sys.modules:
raise ModuleNotInstalledException("dask")
else:
import dask
import dask.array
global dask
meta = {
"dtype": array.dtype,
"dtag": dtag,
"dcompress": dcompress,
"chunksize": chunksize,
}
if str(array.dtype) == "object":
array = dask.array.from_array(array, chunks=1)
else:
array = dask.array.from_array(array)
return Tensor(meta, array)
|
def from_array(array, dtag=None, dcompress=None, chunksize=None) -> Tensor:
"""Generates tensor from arraylike object
Parameters
----------
array : np.ndarray
Numpy array like object with shape, dtype, dims
dtag : str, optional
Describes type of the data stored in this array (image, mask, labels, ...)
dcompress: str, optional
Argument for compression algorithm, ignore this one, this one does not have any affect yet!
chunksize:
Information about how many items (from axis 0) should be stored in the same file if a command is given to save this tensor
Returns
-------
Tensor
newly generated tensor itself
"""
meta = {
"dtype": array.dtype,
"dtag": dtag,
"dcompress": dcompress,
"chunksize": chunksize,
}
if str(array.dtype) == "object":
array = dask.array.from_array(array, chunks=1)
else:
array = dask.array.from_array(array)
return Tensor(meta, array)
|
https://github.com/activeloopai/Hub/issues/216
|
Traceback (most recent call last):
File "hub/compute/tests/test_transform.py", line 284, in <module>
test_threaded()
File "hub/compute/tests/test_transform.py", line 88, in test_threaded
create_classification_dataset(ds_init).store(
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 246, in store
ds = self.upload(results, url=url, token=token, progressbar=progressbar)
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 177, in upload
list(self.map(upload_chunk, index_batched_values))
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/pathos/threading.py", line 134, in map
return _pool.map(star(f), zip(*args)) # chunksize
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 364, in map
return self._map_async(func, iterable, mapstar, chunksize).get()
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 771, in get
raise self._value
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 125, in worker
result = (True, func(*args, **kwds))
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 48, in mapstar
return list(map(*args))
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/pathos/helpers/mp_helper.py", line 15, in <lambda>
func = lambda args: f(*args)
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 167, in upload_chunk
ds[key, i * length + k] = el
File "/Users/davitb/Git/Hub/hub/api/dataset.py", line 316, in __setitem__
self._tensors[subpath][slice_list] = value
File "/Users/davitb/Git/Hub/hub/store/dynamic_tensor.py", line 207, in __setitem__
self._dynamic_tensor[slice_[0]]
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 571, in __getitem__
return self.get_basic_selection(selection, fields=fields)
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 696, in get_basic_selection
return self._get_basic_selection_nd(selection=selection, out=out,
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 739, in _get_basic_selection_nd
return self._get_selection(indexer=indexer, out=out, fields=fields)
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1028, in _get_selection
self._chunk_getitem(chunk_coords, chunk_selection, out, out_selection,
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1649, in _chunk_getitem
self._process_chunk(out, cdata, chunk_selection, drop_axes,
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1592, in _process_chunk
chunk = self._decode_chunk(cdata)
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1802, in _decode_chunk
chunk = chunk.reshape(self._chunks, order=self._order)
ValueError: cannot reshape array of size 0 into shape (50,3)
|
ValueError
|
def __init__(self, meta: dict, daskarray, delayed_objs: tuple = None):
if "dask" not in sys.modules:
raise ModuleNotInstalledException("dask")
else:
import dask
import dask.array
global dask
if not meta.get("preprocessed"):
meta = Tensor._preprocess_meta(meta, daskarray)
self._meta = meta
self._array = daskarray
self._delayed_objs = delayed_objs
self._shape = _dask_shape_backward(daskarray.shape)
self._dtype = meta["dtype"]
self._dtag = meta.get("dtag")
self._dcompress = meta.get("dcompress")
self._dcompress_algo = meta.get("dcompress_algo")
self._dcompress_lvl = meta.get("dcompress_lvl")
self._chunksize = meta.get("chunksize")
|
def __init__(self, meta: dict, daskarray, delayed_objs: tuple = None):
if not meta.get("preprocessed"):
meta = Tensor._preprocess_meta(meta, daskarray)
self._meta = meta
self._array = daskarray
self._delayed_objs = delayed_objs
self._shape = _dask_shape_backward(daskarray.shape)
self._dtype = meta["dtype"]
self._dtag = meta.get("dtag")
self._dcompress = meta.get("dcompress")
self._dcompress_algo = meta.get("dcompress_algo")
self._dcompress_lvl = meta.get("dcompress_lvl")
self._chunksize = meta.get("chunksize")
|
https://github.com/activeloopai/Hub/issues/216
|
Traceback (most recent call last):
File "hub/compute/tests/test_transform.py", line 284, in <module>
test_threaded()
File "hub/compute/tests/test_transform.py", line 88, in test_threaded
create_classification_dataset(ds_init).store(
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 246, in store
ds = self.upload(results, url=url, token=token, progressbar=progressbar)
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 177, in upload
list(self.map(upload_chunk, index_batched_values))
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/pathos/threading.py", line 134, in map
return _pool.map(star(f), zip(*args)) # chunksize
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 364, in map
return self._map_async(func, iterable, mapstar, chunksize).get()
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 771, in get
raise self._value
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 125, in worker
result = (True, func(*args, **kwds))
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 48, in mapstar
return list(map(*args))
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/pathos/helpers/mp_helper.py", line 15, in <lambda>
func = lambda args: f(*args)
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 167, in upload_chunk
ds[key, i * length + k] = el
File "/Users/davitb/Git/Hub/hub/api/dataset.py", line 316, in __setitem__
self._tensors[subpath][slice_list] = value
File "/Users/davitb/Git/Hub/hub/store/dynamic_tensor.py", line 207, in __setitem__
self._dynamic_tensor[slice_[0]]
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 571, in __getitem__
return self.get_basic_selection(selection, fields=fields)
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 696, in get_basic_selection
return self._get_basic_selection_nd(selection=selection, out=out,
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 739, in _get_basic_selection_nd
return self._get_selection(indexer=indexer, out=out, fields=fields)
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1028, in _get_selection
self._chunk_getitem(chunk_coords, chunk_selection, out, out_selection,
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1649, in _chunk_getitem
self._process_chunk(out, cdata, chunk_selection, drop_axes,
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1592, in _process_chunk
chunk = self._decode_chunk(cdata)
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1802, in _decode_chunk
chunk = chunk.reshape(self._chunks, order=self._order)
ValueError: cannot reshape array of size 0 into shape (50,3)
|
ValueError
|
def __init__(self, response):
message = f"No permision to store the dataset at {response}"
super(PermissionException, self).__init__(message=message)
|
def __init__(self, response):
message = f"No permision to store the dataset at {response}"
super().__init__(message=message)
|
https://github.com/activeloopai/Hub/issues/216
|
Traceback (most recent call last):
File "hub/compute/tests/test_transform.py", line 284, in <module>
test_threaded()
File "hub/compute/tests/test_transform.py", line 88, in test_threaded
create_classification_dataset(ds_init).store(
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 246, in store
ds = self.upload(results, url=url, token=token, progressbar=progressbar)
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 177, in upload
list(self.map(upload_chunk, index_batched_values))
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/pathos/threading.py", line 134, in map
return _pool.map(star(f), zip(*args)) # chunksize
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 364, in map
return self._map_async(func, iterable, mapstar, chunksize).get()
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 771, in get
raise self._value
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 125, in worker
result = (True, func(*args, **kwds))
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 48, in mapstar
return list(map(*args))
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/pathos/helpers/mp_helper.py", line 15, in <lambda>
func = lambda args: f(*args)
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 167, in upload_chunk
ds[key, i * length + k] = el
File "/Users/davitb/Git/Hub/hub/api/dataset.py", line 316, in __setitem__
self._tensors[subpath][slice_list] = value
File "/Users/davitb/Git/Hub/hub/store/dynamic_tensor.py", line 207, in __setitem__
self._dynamic_tensor[slice_[0]]
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 571, in __getitem__
return self.get_basic_selection(selection, fields=fields)
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 696, in get_basic_selection
return self._get_basic_selection_nd(selection=selection, out=out,
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 739, in _get_basic_selection_nd
return self._get_selection(indexer=indexer, out=out, fields=fields)
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1028, in _get_selection
self._chunk_getitem(chunk_coords, chunk_selection, out, out_selection,
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1649, in _chunk_getitem
self._process_chunk(out, cdata, chunk_selection, drop_axes,
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1592, in _process_chunk
chunk = self._decode_chunk(cdata)
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1802, in _decode_chunk
chunk = chunk.reshape(self._chunks, order=self._order)
ValueError: cannot reshape array of size 0 into shape (50,3)
|
ValueError
|
def _flatten(list_):
"""
Helper function to flatten the list
"""
return [item for sublist in list_ for item in sublist]
|
def _flatten(l):
"""
Helper function to flatten the list
"""
return [item for sublist in l for item in sublist]
|
https://github.com/activeloopai/Hub/issues/216
|
Traceback (most recent call last):
File "hub/compute/tests/test_transform.py", line 284, in <module>
test_threaded()
File "hub/compute/tests/test_transform.py", line 88, in test_threaded
create_classification_dataset(ds_init).store(
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 246, in store
ds = self.upload(results, url=url, token=token, progressbar=progressbar)
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 177, in upload
list(self.map(upload_chunk, index_batched_values))
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/pathos/threading.py", line 134, in map
return _pool.map(star(f), zip(*args)) # chunksize
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 364, in map
return self._map_async(func, iterable, mapstar, chunksize).get()
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 771, in get
raise self._value
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 125, in worker
result = (True, func(*args, **kwds))
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 48, in mapstar
return list(map(*args))
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/pathos/helpers/mp_helper.py", line 15, in <lambda>
func = lambda args: f(*args)
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 167, in upload_chunk
ds[key, i * length + k] = el
File "/Users/davitb/Git/Hub/hub/api/dataset.py", line 316, in __setitem__
self._tensors[subpath][slice_list] = value
File "/Users/davitb/Git/Hub/hub/store/dynamic_tensor.py", line 207, in __setitem__
self._dynamic_tensor[slice_[0]]
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 571, in __getitem__
return self.get_basic_selection(selection, fields=fields)
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 696, in get_basic_selection
return self._get_basic_selection_nd(selection=selection, out=out,
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 739, in _get_basic_selection_nd
return self._get_selection(indexer=indexer, out=out, fields=fields)
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1028, in _get_selection
self._chunk_getitem(chunk_coords, chunk_selection, out, out_selection,
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1649, in _chunk_getitem
self._process_chunk(out, cdata, chunk_selection, drop_axes,
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1592, in _process_chunk
chunk = self._decode_chunk(cdata)
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1802, in _decode_chunk
chunk = chunk.reshape(self._chunks, order=self._order)
ValueError: cannot reshape array of size 0 into shape (50,3)
|
ValueError
|
def get_argnames(func):
"""Introspecs the arguments of a callable.
Args:
func: The callable to introspect
Returns:
A list of argument names, excluding *arg and **kwargs
arguments.
"""
if six.PY2:
func_object = _get_func_if_nested(func)
spec = _get_argspec(func_object)
args = spec.args
else:
sig = inspect.signature(func)
args = [
param.name
for param in sig.parameters.values()
if param.kind
not in (inspect.Parameter.VAR_POSITIONAL, inspect.Parameter.VAR_KEYWORD)
]
# NOTE(kgriffs): Depending on the version of Python, 'self' may or may not
# be present, so we normalize the results by removing 'self' as needed.
# Note that this behavior varies between 3.x versions as well as between
# 3.x and 2.7.
if args and args[0] == "self":
args = args[1:]
return args
|
def get_argnames(func):
"""Introspecs the arguments of a callable.
Args:
func: The callable to introspect
Returns:
A list of argument names, excluding *arg and **kwargs
arguments.
"""
if six.PY2:
func_object = _get_func_if_nested(func)
spec = _get_argspec(func_object)
args = spec.args
else:
sig = inspect.signature(func)
args = [
param.name
for param in sig.parameters.values()
if param.kind
not in (inspect.Parameter.VAR_POSITIONAL, inspect.Parameter.VAR_KEYWORD)
]
# NOTE(kgriffs): Depending on the version of Python, 'self' may or may not
# be present, so we normalize the results by removing 'self' as needed.
# Note that this behavior varies between 3.x versions as well as between
# 3.x and 2.7.
if args[0] == "self":
args = args[1:]
return args
|
https://github.com/falconry/falcon/issues/1254
|
(falcon-bug-repro) falcon-bug-repro Β» python main.py
Traceback (most recent call last):
File "main.py", line 19, in <module>
MyMiddleware(),
File "/Users/joshklar/.virtualenvs/falcon-bug-repro/lib/python3.6/site-packages/falcon/api.py", line 156, in __init__
middleware, independent_middleware=independent_middleware)
File "/Users/joshklar/.virtualenvs/falcon-bug-repro/lib/python3.6/site-packages/falcon/api_helpers.py", line 87, in prepare_middleware
args = util.get_argnames(process_response)
File "/Users/joshklar/.virtualenvs/falcon-bug-repro/lib/python3.6/site-packages/falcon/util/misc.py", line 317, in get_argnames
if args[0] == 'self':
IndexError: list index out of range
|
IndexError
|
def set_header(self, name, value):
"""Set a header for this response to a given value.
Warning:
Calling this method overwrites the existing value, if any.
Warning:
For setting cookies, see instead :meth:`~.set_cookie`
Args:
name (str): Header name (case-insensitive). The restrictions
noted below for the header's value also apply here.
value (str): Value for the header. Must be of type ``str`` or
``StringType`` and contain only US-ASCII characters.
Under Python 2.x, the ``unicode`` type is also accepted,
although such strings are also limited to US-ASCII.
"""
if PY2:
# NOTE(kgriffs): uwsgi fails with a TypeError if any header
# is not a str, so do the conversion here. It's actually
# faster to not do an isinstance check. str() will encode
# to US-ASCII.
name = str(name)
value = str(value)
# NOTE(kgriffs): normalize name by lowercasing it
self._headers[name.lower()] = value
|
def set_header(self, name, value):
"""Set a header for this response to a given value.
Warning:
Calling this method overwrites the existing value, if any.
Warning:
For setting cookies, see instead :meth:`~.set_cookie`
Args:
name (str): Header name (case-insensitive). The restrictions
noted below for the header's value also apply here.
value (str): Value for the header. Must be of type ``str`` or
``StringType`` and contain only ISO-8859-1 characters.
Under Python 2.x, the ``unicode`` type is also accepted,
although such strings are also limited to ISO-8859-1.
"""
name, value = self._encode_header(name, value)
# NOTE(kgriffs): normalize name by lowercasing it
self._headers[name.lower()] = value
|
https://github.com/falconry/falcon/issues/413
|
Traceback (most recent call last):
File "/Users/kgriffs/Code/falcon/falcon/api.py", line 265, in __call__
start_response(resp.status, headers)
TypeError: http header value must be a string
|
TypeError
|
def append_header(self, name, value):
"""Set or append a header for this response.
Warning:
If the header already exists, the new value will be appended
to it, delimited by a comma. Most header specifications support
this format, Set-Cookie being the notable exceptions.
Warning:
For setting cookies, see :py:meth:`~.set_cookie`
Args:
name (str): Header name (case-insensitive). The restrictions
noted below for the header's value also apply here.
value (str): Value for the header. Must be of type ``str`` or
``StringType`` and contain only US-ASCII characters.
Under Python 2.x, the ``unicode`` type is also accepted,
although such strings are also limited to US-ASCII.
"""
if PY2:
# NOTE(kgriffs): uwsgi fails with a TypeError if any header
# is not a str, so do the conversion here. It's actually
# faster to not do an isinstance check. str() will encode
# to US-ASCII.
name = str(name)
value = str(value)
name = name.lower()
if name in self._headers:
value = self._headers[name] + "," + value
self._headers[name] = value
|
def append_header(self, name, value):
"""Set or append a header for this response.
Warning:
If the header already exists, the new value will be appended
to it, delimited by a comma. Most header specifications support
this format, Set-Cookie being the notable exceptions.
Warning:
For setting cookies, see :py:meth:`~.set_cookie`
Args:
name (str): Header name (case-insensitive). The restrictions
noted below for the header's value also apply here.
value (str): Value for the header. Must be of type ``str`` or
``StringType`` and contain only ISO-8859-1 characters.
Under Python 2.x, the ``unicode`` type is also accepted,
although such strings are also limited to ISO-8859-1.
"""
name, value = self._encode_header(name, value)
name = name.lower()
if name in self._headers:
value = self._headers[name] + "," + value
self._headers[name] = value
|
https://github.com/falconry/falcon/issues/413
|
Traceback (most recent call last):
File "/Users/kgriffs/Code/falcon/falcon/api.py", line 265, in __call__
start_response(resp.status, headers)
TypeError: http header value must be a string
|
TypeError
|
def set_headers(self, headers):
"""Set several headers at once.
Warning:
Calling this method overwrites existing values, if any.
Args:
headers (dict or list): A dictionary of header names and values
to set, or a ``list`` of (*name*, *value*) tuples. Both *name*
and *value* must be of type ``str`` or ``StringType`` and
contain only US-ASCII characters. Under Python 2.x, the
``unicode`` type is also accepted, although such strings are
also limited to US-ASCII.
Note:
Falcon can process a list of tuples slightly faster
than a dict.
Raises:
ValueError: `headers` was not a ``dict`` or ``list`` of ``tuple``.
"""
if isinstance(headers, dict):
headers = headers.items()
# NOTE(kgriffs): We can't use dict.update because we have to
# normalize the header names.
_headers = self._headers
if PY2:
for name, value in headers:
# NOTE(kgriffs): uwsgi fails with a TypeError if any header
# is not a str, so do the conversion here. It's actually
# faster to not do an isinstance check. str() will encode
# to US-ASCII.
name = str(name)
value = str(value)
_headers[name.lower()] = value
else:
for name, value in headers:
_headers[name.lower()] = value
|
def set_headers(self, headers):
"""Set several headers at once.
Warning:
Calling this method overwrites existing values, if any.
Args:
headers (dict or list): A dictionary of header names and values
to set, or a ``list`` of (*name*, *value*) tuples. Both *name*
and *value* must be of type ``str`` or ``StringType`` and
contain only ISO-8859-1 characters. Under Python 2.x, the
``unicode`` type is also accepted, although such strings are
also limited to ISO-8859-1.
Note:
Falcon can process a list of tuples slightly faster
than a dict.
Raises:
ValueError: `headers` was not a ``dict`` or ``list`` of ``tuple``.
"""
if isinstance(headers, dict):
headers = headers.items()
# NOTE(kgriffs): We can't use dict.update because we have to
# normalize the header names.
_headers = self._headers
for name, value in headers:
name, value = self._encode_header(name, value)
_headers[name.lower()] = value
|
https://github.com/falconry/falcon/issues/413
|
Traceback (most recent call last):
File "/Users/kgriffs/Code/falcon/falcon/api.py", line 265, in __call__
start_response(resp.status, headers)
TypeError: http header value must be a string
|
TypeError
|
def add_link(
self,
target,
rel,
title=None,
title_star=None,
anchor=None,
hreflang=None,
type_hint=None,
):
"""
Add a link header to the response.
See also: https://tools.ietf.org/html/rfc5988
Note:
Calling this method repeatedly will cause each link to be
appended to the Link header value, separated by commas.
Note:
So-called "link-extension" elements, as defined by RFC 5988,
are not yet supported. See also Issue #288.
Args:
target (str): Target IRI for the resource identified by the
link. Will be converted to a URI, if necessary, per
RFC 3987, Section 3.1.
rel (str): Relation type of the link, such as "next" or
"bookmark". See also http://goo.gl/618GHr for a list
of registered link relation types.
Kwargs:
title (str): Human-readable label for the destination of
the link (default ``None``). If the title includes non-ASCII
characters, you will need to use `title_star` instead, or
provide both a US-ASCII version using `title` and a
Unicode version using `title_star`.
title_star (tuple of str): Localized title describing the
destination of the link (default ``None``). The value must be a
two-member tuple in the form of (*language-tag*, *text*),
where *language-tag* is a standard language identifier as
defined in RFC 5646, Section 2.1, and *text* is a Unicode
string.
Note:
*language-tag* may be an empty string, in which case the
client will assume the language from the general context
of the current request.
Note:
*text* will always be encoded as UTF-8. If the string
contains non-ASCII characters, it should be passed as
a ``unicode`` type string (requires the 'u' prefix in
Python 2).
anchor (str): Override the context IRI with a different URI
(default None). By default, the context IRI for the link is
simply the IRI of the requested resource. The value
provided may be a relative URI.
hreflang (str or iterable): Either a single *language-tag*, or
a ``list`` or ``tuple`` of such tags to provide a hint to the
client as to the language of the result of following the link.
A list of tags may be given in order to indicate to the
client that the target resource is available in multiple
languages.
type_hint(str): Provides a hint as to the media type of the
result of dereferencing the link (default ``None``). As noted
in RFC 5988, this is only a hint and does not override the
Content-Type header returned when the link is followed.
"""
# PERF(kgriffs): Heuristic to detect possiblity of an extension
# relation type, in which case it will be a URL that may contain
# reserved characters. Otherwise, don't waste time running the
# string through uri.encode
#
# Example values for rel:
#
# "next"
# "http://example.com/ext-type"
# "https://example.com/ext-type"
# "alternate http://example.com/ext-type"
# "http://example.com/ext-type alternate"
#
if "//" in rel:
if " " in rel:
rel = '"' + " ".join([uri_encode(r) for r in rel.split()]) + '"'
else:
rel = '"' + uri_encode(rel) + '"'
value = "<" + uri_encode(target) + ">; rel=" + rel
if title is not None:
value += '; title="' + title + '"'
if title_star is not None:
value += (
"; title*=UTF-8'" + title_star[0] + "'" + uri_encode_value(title_star[1])
)
if type_hint is not None:
value += '; type="' + type_hint + '"'
if hreflang is not None:
if isinstance(hreflang, STRING_TYPES):
value += "; hreflang=" + hreflang
else:
value += "; "
value += "; ".join(["hreflang=" + lang for lang in hreflang])
if anchor is not None:
value += '; anchor="' + uri_encode(anchor) + '"'
if PY2:
# NOTE(kgriffs): uwsgi fails with a TypeError if any header
# is not a str, so do the conversion here. It's actually
# faster to not do an isinstance check. str() will encode
# to US-ASCII.
value = str(value)
_headers = self._headers
if "link" in _headers:
_headers["link"] += ", " + value
else:
_headers["link"] = value
|
def add_link(
self,
target,
rel,
title=None,
title_star=None,
anchor=None,
hreflang=None,
type_hint=None,
):
"""
Add a link header to the response.
See also: https://tools.ietf.org/html/rfc5988
Note:
Calling this method repeatedly will cause each link to be
appended to the Link header value, separated by commas.
Note:
So-called "link-extension" elements, as defined by RFC 5988,
are not yet supported. See also Issue #288.
Args:
target (str): Target IRI for the resource identified by the
link. Will be converted to a URI, if necessary, per
RFC 3987, Section 3.1.
rel (str): Relation type of the link, such as "next" or
"bookmark". See also http://goo.gl/618GHr for a list
of registered link relation types.
Kwargs:
title (str): Human-readable label for the destination of
the link (default ``None``). If the title includes non-ASCII
characters, you will need to use `title_star` instead, or
provide both a US-ASCII version using `title` and a
Unicode version using `title_star`.
title_star (tuple of str): Localized title describing the
destination of the link (default ``None``). The value must be a
two-member tuple in the form of (*language-tag*, *text*),
where *language-tag* is a standard language identifier as
defined in RFC 5646, Section 2.1, and *text* is a Unicode
string.
Note:
*language-tag* may be an empty string, in which case the
client will assume the language from the general context
of the current request.
Note:
*text* will always be encoded as UTF-8. If the string
contains non-ASCII characters, it should be passed as
a ``unicode`` type string (requires the 'u' prefix in
Python 2).
anchor (str): Override the context IRI with a different URI
(default None). By default, the context IRI for the link is
simply the IRI of the requested resource. The value
provided may be a relative URI.
hreflang (str or iterable): Either a single *language-tag*, or
a ``list`` or ``tuple`` of such tags to provide a hint to the
client as to the language of the result of following the link.
A list of tags may be given in order to indicate to the
client that the target resource is available in multiple
languages.
type_hint(str): Provides a hint as to the media type of the
result of dereferencing the link (default ``None``). As noted
in RFC 5988, this is only a hint and does not override the
Content-Type header returned when the link is followed.
"""
# PERF(kgriffs): Heuristic to detect possiblity of an extension
# relation type, in which case it will be a URL that may contain
# reserved characters. Otherwise, don't waste time running the
# string through uri.encode
#
# Example values for rel:
#
# "next"
# "http://example.com/ext-type"
# "https://example.com/ext-type"
# "alternate http://example.com/ext-type"
# "http://example.com/ext-type alternate"
#
if "//" in rel:
if " " in rel:
rel = '"' + " ".join([uri_encode(r) for r in rel.split()]) + '"'
else:
rel = '"' + uri_encode(rel) + '"'
value = "<" + uri_encode(target) + ">; rel=" + rel
if title is not None:
value += '; title="' + title + '"'
if title_star is not None:
value += (
"; title*=UTF-8'" + title_star[0] + "'" + uri_encode_value(title_star[1])
)
if type_hint is not None:
value += '; type="' + type_hint + '"'
if hreflang is not None:
if isinstance(hreflang, STRING_TYPES):
value += "; hreflang=" + hreflang
else:
value += "; "
value += "; ".join(["hreflang=" + lang for lang in hreflang])
if anchor is not None:
value += '; anchor="' + uri_encode(anchor) + '"'
_headers = self._headers
if "link" in _headers:
_headers["link"] += ", " + value
else:
_headers["link"] = value
|
https://github.com/falconry/falcon/issues/413
|
Traceback (most recent call last):
File "/Users/kgriffs/Code/falcon/falcon/api.py", line 265, in __call__
start_response(resp.status, headers)
TypeError: http header value must be a string
|
TypeError
|
def header_property(name, doc, transform=None):
"""Creates a header getter/setter.
Args:
name: Header name, e.g., "Content-Type"
doc: Docstring for the property
transform: Transformation function to use when setting the
property. The value will be passed to the function, and
the function should return the transformed value to use
as the value of the header (default ``None``).
"""
normalized_name = name.lower()
def fget(self):
try:
return self._headers[normalized_name]
except KeyError:
return None
if transform is None:
if six.PY2:
def fset(self, value):
self._headers[normalized_name] = str(value)
else:
def fset(self, value):
self._headers[normalized_name] = value
else:
def fset(self, value):
self._headers[normalized_name] = transform(value)
def fdel(self):
del self._headers[normalized_name]
return property(fget, fset, fdel, doc)
|
def header_property(name, doc, transform=None):
"""Creates a header getter/setter.
Args:
name: Header name, e.g., "Content-Type"
doc: Docstring for the property
transform: Transformation function to use when setting the
property. The value will be passed to the function, and
the function should return the transformed value to use
as the value of the header (default ``None``).
"""
normalized_name = name.lower()
def fget(self):
try:
return self._headers[normalized_name]
except KeyError:
return None
if transform is None:
def fset(self, value):
self._headers[normalized_name] = value
else:
def fset(self, value):
self._headers[normalized_name] = transform(value)
def fdel(self):
del self._headers[normalized_name]
return property(fget, fset, fdel, doc)
|
https://github.com/falconry/falcon/issues/413
|
Traceback (most recent call last):
File "/Users/kgriffs/Code/falcon/falcon/api.py", line 265, in __call__
start_response(resp.status, headers)
TypeError: http header value must be a string
|
TypeError
|
def fset(self, value):
self._headers[normalized_name] = value
|
def fset(self, value):
self._headers[normalized_name] = transform(value)
|
https://github.com/falconry/falcon/issues/413
|
Traceback (most recent call last):
File "/Users/kgriffs/Code/falcon/falcon/api.py", line 265, in __call__
start_response(resp.status, headers)
TypeError: http header value must be a string
|
TypeError
|
def format_range(value):
"""Formats a range header tuple per the HTTP spec.
Args:
value: ``tuple`` passed to `req.range`
"""
# PERF(kgriffs): % was found to be faster than str.format(),
# string concatenation, and str.join() in this case.
if len(value) == 4:
result = "%s %s-%s/%s" % (value[3], value[0], value[1], value[2])
else:
result = "bytes %s-%s/%s" % (value[0], value[1], value[2])
if six.PY2:
# NOTE(kgriffs): In case one of the values was a unicode
# string, convert back to str
result = str(result)
return result
|
def format_range(value):
"""Formats a range header tuple per the HTTP spec.
Args:
value: ``tuple`` passed to `req.range`
"""
# PERF(kgriffs): % was found to be faster than str.format(),
# string concatenation, and str.join() in this case.
if len(value) == 4:
return "%s %s-%s/%s" % (value[3], value[0], value[1], value[2])
return "bytes %s-%s/%s" % (value[0], value[1], value[2])
|
https://github.com/falconry/falcon/issues/413
|
Traceback (most recent call last):
File "/Users/kgriffs/Code/falcon/falcon/api.py", line 265, in __call__
start_response(resp.status, headers)
TypeError: http header value must be a string
|
TypeError
|
def set_header(self, name, value):
"""Set a header for this response to a given value.
Warning:
Calling this method overwrites the existing value, if any.
Warning:
For setting cookies, see instead :meth:`~.set_cookie`
Args:
name (str): Header name (case-insensitive). The restrictions
noted below for the header's value also apply here.
value (str): Value for the header. Must be of type ``str`` or
``StringType`` and contain only ISO-8859-1 characters.
Under Python 2.x, the ``unicode`` type is also accepted,
although such strings are also limited to ISO-8859-1.
"""
name, value = self._encode_header(name, value)
# NOTE(kgriffs): normalize name by lowercasing it
self._headers[name.lower()] = value
|
def set_header(self, name, value):
"""Set a header for this response to a given value.
Warning:
Calling this method overwrites the existing value, if any.
Warning:
For setting cookies, see instead :meth:`~.set_cookie`
Args:
name (str): Header name to set (case-insensitive). Must be of
type ``str`` or ``StringType``, and only character values 0x00
through 0xFF may be used on platforms that use wide
characters.
value (str): Value for the header. Must be of type ``str`` or
``StringType``, and only character values 0x00 through 0xFF
may be used on platforms that use wide characters.
"""
# NOTE(kgriffs): normalize name by lowercasing it
self._headers[name.lower()] = value
|
https://github.com/falconry/falcon/issues/413
|
Traceback (most recent call last):
File "/Users/kgriffs/Code/falcon/falcon/api.py", line 265, in __call__
start_response(resp.status, headers)
TypeError: http header value must be a string
|
TypeError
|
def append_header(self, name, value):
"""Set or append a header for this response.
Warning:
If the header already exists, the new value will be appended
to it, delimited by a comma. Most header specifications support
this format, Cookie and Set-Cookie being the notable exceptions.
Warning:
For setting cookies, see :py:meth:`~.set_cookie`
Args:
name (str): Header name (case-insensitive). The restrictions
noted below for the header's value also apply here.
value (str): Value for the header. Must be of type ``str`` or
``StringType`` and contain only ISO-8859-1 characters.
Under Python 2.x, the ``unicode`` type is also accepted,
although such strings are also limited to ISO-8859-1.
"""
name, value = self._encode_header(name, value)
name = name.lower()
if name in self._headers:
value = self._headers[name] + "," + value
self._headers[name] = value
|
def append_header(self, name, value):
"""Set or append a header for this response.
Warning:
If the header already exists, the new value will be appended
to it, delimited by a comma. Most header specifications support
this format, Cookie and Set-Cookie being the notable exceptions.
Warning:
For setting cookies, see :py:meth:`~.set_cookie`
Args:
name (str): Header name to set (case-insensitive). Must be of
type ``str`` or ``StringType``, and only character values 0x00
through 0xFF may be used on platforms that use wide
characters.
value (str): Value for the header. Must be of type ``str`` or
``StringType``, and only character values 0x00 through 0xFF
may be used on platforms that use wide characters.
"""
name = name.lower()
if name in self._headers:
value = self._headers[name] + "," + value
self._headers[name] = value
|
https://github.com/falconry/falcon/issues/413
|
Traceback (most recent call last):
File "/Users/kgriffs/Code/falcon/falcon/api.py", line 265, in __call__
start_response(resp.status, headers)
TypeError: http header value must be a string
|
TypeError
|
def set_headers(self, headers):
"""Set several headers at once.
Warning:
Calling this method overwrites existing values, if any.
Args:
headers (dict or list): A dictionary of header names and values
to set, or a ``list`` of (*name*, *value*) tuples. Both *name*
and *value* must be of type ``str`` or ``StringType`` and
contain only ISO-8859-1 characters. Under Python 2.x, the
``unicode`` type is also accepted, although such strings are
also limited to ISO-8859-1.
Note:
Falcon can process a list of tuples slightly faster
than a dict.
Raises:
ValueError: `headers` was not a ``dict`` or ``list`` of ``tuple``.
"""
if isinstance(headers, dict):
headers = headers.items()
# NOTE(kgriffs): We can't use dict.update because we have to
# normalize the header names.
_headers = self._headers
for name, value in headers:
name, value = self._encode_header(name, value)
_headers[name.lower()] = value
|
def set_headers(self, headers):
"""Set several headers at once.
Warning:
Calling this method overwrites existing values, if any.
Args:
headers (dict or list): A dictionary of header names and values
to set, or ``list`` of (*name*, *value*) tuples. Both *name*
and *value* must be of type ``str`` or ``StringType``, and
only character values 0x00 through 0xFF may be used on
platforms that use wide characters.
Note:
Falcon can process a list of tuples slightly faster
than a dict.
Raises:
ValueError: `headers` was not a ``dict`` or ``list`` of ``tuple``.
"""
if isinstance(headers, dict):
headers = headers.items()
# NOTE(kgriffs): We can't use dict.update because we have to
# normalize the header names.
_headers = self._headers
for name, value in headers:
_headers[name.lower()] = value
|
https://github.com/falconry/falcon/issues/413
|
Traceback (most recent call last):
File "/Users/kgriffs/Code/falcon/falcon/api.py", line 265, in __call__
start_response(resp.status, headers)
TypeError: http header value must be a string
|
TypeError
|
def __call__(self, env, start_response):
"""WSGI `app` method.
Makes instances of API callable from a WSGI server. May be used to
host an API or called directly in order to simulate requests when
testing the API.
See also PEP 3333.
Args:
env (dict): A WSGI environment dictionary
start_response (callable): A WSGI helper function for setting
status and headers on a response.
"""
req = self._request_type(env, options=self.req_options)
resp = self._response_type()
resource = None
middleware_stack = [] # Keep track of executed components
params = {}
try:
# NOTE(kgriffs): Using an inner try..except in order to
# address the case when err_handler raises HTTPError.
# NOTE(kgriffs): Coverage is giving false negatives,
# so disabled on relevant lines. All paths are tested
# afaict.
try:
# NOTE(ealogar): The execution of request middleware should be
# before routing. This will allow request mw to modify path.
self._call_req_mw(middleware_stack, req, resp)
# NOTE(warsaw): Moved this to inside the try except because it
# is possible when using object-based traversal for
# _get_responder() to fail. An example is a case where an
# object does not have the requested next-hop child resource.
# In that case, the object being asked to dispatch to its
# child will raise an HTTP exception signalling the problem,
# e.g. a 404.
responder, params, resource = self._get_responder(req)
self._call_rsrc_mw(middleware_stack, req, resp, resource, params)
responder(req, resp, **params)
self._call_resp_mw(middleware_stack, req, resp, resource)
except Exception as ex:
for err_type, err_handler in self._error_handlers:
if isinstance(ex, err_type):
err_handler(ex, req, resp, params)
self._call_after_hooks(req, resp, resource)
self._call_resp_mw(middleware_stack, req, resp, resource)
break
else:
# PERF(kgriffs): This will propagate HTTPError to
# the handler below. It makes handling HTTPError
# less efficient, but that is OK since error cases
# don't need to be as fast as the happy path, and
# indeed, should perhaps be slower to create
# backpressure on clients that are issuing bad
# requests.
# NOTE(ealogar): This will executed remaining
# process_response when no error_handler is given
# and for whatever exception. If an HTTPError is raised
# remaining process_response will be executed later.
self._call_resp_mw(middleware_stack, req, resp, resource)
raise
except HTTPStatus as ex:
self._compose_status_response(req, resp, ex)
self._call_after_hooks(req, resp, resource)
self._call_resp_mw(middleware_stack, req, resp, resource)
except HTTPError as ex:
self._compose_error_response(req, resp, ex)
self._call_after_hooks(req, resp, resource)
self._call_resp_mw(middleware_stack, req, resp, resource)
#
# Set status and headers
#
if req.method == "HEAD" or resp.status in self._BODILESS_STATUS_CODES:
body = []
else:
body, length = self._get_body(resp, env.get("wsgi.file_wrapper"))
if length is not None:
resp._headers["content-length"] = str(length)
# NOTE(kgriffs): Based on wsgiref.validate's interpretation of
# RFC 2616, as commented in that module's source code. The
# presence of the Content-Length header is not similarly
# enforced.
if resp.status in (status.HTTP_204, status.HTTP_304):
media_type = None
else:
media_type = self._media_type
headers = resp._wsgi_headers(media_type)
# Return the response per the WSGI spec
start_response(resp.status, headers)
return body
|
def __call__(self, env, start_response):
"""WSGI `app` method.
Makes instances of API callable from a WSGI server. May be used to
host an API or called directly in order to simulate requests when
testing the API.
See also PEP 3333.
Args:
env (dict): A WSGI environment dictionary
start_response (callable): A WSGI helper function for setting
status and headers on a response.
"""
req = self._request_type(env, options=self.req_options)
resp = self._response_type()
resource = None
middleware_stack = [] # Keep track of executed components
params = {}
try:
# NOTE(kgriffs): Using an inner try..except in order to
# address the case when err_handler raises HTTPError.
# NOTE(kgriffs): Coverage is giving false negatives,
# so disabled on relevant lines. All paths are tested
# afaict.
try:
# NOTE(ealogar): The execution of request middleware should be
# before routing. This will allow request mw to modify path.
self._call_req_mw(middleware_stack, req, resp)
# NOTE(warsaw): Moved this to inside the try except because it
# is possible when using object-based traversal for
# _get_responder() to fail. An example is a case where an
# object does not have the requested next-hop child resource.
# In that case, the object being asked to dispatch to its
# child will raise an HTTP exception signalling the problem,
# e.g. a 404.
responder, params, resource = self._get_responder(req)
self._call_rsrc_mw(middleware_stack, req, resp, resource, params)
responder(req, resp, **params)
self._call_resp_mw(middleware_stack, req, resp, resource)
except Exception as ex:
for err_type, err_handler in self._error_handlers:
if isinstance(ex, err_type):
err_handler(ex, req, resp, params)
self._call_after_hooks(req, resp, resource)
self._call_resp_mw(middleware_stack, req, resp, resource)
break
else:
# PERF(kgriffs): This will propagate HTTPError to
# the handler below. It makes handling HTTPError
# less efficient, but that is OK since error cases
# don't need to be as fast as the happy path, and
# indeed, should perhaps be slower to create
# backpressure on clients that are issuing bad
# requests.
# NOTE(ealogar): This will executed remaining
# process_response when no error_handler is given
# and for whatever exception. If an HTTPError is raised
# remaining process_response will be executed later.
self._call_resp_mw(middleware_stack, req, resp, resource)
raise
except HTTPStatus as ex:
self._compose_status_response(req, resp, ex)
self._call_after_hooks(req, resp, resource)
self._call_resp_mw(middleware_stack, req, resp, resource)
except HTTPError as ex:
self._compose_error_response(req, resp, ex)
self._call_after_hooks(req, resp, resource)
self._call_resp_mw(middleware_stack, req, resp, resource)
#
# Set status and headers
#
if req.method == "HEAD" or resp.status in self._BODILESS_STATUS_CODES:
body = []
else:
body, length = self._get_body(resp, env.get("wsgi.file_wrapper"))
if length is not None:
resp._headers["content-length"] = str(length)
# Set content type if needed
use_content_type = body or req.method == "HEAD" or resp.status == status.HTTP_416
if use_content_type:
media_type = self._media_type
else:
media_type = None
headers = resp._wsgi_headers(media_type)
# Return the response per the WSGI spec
start_response(resp.status, headers)
return body
|
https://github.com/falconry/falcon/issues/689
|
Traceback (most recent call last):
<...snip...>
File "/usr/local/lib/python2.7/dist-packages/falcon/api.py", line 247, in __call__
start_response(resp.status, headers)
File "/usr/lib/python2.7/wsgiref/validate.py", line 167, in start_response_wrapper
check_content_type(status, headers)
File "/usr/lib/python2.7/wsgiref/validate.py", line 419, in check_content_type
assert_(0, "No Content-Type header found in headers (%s)" % headers)
File "/usr/lib/python2.7/wsgiref/validate.py", line 129, in assert_
raise AssertionError(*args)
AssertionError: No Content-Type header found in headers ([('content-length', '0')])
|
AssertionError
|
def default_serialize_error(req, resp, exception):
"""Serialize the given instance of HTTPError.
This function determines which of the supported media types, if
any, are acceptable by the client, and serializes the error
to the preferred type.
Currently, JSON and XML are the only supported media types. If the
client accepts both JSON and XML with equal weight, JSON will be
chosen.
Other media types can be supported by using a custom error serializer.
Note:
If a custom media type is used and the type includes a
"+json" or "+xml" suffix, the error will be serialized
to JSON or XML, respectively. If this behavior is not
desirable, a custom error serializer may be used to
override this one.
Args:
req: Instance of ``falcon.Request``
resp: Instance of ``falcon.Response``
exception: Instance of ``falcon.HTTPError``
"""
representation = None
preferred = req.client_prefers(("application/xml", "text/xml", "application/json"))
if preferred is None:
# NOTE(kgriffs): See if the client expects a custom media
# type based on something Falcon supports. Returning something
# is probably better than nothing, but if that is not
# desired, this behavior can be customized by adding a
# custom HTTPError serializer for the custom type.
accept = req.accept.lower()
# NOTE(kgriffs): Simple heuristic, but it's fast, and
# should be sufficiently accurate for our purposes. Does
# not take into account weights if both types are
# acceptable (simply chooses JSON). If it turns out we
# need to be more sophisticated, we can always change it
# later (YAGNI).
if "+json" in accept:
preferred = "application/json"
elif "+xml" in accept:
preferred = "application/xml"
if preferred is not None:
resp.append_header("Vary", "Accept")
if preferred == "application/json":
representation = exception.to_json()
else:
representation = exception.to_xml()
resp.body = representation
resp.content_type = preferred + "; charset=UTF-8"
|
def default_serialize_error(req, resp, exception):
"""Serialize the given instance of HTTPError.
This function determines which of the supported media types, if
any, are acceptable by the client, and serializes the error
to the preferred type.
Currently, JSON and XML are the only supported media types. If the
client accepts both JSON and XML with equal weight, JSON will be
chosen.
Other media types can be supported by using a custom error serializer.
Note:
If a custom media type is used and the type includes a
"+json" or "+xml" suffix, the error will be serialized
to JSON or XML, respectively. If this behavior is not
desirable, a custom error serializer may be used to
override this one.
Args:
req: Instance of ``falcon.Request``
resp: Instance of ``falcon.Response``
exception: Instance of ``falcon.HTTPError``
"""
representation = None
preferred = req.client_prefers(("application/xml", "text/xml", "application/json"))
if preferred is None:
# NOTE(kgriffs): See if the client expects a custom media
# type based on something Falcon supports. Returning something
# is probably better than nothing, but if that is not
# desired, this behavior can be customized by adding a
# custom HTTPError serializer for the custom type.
accept = req.accept.lower()
# NOTE(kgriffs): Simple heuristic, but it's fast, and
# should be sufficiently accurate for our purposes. Does
# not take into account weights if both types are
# acceptable (simply chooses JSON). If it turns out we
# need to be more sophisticated, we can always change it
# later (YAGNI).
if "+json" in accept:
preferred = "application/json"
elif "+xml" in accept:
preferred = "application/xml"
if preferred is not None:
resp.append_header("Vary", "Accept")
if preferred == "application/json":
representation = exception.to_json()
else:
representation = exception.to_xml()
resp.body = representation
resp.content_type = preferred
|
https://github.com/falconry/falcon/issues/689
|
Traceback (most recent call last):
<...snip...>
File "/usr/local/lib/python2.7/dist-packages/falcon/api.py", line 247, in __call__
start_response(resp.status, headers)
File "/usr/lib/python2.7/wsgiref/validate.py", line 167, in start_response_wrapper
check_content_type(status, headers)
File "/usr/lib/python2.7/wsgiref/validate.py", line 419, in check_content_type
assert_(0, "No Content-Type header found in headers (%s)" % headers)
File "/usr/lib/python2.7/wsgiref/validate.py", line 129, in assert_
raise AssertionError(*args)
AssertionError: No Content-Type header found in headers ([('content-length', '0')])
|
AssertionError
|
def _wsgi_headers(self, media_type=None, py2=PY2):
"""Convert headers into the format expected by WSGI servers.
Args:
media_type: Default media type to use for the Content-Type
header if the header was not set explicitly (default ``None``).
"""
headers = self._headers
# PERF(kgriffs): Using "in" like this is faster than using
# dict.setdefault (tested on py27).
set_content_type = media_type is not None and "content-type" not in headers
if set_content_type:
headers["content-type"] = media_type
if py2:
# PERF(kgriffs): Don't create an extra list object if
# it isn't needed.
items = headers.items()
else:
items = list(headers.items())
if self._cookies is not None:
# PERF(tbug):
# The below implementation is ~23% faster than
# the alternative:
#
# self._cookies.output().split("\\r\\n")
#
# Even without the .split("\\r\\n"), the below
# is still ~17% faster, so don't use .output()
items += [("set-cookie", c.OutputString()) for c in self._cookies.values()]
return items
|
def _wsgi_headers(self, media_type=None, py2=PY2):
"""Convert headers into the format expected by WSGI servers.
Args:
media_type: Default media type to use for the Content-Type
header if the header was not set explicitly (default ``None``).
"""
headers = self._headers
# PERF(kgriffs): Using "in" like this is faster than using
# dict.setdefault (tested on py27).
set_content_type = media_type is not None and "content-type" not in headers
if set_content_type:
headers["content-type"] = media_type
if py2:
# PERF(kgriffs): Don't create an extra list object if
# it isn't needed.
items = headers.items()
else:
items = list(headers.items()) # pragma: no cover
if self._cookies is not None:
# PERF(tbug):
# The below implementation is ~23% faster than
# the alternative:
#
# self._cookies.output().split("\\r\\n")
#
# Even without the .split("\\r\\n"), the below
# is still ~17% faster, so don't use .output()
items += [("set-cookie", c.OutputString()) for c in self._cookies.values()]
return items
|
https://github.com/falconry/falcon/issues/689
|
Traceback (most recent call last):
<...snip...>
File "/usr/local/lib/python2.7/dist-packages/falcon/api.py", line 247, in __call__
start_response(resp.status, headers)
File "/usr/lib/python2.7/wsgiref/validate.py", line 167, in start_response_wrapper
check_content_type(status, headers)
File "/usr/lib/python2.7/wsgiref/validate.py", line 419, in check_content_type
assert_(0, "No Content-Type header found in headers (%s)" % headers)
File "/usr/lib/python2.7/wsgiref/validate.py", line 129, in assert_
raise AssertionError(*args)
AssertionError: No Content-Type header found in headers ([('content-length', '0')])
|
AssertionError
|
def decode(encoded_uri):
"""Decodes percent-encoded characters in a URI or query string.
This function models the behavior of `urllib.parse.unquote_plus`,
albeit in a faster, more straightforward manner.
Args:
encoded_uri (str): An encoded URI (full or partial).
Returns:
str: A decoded URL. If the URL contains escaped non-ASCII
characters, UTF-8 is assumed per RFC 3986.
"""
decoded_uri = encoded_uri
# PERF(kgriffs): Don't take the time to instantiate a new
# string unless we have to.
if "+" in decoded_uri:
decoded_uri = decoded_uri.replace("+", " ")
# Short-circuit if we can
if "%" not in decoded_uri:
return decoded_uri
# NOTE(kgriffs): Clients should never submit a URI that has
# unescaped non-ASCII chars in them, but just in case they
# do, let's encode into a non-lossy format.
decoded_uri = decoded_uri.encode("utf-8")
# PERF(kgriffs): This was found to be faster than using
# a regex sub call or list comprehension with a join.
tokens = decoded_uri.split(b"%")
decoded_uri = tokens[0]
for token in tokens[1:]:
token_partial = token[:2]
if token_partial in _HEX_TO_BYTE:
decoded_uri += _HEX_TO_BYTE[token_partial] + token[2:]
else:
# malformed percentage like "x=%" or "y=%+"
decoded_uri += b"%" + token
# Convert back to str
return decoded_uri.decode("utf-8", "replace")
|
def decode(encoded_uri):
"""Decodes percent-encoded characters in a URI or query string.
This function models the behavior of `urllib.parse.unquote_plus`,
albeit in a faster, more straightforward manner.
Args:
encoded_uri (str): An encoded URI (full or partial).
Returns:
str: A decoded URL. If the URL contains escaped non-ASCII
characters, UTF-8 is assumed per RFC 3986.
"""
decoded_uri = encoded_uri
# PERF(kgriffs): Don't take the time to instantiate a new
# string unless we have to.
if "+" in decoded_uri:
decoded_uri = decoded_uri.replace("+", " ")
# Short-circuit if we can
if "%" not in decoded_uri:
return decoded_uri
# NOTE(kgriffs): Clients should never submit a URI that has
# unescaped non-ASCII chars in them, but just in case they
# do, let's encode into a non-lossy format.
decoded_uri = decoded_uri.encode("utf-8")
# PERF(kgriffs): This was found to be faster than using
# a regex sub call or list comprehension with a join.
tokens = decoded_uri.split(b"%")
decoded_uri = tokens[0]
for token in tokens[1:]:
decoded_uri += _HEX_TO_BYTE[token[:2]] + token[2:]
# Convert back to str
return decoded_uri.decode("utf-8", "replace")
|
https://github.com/falconry/falcon/issues/588
|
Traceback (most recent call last):
File "/opt/ads/venv/local/lib/python2.7/site-packages/falcon/api.py", line 154, in __call__
req = self._request_type(env, options=self.req_options)
File "/opt/ads/venv/local/lib/python2.7/site-packages/falcon/request.py", line 237, in __init__
keep_blank_qs_values=self.options.keep_blank_qs_values,
File "/opt/ads/venv/local/lib/python2.7/site-packages/falcon/util/uri.py", line 327, in parse_query_string
params[k] = decode(v)
File "/opt/ads/venv/local/lib/python2.7/site-packages/falcon/util/uri.py", line 181, in decode
char, byte = _HEX_TO_BYTE[token[:2]]
KeyError: ''
|
KeyError
|
def __init__(self, env, options=None):
global _maybe_wrap_wsgi_stream
self.env = env
self.options = options if options else RequestOptions()
self._wsgierrors = env["wsgi.errors"]
self.stream = env["wsgi.input"]
self.method = env["REQUEST_METHOD"]
# Normalize path
path = env["PATH_INFO"]
if path:
if six.PY3: # pragma: no cover
# PEP 3333 specifies that PATH_INFO variable are always
# "bytes tunneled as latin-1" and must be encoded back
path = path.encode("latin1").decode("utf-8", "replace")
if len(path) != 1 and path.endswith("/"):
self.path = path[:-1]
else:
self.path = path
else:
self.path = "/"
# PERF(kgriffs): if...in is faster than using env.get(...)
if "QUERY_STRING" in env:
self.query_string = env["QUERY_STRING"]
if self.query_string:
self._params = parse_query_string(
self.query_string,
keep_blank_qs_values=self.options.keep_blank_qs_values,
)
else:
self._params = {}
else:
self.query_string = ""
self._params = {}
self._cookies = None
self._cached_headers = None
self._cached_uri = None
self._cached_relative_uri = None
try:
self.content_type = self.env["CONTENT_TYPE"]
except KeyError:
self.content_type = None
# NOTE(kgriffs): Wrap wsgi.input if needed to make read() more robust,
# normalizing semantics between, e.g., gunicorn and wsgiref.
if _maybe_wrap_wsgi_stream:
if isinstance(self.stream, NativeStream):
# NOTE(kgriffs): This is covered by tests, it's just that
# coverage can't figure this out for some reason (TBD).
self._wrap_stream() # pragma nocover
else:
# PERF(kgriffs): If self.stream does not need to be wrapped
# this time, it never needs to be wrapped since the server
# will continue using the same type for wsgi.input.
_maybe_wrap_wsgi_stream = False
# PERF(kgriffs): Technically, we should spend a few more
# cycles and parse the content type for real, but
# this heuristic will work virtually all the time.
if (
self.content_type is not None
and "application/x-www-form-urlencoded" in self.content_type
):
self._parse_form_urlencoded()
if self.context_type is None:
# Literal syntax is more efficient than using dict()
self.context = {}
else:
# pylint will detect this as not-callable because it only sees the
# declaration of None, not whatever type a subclass may have set.
self.context = self.context_type() # pylint: disable=not-callable
|
def __init__(self, env, options=None):
global _maybe_wrap_wsgi_stream
self.env = env
self.options = options if options else RequestOptions()
if self.context_type is None:
# Literal syntax is more efficient than using dict()
self.context = {}
else:
# pylint will detect this as not-callable because it only sees the
# declaration of None, not whatever type a subclass may have set.
self.context = self.context_type() # pylint: disable=not-callable
self._wsgierrors = env["wsgi.errors"]
self.stream = env["wsgi.input"]
self.method = env["REQUEST_METHOD"]
# Normalize path
path = env["PATH_INFO"]
if path:
if six.PY3: # pragma: no cover
# PEP 3333 specifies that PATH_INFO variable are always
# "bytes tunneled as latin-1" and must be encoded back
path = path.encode("latin1").decode("utf-8", "replace")
if len(path) != 1 and path.endswith("/"):
self.path = path[:-1]
else:
self.path = path
else:
self.path = "/"
# PERF(kgriffs): if...in is faster than using env.get(...)
if "QUERY_STRING" in env:
self.query_string = env["QUERY_STRING"]
if self.query_string:
self._params = parse_query_string(
self.query_string,
keep_blank_qs_values=self.options.keep_blank_qs_values,
)
else:
self._params = {}
else:
self.query_string = ""
self._params = {}
self._cookies = None
self._cached_headers = None
self._cached_uri = None
self._cached_relative_uri = None
try:
self.content_type = self.env["CONTENT_TYPE"]
except KeyError:
self.content_type = None
# NOTE(kgriffs): Wrap wsgi.input if needed to make read() more robust,
# normalizing semantics between, e.g., gunicorn and wsgiref.
if _maybe_wrap_wsgi_stream:
if isinstance(self.stream, NativeStream):
# NOTE(kgriffs): This is covered by tests, it's just that
# coverage can't figure this out for some reason (TBD).
self._wrap_stream() # pragma nocover
else:
# PERF(kgriffs): If self.stream does not need to be wrapped
# this time, it never needs to be wrapped since the server
# will continue using the same type for wsgi.input.
_maybe_wrap_wsgi_stream = False
# PERF(kgriffs): Technically, we should spend a few more
# cycles and parse the content type for real, but
# this heuristic will work virtually all the time.
if (
self.content_type is not None
and "application/x-www-form-urlencoded" in self.content_type
):
self._parse_form_urlencoded()
|
https://github.com/falconry/falcon/issues/567
|
Traceback (most recent call last):
File "falcon/api.py", line 154, in falcon.api.API.__call__ (falcon/api.c:1809)
File "falcon/request.py", line 214, in falcon.request.Request.__init__ (falcon/request.c:1629)
File "./dkuim/api.py", line 29, in context
print("Create CONTEXT", req.url)
File "falcon/request.py", line 399, in falcon.request.Request.uri (falcon/request.c:4006)
AttributeError: _cached_uri
|
AttributeError
|
def _add_noise_shaping(dithered_waveform: Tensor, waveform: Tensor) -> Tensor:
r"""Noise shaping is calculated by error:
error[n] = dithered[n] - original[n]
noise_shaped_waveform[n] = dithered[n] + error[n-1]
"""
wf_shape = waveform.size()
waveform = waveform.reshape(-1, wf_shape[-1])
dithered_shape = dithered_waveform.size()
dithered_waveform = dithered_waveform.reshape(-1, dithered_shape[-1])
error = dithered_waveform - waveform
# add error[n-1] to dithered_waveform[n], so offset the error by 1 index
zeros = torch.zeros(1, dtype=error.dtype, device=error.device)
for index in range(error.size()[0]):
err = error[index]
error_offset = torch.cat((zeros, err))
error[index] = error_offset[: waveform.size()[1]]
noise_shaped = dithered_waveform + error
return noise_shaped.reshape(dithered_shape[:-1] + noise_shaped.shape[-1:])
|
def _add_noise_shaping(dithered_waveform: Tensor, waveform: Tensor) -> Tensor:
r"""Noise shaping is calculated by error:
error[n] = dithered[n] - original[n]
noise_shaped_waveform[n] = dithered[n] + error[n-1]
"""
wf_shape = waveform.size()
waveform = waveform.reshape(-1, wf_shape[-1])
dithered_shape = dithered_waveform.size()
dithered_waveform = dithered_waveform.reshape(-1, dithered_shape[-1])
error = dithered_waveform - waveform
# add error[n-1] to dithered_waveform[n], so offset the error by 1 index
for index in range(error.size()[0]):
err = error[index]
error_offset = torch.cat((torch.zeros(1), err))
error[index] = error_offset[: waveform.size()[1]]
noise_shaped = dithered_waveform + error
return noise_shaped.reshape(dithered_shape[:-1] + noise_shaped.shape[-1:])
|
https://github.com/pytorch/audio/issues/862
|
In [5]: F.dither(torch.randn(2, 441000).cuda())
Out[5]:
tensor([[ 0.3492, -1.8493, -0.4794, ..., 0.3907, -0.4662, -0.2815],
[-1.3821, 0.0401, 0.0023, ..., -1.1417, 0.6411, -0.6851]],
device='cuda:0')
In [6]: F.dither(torch.randn(2, 441000).cuda(), noise_shaping=True)
---------------------------------------------------------------------------
RuntimeError Traceback (most recent call last)
<ipython-input-6-5dfd051b2e21> in <module>
----> 1 F.dither(torch.randn(2, 441000).cuda(), noise_shaping=True)
~/anaconda3/lib/python3.7/site-packages/torchaudio-0.7.0a0+8fdb8df-py3.7-linux-x86_64.egg/torchaudio/functional.py in dither(waveform, density_function, noise_shaping)
1836
1837 if noise_shaping:
-> 1838 return _add_noise_shaping(dithered, waveform)
1839 else:
1840 return dithered
~/anaconda3/lib/python3.7/site-packages/torchaudio-0.7.0a0+8fdb8df-py3.7-linux-x86_64.egg/torchaudio/functional.py in _add_noise_shaping(dithered_waveform, waveform)
1732 for index in range(error.size()[0]):
1733 err = error[index]
-> 1734 error_offset = torch.cat((torch.zeros(1), err))
1735 error[index] = error_offset[:waveform.size()[1]]
1736
|
RuntimeError
|
def _get_strided(
waveform: Tensor, window_size: int, window_shift: int, snip_edges: bool
) -> Tensor:
r"""Given a waveform (1D tensor of size ``num_samples``), it returns a 2D tensor (m, ``window_size``)
representing how the window is shifted along the waveform. Each row is a frame.
Args:
waveform (Tensor): Tensor of size ``num_samples``
window_size (int): Frame length
window_shift (int): Frame shift
snip_edges (bool): If True, end effects will be handled by outputting only frames that completely fit
in the file, and the number of frames depends on the frame_length. If False, the number of frames
depends only on the frame_shift, and we reflect the data at the ends.
Returns:
Tensor: 2D tensor of size (m, ``window_size``) where each row is a frame
"""
assert waveform.dim() == 1
num_samples = waveform.size(0)
strides = (window_shift * waveform.stride(0), waveform.stride(0))
if snip_edges:
if num_samples < window_size:
return torch.empty((0, 0), dtype=waveform.dtype, device=waveform.device)
else:
m = 1 + (num_samples - window_size) // window_shift
else:
reversed_waveform = torch.flip(waveform, [0])
m = (num_samples + (window_shift // 2)) // window_shift
pad = window_size // 2 - window_shift // 2
pad_right = reversed_waveform
if pad > 0:
# torch.nn.functional.pad returns [2,1,0,1,2] for 'reflect'
# but we want [2, 1, 0, 0, 1, 2]
pad_left = reversed_waveform[-pad:]
waveform = torch.cat((pad_left, waveform, pad_right), dim=0)
else:
# pad is negative so we want to trim the waveform at the front
waveform = torch.cat((waveform[-pad:], pad_right), dim=0)
sizes = (m, window_size)
return waveform.as_strided(sizes, strides)
|
def _get_strided(
waveform: Tensor, window_size: int, window_shift: int, snip_edges: bool
) -> Tensor:
r"""Given a waveform (1D tensor of size ``num_samples``), it returns a 2D tensor (m, ``window_size``)
representing how the window is shifted along the waveform. Each row is a frame.
Args:
waveform (Tensor): Tensor of size ``num_samples``
window_size (int): Frame length
window_shift (int): Frame shift
snip_edges (bool): If True, end effects will be handled by outputting only frames that completely fit
in the file, and the number of frames depends on the frame_length. If False, the number of frames
depends only on the frame_shift, and we reflect the data at the ends.
Returns:
Tensor: 2D tensor of size (m, ``window_size``) where each row is a frame
"""
assert waveform.dim() == 1
num_samples = waveform.size(0)
strides = (window_shift * waveform.stride(0), waveform.stride(0))
if snip_edges:
if num_samples < window_size:
return torch.empty((0, 0))
else:
m = 1 + (num_samples - window_size) // window_shift
else:
reversed_waveform = torch.flip(waveform, [0])
m = (num_samples + (window_shift // 2)) // window_shift
pad = window_size // 2 - window_shift // 2
pad_right = reversed_waveform
if pad > 0:
# torch.nn.functional.pad returns [2,1,0,1,2] for 'reflect'
# but we want [2, 1, 0, 0, 1, 2]
pad_left = reversed_waveform[-pad:]
waveform = torch.cat((pad_left, waveform, pad_right), dim=0)
else:
# pad is negative so we want to trim the waveform at the front
waveform = torch.cat((waveform[-pad:], pad_right), dim=0)
sizes = (m, window_size)
return waveform.as_strided(sizes, strides)
|
https://github.com/pytorch/audio/issues/613
|
Traceback (most recent call last):
File "./fbank_test.py", line 27, in <module>
main()
File "./fbank_test.py", line 23, in main
fbank = torchaudio.compliance.kaldi.fbank(waveform)
File "/xxxxxx/py35/lib/python3.5/site-packages/torchaudio/compliance/kaldi.py",
line 554, in fbank
snip_edges, raw_energy, energy_floor, dither, remove_dc_offset, preemphasis_coefficient)
File "/xxxxx/py35/lib/python3.5/site-packages/torchaudio/compliance/kaldi.py",
line 180, in _get_window
signal_log_energy = _get_log_energy(strided_input, EPSILON, energy_floor) # size (m)
File "/xxxxx/py35/lib/python3.5/site-packages/torchaudio/compliance/kaldi.py",
line 113, in _get_log_energy
log_energy = torch.max(strided_input.pow(2).sum(1), epsilon).log() # size (m)
RuntimeError: iter.device(arg).is_cuda() INTERNAL ASSERT FAILED at /pytorch/aten/src/ATen/native/cuda/Loops.cuh:56,
please report a bug to PyTorch.
|
RuntimeError
|
def _feature_window_function(
window_type: str,
window_size: int,
blackman_coeff: float,
device: torch.device,
dtype: int,
) -> Tensor:
r"""Returns a window function with the given type and size"""
if window_type == HANNING:
return torch.hann_window(
window_size, periodic=False, device=device, dtype=dtype
)
elif window_type == HAMMING:
return torch.hamming_window(
window_size,
periodic=False,
alpha=0.54,
beta=0.46,
device=device,
dtype=dtype,
)
elif window_type == POVEY:
# like hanning but goes to zero at edges
return torch.hann_window(
window_size, periodic=False, device=device, dtype=dtype
).pow(0.85)
elif window_type == RECTANGULAR:
return torch.ones(window_size, device=device, dtype=dtype)
elif window_type == BLACKMAN:
a = 2 * math.pi / (window_size - 1)
window_function = torch.arange(window_size, device=device, dtype=dtype)
# can't use torch.blackman_window as they use different coefficients
return (
blackman_coeff
- 0.5 * torch.cos(a * window_function)
+ (0.5 - blackman_coeff) * torch.cos(2 * a * window_function)
).to(device=device, dtype=dtype)
else:
raise Exception("Invalid window type " + window_type)
|
def _feature_window_function(
window_type: str, window_size: int, blackman_coeff: float
) -> Tensor:
r"""Returns a window function with the given type and size"""
if window_type == HANNING:
return torch.hann_window(window_size, periodic=False)
elif window_type == HAMMING:
return torch.hamming_window(window_size, periodic=False, alpha=0.54, beta=0.46)
elif window_type == POVEY:
# like hanning but goes to zero at edges
return torch.hann_window(window_size, periodic=False).pow(0.85)
elif window_type == RECTANGULAR:
return torch.ones(window_size)
elif window_type == BLACKMAN:
a = 2 * math.pi / (window_size - 1)
window_function = torch.arange(window_size)
# can't use torch.blackman_window as they use different coefficients
return (
blackman_coeff
- 0.5 * torch.cos(a * window_function)
+ (0.5 - blackman_coeff) * torch.cos(2 * a * window_function)
)
else:
raise Exception("Invalid window type " + window_type)
|
https://github.com/pytorch/audio/issues/613
|
Traceback (most recent call last):
File "./fbank_test.py", line 27, in <module>
main()
File "./fbank_test.py", line 23, in main
fbank = torchaudio.compliance.kaldi.fbank(waveform)
File "/xxxxxx/py35/lib/python3.5/site-packages/torchaudio/compliance/kaldi.py",
line 554, in fbank
snip_edges, raw_energy, energy_floor, dither, remove_dc_offset, preemphasis_coefficient)
File "/xxxxx/py35/lib/python3.5/site-packages/torchaudio/compliance/kaldi.py",
line 180, in _get_window
signal_log_energy = _get_log_energy(strided_input, EPSILON, energy_floor) # size (m)
File "/xxxxx/py35/lib/python3.5/site-packages/torchaudio/compliance/kaldi.py",
line 113, in _get_log_energy
log_energy = torch.max(strided_input.pow(2).sum(1), epsilon).log() # size (m)
RuntimeError: iter.device(arg).is_cuda() INTERNAL ASSERT FAILED at /pytorch/aten/src/ATen/native/cuda/Loops.cuh:56,
please report a bug to PyTorch.
|
RuntimeError
|
def _get_log_energy(
strided_input: Tensor, epsilon: Tensor, energy_floor: float
) -> Tensor:
r"""Returns the log energy of size (m) for a strided_input (m,*)"""
device, dtype = strided_input.device, strided_input.dtype
log_energy = torch.max(strided_input.pow(2).sum(1), epsilon).log() # size (m)
if energy_floor == 0.0:
return log_energy
return torch.max(
log_energy, torch.tensor(math.log(energy_floor), device=device, dtype=dtype)
)
|
def _get_log_energy(
strided_input: Tensor, epsilon: Tensor, energy_floor: float
) -> Tensor:
r"""Returns the log energy of size (m) for a strided_input (m,*)"""
log_energy = torch.max(strided_input.pow(2).sum(1), epsilon).log() # size (m)
if energy_floor == 0.0:
return log_energy
else:
return torch.max(log_energy, torch.tensor(math.log(energy_floor)))
|
https://github.com/pytorch/audio/issues/613
|
Traceback (most recent call last):
File "./fbank_test.py", line 27, in <module>
main()
File "./fbank_test.py", line 23, in main
fbank = torchaudio.compliance.kaldi.fbank(waveform)
File "/xxxxxx/py35/lib/python3.5/site-packages/torchaudio/compliance/kaldi.py",
line 554, in fbank
snip_edges, raw_energy, energy_floor, dither, remove_dc_offset, preemphasis_coefficient)
File "/xxxxx/py35/lib/python3.5/site-packages/torchaudio/compliance/kaldi.py",
line 180, in _get_window
signal_log_energy = _get_log_energy(strided_input, EPSILON, energy_floor) # size (m)
File "/xxxxx/py35/lib/python3.5/site-packages/torchaudio/compliance/kaldi.py",
line 113, in _get_log_energy
log_energy = torch.max(strided_input.pow(2).sum(1), epsilon).log() # size (m)
RuntimeError: iter.device(arg).is_cuda() INTERNAL ASSERT FAILED at /pytorch/aten/src/ATen/native/cuda/Loops.cuh:56,
please report a bug to PyTorch.
|
RuntimeError
|
def _get_window(
waveform: Tensor,
padded_window_size: int,
window_size: int,
window_shift: int,
window_type: str,
blackman_coeff: float,
snip_edges: bool,
raw_energy: bool,
energy_floor: float,
dither: float,
remove_dc_offset: bool,
preemphasis_coefficient: float,
) -> Tuple[Tensor, Tensor]:
r"""Gets a window and its log energy
Returns:
(Tensor, Tensor): strided_input of size (m, ``padded_window_size``) and signal_log_energy of size (m)
"""
device, dtype = waveform.device, waveform.dtype
epsilon = _get_epsilon(device, dtype)
# size (m, window_size)
strided_input = _get_strided(waveform, window_size, window_shift, snip_edges)
if dither != 0.0:
# Returns a random number strictly between 0 and 1
x = torch.max(
epsilon, torch.rand(strided_input.shape, device=device, dtype=dtype)
)
rand_gauss = torch.sqrt(-2 * x.log()) * torch.cos(2 * math.pi * x)
strided_input = strided_input + rand_gauss * dither
if remove_dc_offset:
# Subtract each row/frame by its mean
row_means = torch.mean(strided_input, dim=1).unsqueeze(1) # size (m, 1)
strided_input = strided_input - row_means
if raw_energy:
# Compute the log energy of each row/frame before applying preemphasis and
# window function
signal_log_energy = _get_log_energy(
strided_input, epsilon, energy_floor
) # size (m)
if preemphasis_coefficient != 0.0:
# strided_input[i,j] -= preemphasis_coefficient * strided_input[i, max(0, j-1)] for all i,j
offset_strided_input = torch.nn.functional.pad(
strided_input.unsqueeze(0), (1, 0), mode="replicate"
).squeeze(0) # size (m, window_size + 1)
strided_input = (
strided_input - preemphasis_coefficient * offset_strided_input[:, :-1]
)
# Apply window_function to each row/frame
window_function = _feature_window_function(
window_type, window_size, blackman_coeff, device, dtype
).unsqueeze(0) # size (1, window_size)
strided_input = strided_input * window_function # size (m, window_size)
# Pad columns with zero until we reach size (m, padded_window_size)
if padded_window_size != window_size:
padding_right = padded_window_size - window_size
strided_input = torch.nn.functional.pad(
strided_input.unsqueeze(0), (0, padding_right), mode="constant", value=0
).squeeze(0)
# Compute energy after window function (not the raw one)
if not raw_energy:
signal_log_energy = _get_log_energy(
strided_input, epsilon, energy_floor
) # size (m)
return strided_input, signal_log_energy
|
def _get_window(
waveform: Tensor,
padded_window_size: int,
window_size: int,
window_shift: int,
window_type: str,
blackman_coeff: float,
snip_edges: bool,
raw_energy: bool,
energy_floor: float,
dither: float,
remove_dc_offset: bool,
preemphasis_coefficient: float,
) -> Tuple[Tensor, Tensor]:
r"""Gets a window and its log energy
Returns:
(Tensor, Tensor): strided_input of size (m, ``padded_window_size``) and signal_log_energy of size (m)
"""
# size (m, window_size)
strided_input = _get_strided(waveform, window_size, window_shift, snip_edges)
if dither != 0.0:
# Returns a random number strictly between 0 and 1
x = torch.max(EPSILON, torch.rand(strided_input.shape))
rand_gauss = torch.sqrt(-2 * x.log()) * torch.cos(2 * math.pi * x)
strided_input = strided_input + rand_gauss * dither
if remove_dc_offset:
# Subtract each row/frame by its mean
row_means = torch.mean(strided_input, dim=1).unsqueeze(1) # size (m, 1)
strided_input = strided_input - row_means
if raw_energy:
# Compute the log energy of each row/frame before applying preemphasis and
# window function
signal_log_energy = _get_log_energy(
strided_input, EPSILON, energy_floor
) # size (m)
if preemphasis_coefficient != 0.0:
# strided_input[i,j] -= preemphasis_coefficient * strided_input[i, max(0, j-1)] for all i,j
offset_strided_input = torch.nn.functional.pad(
strided_input.unsqueeze(0), (1, 0), mode="replicate"
).squeeze(0) # size (m, window_size + 1)
strided_input = (
strided_input - preemphasis_coefficient * offset_strided_input[:, :-1]
)
# Apply window_function to each row/frame
window_function = _feature_window_function(
window_type, window_size, blackman_coeff
).unsqueeze(0) # size (1, window_size)
strided_input = strided_input * window_function # size (m, window_size)
# Pad columns with zero until we reach size (m, padded_window_size)
if padded_window_size != window_size:
padding_right = padded_window_size - window_size
strided_input = torch.nn.functional.pad(
strided_input.unsqueeze(0), (0, padding_right), mode="constant", value=0
).squeeze(0)
# Compute energy after window function (not the raw one)
if not raw_energy:
signal_log_energy = _get_log_energy(
strided_input, EPSILON, energy_floor
) # size (m)
return strided_input, signal_log_energy
|
https://github.com/pytorch/audio/issues/613
|
Traceback (most recent call last):
File "./fbank_test.py", line 27, in <module>
main()
File "./fbank_test.py", line 23, in main
fbank = torchaudio.compliance.kaldi.fbank(waveform)
File "/xxxxxx/py35/lib/python3.5/site-packages/torchaudio/compliance/kaldi.py",
line 554, in fbank
snip_edges, raw_energy, energy_floor, dither, remove_dc_offset, preemphasis_coefficient)
File "/xxxxx/py35/lib/python3.5/site-packages/torchaudio/compliance/kaldi.py",
line 180, in _get_window
signal_log_energy = _get_log_energy(strided_input, EPSILON, energy_floor) # size (m)
File "/xxxxx/py35/lib/python3.5/site-packages/torchaudio/compliance/kaldi.py",
line 113, in _get_log_energy
log_energy = torch.max(strided_input.pow(2).sum(1), epsilon).log() # size (m)
RuntimeError: iter.device(arg).is_cuda() INTERNAL ASSERT FAILED at /pytorch/aten/src/ATen/native/cuda/Loops.cuh:56,
please report a bug to PyTorch.
|
RuntimeError
|
def fbank(
waveform: Tensor,
blackman_coeff: float = 0.42,
channel: int = -1,
dither: float = 0.0,
energy_floor: float = 1.0,
frame_length: float = 25.0,
frame_shift: float = 10.0,
high_freq: float = 0.0,
htk_compat: bool = False,
low_freq: float = 20.0,
min_duration: float = 0.0,
num_mel_bins: int = 23,
preemphasis_coefficient: float = 0.97,
raw_energy: bool = True,
remove_dc_offset: bool = True,
round_to_power_of_two: bool = True,
sample_frequency: float = 16000.0,
snip_edges: bool = True,
subtract_mean: bool = False,
use_energy: bool = False,
use_log_fbank: bool = True,
use_power: bool = True,
vtln_high: float = -500.0,
vtln_low: float = 100.0,
vtln_warp: float = 1.0,
window_type: str = POVEY,
) -> Tensor:
r"""Create a fbank from a raw audio signal. This matches the input/output of Kaldi's
compute-fbank-feats.
Args:
waveform (Tensor): Tensor of audio of size (c, n) where c is in the range [0,2)
blackman_coeff (float, optional): Constant coefficient for generalized Blackman window. (Default: ``0.42``)
channel (int, optional): Channel to extract (-1 -> expect mono, 0 -> left, 1 -> right) (Default: ``-1``)
dither (float, optional): Dithering constant (0.0 means no dither). If you turn this off, you should set
the energy_floor option, e.g. to 1.0 or 0.1 (Default: ``0.0``)
energy_floor (float, optional): Floor on energy (absolute, not relative) in Spectrogram computation. Caution:
this floor is applied to the zeroth component, representing the total signal energy. The floor on the
individual spectrogram elements is fixed at std::numeric_limits<float>::epsilon(). (Default: ``1.0``)
frame_length (float, optional): Frame length in milliseconds (Default: ``25.0``)
frame_shift (float, optional): Frame shift in milliseconds (Default: ``10.0``)
high_freq (float, optional): High cutoff frequency for mel bins (if <= 0, offset from Nyquist)
(Default: ``0.0``)
htk_compat (bool, optional): If true, put energy last. Warning: not sufficient to get HTK compatible features
(need to change other parameters). (Default: ``False``)
low_freq (float, optional): Low cutoff frequency for mel bins (Default: ``20.0``)
min_duration (float, optional): Minimum duration of segments to process (in seconds). (Default: ``0.0``)
num_mel_bins (int, optional): Number of triangular mel-frequency bins (Default: ``23``)
preemphasis_coefficient (float, optional): Coefficient for use in signal preemphasis (Default: ``0.97``)
raw_energy (bool, optional): If True, compute energy before preemphasis and windowing (Default: ``True``)
remove_dc_offset (bool, optional): Subtract mean from waveform on each frame (Default: ``True``)
round_to_power_of_two (bool, optional): If True, round window size to power of two by zero-padding input
to FFT. (Default: ``True``)
sample_frequency (float, optional): Waveform data sample frequency (must match the waveform file, if
specified there) (Default: ``16000.0``)
snip_edges (bool, optional): If True, end effects will be handled by outputting only frames that completely fit
in the file, and the number of frames depends on the frame_length. If False, the number of frames
depends only on the frame_shift, and we reflect the data at the ends. (Default: ``True``)
subtract_mean (bool, optional): Subtract mean of each feature file [CMS]; not recommended to do
it this way. (Default: ``False``)
use_energy (bool, optional): Add an extra dimension with energy to the FBANK output. (Default: ``False``)
use_log_fbank (bool, optional):If true, produce log-filterbank, else produce linear. (Default: ``True``)
use_power (bool, optional): If true, use power, else use magnitude. (Default: ``True``)
vtln_high (float, optional): High inflection point in piecewise linear VTLN warping function (if
negative, offset from high-mel-freq (Default: ``-500.0``)
vtln_low (float, optional): Low inflection point in piecewise linear VTLN warping function (Default: ``100.0``)
vtln_warp (float, optional): Vtln warp factor (only applicable if vtln_map not specified) (Default: ``1.0``)
window_type (str, optional): Type of window ('hamming'|'hanning'|'povey'|'rectangular'|'blackman')
(Default: ``'povey'``)
Returns:
Tensor: A fbank identical to what Kaldi would output. The shape is (m, ``num_mel_bins + use_energy``)
where m is calculated in _get_strided
"""
device, dtype = waveform.device, waveform.dtype
waveform, window_shift, window_size, padded_window_size = (
_get_waveform_and_window_properties(
waveform,
channel,
sample_frequency,
frame_shift,
frame_length,
round_to_power_of_two,
preemphasis_coefficient,
)
)
if len(waveform) < min_duration * sample_frequency:
# signal is too short
return torch.empty(0, device=device, dtype=dtype)
# strided_input, size (m, padded_window_size) and signal_log_energy, size (m)
strided_input, signal_log_energy = _get_window(
waveform,
padded_window_size,
window_size,
window_shift,
window_type,
blackman_coeff,
snip_edges,
raw_energy,
energy_floor,
dither,
remove_dc_offset,
preemphasis_coefficient,
)
# size (m, padded_window_size // 2 + 1, 2)
fft = torch.rfft(strided_input, 1, normalized=False, onesided=True)
power_spectrum = (
fft.pow(2).sum(2).unsqueeze(1)
) # size (m, 1, padded_window_size // 2 + 1)
if not use_power:
power_spectrum = power_spectrum.pow(0.5)
# size (num_mel_bins, padded_window_size // 2)
mel_energies, _ = get_mel_banks(
num_mel_bins,
padded_window_size,
sample_frequency,
low_freq,
high_freq,
vtln_low,
vtln_high,
vtln_warp,
)
mel_energies = mel_energies.to(device=device, dtype=dtype)
# pad right column with zeros and add dimension, size (1, num_mel_bins, padded_window_size // 2 + 1)
mel_energies = torch.nn.functional.pad(
mel_energies, (0, 1), mode="constant", value=0
).unsqueeze(0)
# sum with mel fiterbanks over the power spectrum, size (m, num_mel_bins)
mel_energies = (power_spectrum * mel_energies).sum(dim=2)
if use_log_fbank:
# avoid log of zero (which should be prevented anyway by dithering)
mel_energies = torch.max(mel_energies, _get_epsilon(device, dtype)).log()
# if use_energy then add it as the last column for htk_compat == true else first column
if use_energy:
signal_log_energy = signal_log_energy.unsqueeze(1) # size (m, 1)
# returns size (m, num_mel_bins + 1)
if htk_compat:
mel_energies = torch.cat((mel_energies, signal_log_energy), dim=1)
else:
mel_energies = torch.cat((signal_log_energy, mel_energies), dim=1)
mel_energies = _subtract_column_mean(mel_energies, subtract_mean)
return mel_energies
|
def fbank(
waveform: Tensor,
blackman_coeff: float = 0.42,
channel: int = -1,
dither: float = 0.0,
energy_floor: float = 1.0,
frame_length: float = 25.0,
frame_shift: float = 10.0,
high_freq: float = 0.0,
htk_compat: bool = False,
low_freq: float = 20.0,
min_duration: float = 0.0,
num_mel_bins: int = 23,
preemphasis_coefficient: float = 0.97,
raw_energy: bool = True,
remove_dc_offset: bool = True,
round_to_power_of_two: bool = True,
sample_frequency: float = 16000.0,
snip_edges: bool = True,
subtract_mean: bool = False,
use_energy: bool = False,
use_log_fbank: bool = True,
use_power: bool = True,
vtln_high: float = -500.0,
vtln_low: float = 100.0,
vtln_warp: float = 1.0,
window_type: str = POVEY,
) -> Tensor:
r"""Create a fbank from a raw audio signal. This matches the input/output of Kaldi's
compute-fbank-feats.
Args:
waveform (Tensor): Tensor of audio of size (c, n) where c is in the range [0,2)
blackman_coeff (float, optional): Constant coefficient for generalized Blackman window. (Default: ``0.42``)
channel (int, optional): Channel to extract (-1 -> expect mono, 0 -> left, 1 -> right) (Default: ``-1``)
dither (float, optional): Dithering constant (0.0 means no dither). If you turn this off, you should set
the energy_floor option, e.g. to 1.0 or 0.1 (Default: ``0.0``)
energy_floor (float, optional): Floor on energy (absolute, not relative) in Spectrogram computation. Caution:
this floor is applied to the zeroth component, representing the total signal energy. The floor on the
individual spectrogram elements is fixed at std::numeric_limits<float>::epsilon(). (Default: ``1.0``)
frame_length (float, optional): Frame length in milliseconds (Default: ``25.0``)
frame_shift (float, optional): Frame shift in milliseconds (Default: ``10.0``)
high_freq (float, optional): High cutoff frequency for mel bins (if <= 0, offset from Nyquist)
(Default: ``0.0``)
htk_compat (bool, optional): If true, put energy last. Warning: not sufficient to get HTK compatible features
(need to change other parameters). (Default: ``False``)
low_freq (float, optional): Low cutoff frequency for mel bins (Default: ``20.0``)
min_duration (float, optional): Minimum duration of segments to process (in seconds). (Default: ``0.0``)
num_mel_bins (int, optional): Number of triangular mel-frequency bins (Default: ``23``)
preemphasis_coefficient (float, optional): Coefficient for use in signal preemphasis (Default: ``0.97``)
raw_energy (bool, optional): If True, compute energy before preemphasis and windowing (Default: ``True``)
remove_dc_offset (bool, optional): Subtract mean from waveform on each frame (Default: ``True``)
round_to_power_of_two (bool, optional): If True, round window size to power of two by zero-padding input
to FFT. (Default: ``True``)
sample_frequency (float, optional): Waveform data sample frequency (must match the waveform file, if
specified there) (Default: ``16000.0``)
snip_edges (bool, optional): If True, end effects will be handled by outputting only frames that completely fit
in the file, and the number of frames depends on the frame_length. If False, the number of frames
depends only on the frame_shift, and we reflect the data at the ends. (Default: ``True``)
subtract_mean (bool, optional): Subtract mean of each feature file [CMS]; not recommended to do
it this way. (Default: ``False``)
use_energy (bool, optional): Add an extra dimension with energy to the FBANK output. (Default: ``False``)
use_log_fbank (bool, optional):If true, produce log-filterbank, else produce linear. (Default: ``True``)
use_power (bool, optional): If true, use power, else use magnitude. (Default: ``True``)
vtln_high (float, optional): High inflection point in piecewise linear VTLN warping function (if
negative, offset from high-mel-freq (Default: ``-500.0``)
vtln_low (float, optional): Low inflection point in piecewise linear VTLN warping function (Default: ``100.0``)
vtln_warp (float, optional): Vtln warp factor (only applicable if vtln_map not specified) (Default: ``1.0``)
window_type (str, optional): Type of window ('hamming'|'hanning'|'povey'|'rectangular'|'blackman')
(Default: ``'povey'``)
Returns:
Tensor: A fbank identical to what Kaldi would output. The shape is (m, ``num_mel_bins + use_energy``)
where m is calculated in _get_strided
"""
waveform, window_shift, window_size, padded_window_size = (
_get_waveform_and_window_properties(
waveform,
channel,
sample_frequency,
frame_shift,
frame_length,
round_to_power_of_two,
preemphasis_coefficient,
)
)
if len(waveform) < min_duration * sample_frequency:
# signal is too short
return torch.empty(0)
# strided_input, size (m, padded_window_size) and signal_log_energy, size (m)
strided_input, signal_log_energy = _get_window(
waveform,
padded_window_size,
window_size,
window_shift,
window_type,
blackman_coeff,
snip_edges,
raw_energy,
energy_floor,
dither,
remove_dc_offset,
preemphasis_coefficient,
)
# size (m, padded_window_size // 2 + 1, 2)
fft = torch.rfft(strided_input, 1, normalized=False, onesided=True)
power_spectrum = (
fft.pow(2).sum(2).unsqueeze(1)
) # size (m, 1, padded_window_size // 2 + 1)
if not use_power:
power_spectrum = power_spectrum.pow(0.5)
# size (num_mel_bins, padded_window_size // 2)
mel_energies, _ = get_mel_banks(
num_mel_bins,
padded_window_size,
sample_frequency,
low_freq,
high_freq,
vtln_low,
vtln_high,
vtln_warp,
)
# pad right column with zeros and add dimension, size (1, num_mel_bins, padded_window_size // 2 + 1)
mel_energies = torch.nn.functional.pad(
mel_energies, (0, 1), mode="constant", value=0
).unsqueeze(0)
# sum with mel fiterbanks over the power spectrum, size (m, num_mel_bins)
mel_energies = (power_spectrum * mel_energies).sum(dim=2)
if use_log_fbank:
# avoid log of zero (which should be prevented anyway by dithering)
mel_energies = torch.max(mel_energies, EPSILON).log()
# if use_energy then add it as the last column for htk_compat == true else first column
if use_energy:
signal_log_energy = signal_log_energy.unsqueeze(1) # size (m, 1)
# returns size (m, num_mel_bins + 1)
if htk_compat:
mel_energies = torch.cat((mel_energies, signal_log_energy), dim=1)
else:
mel_energies = torch.cat((signal_log_energy, mel_energies), dim=1)
mel_energies = _subtract_column_mean(mel_energies, subtract_mean)
return mel_energies
|
https://github.com/pytorch/audio/issues/613
|
Traceback (most recent call last):
File "./fbank_test.py", line 27, in <module>
main()
File "./fbank_test.py", line 23, in main
fbank = torchaudio.compliance.kaldi.fbank(waveform)
File "/xxxxxx/py35/lib/python3.5/site-packages/torchaudio/compliance/kaldi.py",
line 554, in fbank
snip_edges, raw_energy, energy_floor, dither, remove_dc_offset, preemphasis_coefficient)
File "/xxxxx/py35/lib/python3.5/site-packages/torchaudio/compliance/kaldi.py",
line 180, in _get_window
signal_log_energy = _get_log_energy(strided_input, EPSILON, energy_floor) # size (m)
File "/xxxxx/py35/lib/python3.5/site-packages/torchaudio/compliance/kaldi.py",
line 113, in _get_log_energy
log_energy = torch.max(strided_input.pow(2).sum(1), epsilon).log() # size (m)
RuntimeError: iter.device(arg).is_cuda() INTERNAL ASSERT FAILED at /pytorch/aten/src/ATen/native/cuda/Loops.cuh:56,
please report a bug to PyTorch.
|
RuntimeError
|
def _get_LR_indices_and_weights(
orig_freq: float,
new_freq: float,
output_samples_in_unit: int,
window_width: float,
lowpass_cutoff: float,
lowpass_filter_width: int,
device: torch.device,
dtype: int,
) -> Tuple[Tensor, Tensor]:
r"""Based on LinearResample::SetIndexesAndWeights where it retrieves the weights for
resampling as well as the indices in which they are valid. LinearResample (LR) means
that the output signal is at linearly spaced intervals (i.e the output signal has a
frequency of ``new_freq``). It uses sinc/bandlimited interpolation to upsample/downsample
the signal.
The reason why the same filter is not used for multiple convolutions is because the
sinc function could sampled at different points in time. For example, suppose
a signal is sampled at the timestamps (seconds)
0 16 32
and we want it to be sampled at the timestamps (seconds)
0 5 10 15 20 25 30 35
at the timestamp of 16, the delta timestamps are
16 11 6 1 4 9 14 19
at the timestamp of 32, the delta timestamps are
32 27 22 17 12 8 2 3
As we can see from deltas, the sinc function is sampled at different points of time
assuming the center of the sinc function is at 0, 16, and 32 (the deltas [..., 6, 1, 4, ....]
for 16 vs [...., 2, 3, ....] for 32)
Example, one case is when the ``orig_freq`` and ``new_freq`` are multiples of each other then
there needs to be one filter.
A windowed filter function (i.e. Hanning * sinc) because the ideal case of sinc function
has infinite support (non-zero for all values) so instead it is truncated and multiplied by
a window function which gives it less-than-perfect rolloff [1].
[1] Chapter 16: Windowed-Sinc Filters, https://www.dspguide.com/ch16/1.htm
Args:
orig_freq (float): The original frequency of the signal
new_freq (float): The desired frequency
output_samples_in_unit (int): The number of output samples in the smallest repeating unit:
num_samp_out = new_freq / Gcd(orig_freq, new_freq)
window_width (float): The width of the window which is nonzero
lowpass_cutoff (float): The filter cutoff in Hz. The filter cutoff needs to be less
than samp_rate_in_hz/2 and less than samp_rate_out_hz/2.
lowpass_filter_width (int): Controls the sharpness of the filter, more == sharper but less
efficient. We suggest around 4 to 10 for normal use
Returns:
(Tensor, Tensor): A tuple of ``min_input_index`` (which is the minimum indices
where the window is valid, size (``output_samples_in_unit``)) and ``weights`` (which is the weights
which correspond with min_input_index, size (``output_samples_in_unit``, ``max_weight_width``)).
"""
assert lowpass_cutoff < min(orig_freq, new_freq) / 2
output_t = (
torch.arange(0.0, output_samples_in_unit, device=device, dtype=dtype) / new_freq
)
min_t = output_t - window_width
max_t = output_t + window_width
min_input_index = torch.ceil(min_t * orig_freq) # size (output_samples_in_unit)
max_input_index = torch.floor(max_t * orig_freq) # size (output_samples_in_unit)
num_indices = max_input_index - min_input_index + 1 # size (output_samples_in_unit)
max_weight_width = num_indices.max()
# create a group of weights of size (output_samples_in_unit, max_weight_width)
j = torch.arange(max_weight_width, device=device, dtype=dtype).unsqueeze(0)
input_index = min_input_index.unsqueeze(1) + j
delta_t = (input_index / orig_freq) - output_t.unsqueeze(1)
weights = torch.zeros_like(delta_t)
inside_window_indices = delta_t.abs().lt(window_width)
# raised-cosine (Hanning) window with width `window_width`
weights[inside_window_indices] = 0.5 * (
1
+ torch.cos(
2
* math.pi
* lowpass_cutoff
/ lowpass_filter_width
* delta_t[inside_window_indices]
)
)
t_eq_zero_indices = delta_t.eq(0.0)
t_not_eq_zero_indices = ~t_eq_zero_indices
# sinc filter function
weights[t_not_eq_zero_indices] *= torch.sin(
2 * math.pi * lowpass_cutoff * delta_t[t_not_eq_zero_indices]
) / (math.pi * delta_t[t_not_eq_zero_indices])
# limit of the function at t = 0
weights[t_eq_zero_indices] *= 2 * lowpass_cutoff
weights /= orig_freq # size (output_samples_in_unit, max_weight_width)
return min_input_index, weights
|
def _get_LR_indices_and_weights(
orig_freq: float,
new_freq: float,
output_samples_in_unit: int,
window_width: float,
lowpass_cutoff: float,
lowpass_filter_width: int,
) -> Tuple[Tensor, Tensor]:
r"""Based on LinearResample::SetIndexesAndWeights where it retrieves the weights for
resampling as well as the indices in which they are valid. LinearResample (LR) means
that the output signal is at linearly spaced intervals (i.e the output signal has a
frequency of ``new_freq``). It uses sinc/bandlimited interpolation to upsample/downsample
the signal.
The reason why the same filter is not used for multiple convolutions is because the
sinc function could sampled at different points in time. For example, suppose
a signal is sampled at the timestamps (seconds)
0 16 32
and we want it to be sampled at the timestamps (seconds)
0 5 10 15 20 25 30 35
at the timestamp of 16, the delta timestamps are
16 11 6 1 4 9 14 19
at the timestamp of 32, the delta timestamps are
32 27 22 17 12 8 2 3
As we can see from deltas, the sinc function is sampled at different points of time
assuming the center of the sinc function is at 0, 16, and 32 (the deltas [..., 6, 1, 4, ....]
for 16 vs [...., 2, 3, ....] for 32)
Example, one case is when the ``orig_freq`` and ``new_freq`` are multiples of each other then
there needs to be one filter.
A windowed filter function (i.e. Hanning * sinc) because the ideal case of sinc function
has infinite support (non-zero for all values) so instead it is truncated and multiplied by
a window function which gives it less-than-perfect rolloff [1].
[1] Chapter 16: Windowed-Sinc Filters, https://www.dspguide.com/ch16/1.htm
Args:
orig_freq (float): The original frequency of the signal
new_freq (float): The desired frequency
output_samples_in_unit (int): The number of output samples in the smallest repeating unit:
num_samp_out = new_freq / Gcd(orig_freq, new_freq)
window_width (float): The width of the window which is nonzero
lowpass_cutoff (float): The filter cutoff in Hz. The filter cutoff needs to be less
than samp_rate_in_hz/2 and less than samp_rate_out_hz/2.
lowpass_filter_width (int): Controls the sharpness of the filter, more == sharper but less
efficient. We suggest around 4 to 10 for normal use
Returns:
(Tensor, Tensor): A tuple of ``min_input_index`` (which is the minimum indices
where the window is valid, size (``output_samples_in_unit``)) and ``weights`` (which is the weights
which correspond with min_input_index, size (``output_samples_in_unit``, ``max_weight_width``)).
"""
assert lowpass_cutoff < min(orig_freq, new_freq) / 2
output_t = torch.arange(0.0, output_samples_in_unit) / new_freq
min_t = output_t - window_width
max_t = output_t + window_width
min_input_index = torch.ceil(min_t * orig_freq) # size (output_samples_in_unit)
max_input_index = torch.floor(max_t * orig_freq) # size (output_samples_in_unit)
num_indices = max_input_index - min_input_index + 1 # size (output_samples_in_unit)
max_weight_width = num_indices.max()
# create a group of weights of size (output_samples_in_unit, max_weight_width)
j = torch.arange(max_weight_width).unsqueeze(0)
input_index = min_input_index.unsqueeze(1) + j
delta_t = (input_index / orig_freq) - output_t.unsqueeze(1)
weights = torch.zeros_like(delta_t)
inside_window_indices = delta_t.abs().lt(window_width)
# raised-cosine (Hanning) window with width `window_width`
weights[inside_window_indices] = 0.5 * (
1
+ torch.cos(
2
* math.pi
* lowpass_cutoff
/ lowpass_filter_width
* delta_t[inside_window_indices]
)
)
t_eq_zero_indices = delta_t.eq(0.0)
t_not_eq_zero_indices = ~t_eq_zero_indices
# sinc filter function
weights[t_not_eq_zero_indices] *= torch.sin(
2 * math.pi * lowpass_cutoff * delta_t[t_not_eq_zero_indices]
) / (math.pi * delta_t[t_not_eq_zero_indices])
# limit of the function at t = 0
weights[t_eq_zero_indices] *= 2 * lowpass_cutoff
weights /= orig_freq # size (output_samples_in_unit, max_weight_width)
return min_input_index, weights
|
https://github.com/pytorch/audio/issues/613
|
Traceback (most recent call last):
File "./fbank_test.py", line 27, in <module>
main()
File "./fbank_test.py", line 23, in main
fbank = torchaudio.compliance.kaldi.fbank(waveform)
File "/xxxxxx/py35/lib/python3.5/site-packages/torchaudio/compliance/kaldi.py",
line 554, in fbank
snip_edges, raw_energy, energy_floor, dither, remove_dc_offset, preemphasis_coefficient)
File "/xxxxx/py35/lib/python3.5/site-packages/torchaudio/compliance/kaldi.py",
line 180, in _get_window
signal_log_energy = _get_log_energy(strided_input, EPSILON, energy_floor) # size (m)
File "/xxxxx/py35/lib/python3.5/site-packages/torchaudio/compliance/kaldi.py",
line 113, in _get_log_energy
log_energy = torch.max(strided_input.pow(2).sum(1), epsilon).log() # size (m)
RuntimeError: iter.device(arg).is_cuda() INTERNAL ASSERT FAILED at /pytorch/aten/src/ATen/native/cuda/Loops.cuh:56,
please report a bug to PyTorch.
|
RuntimeError
|
def resample_waveform(
waveform: Tensor, orig_freq: float, new_freq: float, lowpass_filter_width: int = 6
) -> Tensor:
r"""Resamples the waveform at the new frequency. This matches Kaldi's OfflineFeatureTpl ResampleWaveform
which uses a LinearResample (resample a signal at linearly spaced intervals to upsample/downsample
a signal). LinearResample (LR) means that the output signal is at linearly spaced intervals (i.e
the output signal has a frequency of ``new_freq``). It uses sinc/bandlimited interpolation to
upsample/downsample the signal.
https://ccrma.stanford.edu/~jos/resample/Theory_Ideal_Bandlimited_Interpolation.html
https://github.com/kaldi-asr/kaldi/blob/master/src/feat/resample.h#L56
Args:
waveform (Tensor): The input signal of size (c, n)
orig_freq (float): The original frequency of the signal
new_freq (float): The desired frequency
lowpass_filter_width (int, optional): Controls the sharpness of the filter, more == sharper
but less efficient. We suggest around 4 to 10 for normal use. (Default: ``6``)
Returns:
Tensor: The waveform at the new frequency
"""
device, dtype = waveform.device, waveform.dtype
assert waveform.dim() == 2
assert orig_freq > 0.0 and new_freq > 0.0
min_freq = min(orig_freq, new_freq)
lowpass_cutoff = 0.99 * 0.5 * min_freq
assert lowpass_cutoff * 2 <= min_freq
base_freq = math.gcd(int(orig_freq), int(new_freq))
input_samples_in_unit = int(orig_freq) // base_freq
output_samples_in_unit = int(new_freq) // base_freq
window_width = lowpass_filter_width / (2.0 * lowpass_cutoff)
first_indices, weights = _get_LR_indices_and_weights(
orig_freq,
new_freq,
output_samples_in_unit,
window_width,
lowpass_cutoff,
lowpass_filter_width,
device,
dtype,
)
assert first_indices.dim() == 1
# TODO figure a better way to do this. conv1d reaches every element i*stride + padding
# all the weights have the same stride but have different padding.
# Current implementation takes the input and applies the various padding before
# doing a conv1d for that specific weight.
conv_stride = input_samples_in_unit
conv_transpose_stride = output_samples_in_unit
num_channels, wave_len = waveform.size()
window_size = weights.size(1)
tot_output_samp = _get_num_LR_output_samples(wave_len, orig_freq, new_freq)
output = torch.zeros((num_channels, tot_output_samp), device=device, dtype=dtype)
# eye size: (num_channels, num_channels, 1)
eye = torch.eye(num_channels, device=device, dtype=dtype).unsqueeze(2)
for i in range(first_indices.size(0)):
wave_to_conv = waveform
first_index = int(first_indices[i].item())
if first_index >= 0:
# trim the signal as the filter will not be applied before the first_index
wave_to_conv = wave_to_conv[..., first_index:]
# pad the right of the signal to allow partial convolutions meaning compute
# values for partial windows (e.g. end of the window is outside the signal length)
max_unit_index = (tot_output_samp - 1) // output_samples_in_unit
end_index_of_last_window = max_unit_index * conv_stride + window_size
current_wave_len = wave_len - first_index
right_padding = max(0, end_index_of_last_window + 1 - current_wave_len)
left_padding = max(0, -first_index)
if left_padding != 0 or right_padding != 0:
wave_to_conv = torch.nn.functional.pad(
wave_to_conv, (left_padding, right_padding)
)
conv_wave = torch.nn.functional.conv1d(
wave_to_conv.unsqueeze(0),
weights[i].repeat(num_channels, 1, 1),
stride=conv_stride,
groups=num_channels,
)
# we want conv_wave[:, i] to be at output[:, i + n*conv_transpose_stride]
dilated_conv_wave = torch.nn.functional.conv_transpose1d(
conv_wave, eye, stride=conv_transpose_stride
).squeeze(0)
# pad dilated_conv_wave so it reaches the output length if needed.
dialated_conv_wave_len = dilated_conv_wave.size(-1)
left_padding = i
right_padding = max(
0, tot_output_samp - (left_padding + dialated_conv_wave_len)
)
dilated_conv_wave = torch.nn.functional.pad(
dilated_conv_wave, (left_padding, right_padding)
)[..., :tot_output_samp]
output += dilated_conv_wave
return output
|
def resample_waveform(
waveform: Tensor, orig_freq: float, new_freq: float, lowpass_filter_width: int = 6
) -> Tensor:
r"""Resamples the waveform at the new frequency. This matches Kaldi's OfflineFeatureTpl ResampleWaveform
which uses a LinearResample (resample a signal at linearly spaced intervals to upsample/downsample
a signal). LinearResample (LR) means that the output signal is at linearly spaced intervals (i.e
the output signal has a frequency of ``new_freq``). It uses sinc/bandlimited interpolation to
upsample/downsample the signal.
https://ccrma.stanford.edu/~jos/resample/Theory_Ideal_Bandlimited_Interpolation.html
https://github.com/kaldi-asr/kaldi/blob/master/src/feat/resample.h#L56
Args:
waveform (Tensor): The input signal of size (c, n)
orig_freq (float): The original frequency of the signal
new_freq (float): The desired frequency
lowpass_filter_width (int, optional): Controls the sharpness of the filter, more == sharper
but less efficient. We suggest around 4 to 10 for normal use. (Default: ``6``)
Returns:
Tensor: The waveform at the new frequency
"""
device, dtype = waveform.device, waveform.dtype
assert waveform.dim() == 2
assert orig_freq > 0.0 and new_freq > 0.0
min_freq = min(orig_freq, new_freq)
lowpass_cutoff = 0.99 * 0.5 * min_freq
assert lowpass_cutoff * 2 <= min_freq
base_freq = math.gcd(int(orig_freq), int(new_freq))
input_samples_in_unit = int(orig_freq) // base_freq
output_samples_in_unit = int(new_freq) // base_freq
window_width = lowpass_filter_width / (2.0 * lowpass_cutoff)
first_indices, weights = _get_LR_indices_and_weights(
orig_freq,
new_freq,
output_samples_in_unit,
window_width,
lowpass_cutoff,
lowpass_filter_width,
)
weights = weights.to(
device=device, dtype=dtype
) # TODO Create weights on device directly
assert first_indices.dim() == 1
# TODO figure a better way to do this. conv1d reaches every element i*stride + padding
# all the weights have the same stride but have different padding.
# Current implementation takes the input and applies the various padding before
# doing a conv1d for that specific weight.
conv_stride = input_samples_in_unit
conv_transpose_stride = output_samples_in_unit
num_channels, wave_len = waveform.size()
window_size = weights.size(1)
tot_output_samp = _get_num_LR_output_samples(wave_len, orig_freq, new_freq)
output = torch.zeros((num_channels, tot_output_samp), device=device, dtype=dtype)
# eye size: (num_channels, num_channels, 1)
eye = torch.eye(num_channels, device=device, dtype=dtype).unsqueeze(2)
for i in range(first_indices.size(0)):
wave_to_conv = waveform
first_index = int(first_indices[i].item())
if first_index >= 0:
# trim the signal as the filter will not be applied before the first_index
wave_to_conv = wave_to_conv[..., first_index:]
# pad the right of the signal to allow partial convolutions meaning compute
# values for partial windows (e.g. end of the window is outside the signal length)
max_unit_index = (tot_output_samp - 1) // output_samples_in_unit
end_index_of_last_window = max_unit_index * conv_stride + window_size
current_wave_len = wave_len - first_index
right_padding = max(0, end_index_of_last_window + 1 - current_wave_len)
left_padding = max(0, -first_index)
if left_padding != 0 or right_padding != 0:
wave_to_conv = torch.nn.functional.pad(
wave_to_conv, (left_padding, right_padding)
)
conv_wave = torch.nn.functional.conv1d(
wave_to_conv.unsqueeze(0),
weights[i].repeat(num_channels, 1, 1),
stride=conv_stride,
groups=num_channels,
)
# we want conv_wave[:, i] to be at output[:, i + n*conv_transpose_stride]
dilated_conv_wave = torch.nn.functional.conv_transpose1d(
conv_wave, eye, stride=conv_transpose_stride
).squeeze(0)
# pad dilated_conv_wave so it reaches the output length if needed.
dialated_conv_wave_len = dilated_conv_wave.size(-1)
left_padding = i
right_padding = max(
0, tot_output_samp - (left_padding + dialated_conv_wave_len)
)
dilated_conv_wave = torch.nn.functional.pad(
dilated_conv_wave, (left_padding, right_padding)
)[..., :tot_output_samp]
output += dilated_conv_wave
return output
|
https://github.com/pytorch/audio/issues/613
|
Traceback (most recent call last):
File "./fbank_test.py", line 27, in <module>
main()
File "./fbank_test.py", line 23, in main
fbank = torchaudio.compliance.kaldi.fbank(waveform)
File "/xxxxxx/py35/lib/python3.5/site-packages/torchaudio/compliance/kaldi.py",
line 554, in fbank
snip_edges, raw_energy, energy_floor, dither, remove_dc_offset, preemphasis_coefficient)
File "/xxxxx/py35/lib/python3.5/site-packages/torchaudio/compliance/kaldi.py",
line 180, in _get_window
signal_log_energy = _get_log_energy(strided_input, EPSILON, energy_floor) # size (m)
File "/xxxxx/py35/lib/python3.5/site-packages/torchaudio/compliance/kaldi.py",
line 113, in _get_log_energy
log_energy = torch.max(strided_input.pow(2).sum(1), epsilon).log() # size (m)
RuntimeError: iter.device(arg).is_cuda() INTERNAL ASSERT FAILED at /pytorch/aten/src/ATen/native/cuda/Loops.cuh:56,
please report a bug to PyTorch.
|
RuntimeError
|
async def _run_app(
app: Union[Application, Awaitable[Application]],
*,
host: Optional[Union[str, HostSequence]] = None,
port: Optional[int] = None,
path: Optional[str] = None,
sock: Optional[socket.socket] = None,
shutdown_timeout: float = 60.0,
ssl_context: Optional[SSLContext] = None,
print: Optional[Callable[..., None]] = print,
backlog: int = 128,
access_log_class: Type[AbstractAccessLogger] = AccessLogger,
access_log_format: str = AccessLogger.LOG_FORMAT,
access_log: Optional[logging.Logger] = access_logger,
handle_signals: bool = True,
reuse_address: Optional[bool] = None,
reuse_port: Optional[bool] = None,
) -> None:
# An internal function to actually do all dirty job for application running
if asyncio.iscoroutine(app):
app = await app # type: ignore
app = cast(Application, app)
runner = AppRunner(
app,
handle_signals=handle_signals,
access_log_class=access_log_class,
access_log_format=access_log_format,
access_log=access_log,
)
await runner.setup()
sites = [] # type: List[BaseSite]
try:
if host is not None:
if isinstance(host, (str, bytes, bytearray, memoryview)):
sites.append(
TCPSite(
runner,
host,
port,
shutdown_timeout=shutdown_timeout,
ssl_context=ssl_context,
backlog=backlog,
reuse_address=reuse_address,
reuse_port=reuse_port,
)
)
else:
for h in host:
sites.append(
TCPSite(
runner,
h,
port,
shutdown_timeout=shutdown_timeout,
ssl_context=ssl_context,
backlog=backlog,
reuse_address=reuse_address,
reuse_port=reuse_port,
)
)
elif path is None and sock is None or port is not None:
sites.append(
TCPSite(
runner,
port=port,
shutdown_timeout=shutdown_timeout,
ssl_context=ssl_context,
backlog=backlog,
reuse_address=reuse_address,
reuse_port=reuse_port,
)
)
if path is not None:
if isinstance(path, (str, bytes, bytearray, memoryview)):
sites.append(
UnixSite(
runner,
path,
shutdown_timeout=shutdown_timeout,
ssl_context=ssl_context,
backlog=backlog,
)
)
else:
for p in path:
sites.append(
UnixSite(
runner,
p,
shutdown_timeout=shutdown_timeout,
ssl_context=ssl_context,
backlog=backlog,
)
)
if sock is not None:
if not isinstance(sock, Iterable):
sites.append(
SockSite(
runner,
sock,
shutdown_timeout=shutdown_timeout,
ssl_context=ssl_context,
backlog=backlog,
)
)
else:
for s in sock:
sites.append(
SockSite(
runner,
s,
shutdown_timeout=shutdown_timeout,
ssl_context=ssl_context,
backlog=backlog,
)
)
for site in sites:
await site.start()
if print: # pragma: no branch
names = sorted(str(s.name) for s in runner.sites)
print(
"======== Running on {} ========\n(Press CTRL+C to quit)".format(
", ".join(names)
)
)
# sleep forever by 1 hour intervals,
# on Windows before Python 3.8 wake up every 1 second to handle
# Ctrl+C smoothly
if sys.platform == "win32" and sys.version_info < (3, 8):
delay = 1
else:
delay = 3600
while True:
await asyncio.sleep(delay)
finally:
await runner.cleanup()
|
async def _run_app(
app: Union[Application, Awaitable[Application]],
*,
host: Optional[Union[str, HostSequence]] = None,
port: Optional[int] = None,
path: Optional[str] = None,
sock: Optional[socket.socket] = None,
shutdown_timeout: float = 60.0,
ssl_context: Optional[SSLContext] = None,
print: Optional[Callable[..., None]] = print,
backlog: int = 128,
access_log_class: Type[AbstractAccessLogger] = AccessLogger,
access_log_format: str = AccessLogger.LOG_FORMAT,
access_log: Optional[logging.Logger] = access_logger,
handle_signals: bool = True,
reuse_address: Optional[bool] = None,
reuse_port: Optional[bool] = None,
) -> None:
# An internal function to actually do all dirty job for application running
if asyncio.iscoroutine(app):
app = await app # type: ignore
app = cast(Application, app)
runner = AppRunner(
app,
handle_signals=handle_signals,
access_log_class=access_log_class,
access_log_format=access_log_format,
access_log=access_log,
)
await runner.setup()
sites = [] # type: List[BaseSite]
try:
if host is not None:
if isinstance(host, (str, bytes, bytearray, memoryview)):
sites.append(
TCPSite(
runner,
host,
port,
shutdown_timeout=shutdown_timeout,
ssl_context=ssl_context,
backlog=backlog,
reuse_address=reuse_address,
reuse_port=reuse_port,
)
)
else:
for h in host:
sites.append(
TCPSite(
runner,
h,
port,
shutdown_timeout=shutdown_timeout,
ssl_context=ssl_context,
backlog=backlog,
reuse_address=reuse_address,
reuse_port=reuse_port,
)
)
elif path is None and sock is None or port is not None:
sites.append(
TCPSite(
runner,
port=port,
shutdown_timeout=shutdown_timeout,
ssl_context=ssl_context,
backlog=backlog,
reuse_address=reuse_address,
reuse_port=reuse_port,
)
)
if path is not None:
if isinstance(path, (str, bytes, bytearray, memoryview)):
sites.append(
UnixSite(
runner,
path,
shutdown_timeout=shutdown_timeout,
ssl_context=ssl_context,
backlog=backlog,
)
)
else:
for p in path:
sites.append(
UnixSite(
runner,
p,
shutdown_timeout=shutdown_timeout,
ssl_context=ssl_context,
backlog=backlog,
)
)
if sock is not None:
if not isinstance(sock, Iterable):
sites.append(
SockSite(
runner,
sock,
shutdown_timeout=shutdown_timeout,
ssl_context=ssl_context,
backlog=backlog,
)
)
else:
for s in sock:
sites.append(
SockSite(
runner,
s,
shutdown_timeout=shutdown_timeout,
ssl_context=ssl_context,
backlog=backlog,
)
)
for site in sites:
await site.start()
if print: # pragma: no branch
names = sorted(str(s.name) for s in runner.sites)
print(
"======== Running on {} ========\n(Press CTRL+C to quit)".format(
", ".join(names)
)
)
# sleep forever by 1 hour intervals,
# on Windows before Python 3.8 wake up every 1 second to handle
# Ctrl+C smoothly
if sys.platform == "win32" and sys.version_info < 3.8:
delay = 1
else:
delay = 3600
while True:
await asyncio.sleep(delay)
finally:
await runner.cleanup()
|
https://github.com/aio-libs/aiohttp/issues/5127
|
File "C:\_dev\.venv\lib\site-packages\aiohttp\web.py", line 379, in _run_app
if sys.platform == "win32" and sys.version_info < 3.8:
TypeError: '<' not supported between instances of 'sys.version_info' and 'float'
|
TypeError
|
async def sendfile(self) -> None:
assert self.transport is not None
loop = self.loop
data = b"".join(self._sendfile_buffer)
if hasattr(loop, "sendfile"):
# Python 3.7+
self.transport.write(data)
if self._count != 0:
await loop.sendfile(self.transport, self._fobj, self._offset, self._count)
await super().write_eof()
return
self._fobj.seek(self._offset)
out_socket = self.transport.get_extra_info("socket").dup()
out_socket.setblocking(False)
out_fd = out_socket.fileno()
try:
await loop.sock_sendall(out_socket, data)
if not self._do_sendfile(out_fd):
fut = loop.create_future()
fut.add_done_callback(partial(self._done_fut, out_fd))
loop.add_writer(out_fd, self._sendfile_cb, fut, out_fd)
await fut
except asyncio.CancelledError:
raise
except Exception:
server_logger.debug("Socket error")
self.transport.close()
finally:
out_socket.close()
await super().write_eof()
|
async def sendfile(self) -> None:
assert self.transport is not None
loop = self.loop
data = b"".join(self._sendfile_buffer)
if hasattr(loop, "sendfile"):
# Python 3.7+
self.transport.write(data)
await loop.sendfile(self.transport, self._fobj, self._offset, self._count)
await super().write_eof()
return
self._fobj.seek(self._offset)
out_socket = self.transport.get_extra_info("socket").dup()
out_socket.setblocking(False)
out_fd = out_socket.fileno()
try:
await loop.sock_sendall(out_socket, data)
if not self._do_sendfile(out_fd):
fut = loop.create_future()
fut.add_done_callback(partial(self._done_fut, out_fd))
loop.add_writer(out_fd, self._sendfile_cb, fut, out_fd)
await fut
except asyncio.CancelledError:
raise
except Exception:
server_logger.debug("Socket error")
self.transport.close()
finally:
out_socket.close()
await super().write_eof()
|
https://github.com/aio-libs/aiohttp/issues/5124
|
Traceback (most recent call last):
File "/__w/core/core/venv/lib/python3.7/site-packages/aiohttp/web_protocol.py", line 461, in start
resp, reset = await task
File "/__w/core/core/venv/lib/python3.7/site-packages/aiohttp/web_protocol.py", line 414, in _handle_request
reset = await self.finish_response(request, resp, start_time)
File "/__w/core/core/venv/lib/python3.7/site-packages/aiohttp/web_protocol.py", line 568, in finish_response
await prepare_meth(request)
File "/__w/core/core/venv/lib/python3.7/site-packages/aiohttp/web_fileresponse.py", line 373, in prepare
return await self._sendfile(request, fobj, offset, count)
File "/__w/core/core/venv/lib/python3.7/site-packages/aiohttp/web_fileresponse.py", line 191, in _sendfile_system
await writer.sendfile()
File "/__w/core/core/venv/lib/python3.7/site-packages/aiohttp/web_fileresponse.py", line 107, in sendfile
self._count
File "/usr/local/lib/python3.7/asyncio/base_events.py", line 1044, in sendfile
offset, count)
File "/usr/local/lib/python3.7/asyncio/selector_events.py", line 551, in _sendfile_native
fallback=False)
File "/usr/local/lib/python3.7/asyncio/base_events.py", line 788, in sock_sendfile
self._check_sendfile_params(sock, file, offset, count)
File "/usr/local/lib/python3.7/asyncio/base_events.py", line 842, in _check_sendfile_params
"count must be a positive integer (got {!r})".format(count))
ValueError: count must be a positive integer (got 0)
|
ValueError
|
async def resolve(
self, hostname: str, port: int = 0, family: int = socket.AF_INET
) -> List[Dict[str, Any]]:
infos = await self._loop.getaddrinfo(
hostname, port, type=socket.SOCK_STREAM, family=family
)
hosts = []
for family, _, proto, _, address in infos:
if family == socket.AF_INET6 and address[3]: # type: ignore
# This is essential for link-local IPv6 addresses.
# LL IPv6 is a VERY rare case. Strictly speaking, we should use
# getnameinfo() unconditionally, but performance makes sense.
host, _port = socket.getnameinfo(
address, socket.NI_NUMERICHOST | socket.NI_NUMERICSERV
)
port = int(_port)
else:
host, port = address[:2]
hosts.append(
{
"hostname": hostname,
"host": host,
"port": port,
"family": family,
"proto": proto,
"flags": socket.AI_NUMERICHOST | socket.AI_NUMERICSERV,
}
)
return hosts
|
async def resolve(
self, host: str, port: int = 0, family: int = socket.AF_INET
) -> List[Dict[str, Any]]:
infos = await self._loop.getaddrinfo(
host, port, type=socket.SOCK_STREAM, family=family
)
hosts = []
for family, _, proto, _, address in infos:
if family == socket.AF_INET6 and address[3]: # type: ignore
# This is essential for link-local IPv6 addresses.
# LL IPv6 is a VERY rare case. Strictly speaking, we should use
# getnameinfo() unconditionally, but performance makes sense.
host, _port = socket.getnameinfo(
address, socket.NI_NUMERICHOST | socket.NI_NUMERICSERV
)
port = int(_port)
else:
host, port = address[:2]
hosts.append(
{
"hostname": host,
"host": host,
"port": port,
"family": family,
"proto": proto,
"flags": socket.AI_NUMERICHOST | socket.AI_NUMERICSERV,
}
)
return hosts
|
https://github.com/aio-libs/aiohttp/issues/5110
|
Traceback (most recent call last):
File "/home/lilydjwg/tmpfs/venv/lib/python3.8/site-packages/aiohttp/connector.py", line 946, in _wrap_create_connection
return await self._loop.create_connection(*args, **kwargs) # type: ignore # noqa
File "/usr/lib/python3.8/asyncio/base_events.py", line 1050, in create_connection
transport, protocol = await self._create_connection_transport(
File "/usr/lib/python3.8/asyncio/base_events.py", line 1080, in _create_connection_transport await waiter File "/usr/lib/python3.8/asyncio/sslproto.py", line 529, in data_received
ssldata, appdata = self._sslpipe.feed_ssldata(data)
File "/usr/lib/python3.8/asyncio/sslproto.py", line 189, in feed_ssldata
self._sslobj.do_handshake()
File "/usr/lib/python3.8/ssl.py", line 944, in do_handshake
self._sslobj.do_handshake()
ssl.SSLCertVerificationError: [SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: IP address mismatch, certificate is not valid
for '151.101.128.223'. (_ssl.c:1124)
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "t.py", line 12, in <module>
asyncio.run(main())
File "/usr/lib/python3.8/asyncio/runners.py", line 44, in run
return loop.run_until_complete(main)
File "/usr/lib/python3.8/asyncio/base_events.py", line 616, in run_until_complete
return future.result()
File "t.py", line 8, in main
r = await session.get('https://pypi.org/')
File "/home/lilydjwg/tmpfs/venv/lib/python3.8/site-packages/aiohttp/client.py", line 490, in _request
conn = await self._connector.connect(
File "/home/lilydjwg/tmpfs/venv/lib/python3.8/site-packages/aiohttp/connector.py", line 528, in connect
proto = await self._create_connection(req, traces, timeout)
File "/home/lilydjwg/tmpfs/venv/lib/python3.8/site-packages/aiohttp/connector.py", line 868, in _create_connection
_, proto = await self._create_direct_connection(
File "/home/lilydjwg/tmpfs/venv/lib/python3.8/site-packages/aiohttp/connector.py", line 1023, in _create_direct_connection
raise last_exc
File "/home/lilydjwg/tmpfs/venv/lib/python3.8/site-packages/aiohttp/connector.py", line 999, in _create_direct_connection
transp, proto = await self._wrap_create_connection(
File "/home/lilydjwg/tmpfs/venv/lib/python3.8/site-packages/aiohttp/connector.py", line 948, in _wrap_create_connection
raise ClientConnectorCertificateError(
aiohttp.client_exceptions.ClientConnectorCertificateError: Cannot connect to host pypi.org:443 ssl:True [SSLCertVerificationError: (1, "[SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: IP address mismatch, certificate is not valid for '151.101.128.223'. (_ssl.c:1124)")]
|
ssl.SSLCertVerificationError
|
def is_connected(self) -> bool:
return self.transport is not None and not self.transport.is_closing()
|
def is_connected(self) -> bool:
return self.transport is not None
|
https://github.com/aio-libs/aiohttp/issues/4587
|
Traceback (most recent call last):
File "/home/ubuntu/.pyenv/versions/gateway/lib/python3.7/site-packages/aiohttp/client.py", line 502, in _request
resp = await req.send(conn)
File "/home/ubuntu/.pyenv/versions/gateway/lib/python3.7/site-packages/aiohttp/client_reqrep.py", line 629, in send
await writer.write_headers(status_line, self.headers)
File "/home/ubuntu/.pyenv/versions/gateway/lib/python3.7/site-packages/aiohttp/http_writer.py", line 112, in write_headers
self._write(buf)
File "/home/ubuntu/.pyenv/versions/gateway/lib/python3.7/site-packages/aiohttp/http_writer.py", line 67, in _write
raise ConnectionResetError('Cannot write to closing transport')
ConnectionResetError: Cannot write to closing transport
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "/home/ubuntu/.pyenv/versions/gateway/lib/python3.7/site-packages/hydra/base_api_request.py", line 75, in request
timeout=cls.request_timeout(timeout))
File "<string>", line 5, in wrapper
File "/home/ubuntu/.pyenv/versions/3.7.2/envs/gateway/lib/python3.7/site-packages/newrelic/hooks/framework_aiohttp.py", line 260, in _coro
response = yield from wrapped(*args, **kwargs)
File "/home/ubuntu/.pyenv/versions/gateway/lib/python3.7/site-packages/aiohttp/client.py", line 514, in _request
raise ClientOSError(*exc.args) from exc
aiohttp.client_exceptions.ClientOSError: Cannot write to closing transport
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/ubuntu/.pyenv/versions/gateway/lib/python3.7/site-packages/hydra/decorators/http.py", line 41, in f
result = await wait_for(shield(wrapped_func(self, *args, **kwargs)), api_timeout)
File "/home/ubuntu/.pyenv/versions/3.7.2/lib/python3.7/asyncio/tasks.py", line 416, in wait_for
return fut.result()
File "/home/ubuntu/1mg/API-Gateway/gateway/utils.py", line 189, in wrapper
result = await func(*args, **kwargs)
File "/home/ubuntu/1mg/API-Gateway/gateway/managers/app_context_manager.py", line 98, in _build_shared_context
result = await func(*args, **kwargs)
File "/home/ubuntu/1mg/API-Gateway/gateway/handlers/mingler/service_handler.py", line 14, in services
response = await Services.get_services(params)
File "/home/ubuntu/1mg/API-Gateway/gateway/service_clients/mingler/services.py", line 13, in get_services
result = await cls.post(path, data=params, headers=headers)
File "/home/ubuntu/.pyenv/versions/gateway/lib/python3.7/site-packages/hydra/base_api_request.py", line 144, in post
response_headers_list=response_headers_list)
File "/home/ubuntu/.pyenv/versions/gateway/lib/python3.7/site-packages/hydra/base_api_request.py", line 93, in request
raise HTTPRequestException(error={'message': exception_message})
hydra.exceptions.HTTPRequestException
|
ConnectionResetError
|
def _cleanup(self) -> None:
"""Cleanup unused transports."""
if self._cleanup_handle:
self._cleanup_handle.cancel()
now = self._loop.time()
timeout = self._keepalive_timeout
if self._conns:
connections = {}
deadline = now - timeout
for key, conns in self._conns.items():
alive = []
for proto, use_time in conns:
if proto.is_connected():
if use_time - deadline < 0:
transport = proto.transport
proto.close()
if key.is_ssl and not self._cleanup_closed_disabled:
self._cleanup_closed_transports.append(transport)
else:
alive.append((proto, use_time))
else:
transport = proto.transport
proto.close()
if key.is_ssl and not self._cleanup_closed_disabled:
self._cleanup_closed_transports.append(transport)
if alive:
connections[key] = alive
self._conns = connections
if self._conns:
self._cleanup_handle = helpers.weakref_handle(
self, "_cleanup", timeout, self._loop
)
|
def _cleanup(self) -> None:
"""Cleanup unused transports."""
if self._cleanup_handle:
self._cleanup_handle.cancel()
now = self._loop.time()
timeout = self._keepalive_timeout
if self._conns:
connections = {}
deadline = now - timeout
for key, conns in self._conns.items():
alive = []
for proto, use_time in conns:
if proto.is_connected():
if use_time - deadline < 0:
transport = proto.transport
proto.close()
if key.is_ssl and not self._cleanup_closed_disabled:
self._cleanup_closed_transports.append(transport)
else:
alive.append((proto, use_time))
if alive:
connections[key] = alive
self._conns = connections
if self._conns:
self._cleanup_handle = helpers.weakref_handle(
self, "_cleanup", timeout, self._loop
)
|
https://github.com/aio-libs/aiohttp/issues/4587
|
Traceback (most recent call last):
File "/home/ubuntu/.pyenv/versions/gateway/lib/python3.7/site-packages/aiohttp/client.py", line 502, in _request
resp = await req.send(conn)
File "/home/ubuntu/.pyenv/versions/gateway/lib/python3.7/site-packages/aiohttp/client_reqrep.py", line 629, in send
await writer.write_headers(status_line, self.headers)
File "/home/ubuntu/.pyenv/versions/gateway/lib/python3.7/site-packages/aiohttp/http_writer.py", line 112, in write_headers
self._write(buf)
File "/home/ubuntu/.pyenv/versions/gateway/lib/python3.7/site-packages/aiohttp/http_writer.py", line 67, in _write
raise ConnectionResetError('Cannot write to closing transport')
ConnectionResetError: Cannot write to closing transport
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "/home/ubuntu/.pyenv/versions/gateway/lib/python3.7/site-packages/hydra/base_api_request.py", line 75, in request
timeout=cls.request_timeout(timeout))
File "<string>", line 5, in wrapper
File "/home/ubuntu/.pyenv/versions/3.7.2/envs/gateway/lib/python3.7/site-packages/newrelic/hooks/framework_aiohttp.py", line 260, in _coro
response = yield from wrapped(*args, **kwargs)
File "/home/ubuntu/.pyenv/versions/gateway/lib/python3.7/site-packages/aiohttp/client.py", line 514, in _request
raise ClientOSError(*exc.args) from exc
aiohttp.client_exceptions.ClientOSError: Cannot write to closing transport
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/ubuntu/.pyenv/versions/gateway/lib/python3.7/site-packages/hydra/decorators/http.py", line 41, in f
result = await wait_for(shield(wrapped_func(self, *args, **kwargs)), api_timeout)
File "/home/ubuntu/.pyenv/versions/3.7.2/lib/python3.7/asyncio/tasks.py", line 416, in wait_for
return fut.result()
File "/home/ubuntu/1mg/API-Gateway/gateway/utils.py", line 189, in wrapper
result = await func(*args, **kwargs)
File "/home/ubuntu/1mg/API-Gateway/gateway/managers/app_context_manager.py", line 98, in _build_shared_context
result = await func(*args, **kwargs)
File "/home/ubuntu/1mg/API-Gateway/gateway/handlers/mingler/service_handler.py", line 14, in services
response = await Services.get_services(params)
File "/home/ubuntu/1mg/API-Gateway/gateway/service_clients/mingler/services.py", line 13, in get_services
result = await cls.post(path, data=params, headers=headers)
File "/home/ubuntu/.pyenv/versions/gateway/lib/python3.7/site-packages/hydra/base_api_request.py", line 144, in post
response_headers_list=response_headers_list)
File "/home/ubuntu/.pyenv/versions/gateway/lib/python3.7/site-packages/hydra/base_api_request.py", line 93, in request
raise HTTPRequestException(error={'message': exception_message})
hydra.exceptions.HTTPRequestException
|
ConnectionResetError
|
def _get(self, key: "ConnectionKey") -> Optional[ResponseHandler]:
try:
conns = self._conns[key]
except KeyError:
return None
t1 = self._loop.time()
while conns:
proto, t0 = conns.pop()
if proto.is_connected():
if t1 - t0 > self._keepalive_timeout:
transport = proto.transport
proto.close()
# only for SSL transports
if key.is_ssl and not self._cleanup_closed_disabled:
self._cleanup_closed_transports.append(transport)
else:
if not conns:
# The very last connection was reclaimed: drop the key
del self._conns[key]
return proto
else:
transport = proto.transport
proto.close()
if key.is_ssl and not self._cleanup_closed_disabled:
self._cleanup_closed_transports.append(transport)
# No more connections: drop the key
del self._conns[key]
return None
|
def _get(self, key: "ConnectionKey") -> Optional[ResponseHandler]:
try:
conns = self._conns[key]
except KeyError:
return None
t1 = self._loop.time()
while conns:
proto, t0 = conns.pop()
if proto.is_connected():
if t1 - t0 > self._keepalive_timeout:
transport = proto.transport
proto.close()
# only for SSL transports
if key.is_ssl and not self._cleanup_closed_disabled:
self._cleanup_closed_transports.append(transport)
else:
if not conns:
# The very last connection was reclaimed: drop the key
del self._conns[key]
return proto
# No more connections: drop the key
del self._conns[key]
return None
|
https://github.com/aio-libs/aiohttp/issues/4587
|
Traceback (most recent call last):
File "/home/ubuntu/.pyenv/versions/gateway/lib/python3.7/site-packages/aiohttp/client.py", line 502, in _request
resp = await req.send(conn)
File "/home/ubuntu/.pyenv/versions/gateway/lib/python3.7/site-packages/aiohttp/client_reqrep.py", line 629, in send
await writer.write_headers(status_line, self.headers)
File "/home/ubuntu/.pyenv/versions/gateway/lib/python3.7/site-packages/aiohttp/http_writer.py", line 112, in write_headers
self._write(buf)
File "/home/ubuntu/.pyenv/versions/gateway/lib/python3.7/site-packages/aiohttp/http_writer.py", line 67, in _write
raise ConnectionResetError('Cannot write to closing transport')
ConnectionResetError: Cannot write to closing transport
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "/home/ubuntu/.pyenv/versions/gateway/lib/python3.7/site-packages/hydra/base_api_request.py", line 75, in request
timeout=cls.request_timeout(timeout))
File "<string>", line 5, in wrapper
File "/home/ubuntu/.pyenv/versions/3.7.2/envs/gateway/lib/python3.7/site-packages/newrelic/hooks/framework_aiohttp.py", line 260, in _coro
response = yield from wrapped(*args, **kwargs)
File "/home/ubuntu/.pyenv/versions/gateway/lib/python3.7/site-packages/aiohttp/client.py", line 514, in _request
raise ClientOSError(*exc.args) from exc
aiohttp.client_exceptions.ClientOSError: Cannot write to closing transport
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/ubuntu/.pyenv/versions/gateway/lib/python3.7/site-packages/hydra/decorators/http.py", line 41, in f
result = await wait_for(shield(wrapped_func(self, *args, **kwargs)), api_timeout)
File "/home/ubuntu/.pyenv/versions/3.7.2/lib/python3.7/asyncio/tasks.py", line 416, in wait_for
return fut.result()
File "/home/ubuntu/1mg/API-Gateway/gateway/utils.py", line 189, in wrapper
result = await func(*args, **kwargs)
File "/home/ubuntu/1mg/API-Gateway/gateway/managers/app_context_manager.py", line 98, in _build_shared_context
result = await func(*args, **kwargs)
File "/home/ubuntu/1mg/API-Gateway/gateway/handlers/mingler/service_handler.py", line 14, in services
response = await Services.get_services(params)
File "/home/ubuntu/1mg/API-Gateway/gateway/service_clients/mingler/services.py", line 13, in get_services
result = await cls.post(path, data=params, headers=headers)
File "/home/ubuntu/.pyenv/versions/gateway/lib/python3.7/site-packages/hydra/base_api_request.py", line 144, in post
response_headers_list=response_headers_list)
File "/home/ubuntu/.pyenv/versions/gateway/lib/python3.7/site-packages/hydra/base_api_request.py", line 93, in request
raise HTTPRequestException(error={'message': exception_message})
hydra.exceptions.HTTPRequestException
|
ConnectionResetError
|
def feed_data(
self, chunk: bytes, SEP: bytes = b"\r\n", CHUNK_EXT: bytes = b";"
) -> Tuple[bool, bytes]:
# Read specified amount of bytes
if self._type == ParseState.PARSE_LENGTH:
required = self._length
chunk_len = len(chunk)
if required >= chunk_len:
self._length = required - chunk_len
self.payload.feed_data(chunk, chunk_len)
if self._length == 0:
self.payload.feed_eof()
return True, b""
else:
self._length = 0
self.payload.feed_data(chunk[:required], required)
self.payload.feed_eof()
return True, chunk[required:]
# Chunked transfer encoding parser
elif self._type == ParseState.PARSE_CHUNKED:
if self._chunk_tail:
chunk = self._chunk_tail + chunk
self._chunk_tail = b""
while chunk:
# read next chunk size
if self._chunk == ChunkState.PARSE_CHUNKED_SIZE:
pos = chunk.find(SEP)
if pos >= 0:
i = chunk.find(CHUNK_EXT, 0, pos)
if i >= 0:
size_b = chunk[:i] # strip chunk-extensions
else:
size_b = chunk[:pos]
try:
size = int(bytes(size_b), 16)
except ValueError:
exc = TransferEncodingError(
chunk[:pos].decode("ascii", "surrogateescape")
)
self.payload.set_exception(exc)
raise exc from None
chunk = chunk[pos + 2 :]
if size == 0: # eof marker
self._chunk = ChunkState.PARSE_MAYBE_TRAILERS
else:
self._chunk = ChunkState.PARSE_CHUNKED_CHUNK
self._chunk_size = size
self.payload.begin_http_chunk_receiving()
else:
self._chunk_tail = chunk
return False, b""
# read chunk and feed buffer
if self._chunk == ChunkState.PARSE_CHUNKED_CHUNK:
required = self._chunk_size
chunk_len = len(chunk)
if required > chunk_len:
self._chunk_size = required - chunk_len
self.payload.feed_data(chunk, chunk_len)
return False, b""
else:
self._chunk_size = 0
self.payload.feed_data(chunk[:required], required)
chunk = chunk[required:]
self._chunk = ChunkState.PARSE_CHUNKED_CHUNK_EOF
self.payload.end_http_chunk_receiving()
# toss the CRLF at the end of the chunk
if self._chunk == ChunkState.PARSE_CHUNKED_CHUNK_EOF:
if chunk[:2] == SEP:
chunk = chunk[2:]
self._chunk = ChunkState.PARSE_CHUNKED_SIZE
else:
self._chunk_tail = chunk
return False, b""
# if stream does not contain trailer, after 0\r\n
# we should get another \r\n otherwise
# trailers needs to be skiped until \r\n\r\n
if self._chunk == ChunkState.PARSE_MAYBE_TRAILERS:
head = chunk[:2]
if head == SEP:
# end of stream
self.payload.feed_eof()
return True, chunk[2:]
# Both CR and LF, or only LF may not be received yet. It is
# expected that CRLF or LF will be shown at the very first
# byte next time, otherwise trailers should come. The last
# CRLF which marks the end of response might not be
# contained in the same TCP segment which delivered the
# size indicator.
if not head:
return False, b""
if head == SEP[:1]:
self._chunk_tail = head
return False, b""
self._chunk = ChunkState.PARSE_TRAILERS
# read and discard trailer up to the CRLF terminator
if self._chunk == ChunkState.PARSE_TRAILERS:
pos = chunk.find(SEP)
if pos >= 0:
chunk = chunk[pos + 2 :]
self._chunk = ChunkState.PARSE_MAYBE_TRAILERS
else:
self._chunk_tail = chunk
return False, b""
# Read all bytes until eof
elif self._type == ParseState.PARSE_UNTIL_EOF:
self.payload.feed_data(chunk, len(chunk))
return False, b""
|
def feed_data(
self, chunk: bytes, SEP: bytes = b"\r\n", CHUNK_EXT: bytes = b";"
) -> Tuple[bool, bytes]:
# Read specified amount of bytes
if self._type == ParseState.PARSE_LENGTH:
required = self._length
chunk_len = len(chunk)
if required >= chunk_len:
self._length = required - chunk_len
self.payload.feed_data(chunk, chunk_len)
if self._length == 0:
self.payload.feed_eof()
return True, b""
else:
self._length = 0
self.payload.feed_data(chunk[:required], required)
self.payload.feed_eof()
return True, chunk[required:]
# Chunked transfer encoding parser
elif self._type == ParseState.PARSE_CHUNKED:
if self._chunk_tail:
chunk = self._chunk_tail + chunk
self._chunk_tail = b""
while chunk:
# read next chunk size
if self._chunk == ChunkState.PARSE_CHUNKED_SIZE:
pos = chunk.find(SEP)
if pos >= 0:
i = chunk.find(CHUNK_EXT, 0, pos)
if i >= 0:
size_b = chunk[:i] # strip chunk-extensions
else:
size_b = chunk[:pos]
try:
size = int(bytes(size_b), 16)
except ValueError:
exc = TransferEncodingError(
chunk[:pos].decode("ascii", "surrogateescape")
)
self.payload.set_exception(exc)
raise exc from None
chunk = chunk[pos + 2 :]
if size == 0: # eof marker
self._chunk = ChunkState.PARSE_MAYBE_TRAILERS
else:
self._chunk = ChunkState.PARSE_CHUNKED_CHUNK
self._chunk_size = size
self.payload.begin_http_chunk_receiving()
else:
self._chunk_tail = chunk
return False, b""
# read chunk and feed buffer
if self._chunk == ChunkState.PARSE_CHUNKED_CHUNK:
required = self._chunk_size
chunk_len = len(chunk)
if required > chunk_len:
self._chunk_size = required - chunk_len
self.payload.feed_data(chunk, chunk_len)
return False, b""
else:
self._chunk_size = 0
self.payload.feed_data(chunk[:required], required)
chunk = chunk[required:]
self._chunk = ChunkState.PARSE_CHUNKED_CHUNK_EOF
self.payload.end_http_chunk_receiving()
# toss the CRLF at the end of the chunk
if self._chunk == ChunkState.PARSE_CHUNKED_CHUNK_EOF:
if chunk[:2] == SEP:
chunk = chunk[2:]
self._chunk = ChunkState.PARSE_CHUNKED_SIZE
else:
self._chunk_tail = chunk
return False, b""
# if stream does not contain trailer, after 0\r\n
# we should get another \r\n otherwise
# trailers needs to be skiped until \r\n\r\n
if self._chunk == ChunkState.PARSE_MAYBE_TRAILERS:
if chunk[:2] == SEP:
# end of stream
self.payload.feed_eof()
return True, chunk[2:]
else:
self._chunk = ChunkState.PARSE_TRAILERS
# read and discard trailer up to the CRLF terminator
if self._chunk == ChunkState.PARSE_TRAILERS:
pos = chunk.find(SEP)
if pos >= 0:
chunk = chunk[pos + 2 :]
self._chunk = ChunkState.PARSE_MAYBE_TRAILERS
else:
self._chunk_tail = chunk
return False, b""
# Read all bytes until eof
elif self._type == ParseState.PARSE_UNTIL_EOF:
self.payload.feed_data(chunk, len(chunk))
return False, b""
|
https://github.com/aio-libs/aiohttp/issues/4630
|
(False, b'')
(False, b'')
Traceback (most recent call last):
File "aiohttp-test.py", line 32, in <module>
print(repr(parser.feed_eof()))
File ".../lib/python3.6/site-packages/aiohttp/http_parser.py", line 575, in feed_eof
"Not enough data for satisfy transfer length header.")
aiohttp.http_exceptions.TransferEncodingError: 400, message='Not enough data for satisfy transfer length header.'
|
aiohttp.http_exceptions.TransferEncodingError
|
async def text(self) -> str:
"""Return BODY as text using encoding from .charset."""
bytes_body = await self.read()
encoding = self.charset or "utf-8"
try:
return bytes_body.decode(encoding)
except LookupError:
raise HTTPUnsupportedMediaType()
|
async def text(self) -> str:
"""Return BODY as text using encoding from .charset."""
bytes_body = await self.read()
encoding = self.charset or "utf-8"
return bytes_body.decode(encoding)
|
https://github.com/aio-libs/aiohttp/issues/3562
|
Error handling request
Traceback (most recent call last):
File "/home/pentusha/.venvs/test/lib/python3.7/site-packages/aiohttp/web_protocol.py", line 418, in start
resp = await task
File "/home/pentusha/.venvs/test/lib/python3.7/site-packages/aiohttp/web_app.py", line 458, in _handle
resp = await handler(request)
File "/home/pentusha/projects/test/api/app2.py", line 5, in post_handler
body = await request.text()
File "/home/pentusha/.venvs/test/lib/python3.7/site-packages/aiohttp/web_request.py", line 579, in text
return bytes_body.decode(encoding)
LookupError: unknown encoding: test
|
LookupError
|
async def post(self) -> "MultiDictProxy[Union[str, bytes, FileField]]":
"""Return POST parameters."""
if self._post is not None:
return self._post
if self._method not in self.POST_METHODS:
self._post = MultiDictProxy(MultiDict())
return self._post
content_type = self.content_type
if content_type not in (
"",
"application/x-www-form-urlencoded",
"multipart/form-data",
):
self._post = MultiDictProxy(MultiDict())
return self._post
out = MultiDict() # type: MultiDict[Union[str, bytes, FileField]]
if content_type == "multipart/form-data":
multipart = await self.multipart()
max_size = self._client_max_size
field = await multipart.next()
while field is not None:
size = 0
content_type = field.headers.get(hdrs.CONTENT_TYPE)
if field.filename:
# store file in temp file
tmp = tempfile.TemporaryFile()
chunk = await field.read_chunk(size=2**16)
while chunk:
chunk = field.decode(chunk)
tmp.write(chunk)
size += len(chunk)
if 0 < max_size < size:
raise HTTPRequestEntityTooLarge(
max_size=max_size, actual_size=size
)
chunk = await field.read_chunk(size=2**16)
tmp.seek(0)
ff = FileField(
field.name,
field.filename,
cast(io.BufferedReader, tmp),
content_type,
field.headers,
)
out.add(field.name, ff)
else:
value = await field.read(decode=True)
if content_type is None or content_type.startswith("text/"):
charset = field.get_charset(default="utf-8")
value = value.decode(charset)
out.add(field.name, value)
size += len(value)
if 0 < max_size < size:
raise HTTPRequestEntityTooLarge(max_size=max_size, actual_size=size)
field = await multipart.next()
else:
data = await self.read()
if data:
charset = self.charset or "utf-8"
bytes_query = data.rstrip()
try:
query = bytes_query.decode(charset)
except LookupError:
raise HTTPUnsupportedMediaType()
out.extend(parse_qsl(qs=query, keep_blank_values=True, encoding=charset))
self._post = MultiDictProxy(out)
return self._post
|
async def post(self) -> "MultiDictProxy[Union[str, bytes, FileField]]":
"""Return POST parameters."""
if self._post is not None:
return self._post
if self._method not in self.POST_METHODS:
self._post = MultiDictProxy(MultiDict())
return self._post
content_type = self.content_type
if content_type not in (
"",
"application/x-www-form-urlencoded",
"multipart/form-data",
):
self._post = MultiDictProxy(MultiDict())
return self._post
out = MultiDict() # type: MultiDict[Union[str, bytes, FileField]]
if content_type == "multipart/form-data":
multipart = await self.multipart()
max_size = self._client_max_size
field = await multipart.next()
while field is not None:
size = 0
content_type = field.headers.get(hdrs.CONTENT_TYPE)
if field.filename:
# store file in temp file
tmp = tempfile.TemporaryFile()
chunk = await field.read_chunk(size=2**16)
while chunk:
chunk = field.decode(chunk)
tmp.write(chunk)
size += len(chunk)
if 0 < max_size < size:
raise HTTPRequestEntityTooLarge(
max_size=max_size, actual_size=size
)
chunk = await field.read_chunk(size=2**16)
tmp.seek(0)
ff = FileField(
field.name,
field.filename,
cast(io.BufferedReader, tmp),
content_type,
field.headers,
)
out.add(field.name, ff)
else:
value = await field.read(decode=True)
if content_type is None or content_type.startswith("text/"):
charset = field.get_charset(default="utf-8")
value = value.decode(charset)
out.add(field.name, value)
size += len(value)
if 0 < max_size < size:
raise HTTPRequestEntityTooLarge(max_size=max_size, actual_size=size)
field = await multipart.next()
else:
data = await self.read()
if data:
charset = self.charset or "utf-8"
out.extend(
parse_qsl(
data.rstrip().decode(charset),
keep_blank_values=True,
encoding=charset,
)
)
self._post = MultiDictProxy(out)
return self._post
|
https://github.com/aio-libs/aiohttp/issues/3562
|
Error handling request
Traceback (most recent call last):
File "/home/pentusha/.venvs/test/lib/python3.7/site-packages/aiohttp/web_protocol.py", line 418, in start
resp = await task
File "/home/pentusha/.venvs/test/lib/python3.7/site-packages/aiohttp/web_app.py", line 458, in _handle
resp = await handler(request)
File "/home/pentusha/projects/test/api/app2.py", line 5, in post_handler
body = await request.text()
File "/home/pentusha/.venvs/test/lib/python3.7/site-packages/aiohttp/web_request.py", line 579, in text
return bytes_body.decode(encoding)
LookupError: unknown encoding: test
|
LookupError
|
async def start(self) -> None:
"""Process incoming request.
It reads request line, request headers and request payload, then
calls handle_request() method. Subclass has to override
handle_request(). start() handles various exceptions in request
or response handling. Connection is being closed always unless
keep_alive(True) specified.
"""
loop = self._loop
handler = self._task_handler
assert handler is not None
manager = self._manager
assert manager is not None
keepalive_timeout = self._keepalive_timeout
resp = None
assert self._request_factory is not None
assert self._request_handler is not None
while not self._force_close:
if not self._messages:
try:
# wait for next request
self._waiter = loop.create_future()
await self._waiter
except asyncio.CancelledError:
break
finally:
self._waiter = None
message, payload = self._messages.popleft()
if self.access_log:
now = loop.time()
manager.requests_count += 1
writer = StreamWriter(self, loop)
request = self._request_factory(message, payload, self, writer, handler)
try:
try:
# a new task is used for copy context vars (#3406)
task = self._loop.create_task(self._request_handler(request))
resp = await task
except HTTPException as exc:
resp = Response(
status=exc.status,
reason=exc.reason,
text=exc.text,
headers=exc.headers,
)
except asyncio.CancelledError:
self.log_debug("Ignored premature client disconnection")
break
except asyncio.TimeoutError as exc:
self.log_debug("Request handler timed out.", exc_info=exc)
resp = self.handle_error(request, 504)
except Exception as exc:
resp = self.handle_error(request, 500, exc)
try:
prepare_meth = resp.prepare
except AttributeError:
if resp is None:
raise RuntimeError("Missing return statement on request handler")
else:
raise RuntimeError(
"Web-handler should return "
"a response instance, "
"got {!r}".format(resp)
)
try:
await prepare_meth(request)
await resp.write_eof()
except ConnectionResetError:
self.log_debug("Ignored premature client disconnection 2")
break
# notify server about keep-alive
self._keepalive = bool(resp.keep_alive)
# log access
if self.access_log:
self.log_access(request, resp, loop.time() - now)
# check payload
if not payload.is_eof():
lingering_time = self._lingering_time
if not self._force_close and lingering_time:
self.log_debug(
"Start lingering close timer for %s sec.", lingering_time
)
now = loop.time()
end_t = now + lingering_time
with suppress(asyncio.TimeoutError, asyncio.CancelledError):
while not payload.is_eof() and now < end_t:
with CeilTimeout(end_t - now, loop=loop):
# read and ignore
await payload.readany()
now = loop.time()
# if payload still uncompleted
if not payload.is_eof() and not self._force_close:
self.log_debug("Uncompleted request.")
self.close()
payload.set_exception(PayloadAccessError())
except asyncio.CancelledError:
self.log_debug("Ignored premature client disconnection ")
break
except RuntimeError as exc:
if self.debug:
self.log_exception("Unhandled runtime exception", exc_info=exc)
self.force_close()
except Exception as exc:
self.log_exception("Unhandled exception", exc_info=exc)
self.force_close()
finally:
if self.transport is None and resp is not None:
self.log_debug("Ignored premature client disconnection.")
elif not self._force_close:
if self._keepalive and not self._close:
# start keep-alive timer
if keepalive_timeout is not None:
now = self._loop.time()
self._keepalive_time = now
if self._keepalive_handle is None:
self._keepalive_handle = loop.call_at(
now + keepalive_timeout, self._process_keepalive
)
else:
break
# remove handler, close transport if no handlers left
if not self._force_close:
self._task_handler = None
if self.transport is not None and self._error_handler is None:
self.transport.close()
|
async def start(self) -> None:
"""Process incoming request.
It reads request line, request headers and request payload, then
calls handle_request() method. Subclass has to override
handle_request(). start() handles various exceptions in request
or response handling. Connection is being closed always unless
keep_alive(True) specified.
"""
loop = self._loop
handler = self._task_handler
assert handler is not None
manager = self._manager
assert manager is not None
keepalive_timeout = self._keepalive_timeout
resp = None
assert self._request_factory is not None
assert self._request_handler is not None
while not self._force_close:
if not self._messages:
try:
# wait for next request
self._waiter = loop.create_future()
await self._waiter
except asyncio.CancelledError:
break
finally:
self._waiter = None
message, payload = self._messages.popleft()
if self.access_log:
now = loop.time()
manager.requests_count += 1
writer = StreamWriter(self, loop)
request = self._request_factory(message, payload, self, writer, handler)
try:
try:
# a new task is used for copy context vars (#3406)
task = self._loop.create_task(self._request_handler(request))
resp = await task
except HTTPException as exc:
resp = Response(
status=exc.status,
reason=exc.reason,
text=exc.text,
headers=exc.headers,
)
except asyncio.CancelledError:
self.log_debug("Ignored premature client disconnection")
break
except asyncio.TimeoutError as exc:
self.log_debug("Request handler timed out.", exc_info=exc)
resp = self.handle_error(request, 504)
except Exception as exc:
resp = self.handle_error(request, 500, exc)
try:
prepare_meth = resp.prepare
except AttributeError:
if resp is None:
raise RuntimeError("Missing return statement on request handler")
else:
raise RuntimeError(
"Web-handler should return "
"a response instance, "
"got {!r}".format(resp)
)
await prepare_meth(request)
await resp.write_eof()
# notify server about keep-alive
self._keepalive = bool(resp.keep_alive)
# log access
if self.access_log:
self.log_access(request, resp, loop.time() - now)
# check payload
if not payload.is_eof():
lingering_time = self._lingering_time
if not self._force_close and lingering_time:
self.log_debug(
"Start lingering close timer for %s sec.", lingering_time
)
now = loop.time()
end_t = now + lingering_time
with suppress(asyncio.TimeoutError, asyncio.CancelledError):
while not payload.is_eof() and now < end_t:
with CeilTimeout(end_t - now, loop=loop):
# read and ignore
await payload.readany()
now = loop.time()
# if payload still uncompleted
if not payload.is_eof() and not self._force_close:
self.log_debug("Uncompleted request.")
self.close()
payload.set_exception(PayloadAccessError())
except asyncio.CancelledError:
self.log_debug("Ignored premature client disconnection ")
break
except RuntimeError as exc:
if self.debug:
self.log_exception("Unhandled runtime exception", exc_info=exc)
self.force_close()
except Exception as exc:
self.log_exception("Unhandled exception", exc_info=exc)
self.force_close()
finally:
if self.transport is None and resp is not None:
self.log_debug("Ignored premature client disconnection.")
elif not self._force_close:
if self._keepalive and not self._close:
# start keep-alive timer
if keepalive_timeout is not None:
now = self._loop.time()
self._keepalive_time = now
if self._keepalive_handle is None:
self._keepalive_handle = loop.call_at(
now + keepalive_timeout, self._process_keepalive
)
else:
break
# remove handler, close transport if no handlers left
if not self._force_close:
self._task_handler = None
if self.transport is not None and self._error_handler is None:
self.transport.close()
|
https://github.com/aio-libs/aiohttp/issues/3648
|
Unhandled exception
Traceback (most recent call last):
File "/home/decaz/.virtualenvs/test-aiohttp/lib/python3.6/site-packages/aiohttp/web_protocol.py", line 448, in start
await resp.write_eof()
File "/usr/lib/python3.6/asyncio/coroutines.py", line 110, in __next__
return self.gen.send(None)
File "/home/decaz/.virtualenvs/test-aiohttp/lib/python3.6/site-packages/aiohttp/web_response.py", line 444, in write_eof
await self._payload_writer.write_eof(data)
File "/usr/lib/python3.6/asyncio/coroutines.py", line 110, in __next__
return self.gen.send(None)
File "/home/decaz/.virtualenvs/test-aiohttp/lib/python3.6/site-packages/aiohttp/http_writer.py", line 138, in write_eof
self._write(chunk)
File "/home/decaz/.virtualenvs/test-aiohttp/lib/python3.6/site-packages/aiohttp/http_writer.py", line 67, in _write
raise ConnectionResetError('Cannot write to closing transport')
ConnectionResetError: Cannot write to closing transport
|
ConnectionResetError
|
async def start(self) -> None:
"""Process incoming request.
It reads request line, request headers and request payload, then
calls handle_request() method. Subclass has to override
handle_request(). start() handles various exceptions in request
or response handling. Connection is being closed always unless
keep_alive(True) specified.
"""
loop = self._loop
handler = self._task_handler
assert handler is not None
manager = self._manager
assert manager is not None
keepalive_timeout = self._keepalive_timeout
resp = None
assert self._request_factory is not None
assert self._request_handler is not None
while not self._force_close:
if not self._messages:
try:
# wait for next request
self._waiter = loop.create_future()
await self._waiter
except asyncio.CancelledError:
break
finally:
self._waiter = None
message, payload = self._messages.popleft()
if self.access_log:
now = loop.time()
manager.requests_count += 1
writer = StreamWriter(self, loop)
request = self._request_factory(message, payload, self, writer, handler)
try:
try:
# a new task is used for copy context vars (#3406)
task = self._loop.create_task(self._request_handler(request))
resp = await task
except HTTPException as exc:
resp = exc
except asyncio.CancelledError:
self.log_debug("Ignored premature client disconnection")
break
except asyncio.TimeoutError as exc:
self.log_debug("Request handler timed out.", exc_info=exc)
resp = self.handle_error(request, 504)
except Exception as exc:
resp = self.handle_error(request, 500, exc)
else:
# Deprecation warning (See #2415)
if getattr(resp, "__http_exception__", False):
warnings.warn(
"returning HTTPException object is deprecated "
"(#2415) and will be removed, "
"please raise the exception instead",
DeprecationWarning,
)
if self.debug:
if not isinstance(resp, StreamResponse):
if resp is None:
raise RuntimeError(
"Missing return statement on request handler"
)
else:
raise RuntimeError(
"Web-handler should return "
"a response instance, "
"got {!r}".format(resp)
)
try:
prepare_meth = resp.prepare
except AttributeError:
if resp is None:
raise RuntimeError("Missing return statement on request handler")
else:
raise RuntimeError(
"Web-handler should return "
"a response instance, "
"got {!r}".format(resp)
)
try:
await prepare_meth(request)
await resp.write_eof()
except ConnectionResetError:
self.log_debug("Ignored premature client disconnection 2")
break
# notify server about keep-alive
self._keepalive = bool(resp.keep_alive)
# log access
if self.access_log:
self.log_access(request, resp, loop.time() - now)
# check payload
if not payload.is_eof():
lingering_time = self._lingering_time
if not self._force_close and lingering_time:
self.log_debug(
"Start lingering close timer for %s sec.", lingering_time
)
now = loop.time()
end_t = now + lingering_time
with suppress(asyncio.TimeoutError, asyncio.CancelledError):
while not payload.is_eof() and now < end_t:
with CeilTimeout(end_t - now, loop=loop):
# read and ignore
await payload.readany()
now = loop.time()
# if payload still uncompleted
if not payload.is_eof() and not self._force_close:
self.log_debug("Uncompleted request.")
self.close()
payload.set_exception(PayloadAccessError())
except asyncio.CancelledError:
self.log_debug("Ignored premature client disconnection ")
break
except RuntimeError as exc:
if self.debug:
self.log_exception("Unhandled runtime exception", exc_info=exc)
self.force_close()
except Exception as exc:
self.log_exception("Unhandled exception", exc_info=exc)
self.force_close()
finally:
if self.transport is None and resp is not None:
self.log_debug("Ignored premature client disconnection.")
elif not self._force_close:
if self._keepalive and not self._close:
# start keep-alive timer
if keepalive_timeout is not None:
now = self._loop.time()
self._keepalive_time = now
if self._keepalive_handle is None:
self._keepalive_handle = loop.call_at(
now + keepalive_timeout, self._process_keepalive
)
else:
break
# remove handler, close transport if no handlers left
if not self._force_close:
self._task_handler = None
if self.transport is not None and self._error_handler is None:
self.transport.close()
|
async def start(self) -> None:
"""Process incoming request.
It reads request line, request headers and request payload, then
calls handle_request() method. Subclass has to override
handle_request(). start() handles various exceptions in request
or response handling. Connection is being closed always unless
keep_alive(True) specified.
"""
loop = self._loop
handler = self._task_handler
assert handler is not None
manager = self._manager
assert manager is not None
keepalive_timeout = self._keepalive_timeout
resp = None
assert self._request_factory is not None
assert self._request_handler is not None
while not self._force_close:
if not self._messages:
try:
# wait for next request
self._waiter = loop.create_future()
await self._waiter
except asyncio.CancelledError:
break
finally:
self._waiter = None
message, payload = self._messages.popleft()
if self.access_log:
now = loop.time()
manager.requests_count += 1
writer = StreamWriter(self, loop)
request = self._request_factory(message, payload, self, writer, handler)
try:
try:
# a new task is used for copy context vars (#3406)
task = self._loop.create_task(self._request_handler(request))
resp = await task
except HTTPException as exc:
resp = exc
except asyncio.CancelledError:
self.log_debug("Ignored premature client disconnection")
break
except asyncio.TimeoutError as exc:
self.log_debug("Request handler timed out.", exc_info=exc)
resp = self.handle_error(request, 504)
except Exception as exc:
resp = self.handle_error(request, 500, exc)
else:
# Deprecation warning (See #2415)
if getattr(resp, "__http_exception__", False):
warnings.warn(
"returning HTTPException object is deprecated "
"(#2415) and will be removed, "
"please raise the exception instead",
DeprecationWarning,
)
if self.debug:
if not isinstance(resp, StreamResponse):
if resp is None:
raise RuntimeError(
"Missing return statement on request handler"
)
else:
raise RuntimeError(
"Web-handler should return "
"a response instance, "
"got {!r}".format(resp)
)
await resp.prepare(request)
await resp.write_eof()
# notify server about keep-alive
self._keepalive = bool(resp.keep_alive)
# log access
if self.access_log:
self.log_access(request, resp, loop.time() - now)
# check payload
if not payload.is_eof():
lingering_time = self._lingering_time
if not self._force_close and lingering_time:
self.log_debug(
"Start lingering close timer for %s sec.", lingering_time
)
now = loop.time()
end_t = now + lingering_time
with suppress(asyncio.TimeoutError, asyncio.CancelledError):
while not payload.is_eof() and now < end_t:
with CeilTimeout(end_t - now, loop=loop):
# read and ignore
await payload.readany()
now = loop.time()
# if payload still uncompleted
if not payload.is_eof() and not self._force_close:
self.log_debug("Uncompleted request.")
self.close()
payload.set_exception(PayloadAccessError())
except asyncio.CancelledError:
self.log_debug("Ignored premature client disconnection ")
break
except RuntimeError as exc:
if self.debug:
self.log_exception("Unhandled runtime exception", exc_info=exc)
self.force_close()
except Exception as exc:
self.log_exception("Unhandled exception", exc_info=exc)
self.force_close()
finally:
if self.transport is None and resp is not None:
self.log_debug("Ignored premature client disconnection.")
elif not self._force_close:
if self._keepalive and not self._close:
# start keep-alive timer
if keepalive_timeout is not None:
now = self._loop.time()
self._keepalive_time = now
if self._keepalive_handle is None:
self._keepalive_handle = loop.call_at(
now + keepalive_timeout, self._process_keepalive
)
else:
break
# remove handler, close transport if no handlers left
if not self._force_close:
self._task_handler = None
if self.transport is not None and self._error_handler is None:
self.transport.close()
|
https://github.com/aio-libs/aiohttp/issues/3648
|
Unhandled exception
Traceback (most recent call last):
File "/home/decaz/.virtualenvs/test-aiohttp/lib/python3.6/site-packages/aiohttp/web_protocol.py", line 448, in start
await resp.write_eof()
File "/usr/lib/python3.6/asyncio/coroutines.py", line 110, in __next__
return self.gen.send(None)
File "/home/decaz/.virtualenvs/test-aiohttp/lib/python3.6/site-packages/aiohttp/web_response.py", line 444, in write_eof
await self._payload_writer.write_eof(data)
File "/usr/lib/python3.6/asyncio/coroutines.py", line 110, in __next__
return self.gen.send(None)
File "/home/decaz/.virtualenvs/test-aiohttp/lib/python3.6/site-packages/aiohttp/http_writer.py", line 138, in write_eof
self._write(chunk)
File "/home/decaz/.virtualenvs/test-aiohttp/lib/python3.6/site-packages/aiohttp/http_writer.py", line 67, in _write
raise ConnectionResetError('Cannot write to closing transport')
ConnectionResetError: Cannot write to closing transport
|
ConnectionResetError
|
def data_received(self, data):
if self._force_close or self._close:
return
# parse http messages
if self._payload_parser is None and not self._upgrade:
try:
messages, upgraded, tail = self._request_parser.feed_data(data)
except HttpProcessingError as exc:
# something happened during parsing
self._error_handler = self._loop.create_task(
self.handle_parse_error(
StreamWriter(self, self.transport, self._loop),
400,
exc,
exc.message,
)
)
self.close()
except Exception as exc:
# 500: internal error
self._error_handler = self._loop.create_task(
self.handle_parse_error(
StreamWriter(self, self.transport, self._loop), 500, exc
)
)
self.close()
else:
if messages:
# sometimes the parser returns no messages
for msg, payload in messages:
self._request_count += 1
self._messages.append((msg, payload))
if self._waiter is not None:
self._waiter.set_result(None)
self._upgraded = upgraded
if upgraded and tail:
self._message_tail = tail
# no parser, just store
elif self._payload_parser is None and self._upgrade and data:
self._message_tail += data
# feed payload
elif data:
eof, tail = self._payload_parser.feed_data(data)
if eof:
self.close()
|
def data_received(self, data):
if self._force_close or self._close:
return
# parse http messages
if self._payload_parser is None and not self._upgrade:
try:
messages, upgraded, tail = self._request_parser.feed_data(data)
except HttpProcessingError as exc:
# something happened during parsing
self._error_handler = self._loop.create_task(
self.handle_parse_error(
StreamWriter(self, self.transport, self._loop),
400,
exc,
exc.message,
)
)
self.close()
except Exception as exc:
# 500: internal error
self._error_handler = self._loop.create_task(
self.handle_parse_error(
StreamWriter(self, self.transport, self._loop), 500, exc
)
)
self.close()
else:
for msg, payload in messages:
self._request_count += 1
self._messages.append((msg, payload))
if self._waiter:
self._waiter.set_result(None)
self._upgraded = upgraded
if upgraded and tail:
self._message_tail = tail
# no parser, just store
elif self._payload_parser is None and self._upgrade and data:
self._message_tail += data
# feed payload
elif data:
eof, tail = self._payload_parser.feed_data(data)
if eof:
self.close()
|
https://github.com/aio-libs/aiohttp/issues/2752
|
Task exception was never retrieved
future: <Task finished coro=<RequestHandler.start() done, defined at C:\Program Files\Python36\lib\site-packages\aiohttp\web_protocol.py:340> exception=IndexError('pop from an empty deque',)>
Traceback (most recent call last):
File "C:\Program Files\Python36\lib\site-packages\aiohttp\web_protocol.py", line 365, in start
message, payload = self._messages.popleft()
IndexError: pop from an empty deque
|
IndexError
|
def make_handler(self, app):
if hasattr(self.cfg, "debug"):
is_debug = self.cfg.debug
else:
is_debug = self.log.loglevel == logging.DEBUG
return app.make_handler(
logger=self.log,
debug=is_debug,
slow_request_timeout=self.cfg.timeout,
keepalive_timeout=self.cfg.keepalive,
access_log=self.log.access_log,
access_log_format=self._get_valid_log_format(self.cfg.access_log_format),
)
|
def make_handler(self, app):
return app.make_handler(
logger=self.log,
debug=self.cfg.debug,
timeout=self.cfg.timeout,
keep_alive=self.cfg.keepalive,
access_log=self.log.access_log,
access_log_format=self._get_valid_log_format(self.cfg.access_log_format),
)
|
https://github.com/aio-libs/aiohttp/issues/1148
|
Traceback (most recent call last):
File "/Users/bcmyers/Dropbox/Programming/python/website/venv/lib/python3.5/site-packages/gunicorn/arbiter.py", line 557, in spawn_worker
worker.init_process()
File "/Users/bcmyers/Dropbox/Programming/python/website/venv/lib/python3.5/site-packages/aiohttp/worker.py", line 196, in init_process
super().init_process()
File "/Users/bcmyers/Dropbox/Programming/python/website/venv/lib/python3.5/site-packages/aiohttp/worker.py", line 36, in init_process
super().init_process()
File "/Users/bcmyers/Dropbox/Programming/python/website/venv/lib/python3.5/site-packages/gunicorn/workers/base.py", line 132, in init_process
self.run()
File "/Users/bcmyers/Dropbox/Programming/python/website/venv/lib/python3.5/site-packages/aiohttp/worker.py", line 43, in run
self.loop.run_until_complete(self._runner)
File "uvloop/loop.pyx", line 1133, in uvloop.loop.Loop.run_until_complete (uvloop/loop.c:19943)
File "uvloop/future.pyx", line 123, in uvloop.loop.BaseFuture.result (uvloop/loop.c:94147)
File "uvloop/future.pyx", line 78, in uvloop.loop.BaseFuture._result_impl (uvloop/loop.c:93686)
File "uvloop/task.pyx", line 126, in uvloop.loop.BaseTask._fast_step (uvloop/loop.c:99430)
File "/Users/bcmyers/Dropbox/Programming/python/website/venv/lib/python3.5/site-packages/aiohttp/worker.py", line 91, in _run
handler = self.make_handler(self.wsgi)
File "/Users/bcmyers/Dropbox/Programming/python/website/venv/lib/python3.5/site-packages/aiohttp/worker.py", line 52, in make_handler
debug=self.cfg.debug,
File "/Users/bcmyers/Dropbox/Programming/python/website/venv/lib/python3.5/site-packages/gunicorn/config.py", line 58, in __getattr__
raise AttributeError("No configuration setting for: %s" % name)
AttributeError: No configuration setting for: debug
|
AttributeError
|
def add_route(self, method, path, handler, *, name=None):
assert path.startswith("/")
assert callable(handler), handler
if not asyncio.iscoroutinefunction(handler):
handler = asyncio.coroutine(handler)
method = method.upper()
assert method in self.METHODS, method
parts = []
factory = PlainRoute
format_parts = []
for part in path.split("/"):
if not part:
continue
match = self.DYN.match(part)
if match:
parts.append("(?P<" + match.group("var") + ">" + self.GOOD + ")")
factory = DynamicRoute
format_parts.append("{" + match.group("var") + "}")
continue
match = self.DYN_WITH_RE.match(part)
if match:
parts.append("(?P<" + match.group("var") + ">" + match.group("re") + ")")
factory = DynamicRoute
format_parts.append("{" + match.group("var") + "}")
continue
if self.PLAIN.match(part):
parts.append(re.escape(part))
format_parts.append(part)
continue
raise ValueError("Invalid path '{}'['{}']".format(path, part))
if factory is PlainRoute:
route = PlainRoute(method, handler, name, path)
else:
pattern = "/" + "/".join(parts)
if path.endswith("/") and pattern != "/":
pattern += "/"
try:
compiled = re.compile("^" + pattern + "$")
except re.error as exc:
raise ValueError("Bad pattern '{}': {}".format(pattern, exc)) from None
formatter = "/" + "/".join(format_parts)
route = DynamicRoute(method, handler, name, compiled, formatter)
self._register_endpoint(route)
return route
|
def add_route(self, method, path, handler, *, name=None):
assert path.startswith("/")
assert callable(handler), handler
if not asyncio.iscoroutinefunction(handler):
handler = asyncio.coroutine(handler)
method = method.upper()
assert method in self.METHODS, method
parts = []
factory = PlainRoute
for part in path.split("/"):
if not part:
continue
match = self.DYN.match(part)
if match:
parts.append("(?P<" + match.group("var") + ">" + self.GOOD + ")")
factory = DynamicRoute
continue
match = self.DYN_WITH_RE.match(part)
if match:
parts.append("(?P<" + match.group("var") + ">" + match.group("re") + ")")
factory = DynamicRoute
continue
if self.PLAIN.match(part):
parts.append(re.escape(part))
continue
raise ValueError("Invalid path '{}'['{}']".format(path, part))
if factory is PlainRoute:
route = PlainRoute(method, handler, name, path)
else:
pattern = "/" + "/".join(parts)
if path.endswith("/") and pattern != "/":
pattern += "/"
try:
compiled = re.compile("^" + pattern + "$")
except re.error as exc:
raise ValueError("Bad pattern '{}': {}".format(pattern, exc)) from None
route = DynamicRoute(method, handler, name, compiled, path)
self._register_endpoint(route)
return route
|
https://github.com/aio-libs/aiohttp/issues/264
|
from aiohttp.web import UrlDispatcher
disp = UrlDispatcher()
disp.add_route('GET', '/{num:^\d+}', lambda x: x, name='name')
<DynamicRoute 'name' [GET] /{num:^\d+} -> <function <lambda> at 0x7f0223131840>
disp['name'].url(parts={'num': '1'})
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/home/imbolc/.pyenv/versions/3.4.1/lib/python3.4/site-packages/aiohttp/web.py", line 1226, in url
url = self._formatter.format_map(parts)
ValueError: Invalid format specifier
|
ValueError
|
def add_fields(self, *fields):
to_add = list(fields)
while to_add:
rec = to_add.pop(0)
if isinstance(rec, io.IOBase):
k = guess_filename(rec, "unknown")
self.add_field(k, rec)
self._has_io = True
elif isinstance(rec, multidict.MultiDict):
to_add.extend(rec.items(getall=True))
elif len(rec) == 1:
k = guess_filename(rec[0], "unknown")
self.add_field(k, rec[0])
if isinstance(rec[0], io.IOBase):
self._has_io = True
elif len(rec) == 2:
k, fp = rec
fn = guess_filename(fp)
self.add_field(k, fp, filename=fn)
if isinstance(fp, io.IOBase):
self._has_io = True
else:
k, fp, ft = rec
fn = guess_filename(fp, k)
self.add_field(k, fp, contenttype=ft, filename=fn)
self._has_io = True
|
def add_fields(self, *fields):
for rec in fields:
if isinstance(rec, io.IOBase):
k = guess_filename(rec, "unknown")
self.add_field(k, rec)
self._has_io = True
elif len(rec) == 1:
k = guess_filename(rec[0], "unknown")
self.add_field(k, rec[0])
if isinstance(rec[0], io.IOBase):
self._has_io = True
elif len(rec) == 2:
k, fp = rec
fn = guess_filename(fp)
self.add_field(k, fp, filename=fn)
if isinstance(fp, io.IOBase):
self._has_io = True
else:
k, fp, ft = rec
fn = guess_filename(fp, k)
self.add_field(k, fp, contenttype=ft, filename=fn)
self._has_io = True
|
https://github.com/aio-libs/aiohttp/issues/114
|
Traceback (most recent call last):
File "bug.py", line 7, in <module>
loop.run_until_complete(aiohttp.request('POST', "http://httpbin.org/post", data=d))
File "/usr/lib64/python3.4/asyncio/base_events.py", line 208, in run_until_complete
return future.result()
File "/usr/lib64/python3.4/asyncio/futures.py", line 243, in result
raise self._exception
File "/usr/lib64/python3.4/asyncio/tasks.py", line 317, in _step
result = coro.throw(exc)
File "/home/pepijn/code/eppserver/env/lib/python3.4/site-packages/aiohttp/client.py", line 111, in request
yield from resp.start(conn, read_until_eof)
File "/home/pepijn/code/eppserver/env/lib/python3.4/site-packages/aiohttp/client.py", line 604, in start
self.message = yield from httpstream.read()
File "/home/pepijn/code/eppserver/env/lib/python3.4/site-packages/aiohttp/streams.py", line 366, in read
return (yield from super().read())
File "/home/pepijn/code/eppserver/env/lib/python3.4/site-packages/aiohttp/streams.py", line 341, in read
yield from self._waiter
File "/usr/lib64/python3.4/asyncio/futures.py", line 348, in __iter__
yield self # This tells Task to wait for completion.
File "/usr/lib64/python3.4/asyncio/tasks.py", line 370, in _wakeup
value = future.result()
File "/usr/lib64/python3.4/asyncio/futures.py", line 243, in result
raise self._exception
File "/home/pepijn/code/eppserver/env/lib/python3.4/site-packages/aiohttp/client.py", line 496, in write_bytes
request.write(chunk)
File "/home/pepijn/code/eppserver/env/lib/python3.4/site-packages/aiohttp/protocol.py", line 666, in write
chunk is EOF_MARKER), chunk
AssertionError: a
|
AssertionError
|
def map_network(self, pool_size=255):
"""
Maps the network
:param pool_size: amount of parallel ping processes
:return: list of valid ip addresses
"""
ip_list = list()
# get my IP and compose a base like 192.168.1.xxx
ip_parts = self.get_my_ip().split(".")
base_ip = ip_parts[0] + "." + ip_parts[1] + "." + ip_parts[2] + "."
max_threads = 50
def check_adb_port(ip):
"""
Check if port is open
:param ip:
:param port:
:return:
"""
try:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
socket.setdefaulttimeout(2.0)
result = sock.connect_ex((ip, 5555))
if result == 0:
ip_list.append(ip)
sock.close()
except Exception as e:
logging.warning("Unable to check %s: %s", ip, e)
for i in range(1, 255):
threading.Thread(
target=check_adb_port,
args=[f"{base_ip}{i}"],
).start()
# limit the number of threads.
while threading.active_count() > max_threads:
time.sleep(1)
return ip_list
|
def map_network(self, pool_size=255):
"""
Maps the network
:param pool_size: amount of parallel ping processes
:return: list of valid ip addresses
"""
if not self.ping:
print(
"Error: `ping` executable not found. Please enter the IP "
"address in the text box manually"
)
return
ip_list = list()
# get my IP and compose a base like 192.168.1.xxx
ip_parts = self.get_my_ip().split(".")
base_ip = ip_parts[0] + "." + ip_parts[1] + "." + ip_parts[2] + "."
# prepare the jobs queue
try:
jobs = multiprocessing.Queue()
results = multiprocessing.Queue()
pool = [
multiprocessing.Process(target=self.pinger, args=(jobs, results))
for _ in range(pool_size)
]
except PermissionError:
print(
"The current system of packaging of guiscrcpy does not "
"support semaphores. Therefore its not possible to "
"multiprocess ip port scanning. Alternatively use `nmap` "
"or `nutty` to scan your network."
)
return
for p in pool:
p.start()
# cue hte ping processes
for i in range(1, 255):
jobs.put(base_ip + "{0}".format(i))
for _ in pool:
jobs.put(None)
for p in pool:
p.join()
# collect he results
while not results.empty():
ip = results.get()
ip_list.append(ip)
return ip_list
|
https://github.com/srevinsaju/guiscrcpy/issues/164
|
Process Process-232:
Traceback (most recent call last):
File "/usr/local/Cellar/python/3.7.7/Frameworks/Python.framework/Versions/3.7/lib/python3.7/multiprocessing/process.py", line 297, in _bootstrap
File "/usr/local/Cellar/python/3.7.7/Frameworks/Python.framework/Versions/3.7/lib/python3.7/multiprocessing/process.py", line 99, in run
File "/usr/local/lib/python3.7/site-packages/guiscrcpy/network/network.py", line 27, in pinger
OSError: [Errno 24] Too many open files: '/dev/null'
Traceback (most recent call last):
File "/usr/local/lib/python3.7/site-packages/guiscrcpy/ux/network.py", line 117, in refresh
self.listView.addItems(self.nm.map_network())
File "/usr/local/lib/python3.7/site-packages/guiscrcpy/network/network.py", line 88, in map_network
p.start()
File "/usr/local/Cellar/python/3.7.7/Frameworks/Python.framework/Versions/3.7/lib/python3.7/multiprocessing/process.py", line 112, in start
self._popen = self._Popen(self)
File "/usr/local/Cellar/python/3.7.7/Frameworks/Python.framework/Versions/3.7/lib/python3.7/multiprocessing/context.py", line 223, in _Popen
return _default_context.get_context().Process._Popen(process_obj)
File "/usr/local/Cellar/python/3.7.7/Frameworks/Python.framework/Versions/3.7/lib/python3.7/multiprocessing/context.py", line 277, in _Popen
return Popen(process_obj)
File "/usr/local/Cellar/python/3.7.7/Frameworks/Python.framework/Versions/3.7/lib/python3.7/multiprocessing/popen_fork.py", line 20, in __init__
self._launch(process_obj)
File "/usr/local/Cellar/python/3.7.7/Frameworks/Python.framework/Versions/3.7/lib/python3.7/multiprocessing/popen_fork.py", line 69, in _launch
parent_r, child_w = os.pipe()
OSError: [Errno 24] Too many open files
Abort trap: 6
|
OSError
|
def devices_detailed(increment=""):
if increment is None:
raise FileNotFoundError(
"guiscrcpy couldn't find adb. "
"Please specify path to adb in configuration filename"
)
proc = Popen(_(increment + " devices -l"), stdout=PIPE)
output = [[y.strip() for y in x.split()] for x in decode_process(proc)[1:]][:-1]
devices_found = []
for device in output:
description = {
"identifier": device[0],
"status": device[1],
"product": get(device, 2, ":").split(":")[-1],
"model": get(device, 3, ":").split(":")[-1],
"device": get(device, 4, ":").split(":")[-1],
"transport_id": get(device, 5, ":").split(":")[-1],
}
devices_found.append(description)
logging.debug("ADB: {}".format(devices_found))
return devices_found
|
def devices_detailed(increment=""):
if increment is None:
raise FileNotFoundError(
"guiscrcpy couldn't find adb. "
"Please specify path to adb in configuration filename"
)
proc = Popen(_(increment + " devices -l"), stdout=PIPE)
output = [[y.strip() for y in x.split()] for x in decode_process(proc)[1:]][:-1]
devices_found = []
for device in output:
description = {
"identifier": device[0],
"status": device[1],
"product": get(device, 2, ":").split(":")[1],
"model": get(device, 3, ":").split(":")[1],
"device": get(device, 4, ":").split(":")[1],
"transport_id": get(device, 5, ":").split(":")[1],
}
devices_found.append(description)
logging.debug("ADB: {}".format(devices_found))
return devices_found
|
https://github.com/srevinsaju/guiscrcpy/issues/117
|
me@host:~$ Downloads/guiscrcpy-x86_64.AppImage
guiscrcpy
by srevinsaju
3.7.post195.dev
Licensed under GNU GPL v3 (c) 2020
MSG: Please ensure you have enabled USB Debugging on your device. See README.md for more details
(python3.8:31901): dbind-WARNING **: 10:21:56.701: Couldn't register with accessibility bus: Did not receive a reply. Possible causes include: the remote application did not send a reply, the message bus security policy blocked the reply, the reply timeout expired, or the network connection was broken.
Traceback (most recent call last):
File "/tmp/.mount_guiscrzbudZI/opt/python3.8/bin/guiscrcpy", line 9, in <module>
sys.exit(bootstrap())
File "/tmp/.mount_guiscrzbudZI/opt/python3.8/lib/python3.8/site-packages/guiscrcpy/launcher.py", line 1109, in bootstrap
bootstrap0()
File "/tmp/.mount_guiscrzbudZI/opt/python3.8/lib/python3.8/site-packages/guiscrcpy/launcher.py", line 1079, in bootstrap0
guiscrcpy = InterfaceGuiscrcpy()
File "/tmp/.mount_guiscrzbudZI/opt/python3.8/lib/python3.8/site-packages/guiscrcpy/launcher.py", line 337, in __init__
self.refresh_devices()
File "/tmp/.mount_guiscrzbudZI/opt/python3.8/lib/python3.8/site-packages/guiscrcpy/launcher.py", line 344, in refresh_devices
self.scan_devices_update_list_view()
File "/tmp/.mount_guiscrzbudZI/opt/python3.8/lib/python3.8/site-packages/guiscrcpy/launcher.py", line 625, in scan_devices_update_list_view
devices = adb.devices_detailed(adb.path)
File "/tmp/.mount_guiscrzbudZI/opt/python3.8/lib/python3.8/site-packages/guiscrcpy/lib/check.py", line 180, in devices_detailed
'product': get(device, 2, ':').split(':')[1],
IndexError: list index out of range
|
IndexError
|
def devices_detailed(increment=""):
if increment is None:
raise FileNotFoundError(
"guiscrcpy couldn't find adb. "
"Please specify path to adb in configuration filename"
)
proc = Popen(_(increment + " devices -l"), stdout=PIPE)
output = [[y.strip() for y in x.split()] for x in decode_process(proc)[1:]][:-1]
devices_found = []
for device in output:
# https://github.com/srevinsaju/guiscrcpy/issues/117
if "udev" in device and "permission" in device:
# This is an error with some linux and Windows OSes
# This happens because the udev is not configured
# and linux adb does not have access to reading the device
# the status hence should be 'no_permission'
status = "no_permission"
else:
status = device[1]
description = {
"identifier": device[0],
"status": status,
"product": get(device, 2, ":").split(":")[-1],
"model": get(device, 3, ":").split(":")[-1],
"device": get(device, 4, ":").split(":")[-1],
"transport_id": get(device, 5, ":").split(":")[-1],
}
devices_found.append(description)
logging.debug("ADB: {}".format(devices_found))
return devices_found
|
def devices_detailed(increment=""):
if increment is None:
raise FileNotFoundError(
"guiscrcpy couldn't find adb. "
"Please specify path to adb in configuration filename"
)
proc = Popen(_(increment + " devices -l"), stdout=PIPE)
output = [[y.strip() for y in x.split()] for x in decode_process(proc)[1:]][:-1]
devices_found = []
for device in output:
description = {
"identifier": device[0],
"status": device[1],
"product": get(device, 2, ":").split(":")[-1],
"model": get(device, 3, ":").split(":")[-1],
"device": get(device, 4, ":").split(":")[-1],
"transport_id": get(device, 5, ":").split(":")[-1],
}
devices_found.append(description)
logging.debug("ADB: {}".format(devices_found))
return devices_found
|
https://github.com/srevinsaju/guiscrcpy/issues/117
|
me@host:~$ Downloads/guiscrcpy-x86_64.AppImage
guiscrcpy
by srevinsaju
3.7.post195.dev
Licensed under GNU GPL v3 (c) 2020
MSG: Please ensure you have enabled USB Debugging on your device. See README.md for more details
(python3.8:31901): dbind-WARNING **: 10:21:56.701: Couldn't register with accessibility bus: Did not receive a reply. Possible causes include: the remote application did not send a reply, the message bus security policy blocked the reply, the reply timeout expired, or the network connection was broken.
Traceback (most recent call last):
File "/tmp/.mount_guiscrzbudZI/opt/python3.8/bin/guiscrcpy", line 9, in <module>
sys.exit(bootstrap())
File "/tmp/.mount_guiscrzbudZI/opt/python3.8/lib/python3.8/site-packages/guiscrcpy/launcher.py", line 1109, in bootstrap
bootstrap0()
File "/tmp/.mount_guiscrzbudZI/opt/python3.8/lib/python3.8/site-packages/guiscrcpy/launcher.py", line 1079, in bootstrap0
guiscrcpy = InterfaceGuiscrcpy()
File "/tmp/.mount_guiscrzbudZI/opt/python3.8/lib/python3.8/site-packages/guiscrcpy/launcher.py", line 337, in __init__
self.refresh_devices()
File "/tmp/.mount_guiscrzbudZI/opt/python3.8/lib/python3.8/site-packages/guiscrcpy/launcher.py", line 344, in refresh_devices
self.scan_devices_update_list_view()
File "/tmp/.mount_guiscrzbudZI/opt/python3.8/lib/python3.8/site-packages/guiscrcpy/launcher.py", line 625, in scan_devices_update_list_view
devices = adb.devices_detailed(adb.path)
File "/tmp/.mount_guiscrzbudZI/opt/python3.8/lib/python3.8/site-packages/guiscrcpy/lib/check.py", line 180, in devices_detailed
'product': get(device, 2, ':').split(':')[1],
IndexError: list index out of range
|
IndexError
|
def scan_devices_update_list_view(self):
"""
Scan for new devices; and update the list view
:return:
"""
# self.devices_view.clear()
paired_devices = []
for index in range(self.devices_view.count()):
paired_devices.append(self.devices_view.item(index))
devices = adb.devices_detailed(adb.path)
log(devices)
for i in devices:
if i["identifier"] not in config["device"].keys():
device_paired_and_exists = False
config["device"][i["identifier"]] = {"rotation": 0}
else:
device_paired_and_exists = True
if config["device"].get("rotation", 0) in (-1, 0, 2):
icon = ":/icons/icons/portrait_mobile_white.svg"
else:
icon = ":/icons/icons/landscape_mobile_white.svg"
if i["status"] == "offline":
icon = ":/icons/icons/portrait_mobile_error.svg"
elif i["status"] == "unauthorized":
icon = ":/icons/icons/portrait_mobile_warning.svg"
if i["status"] == "no_permission":
# https://stackoverflow.com/questions/
# 53887322/adb-devices-no-permissions-user-in-
# plugdev-group-are-your-udev-rules-wrong
udev_error = (
"Error connecting to device. Your udev rules are"
" incorrect. See https://stackoverflow.com/questions"
"/53887322/adb-devices-no-permissions-user-in-plugdev-"
"group-are-your-udev-rules-wrong"
)
self.private_message_box_adb.setText(udev_error)
print(udev_error)
return []
# Check if device is unauthorized
elif i["status"] == "unauthorized":
log("unauthorized device detected: Click Allow on your device")
# The device is connected; and might/might't paired in the past
# And is connected to the same IP address
# It is possibly a bug with the connection;
# Temporarily create a new QListItem to display the
# device with the error
paired = False
device_paired_and_exists = False
self.private_message_box_adb.setText(
f"{i['identifier']} is unauthorized. Please click allow on your device."
)
# Remove other devices with the same id and offline and
# unauthorized
self.remove_device_device_view(
i["identifier"], statuses=["offline", "unauthorized"]
)
# Unauthorized device cannot be considered as a paired device
devices_view_list_item = QListWidgetItem()
else:
# check if device is paired
# if yes, just update the list item
if not device_paired_and_exists:
paired = False
devices_view_list_item = QListWidgetItem()
else:
for paired_device in paired_devices:
if paired_device.text().split()[0] == i["model"]:
paired = True
devices_view_list_item = paired_device
# as we have found a paired device
# we know by assumption; there cannot be two
# devices with the same local IP address;
# lets scan the devices_view once more in a loop
# to check for any device with the same
# identifier and remove them; based on this same
# assumption
self.remove_device_device_view(
i["identifier"], statuses=["offline", "unauthorized"]
)
break
elif paired_device.text().split()[1] == i["identifier"]:
devices_view_list_item = QListWidgetItem()
paired = False
break
else:
paired = False
devices_view_list_item = QListWidgetItem()
devices_view_list_item.setIcon(QIcon(icon))
devices_view_list_item.setText(
"{device}\n{mode}\n{status}".format(
device=i["model"], mode=i["identifier"], status=i["status"]
)
)
devices_view_list_item.setToolTip(
"Device: {d}\n"
"Model: {m}\n"
"Alias: {a}\n"
"Status: {s}\n"
"Transport ID: {t}\n"
"Paired: {p}".format(
d=i["identifier"],
m=i["model"],
a=i["product"],
s=i["status"],
t=i["transport_id"],
p=paired,
)
)
devices_view_list_item.setFont(QFont("Noto Sans", pointSize=8))
log(device_paired_and_exists)
if device_paired_and_exists:
continue
# If and only if the device doesn't exist; add it
self.devices_view.addItem(devices_view_list_item)
return devices
|
def scan_devices_update_list_view(self):
"""
Scan for new devices; and update the list view
:return:
"""
# self.devices_view.clear()
paired_devices = []
for index in range(self.devices_view.count()):
paired_devices.append(self.devices_view.item(index))
devices = adb.devices_detailed(adb.path)
log(devices)
for i in devices:
if i["identifier"] not in config["device"].keys():
device_paired_and_exists = False
config["device"][i["identifier"]] = {"rotation": 0}
else:
device_paired_and_exists = True
if config["device"].get("rotation", 0) in (-1, 0, 2):
icon = ":/icons/icons/portrait_mobile_white.svg"
else:
icon = ":/icons/icons/landscape_mobile_white.svg"
if i["status"] == "offline":
icon = ":/icons/icons/portrait_mobile_error.svg"
elif i["status"] == "unauthorized":
icon = ":/icons/icons/portrait_mobile_warning.svg"
# Check if device is unauthorized
if i["status"] == "unauthorized":
log("unauthorized device detected: Click Allow on your device")
# The device is connected; and might/might't paired in the past
# And is connected to the same IP address
# It is possibly a bug with the connection;
# Temporarily create a new QListItem to display the
# device with the error
paired = False
device_paired_and_exists = False
# Remove other devices with the same id and offline and
# unauthorized
self.remove_device_device_view(
i["identifier"], statuses=["offline", "unauthorized"]
)
# Unauthorized device cannot be considered as a paired device
devices_view_list_item = QListWidgetItem()
else:
# check if device is paired
# if yes, just update the list item
if not device_paired_and_exists:
paired = False
devices_view_list_item = QListWidgetItem()
else:
for paired_device in paired_devices:
if paired_device.text().split()[0] == i["model"]:
paired = True
devices_view_list_item = paired_device
# as we have found a paired device
# we know by assumption; there cannot be two
# devices with the same local IP address;
# lets scan the devices_view once more in a loop
# to check for any device with the same
# identifier and remove them; based on this same
# assumption
self.remove_device_device_view(
i["identifier"], statuses=["offline", "unauthorized"]
)
break
elif paired_device.text().split()[1] == i["identifier"]:
devices_view_list_item = QListWidgetItem()
paired = False
break
else:
paired = False
devices_view_list_item = QListWidgetItem()
devices_view_list_item.setIcon(QIcon(icon))
devices_view_list_item.setText(
"{device}\n{mode}\n{status}".format(
device=i["model"], mode=i["identifier"], status=i["status"]
)
)
devices_view_list_item.setToolTip(
"Device: {d}\n"
"Model: {m}\n"
"Alias: {a}\n"
"Status: {s}\n"
"Transport ID: {t}\n"
"Paired: {p}".format(
d=i["identifier"],
m=i["model"],
a=i["product"],
s=i["status"],
t=i["transport_id"],
p=paired,
)
)
devices_view_list_item.setFont(QFont("Noto Sans", pointSize=8))
log(device_paired_and_exists)
if device_paired_and_exists:
continue
# If and only if the device doesn't exist; add it
self.devices_view.addItem(devices_view_list_item)
return devices
|
https://github.com/srevinsaju/guiscrcpy/issues/117
|
me@host:~$ Downloads/guiscrcpy-x86_64.AppImage
guiscrcpy
by srevinsaju
3.7.post195.dev
Licensed under GNU GPL v3 (c) 2020
MSG: Please ensure you have enabled USB Debugging on your device. See README.md for more details
(python3.8:31901): dbind-WARNING **: 10:21:56.701: Couldn't register with accessibility bus: Did not receive a reply. Possible causes include: the remote application did not send a reply, the message bus security policy blocked the reply, the reply timeout expired, or the network connection was broken.
Traceback (most recent call last):
File "/tmp/.mount_guiscrzbudZI/opt/python3.8/bin/guiscrcpy", line 9, in <module>
sys.exit(bootstrap())
File "/tmp/.mount_guiscrzbudZI/opt/python3.8/lib/python3.8/site-packages/guiscrcpy/launcher.py", line 1109, in bootstrap
bootstrap0()
File "/tmp/.mount_guiscrzbudZI/opt/python3.8/lib/python3.8/site-packages/guiscrcpy/launcher.py", line 1079, in bootstrap0
guiscrcpy = InterfaceGuiscrcpy()
File "/tmp/.mount_guiscrzbudZI/opt/python3.8/lib/python3.8/site-packages/guiscrcpy/launcher.py", line 337, in __init__
self.refresh_devices()
File "/tmp/.mount_guiscrzbudZI/opt/python3.8/lib/python3.8/site-packages/guiscrcpy/launcher.py", line 344, in refresh_devices
self.scan_devices_update_list_view()
File "/tmp/.mount_guiscrzbudZI/opt/python3.8/lib/python3.8/site-packages/guiscrcpy/launcher.py", line 625, in scan_devices_update_list_view
devices = adb.devices_detailed(adb.path)
File "/tmp/.mount_guiscrzbudZI/opt/python3.8/lib/python3.8/site-packages/guiscrcpy/lib/check.py", line 180, in devices_detailed
'product': get(device, 2, ':').split(':')[1],
IndexError: list index out of range
|
IndexError
|
def iter_content(self, chunk_size=1, decode_unicode=False):
"""Iterates over the response data. When stream=True is set on the
request, this avoids reading the content at once into memory for
large responses. The chunk size is the number of bytes it should
read into memory. This is not necessarily the length of each item
returned as decoding can take place.
chunk_size must be of type int or None. A value of None will
function differently depending on the value of `stream`.
stream=True will read data as it arrives in whatever size the
chunks are received. If stream=False, data is returned as
a single chunk.
If decode_unicode is True, content will be decoded using the best
available encoding based on the response.
"""
def generate():
# Special case for urllib3.
if hasattr(self.raw, "stream"):
try:
for chunk in self.raw.stream(chunk_size, decode_content=True):
yield chunk
except ProtocolError as e:
raise ChunkedEncodingError(e)
except DecodeError as e:
# Dirsearch only
while True:
chunk = self.raw.read(chunk_size)
if not chunk:
break
yield chunk
except ReadTimeoutError as e:
raise ConnectionError(e)
else:
# Standard file-like object.
while True:
chunk = self.raw.read(chunk_size)
if not chunk:
break
yield chunk
self._content_consumed = True
if self._content_consumed and isinstance(self._content, bool):
raise StreamConsumedError()
elif chunk_size is not None and not isinstance(chunk_size, int):
raise TypeError(
"chunk_size must be an int, it is instead a %s." % type(chunk_size)
)
# simulate reading small chunks of the content
reused_chunks = iter_slices(self._content, chunk_size)
stream_chunks = generate()
chunks = reused_chunks if self._content_consumed else stream_chunks
if decode_unicode:
chunks = stream_decode_response_unicode(chunks, self)
return chunks
|
def iter_content(self, chunk_size=1, decode_unicode=False):
"""Iterates over the response data. When stream=True is set on the
request, this avoids reading the content at once into memory for
large responses. The chunk size is the number of bytes it should
read into memory. This is not necessarily the length of each item
returned as decoding can take place.
chunk_size must be of type int or None. A value of None will
function differently depending on the value of `stream`.
stream=True will read data as it arrives in whatever size the
chunks are received. If stream=False, data is returned as
a single chunk.
If decode_unicode is True, content will be decoded using the best
available encoding based on the response.
"""
def generate():
# Special case for urllib3.
if hasattr(self.raw, "stream"):
try:
for chunk in self.raw.stream(chunk_size, decode_content=True):
yield chunk
except ProtocolError as e:
raise ChunkedEncodingError(e)
except DecodeError as e:
raise ContentDecodingError(e)
except ReadTimeoutError as e:
raise ConnectionError(e)
else:
# Standard file-like object.
while True:
chunk = self.raw.read(chunk_size)
if not chunk:
break
yield chunk
self._content_consumed = True
if self._content_consumed and isinstance(self._content, bool):
raise StreamConsumedError()
elif chunk_size is not None and not isinstance(chunk_size, int):
raise TypeError(
"chunk_size must be an int, it is instead a %s." % type(chunk_size)
)
# simulate reading small chunks of the content
reused_chunks = iter_slices(self._content, chunk_size)
stream_chunks = generate()
chunks = reused_chunks if self._content_consumed else stream_chunks
if decode_unicode:
chunks = stream_decode_response_unicode(chunks, self)
return chunks
|
https://github.com/maurosoria/dirsearch/issues/106
|
Traceback (most recent call last):
File "/root/RECON/dirsearch-master/thirdparty/requests/packages/urllib3/response.py", line 192, in _decode
data = self._decoder.decompress(data)
File "/root/RECON/dirsearch-master/thirdparty/requests/packages/urllib3/response.py", line 58, in decompress
return self._obj.decompress(data)
zlib.error: Error -3 while decompressing data: incorrect header check
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/root/RECON/dirsearch-master/thirdparty/requests/models.py", line 657, in generate
for chunk in self.raw.stream(chunk_size, decode_content=True):
File "/root/RECON/dirsearch-master/thirdparty/requests/packages/urllib3/response.py", line 303, in stream
for line in self.read_chunked(amt, decode_content=decode_content):
File "/root/RECON/dirsearch-master/thirdparty/requests/packages/urllib3/response.py", line 452, in read_chunked
flush_decoder=True)
File "/root/RECON/dirsearch-master/thirdparty/requests/packages/urllib3/response.py", line 197, in _decode
"failed to decode it." % content_encoding, e)
thirdparty.requests.packages.urllib3.exceptions.DecodeError: ('Received response with content-encoding: gzip, but failed to decode it.', error('Error -3 while decompressing data: incorrect header check',))
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/root/RECON/dirsearch-master/dirsearch.py", line 40, in <module>
main = Program()
File "/root/RECON/dirsearch-master/dirsearch.py", line 36, in __init__
self.controller = Controller(self.script_path, self.arguments, self.output)
File "/root/RECON/dirsearch-master/lib/controller/Controller.py", line 162, in __init__
self.wait()
File "/root/RECON/dirsearch-master/lib/controller/Controller.py", line 400, in wait
self.fuzzer.start()
File "/root/RECON/dirsearch-master/lib/core/Fuzzer.py", line 86, in start
self.setupScanners()
File "/root/RECON/dirsearch-master/lib/core/Fuzzer.py", line 62, in setupScanners
self.scanners[extension] = Scanner(self.requester, self.testFailPath, "." + extension)
File "/root/RECON/dirsearch-master/lib/core/Scanner.py", line 45, in __init__
self.setup()
File "/root/RECON/dirsearch-master/lib/core/Scanner.py", line 49, in setup
firstResponse = self.requester.request(firstPath)
File "/root/RECON/dirsearch-master/lib/connection/Requester.py", line 156, in request
timeout=self.timeout
File "/root/RECON/dirsearch-master/thirdparty/requests/sessions.py", line 482, in get
return self.request('GET', url, **kwargs)
File "/root/RECON/dirsearch-master/thirdparty/requests/sessions.py", line 470, in request
resp = self.send(prep, **send_kwargs)
File "/root/RECON/dirsearch-master/thirdparty/requests/sessions.py", line 610, in send
r.content
File "/root/RECON/dirsearch-master/thirdparty/requests/models.py", line 734, in content
self._content = bytes().join(self.iter_content(CONTENT_CHUNK_SIZE)) or bytes()
File "/root/RECON/dirsearch-master/thirdparty/requests/models.py", line 662, in generate
raise ContentDecodingError(e)
thirdparty.requests.exceptions.ContentDecodingError: ('Received response with content-encoding: gzip, but failed to decode it.', error('Error -3 while decompressing data: incorrect header check',))
|
thirdparty.requests.packages.urllib3.exceptions.DecodeError
|
def generate():
# Special case for urllib3.
if hasattr(self.raw, "stream"):
try:
for chunk in self.raw.stream(chunk_size, decode_content=True):
yield chunk
except ProtocolError as e:
raise ChunkedEncodingError(e)
except DecodeError as e:
# Dirsearch only
while True:
chunk = self.raw.read(chunk_size)
if not chunk:
break
yield chunk
except ReadTimeoutError as e:
raise ConnectionError(e)
else:
# Standard file-like object.
while True:
chunk = self.raw.read(chunk_size)
if not chunk:
break
yield chunk
self._content_consumed = True
|
def generate():
# Special case for urllib3.
if hasattr(self.raw, "stream"):
try:
for chunk in self.raw.stream(chunk_size, decode_content=True):
yield chunk
except ProtocolError as e:
raise ChunkedEncodingError(e)
except DecodeError as e:
raise ContentDecodingError(e)
except ReadTimeoutError as e:
raise ConnectionError(e)
else:
# Standard file-like object.
while True:
chunk = self.raw.read(chunk_size)
if not chunk:
break
yield chunk
self._content_consumed = True
|
https://github.com/maurosoria/dirsearch/issues/106
|
Traceback (most recent call last):
File "/root/RECON/dirsearch-master/thirdparty/requests/packages/urllib3/response.py", line 192, in _decode
data = self._decoder.decompress(data)
File "/root/RECON/dirsearch-master/thirdparty/requests/packages/urllib3/response.py", line 58, in decompress
return self._obj.decompress(data)
zlib.error: Error -3 while decompressing data: incorrect header check
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/root/RECON/dirsearch-master/thirdparty/requests/models.py", line 657, in generate
for chunk in self.raw.stream(chunk_size, decode_content=True):
File "/root/RECON/dirsearch-master/thirdparty/requests/packages/urllib3/response.py", line 303, in stream
for line in self.read_chunked(amt, decode_content=decode_content):
File "/root/RECON/dirsearch-master/thirdparty/requests/packages/urllib3/response.py", line 452, in read_chunked
flush_decoder=True)
File "/root/RECON/dirsearch-master/thirdparty/requests/packages/urllib3/response.py", line 197, in _decode
"failed to decode it." % content_encoding, e)
thirdparty.requests.packages.urllib3.exceptions.DecodeError: ('Received response with content-encoding: gzip, but failed to decode it.', error('Error -3 while decompressing data: incorrect header check',))
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/root/RECON/dirsearch-master/dirsearch.py", line 40, in <module>
main = Program()
File "/root/RECON/dirsearch-master/dirsearch.py", line 36, in __init__
self.controller = Controller(self.script_path, self.arguments, self.output)
File "/root/RECON/dirsearch-master/lib/controller/Controller.py", line 162, in __init__
self.wait()
File "/root/RECON/dirsearch-master/lib/controller/Controller.py", line 400, in wait
self.fuzzer.start()
File "/root/RECON/dirsearch-master/lib/core/Fuzzer.py", line 86, in start
self.setupScanners()
File "/root/RECON/dirsearch-master/lib/core/Fuzzer.py", line 62, in setupScanners
self.scanners[extension] = Scanner(self.requester, self.testFailPath, "." + extension)
File "/root/RECON/dirsearch-master/lib/core/Scanner.py", line 45, in __init__
self.setup()
File "/root/RECON/dirsearch-master/lib/core/Scanner.py", line 49, in setup
firstResponse = self.requester.request(firstPath)
File "/root/RECON/dirsearch-master/lib/connection/Requester.py", line 156, in request
timeout=self.timeout
File "/root/RECON/dirsearch-master/thirdparty/requests/sessions.py", line 482, in get
return self.request('GET', url, **kwargs)
File "/root/RECON/dirsearch-master/thirdparty/requests/sessions.py", line 470, in request
resp = self.send(prep, **send_kwargs)
File "/root/RECON/dirsearch-master/thirdparty/requests/sessions.py", line 610, in send
r.content
File "/root/RECON/dirsearch-master/thirdparty/requests/models.py", line 734, in content
self._content = bytes().join(self.iter_content(CONTENT_CHUNK_SIZE)) or bytes()
File "/root/RECON/dirsearch-master/thirdparty/requests/models.py", line 662, in generate
raise ContentDecodingError(e)
thirdparty.requests.exceptions.ContentDecodingError: ('Received response with content-encoding: gzip, but failed to decode it.', error('Error -3 while decompressing data: incorrect header check',))
|
thirdparty.requests.packages.urllib3.exceptions.DecodeError
|
def _evaluate_cityscapes(self, results, logger, imgfile_prefix):
"""Evaluation in Cityscapes protocol.
Args:
results (list): Testing results of the dataset.
logger (logging.Logger | str | None): Logger used for printing
related information during evaluation. Default: None.
imgfile_prefix (str | None): The prefix of output image file
Returns:
dict[str: float]: Cityscapes evaluation results.
"""
try:
import cityscapesscripts.evaluation.evalPixelLevelSemanticLabeling as CSEval # noqa
except ImportError:
raise ImportError(
'Please run "pip install cityscapesscripts" to '
"install cityscapesscripts first."
)
msg = "Evaluating in Cityscapes style"
if logger is None:
msg = "\n" + msg
print_log(msg, logger=logger)
result_files, tmp_dir = self.format_results(results, imgfile_prefix)
if tmp_dir is None:
result_dir = imgfile_prefix
else:
result_dir = tmp_dir.name
eval_results = dict()
print_log(f"Evaluating results under {result_dir} ...", logger=logger)
CSEval.args.evalInstLevelScore = True
CSEval.args.predictionPath = osp.abspath(result_dir)
CSEval.args.evalPixelAccuracy = True
CSEval.args.JSONOutput = False
seg_map_list = []
pred_list = []
# when evaluating with official cityscapesscripts,
# **_gtFine_labelIds.png is used
for seg_map in mmcv.scandir(self.ann_dir, "gtFine_labelIds.png", recursive=True):
seg_map_list.append(osp.join(self.ann_dir, seg_map))
pred_list.append(CSEval.getPrediction(CSEval.args, seg_map))
eval_results.update(CSEval.evaluateImgLists(pred_list, seg_map_list, CSEval.args))
if tmp_dir is not None:
tmp_dir.cleanup()
return eval_results
|
def _evaluate_cityscapes(self, results, logger, imgfile_prefix):
"""Evaluation in Cityscapes protocol.
Args:
results (list): Testing results of the dataset.
logger (logging.Logger | str | None): Logger used for printing
related information during evaluation. Default: None.
imgfile_prefix (str | None): The prefix of output image file
Returns:
dict[str: float]: Cityscapes evaluation results.
"""
try:
import cityscapesscripts.evaluation.evalPixelLevelSemanticLabeling as CSEval # noqa
except ImportError:
raise ImportError(
'Please run "pip install citscapesscripts" to '
"install cityscapesscripts first."
)
msg = "Evaluating in Cityscapes style"
if logger is None:
msg = "\n" + msg
print_log(msg, logger=logger)
result_files, tmp_dir = self.format_results(results, imgfile_prefix)
if tmp_dir is None:
result_dir = imgfile_prefix
else:
result_dir = tmp_dir.name
eval_results = dict()
print_log(f"Evaluating results under {result_dir} ...", logger=logger)
CSEval.args.evalInstLevelScore = True
CSEval.args.predictionPath = osp.abspath(result_dir)
CSEval.args.evalPixelAccuracy = True
CSEval.args.JSONOutput = False
seg_map_list = []
pred_list = []
# when evaluating with official cityscapesscripts,
# **_gtFine_labelIds.png is used
for seg_map in mmcv.scandir(self.ann_dir, "gtFine_labelIds.png", recursive=True):
seg_map_list.append(osp.join(self.ann_dir, seg_map))
pred_list.append(CSEval.getPrediction(CSEval.args, seg_map))
eval_results.update(CSEval.evaluateImgLists(pred_list, seg_map_list, CSEval.args))
if tmp_dir is not None:
tmp_dir.cleanup()
return eval_results
|
https://github.com/open-mmlab/mmsegmentation/issues/89
|
Traceback (most recent call last):
File "tools/test.py", line 142, in <module>
main()
File "tools/test.py", line 120, in main
outputs = single_gpu_test(model, data_loader, args.show, args.show_dir)
File "/root/code/mmsegmentation/mmseg/apis/test.py", line 62, in single_gpu_test
out_file=out_file)
File "/root/code/mmsegmentation/mmseg/models/segmentors/base.py", line 253, in show_result
img = img * 0.5 + color_seg * 0.5
ValueError: operands could not be broadcast together with shapes (1600,1600,3) (512,512,3)
|
ValueError
|
def slide_inference(self, img, img_meta, rescale):
"""Inference by sliding-window with overlap."""
h_stride, w_stride = self.test_cfg.stride
h_crop, w_crop = self.test_cfg.crop_size
batch_size, _, h_img, w_img = img.size()
assert h_crop <= h_img and w_crop <= w_img, (
"crop size should not greater than image size"
)
num_classes = self.num_classes
h_grids = max(h_img - h_crop + h_stride - 1, 0) // h_stride + 1
w_grids = max(w_img - w_crop + w_stride - 1, 0) // w_stride + 1
preds = img.new_zeros((batch_size, num_classes, h_img, w_img))
count_mat = img.new_zeros((batch_size, 1, h_img, w_img))
for h_idx in range(h_grids):
for w_idx in range(w_grids):
y1 = h_idx * h_stride
x1 = w_idx * w_stride
y2 = min(y1 + h_crop, h_img)
x2 = min(x1 + w_crop, w_img)
y1 = max(y2 - h_crop, 0)
x1 = max(x2 - w_crop, 0)
crop_img = img[:, :, y1:y2, x1:x2]
crop_seg_logit = self.encode_decode(crop_img, img_meta)
preds += F.pad(
crop_seg_logit,
(int(x1), int(preds.shape[3] - x2), int(y1), int(preds.shape[2] - y2)),
)
count_mat[:, :, y1:y2, x1:x2] += 1
assert (count_mat == 0).sum() == 0
if torch.onnx.is_in_onnx_export():
# cast count_mat to constant while exporting to ONNX
count_mat = torch.from_numpy(count_mat.cpu().detach().numpy()).to(
device=img.device
)
preds = preds / count_mat
if rescale:
preds = resize(
preds,
size=img_meta[0]["ori_shape"][:2],
mode="bilinear",
align_corners=self.align_corners,
warning=False,
)
return preds
|
def slide_inference(self, img, img_meta, rescale):
"""Inference by sliding-window with overlap."""
h_stride, w_stride = self.test_cfg.stride
h_crop, w_crop = self.test_cfg.crop_size
batch_size, _, h_img, w_img = img.size()
assert h_crop <= h_img and w_crop <= w_img, (
"crop size should not greater than image size"
)
num_classes = self.num_classes
h_grids = max(h_img - h_crop + h_stride - 1, 0) // h_stride + 1
w_grids = max(w_img - w_crop + w_stride - 1, 0) // w_stride + 1
preds = img.new_zeros((batch_size, num_classes, h_img, w_img))
count_mat = img.new_zeros((batch_size, 1, h_img, w_img))
for h_idx in range(h_grids):
for w_idx in range(w_grids):
y1 = h_idx * h_stride
x1 = w_idx * w_stride
y2 = min(y1 + h_crop, h_img)
x2 = min(x1 + w_crop, w_img)
y1 = max(y2 - h_crop, 0)
x1 = max(x2 - w_crop, 0)
crop_img = img[:, :, y1:y2, x1:x2]
crop_seg_logit = self.encode_decode(crop_img, img_meta)
preds += F.pad(
crop_seg_logit,
(int(x1), int(preds.shape[3] - x2), int(y1), int(preds.shape[2] - y2)),
)
count_mat[:, :, y1:y2, x1:x2] += 1
assert (count_mat == 0).sum() == 0
# We want to regard count_mat as a constant while exporting to ONNX
count_mat = torch.from_numpy(count_mat.detach().numpy())
preds = preds / count_mat
if rescale:
preds = resize(
preds,
size=img_meta[0]["ori_shape"][:2],
mode="bilinear",
align_corners=self.align_corners,
warning=False,
)
return preds
|
https://github.com/open-mmlab/mmsegmentation/issues/89
|
Traceback (most recent call last):
File "tools/test.py", line 142, in <module>
main()
File "tools/test.py", line 120, in main
outputs = single_gpu_test(model, data_loader, args.show, args.show_dir)
File "/root/code/mmsegmentation/mmseg/apis/test.py", line 62, in single_gpu_test
out_file=out_file)
File "/root/code/mmsegmentation/mmseg/models/segmentors/base.py", line 253, in show_result
img = img * 0.5 + color_seg * 0.5
ValueError: operands could not be broadcast together with shapes (1600,1600,3) (512,512,3)
|
ValueError
|
def parse_args():
parser = argparse.ArgumentParser(description="Train a segmentor")
parser.add_argument("config", help="train config file path")
parser.add_argument("--work-dir", help="the dir to save logs and models")
parser.add_argument("--load-from", help="the checkpoint file to load weights from")
parser.add_argument("--resume-from", help="the checkpoint file to resume from")
parser.add_argument(
"--no-validate",
action="store_true",
help="whether not to evaluate the checkpoint during training",
)
group_gpus = parser.add_mutually_exclusive_group()
group_gpus.add_argument(
"--gpus",
type=int,
help="number of gpus to use (only applicable to non-distributed training)",
)
group_gpus.add_argument(
"--gpu-ids",
type=int,
nargs="+",
help="ids of gpus to use (only applicable to non-distributed training)",
)
parser.add_argument("--seed", type=int, default=None, help="random seed")
parser.add_argument(
"--deterministic",
action="store_true",
help="whether to set deterministic options for CUDNN backend.",
)
parser.add_argument(
"--options", nargs="+", action=DictAction, help="custom options"
)
parser.add_argument(
"--launcher",
choices=["none", "pytorch", "slurm", "mpi"],
default="none",
help="job launcher",
)
parser.add_argument("--local_rank", type=int, default=0)
args = parser.parse_args()
if "LOCAL_RANK" not in os.environ:
os.environ["LOCAL_RANK"] = str(args.local_rank)
return args
|
def parse_args():
parser = argparse.ArgumentParser(description="Train a segmentor")
parser.add_argument("config", help="train config file path")
parser.add_argument("--work_dir", help="the dir to save logs and models")
parser.add_argument("--load-from", help="the checkpoint file to load weights from")
parser.add_argument("--resume-from", help="the checkpoint file to resume from")
parser.add_argument(
"--no-validate",
action="store_true",
help="whether not to evaluate the checkpoint during training",
)
group_gpus = parser.add_mutually_exclusive_group()
group_gpus.add_argument(
"--gpus",
type=int,
help="number of gpus to use (only applicable to non-distributed training)",
)
group_gpus.add_argument(
"--gpu-ids",
type=int,
nargs="+",
help="ids of gpus to use (only applicable to non-distributed training)",
)
parser.add_argument("--seed", type=int, default=None, help="random seed")
parser.add_argument(
"--deterministic",
action="store_true",
help="whether to set deterministic options for CUDNN backend.",
)
parser.add_argument(
"--options", nargs="+", action=DictAction, help="custom options"
)
parser.add_argument(
"--launcher",
choices=["none", "pytorch", "slurm", "mpi"],
default="none",
help="job launcher",
)
parser.add_argument("--local_rank", type=int, default=0)
args = parser.parse_args()
if "LOCAL_RANK" not in os.environ:
os.environ["LOCAL_RANK"] = str(args.local_rank)
return args
|
https://github.com/open-mmlab/mmsegmentation/issues/89
|
Traceback (most recent call last):
File "tools/test.py", line 142, in <module>
main()
File "tools/test.py", line 120, in main
outputs = single_gpu_test(model, data_loader, args.show, args.show_dir)
File "/root/code/mmsegmentation/mmseg/apis/test.py", line 62, in single_gpu_test
out_file=out_file)
File "/root/code/mmsegmentation/mmseg/models/segmentors/base.py", line 253, in show_result
img = img * 0.5 + color_seg * 0.5
ValueError: operands could not be broadcast together with shapes (1600,1600,3) (512,512,3)
|
ValueError
|
def _load_config(self) -> None:
"""Load config, monitors, alerters and loggers."""
config = EnvironmentAwareConfigParser()
if not self._config_file.exists():
raise RuntimeError(
"Configuration file {} does not exist".format(self._config_file)
)
config.read(self._config_file)
self.interval = config.getint("monitor", "interval")
self.pidfile = config.get("monitor", "pidfile", fallback=None)
hup_file = config.get("monitor", "hup_file", fallback=None)
if hup_file is not None:
self._hup_file = Path(hup_file)
module_logger.info(
"Watching modification time of %s; increase it to trigger a config reload",
hup_file,
)
self._check_hup_file()
if not self._no_network and config.get("monitor", "remote", fallback="0") == "1":
self._network = True
self._remote_port = int(config.get("monitor", "remote_port"))
self._network_key = config.get("monitor", "key", fallback=None)
self._network_bind_host = config.get("monitor", "bind_host", fallback="")
self._ipv4_only = cast(bool, config.get("monitor", "ipv4_only", fallback=False))
else:
self._network = False
monitors_file = Path(config.get("monitor", "monitors", fallback="monitors.ini"))
self._load_monitors(monitors_file)
count = self.count_monitors()
if count == 0:
module_logger.critical("No monitors loaded :(")
self._load_loggers(config)
self._load_alerters(config)
if not self._verify_dependencies():
raise RuntimeError("Broken dependency configuration")
if not self.verify_alerting():
module_logger.critical("No alerters defined and no remote logger found")
|
def _load_config(self) -> None:
"""Load config, monitors, alerters and loggers."""
config = EnvironmentAwareConfigParser()
if not self._config_file.exists():
raise RuntimeError(
"Configuration file {} does not exist".format(self._config_file)
)
config.read(self._config_file)
self.interval = config.getint("monitor", "interval")
self.pidfile = config.get("monitor", "pidfile", fallback=None)
hup_file = config.get("monitor", "hup_file", fallback=None)
if hup_file is not None:
self._hup_file = Path(hup_file)
module_logger.info(
"Watching modification time of %s; increase it to trigger a config reload",
hup_file,
)
self._check_hup_file()
if not self._no_network and config.get("monitor", "remote", fallback="0") == "1":
self._network = True
self._remote_port = int(config.get("monitor", "remote_port"))
self._network_key = config.get("monitor", "key", fallback=None)
self._network_bind_host = config.get("monitor", "bind_host", fallback="")
self._ipv4_only = cast(bool, config.get("monitor", "ipv4_only", fallback=False))
else:
self._network = False
monitors_file = Path(config.get("monitor", "monitors", fallback="monitors.ini"))
self._load_monitors(monitors_file)
count = self.count_monitors()
if count == 0:
module_logger.critical("No monitors loaded :(")
self._load_loggers(config)
self._load_alerters(config)
if not self._verify_dependencies():
raise RuntimeError("Broken dependency configuration")
if not self.verify_alerting():
module_logger.critical("No alerters defined and no remote logger found")
if self._network:
self._start_network_thread()
|
https://github.com/jamesoff/simplemonitor/issues/617
|
% simplemonitor -t
[...]
2020-09-10 10:12:46 INFO (simplemonitor) Starting remote listener thread (allowing pickle data)
Traceback (most recent call last):
File "/root/.local/share/virtualenvs/sm-pip-9yYbf1A2/bin/simplemonitor", line 11, in <module>
load_entry_point('simplemonitor', 'console_scripts', 'simplemonitor')()
File "/home/james/src/simplemonitor/simplemonitor/monitor.py", line 203, in main
one_shot=options.one_shot,
File "/home/james/src/simplemonitor/simplemonitor/simplemonitor.py", line 73, in __init__
self._load_config()
File "/home/james/src/simplemonitor/simplemonitor/simplemonitor.py", line 120, in _load_config
self._start_network_thread()
File "/home/james/src/simplemonitor/simplemonitor/simplemonitor.py", line 139, in _start_network_thread
bind_host=self._network_bind_host,
File "/home/james/src/simplemonitor/simplemonitor/Loggers/network.py", line 133, in __init__
self.sock.bind((bind_host, port))
OSError: [Errno 98] Address already in use
|
OSError
|
def run(self) -> None:
self._create_pid_file()
self._start_network_thread()
module_logger.info(
"=== Starting... (loop runs every %ds) Hit ^C to stop", self.interval
)
loop = True
loops = self._max_loops
heartbeat = True
while loop:
try:
if loops > 0:
loops -= 1
if loops == 0:
module_logger.warning(
"Ran out of loop counter, will stop after this one"
)
loop = False
if self._need_hup or self._check_hup_file():
try:
module_logger.warning("Reloading configuration")
self._load_config()
self._start_network_thread()
self.hup_loggers()
self._need_hup = False
except Exception:
module_logger.exception("Error while reloading configuration")
sys.exit(1)
self.run_loop()
if module_logger.level in ["error", "critical", "warn"] and self.heartbeat:
heartbeat = not heartbeat
if heartbeat:
sys.stdout.write(".")
sys.stdout.flush()
except KeyboardInterrupt:
module_logger.info("Received ^C")
loop = False
except Exception:
module_logger.exception("Caught unhandled exception during main loop")
if loop and self._network:
if (
self._remote_listening_thread
and not self._remote_listening_thread.is_alive()
):
module_logger.error("Listener thread died :(")
self._start_network_thread()
if self.one_shot:
break
try:
if loop:
time.sleep(self.interval)
except Exception:
module_logger.info("Quitting")
loop = False
self._remove_pid_file()
|
def run(self) -> None:
self._create_pid_file()
module_logger.info(
"=== Starting... (loop runs every %ds) Hit ^C to stop", self.interval
)
loop = True
loops = self._max_loops
heartbeat = True
while loop:
try:
if loops > 0:
loops -= 1
if loops == 0:
module_logger.warning(
"Ran out of loop counter, will stop after this one"
)
loop = False
if self._need_hup or self._check_hup_file():
try:
module_logger.warning("Reloading configuration")
self._load_config()
self.hup_loggers()
self._need_hup = False
except Exception:
module_logger.exception("Error while reloading configuration")
sys.exit(1)
self.run_loop()
if module_logger.level in ["error", "critical", "warn"] and self.heartbeat:
heartbeat = not heartbeat
if heartbeat:
sys.stdout.write(".")
sys.stdout.flush()
except KeyboardInterrupt:
module_logger.info("Received ^C")
loop = False
except Exception:
module_logger.exception("Caught unhandled exception during main loop")
if loop and self._network:
if (
self._remote_listening_thread
and not self._remote_listening_thread.is_alive()
):
module_logger.error("Listener thread died :(")
self._start_network_thread()
if self.one_shot:
break
try:
if loop:
time.sleep(self.interval)
except Exception:
module_logger.info("Quitting")
loop = False
self._remove_pid_file()
|
https://github.com/jamesoff/simplemonitor/issues/617
|
% simplemonitor -t
[...]
2020-09-10 10:12:46 INFO (simplemonitor) Starting remote listener thread (allowing pickle data)
Traceback (most recent call last):
File "/root/.local/share/virtualenvs/sm-pip-9yYbf1A2/bin/simplemonitor", line 11, in <module>
load_entry_point('simplemonitor', 'console_scripts', 'simplemonitor')()
File "/home/james/src/simplemonitor/simplemonitor/monitor.py", line 203, in main
one_shot=options.one_shot,
File "/home/james/src/simplemonitor/simplemonitor/simplemonitor.py", line 73, in __init__
self._load_config()
File "/home/james/src/simplemonitor/simplemonitor/simplemonitor.py", line 120, in _load_config
self._start_network_thread()
File "/home/james/src/simplemonitor/simplemonitor/simplemonitor.py", line 139, in _start_network_thread
bind_host=self._network_bind_host,
File "/home/james/src/simplemonitor/simplemonitor/Loggers/network.py", line 133, in __init__
self.sock.bind((bind_host, port))
OSError: [Errno 98] Address already in use
|
OSError
|
def log_result(self, logger: Logger) -> None:
"""Use the given logger object to log our state."""
logger.check_dependencies(self.failed + self.still_failing + self.skipped)
with logger:
for key, monitor in self.monitors.items():
if check_group_match(monitor.group, logger.groups):
logger.save_result2(key, monitor)
else:
module_logger.debug(
"not logging for %s due to group mismatch (monitor in group %s, "
"logger has groups %s",
key,
monitor.group,
logger.groups,
)
try:
# need to work on a copy here to prevent the dicts changing under us
# during the loop, as remote instances can connect and update our data
# unpredictably
for host_monitors in self.remote_monitors.copy().values():
for name, monitor in host_monitors.copy().items():
if check_group_match(monitor.group, logger.groups):
logger.save_result2(name, monitor)
else:
module_logger.debug(
"not logging for %s due to group mismatch (monitor in group %s, "
"logger has groups %s",
name,
monitor.group,
logger.groups,
)
except Exception: # pragma: no cover
module_logger.exception("exception while logging remote monitors")
|
def log_result(self, logger: Logger) -> None:
"""Use the given logger object to log our state."""
logger.check_dependencies(self.failed + self.still_failing + self.skipped)
with logger:
for key, monitor in self.monitors.items():
if check_group_match(monitor.group, logger.groups):
logger.save_result2(key, monitor)
else:
module_logger.debug(
"not logging for %s due to group mismatch (monitor in group %s, "
"logger has groups %s",
key,
monitor.group,
logger.groups,
)
try:
for host_monitors in self.remote_monitors.values():
for name, monitor in host_monitors.items():
if check_group_match(monitor.group, logger.groups):
logger.save_result2(name, monitor)
else:
module_logger.debug(
"not logging for %s due to group mismatch (monitor in group %s, "
"logger has groups %s",
name,
monitor.group,
logger.groups,
)
except Exception: # pragma: no cover
module_logger.exception("exception while logging remote monitors")
|
https://github.com/jamesoff/simplemonitor/issues/623
|
2020-09-15 23:30:11 ERROR (simplemonitor) Caught unhandled exception during main loop
Traceback (most recent call last):
File "/root/monitor/simplemonitor/simplemonitor/simplemonitor.py", line 766, in run
self.run_loop()
File "/root/monitor/simplemonitor/simplemonitor/simplemonitor.py", line 735, in run_loop
self.do_alerts()
File "/root/monitor/simplemonitor/simplemonitor/simplemonitor.py", line 663, in do_alerts
self.do_alert(alerter)
File "/root/monitor/simplemonitor/simplemonitor/simplemonitor.py", line 583, in do_alert
for (name, monitor) in host_monitors.items():
RuntimeError: dictionary changed size during iteration
|
RuntimeError
|
def do_alert(self, alerter: Alerter) -> None:
"""Use the given alerter object to send an alert, if needed."""
alerter.check_dependencies(self.failed + self.still_failing + self.skipped)
for name, this_monitor in list(self.monitors.items()):
# Don't generate alerts for monitors which want it done remotely
if this_monitor.remote_alerting:
module_logger.debug(
"skipping alert for monitor %s as it wants remote alerting", name
)
continue
try:
if this_monitor.notify:
alerter.send_alert(name, this_monitor)
else:
module_logger.warning("monitor %s has notifications disabled", name)
except Exception: # pragma: no cover
module_logger.exception("exception caught while alerting for %s", name)
for host_monitors in self.remote_monitors.copy().values():
for name, monitor in host_monitors.copy().items():
try:
if monitor.remote_alerting:
alerter.send_alert(name, monitor)
else:
module_logger.debug(
"not alerting for monitor %s as it doesn't want remote alerts",
name,
)
except Exception: # pragma: no cover
module_logger.exception(
"exception caught while alerting for remote monitor %s", name
)
|
def do_alert(self, alerter: Alerter) -> None:
"""Use the given alerter object to send an alert, if needed."""
alerter.check_dependencies(self.failed + self.still_failing + self.skipped)
for name, this_monitor in list(self.monitors.items()):
# Don't generate alerts for monitors which want it done remotely
if this_monitor.remote_alerting:
module_logger.debug(
"skipping alert for monitor %s as it wants remote alerting", name
)
continue
try:
if this_monitor.notify:
alerter.send_alert(name, this_monitor)
else:
module_logger.warning("monitor %s has notifications disabled", name)
except Exception: # pragma: no cover
module_logger.exception("exception caught while alerting for %s", name)
for host_monitors in self.remote_monitors.values():
for name, monitor in host_monitors.items():
try:
if monitor.remote_alerting:
alerter.send_alert(name, monitor)
else:
module_logger.debug(
"not alerting for monitor %s as it doesn't want remote alerts",
name,
)
except Exception: # pragma: no cover
module_logger.exception(
"exception caught while alerting for remote monitor %s", name
)
|
https://github.com/jamesoff/simplemonitor/issues/623
|
2020-09-15 23:30:11 ERROR (simplemonitor) Caught unhandled exception during main loop
Traceback (most recent call last):
File "/root/monitor/simplemonitor/simplemonitor/simplemonitor.py", line 766, in run
self.run_loop()
File "/root/monitor/simplemonitor/simplemonitor/simplemonitor.py", line 735, in run_loop
self.do_alerts()
File "/root/monitor/simplemonitor/simplemonitor/simplemonitor.py", line 663, in do_alerts
self.do_alert(alerter)
File "/root/monitor/simplemonitor/simplemonitor/simplemonitor.py", line 583, in do_alert
for (name, monitor) in host_monitors.items():
RuntimeError: dictionary changed size during iteration
|
RuntimeError
|
def __init__(self, allow_pickle=True):
"""Main class turn on."""
self.allow_pickle = allow_pickle
self.monitors = {}
self.failed = []
self.still_failing = []
self.skipped = []
self.warning = []
self.remote_monitors = {}
self.loggers = {}
self.alerters = {}
try:
signal.signal(signal.SIGHUP, self.hup_loggers)
except ValueError: # pragma: no cover
module_logger.warning(
"Unable to trap SIGHUP... maybe it doesn't exist on this platform.\n"
)
except AttributeError: # pragma: no cover
module_logger.warning(
"Unable to trap SIGHUP... maybe it doesn't exist on this platform.\n"
)
|
def __init__(self, allow_pickle=True):
"""Main class turn on."""
self.allow_pickle = allow_pickle
self.monitors = {}
self.failed = []
self.still_failing = []
self.skipped = []
self.warning = []
self.remote_monitors = {}
self.loggers = {}
self.alerters = {}
try:
signal.signal(signal.SIGHUP, self.hup_loggers)
except ValueError: # pragma: no cover
module_logger.warning(
"Unable to trap SIGHUP... maybe it doesn't exist on this platform.\n"
)
|
https://github.com/jamesoff/simplemonitor/issues/216
|
more monitors.ini
[monitor]
interval=60
[localhost]
type=host
host=localhost
tolerance=2
python monitor.py -v
Warning: --verbose is deprecated; use --log-level=info
2019-01-18 13:23:28 INFO (simplemonitor) === SimpleMonitor v1.7
2019-01-18 13:23:28 INFO (simplemonitor) Loading main config from monitors.ini
2019-01-18 13:23:28 INFO (simplemonitor) Loading monitor config from monitors.ini
Traceback (most recent call last):
File "monitor.py", line 403, in <module>
main()
File "monitor.py", line 263, in main
m = SimpleMonitor(allow_pickle=allow_pickle)
File "D:\Seafile\dev\dev-perso\simplemonitor\simplemonitor.py", line 36, in __init__
signal.signal(signal.SIGHUP, self.hup_loggers)
AttributeError: module 'signal' has no attribute 'SIGHUP'
|
AttributeError
|
def makeYbus(baseMVA, bus, branch):
"""Builds the bus admittance matrix and branch admittance matrices.
Returns the full bus admittance matrix (i.e. for all buses) and the
matrices C{Yf} and C{Yt} which, when multiplied by a complex voltage
vector, yield the vector currents injected into each line from the
"from" and "to" buses respectively of each line. Does appropriate
conversions to p.u.
@see: L{makeSbus}
@author: Ray Zimmerman (PSERC Cornell)
@author: Richard Lincoln
modified by Florian Schaefer (to use numba) (florian.schaefer@uni-kassel.de)
"""
## constants
nb = bus.shape[0] ## number of buses
nl = branch.shape[0] ## number of lines
## for each branch, compute the elements of the branch admittance matrix where
##
## | If | | Yff Yft | | Vf |
## | | = | | * | |
## | It | | Ytf Ytt | | Vt |
##
Ytt, Yff, Yft, Ytf = branch_vectors(branch, nl)
## compute shunt admittance
## if Psh is the real power consumed by the shunt at V = 1.0 p.u.
## and Qsh is the reactive power injected by the shunt at V = 1.0 p.u.
## then Psh - j Qsh = V * conj(Ysh * V) = conj(Ysh) = Gs - j Bs,
## i.e. Ysh = Psh + j Qsh, so ...
## vector of shunt admittances
Ysh = (bus[:, GS] + 1j * bus[:, BS]) / baseMVA
## build connection matrices
f = np.real(branch[:, F_BUS]).astype(int) ## list of "from" buses
t = np.real(branch[:, T_BUS]).astype(int) ## list of "to" buses
## build Yf and Yt such that Yf * V is the vector of complex branch currents injected
## at each branch's "from" bus, and Yt is the same for the "to" bus end
i = np.hstack([np.arange(nl), np.arange(nl)]) ## double set of row indices
Yf_x = np.hstack([Yff, Yft])
Yt_x = np.hstack([Ytf, Ytt])
col_Y = np.hstack([f, t])
Yf = coo_matrix((Yf_x, (i, col_Y)), (nl, nb)).tocsr()
Yt = coo_matrix((Yt_x, (i, col_Y)), (nl, nb)).tocsr()
Yx, Yj, Yp, nnz = gen_Ybus(
Yf_x,
Yt_x,
Ysh,
col_Y,
f,
t,
np.argsort(f),
np.argsort(t),
nb,
nl,
np.arange(nl, dtype=np.int64),
)
Ybus = csr_matrix((np.resize(Yx, nnz), np.resize(Yj, nnz), Yp), (nb, nb))
return Ybus, Yf, Yt
|
def makeYbus(baseMVA, bus, branch):
"""Builds the bus admittance matrix and branch admittance matrices.
Returns the full bus admittance matrix (i.e. for all buses) and the
matrices C{Yf} and C{Yt} which, when multiplied by a complex voltage
vector, yield the vector currents injected into each line from the
"from" and "to" buses respectively of each line. Does appropriate
conversions to p.u.
@see: L{makeSbus}
@author: Ray Zimmerman (PSERC Cornell)
@author: Richard Lincoln
modified by Florian Schaefer (to use numba) (florian.schaefer@uni-kassel.de)
"""
## constants
nb = bus.shape[0] ## number of buses
nl = branch.shape[0] ## number of lines
## for each branch, compute the elements of the branch admittance matrix where
##
## | If | | Yff Yft | | Vf |
## | | = | | * | |
## | It | | Ytf Ytt | | Vt |
##
Ytt, Yff, Yft, Ytf = branch_vectors(branch, nl)
## compute shunt admittance
## if Psh is the real power consumed by the shunt at V = 1.0 p.u.
## and Qsh is the reactive power injected by the shunt at V = 1.0 p.u.
## then Psh - j Qsh = V * conj(Ysh * V) = conj(Ysh) = Gs - j Bs,
## i.e. Ysh = Psh + j Qsh, so ...
## vector of shunt admittances
Ysh = (bus[:, GS] + 1j * bus[:, BS]) / baseMVA
## build connection matrices
f = np.real(branch[:, F_BUS]).astype(int) ## list of "from" buses
t = np.real(branch[:, T_BUS]).astype(int) ## list of "to" buses
## build Yf and Yt such that Yf * V is the vector of complex branch currents injected
## at each branch's "from" bus, and Yt is the same for the "to" bus end
i = np.hstack([np.arange(nl), np.arange(nl)]) ## double set of row indices
Yf_x = np.hstack([Yff, Yft])
Yt_x = np.hstack([Ytf, Ytt])
col_Y = np.hstack([f, t])
Yf = coo_matrix((Yf_x, (i, col_Y)), (nl, nb)).tocsr()
Yt = coo_matrix((Yt_x, (i, col_Y)), (nl, nb)).tocsr()
Yx, Yj, Yp, nnz = gen_Ybus(
Yf_x,
Yt_x,
Ysh,
col_Y,
f,
t,
np.argsort(f),
np.argsort(t),
nb,
nl,
np.arange(nl, dtype=np.int64),
)
Ybus = csr_matrix((np.resize(Yx, nnz), np.resize(Yj, nnz), Yp))
return Ybus, Yf, Yt
|
https://github.com/e2nIEE/pandapower/issues/780
|
import pandapower as pp
import pandapower.networks as ppnw
net = ppnw.case9()
b = pp.create_bus(net, vn_kv=100)
pp.create_ext_grid(net, b)
1
pp.create_load(net, b, p_mw=10)
3
pp.runpp(net)
Traceback (most recent call last):
File "<input>", line 1, in <module>
File "/Users/michael/Dropbox/mac_git/pandapower/pandapower/run.py", line 225, in runpp
_powerflow(net, **kwargs)
File "/Users/michael/Dropbox/mac_git/pandapower/pandapower/powerflow.py", line 74, in _powerflow
result = _run_pf_algorithm(ppci, net["_options"], **kwargs)
File "/Users/michael/Dropbox/mac_git/pandapower/pandapower/powerflow.py", line 151, in _run_pf_algorithm
result = _run_newton_raphson_pf(ppci, options)
File "/Users/michael/Dropbox/mac_git/pandapower/pandapower/pf/run_newton_raphson_pf.py", line 51, in _run_newton_raphson_pf
ppci, success, iterations = _run_ac_pf_without_qlims_enforced(ppci, options)
File "/Users/michael/Dropbox/mac_git/pandapower/pandapower/pf/run_newton_raphson_pf.py", line 131, in _run_ac_pf_without_qlims_enforced
V, success, iterations, J, Vm_it, Va_it = newton(Ybus, Sbus, V0, pv, pq, ppci, options)
File "/Users/michael/Dropbox/mac_git/pandapower/pandapower/pypower/newtonpf.py", line 87, in newtonpf
F = _evaluate_Fx(Ybus, V, Sbus, pv, pq)
File "/Users/michael/Dropbox/mac_git/pandapower/pandapower/pypower/newtonpf.py", line 132, in _evaluate_Fx
mis = V * conj(Ybus * V) - Sbus
File "/Users/michael/miniconda3/envs/pandapower/lib/python3.7/site-packages/scipy/sparse/base.py", line 499, in __mul__
raise ValueError('dimension mismatch')
ValueError: dimension mismatch
|
ValueError
|
def db_to_form_package_schema():
schema = default_package_schema()
schema.update(
{
"tags": {
"__extras": [
ckan.lib.navl.validators.keep_extras,
ckan.logic.converters.free_tags_only,
]
},
}
)
# Add several keys to the 'resources' subschema so they don't get stripped
# from the resource dicts by validation.
schema["resources"].update(
{
"created": [ckan.lib.navl.validators.ignore_missing],
"position": [not_empty],
"last_modified": [ckan.lib.navl.validators.ignore_missing],
"cache_last_updated": [ckan.lib.navl.validators.ignore_missing],
"webstore_last_updated": [ckan.lib.navl.validators.ignore_missing],
"revision_timestamp": [],
"resource_group_id": [],
"cache_last_updated": [],
"webstore_last_updated": [],
"size": [],
"state": [],
"last_modified": [],
"mimetype": [],
"cache_url": [],
"name": [],
"webstore_url": [],
"mimetype_inner": [],
"resource_type": [],
}
)
schema.update(
{
"state": [ckan.lib.navl.validators.ignore_missing],
"isopen": [ignore_missing],
"license_url": [ignore_missing],
}
)
schema["groups"].update(
{
"description": [ignore_missing],
}
)
# Remove validators for several keys from the schema so validation doesn't
# strip the keys from the package dicts if the values are 'missing' (i.e.
# None).
schema["author"] = []
schema["author_email"] = []
schema["maintainer"] = []
schema["maintainer_email"] = []
schema["license_id"] = []
schema["notes"] = []
schema["url"] = []
schema["version"] = []
# Add several keys that are missing from default_package_schema(), so
# validation doesn't strip the keys from the package dicts.
# schema['license_title'] = []
schema["metadata_created"] = []
schema["metadata_modified"] = []
schema["num_resources"] = []
schema["num_tags"] = []
schema["organization"] = []
schema["owner_org"] = []
schema["private"] = []
schema["revision_id"] = []
schema["revision_timestamp"] = []
schema["tracking_summary"] = []
schema["license_title"] = []
return schema
|
def db_to_form_package_schema():
schema = default_package_schema()
schema.update(
{
"tags": {
"__extras": [
ckan.lib.navl.validators.keep_extras,
ckan.logic.converters.free_tags_only,
]
},
}
)
schema["resources"].update(
{
"created": [ckan.lib.navl.validators.ignore_missing],
"position": [not_empty],
"last_modified": [ckan.lib.navl.validators.ignore_missing],
"cache_last_updated": [ckan.lib.navl.validators.ignore_missing],
"webstore_last_updated": [ckan.lib.navl.validators.ignore_missing],
"revision_timestamp": [],
"resource_group_id": [],
"cache_last_updated": [],
"webstore_last_updated": [],
"size": [],
"state": [],
"last_modified": [],
"mimetype": [],
"cache_url": [],
"name": [],
"webstore_url": [],
"mimetype_inner": [],
"resource_type": [],
}
)
schema.update(
{
"state": [ckan.lib.navl.validators.ignore_missing],
"isopen": [ignore_missing],
"license_url": [ignore_missing],
}
)
schema["groups"].update(
{
"description": [ignore_missing],
}
)
# Remove validators for several keys from the schema so validation doesn't
# strip the keys from the package dicts if the values are 'missing' (i.e.
# None).
schema["author"] = []
schema["author_email"] = []
schema["maintainer"] = []
schema["maintainer_email"] = []
schema["license_id"] = []
schema["notes"] = []
schema["url"] = []
schema["version"] = []
# Add several keys that are missing from default_package_schema(), so
# validation doesn't strip the keys from the package dicts.
# schema['license_title'] = []
schema["metadata_created"] = []
schema["metadata_modified"] = []
schema["num_resources"] = []
schema["num_tags"] = []
schema["organization"] = []
schema["owner_org"] = []
schema["private"] = []
schema["revision_id"] = []
schema["revision_timestamp"] = []
schema["tracking_summary"] = []
schema["license_title"] = []
return schema
|
https://github.com/ckan/ckan/issues/396
|
nosetests --ckan --with-pylons=test-core.ini ckan ξ² ξ idatasetform-fixes
.........................................................................................................................................................S.S.S.S.S.S..............................................................F.F........................................E.E.EEE.................EEE...E.E.....E...........................................................................................................................................SSS.........S.................................S.............................................................S...............................................................E.......S...................................SSS....................................................EEE...EEE......E.....E................................................................................................................................................................................................................................................S...........S..............................................E..............SE.................S.S.....................................................................................................................................................................................
======================================================================
ERROR: ckan.tests.functional.api.test_activity.TestActivity.test_01_delete_resources
----------------------------------------------------------------------
Traceback (most recent call last):
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/nose/case.py", line 197, in runTest
self.test(*self.arg)
File "/home/seanh/Projects/ckan/ckan/ckan/tests/functional/api/test_activity.py", line 1253, in test_01_delete_resources
if len(package_dict['resources']) > 0:
KeyError: "'resources'\n-------------------- >> begin captured logging << --------------------\nckan.model: INFO: Database table data deleted\nckan.model: INFO: Database initialised\nckan.model: INFO: Database rebuilt\nckan.model: INFO: Database initialised\nckan.lib.base: INFO: /api/action/package_list render time 0.004 seconds\nckan.lib.base: INFO: /api/action/package_show render time 0.026 seconds\n--------------------- >> end captured logging << ---------------------"
======================================================================
ERROR: ckan.tests.functional.api.test_activity.TestActivity.test_01_update_extras
----------------------------------------------------------------------
Traceback (most recent call last):
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/nose/case.py", line 197, in runTest
self.test(*self.arg)
File "/home/seanh/Projects/ckan/ckan/ckan/tests/functional/api/test_activity.py", line 1366, in test_01_update_extras
if len(package_dict['extras']) > 0:
KeyError: "'extras'\n-------------------- >> begin captured logging << --------------------\nckan.lib.base: INFO: /api/action/package_list render time 0.004 seconds\nckan.lib.base: INFO: /api/action/package_show render time 0.039 seconds\n--------------------- >> end captured logging << ---------------------"
======================================================================
ERROR: ckan.tests.functional.api.test_activity.TestActivity.test_01_update_package
----------------------------------------------------------------------
Traceback (most recent call last):
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/nose/case.py", line 197, in runTest
self.test(*self.arg)
File "/home/seanh/Projects/ckan/ckan/ckan/tests/functional/api/test_activity.py", line 1383, in test_01_update_package
self._update_package(package_dict, user=self.normal_user)
File "/home/seanh/Projects/ckan/ckan/ckan/tests/functional/api/test_activity.py", line 1055, in _update_package
for group_dict in package['groups']:
KeyError: "'groups'\n-------------------- >> begin captured stdout << ---------------------\n@@@@@@@@ {'Authorization': '1eaa43d6-de22-4723-8d51-ebfcd6390d54'}\n@@@@@@@@ {'Authorization': '1eaa43d6-de22-4723-8d51-ebfcd6390d54'}\n\n--------------------- >> end captured stdout << ----------------------\n-------------------- >> begin captured logging << --------------------\nckan.lib.base: INFO: /api/action/package_list render time 0.003 seconds\nckan.lib.base: INFO: /api/action/package_show render time 0.024 seconds\nckan.lib.base: INFO: /api/2/rest/user/28212825-2dc2-4878-9a03-b27d63e9e1e3/activity render time 0.025 seconds\nckan.lib.base: INFO: /api/2/rest/dataset/74fa22ab-01c1-4f1d-8a0d-a20bfa9ea589/activity render time 0.022 seconds\nckan.lib.base: INFO: /api/action/recently_changed_packages_activity_list render time 0.019 seconds\nckan.lib.base: INFO: /api/action/package_update render time 0.655 seconds\nckan.lib.base: INFO: /api/2/rest/user/28212825-2dc2-4878-9a03-b27d63e9e1e3/activity render time 0.027 seconds\nckan.lib.base: INFO: /api/2/rest/dataset/74fa22ab-01c1-4f1d-8a0d-a20bfa9ea589/activity render time 0.045 seconds\nckan.lib.base: INFO: /api/action/recently_changed_packages_activity_list render time 0.020 seconds\n--------------------- >> end captured logging << ---------------------"
======================================================================
ERROR: ckan.tests.functional.api.test_activity.TestActivity.test_01_update_resource
----------------------------------------------------------------------
Traceback (most recent call last):
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/nose/case.py", line 197, in runTest
self.test(*self.arg)
File "/home/seanh/Projects/ckan/ckan/ckan/tests/functional/api/test_activity.py", line 1393, in test_01_update_resource
for resource in package_dict['resources']:
KeyError: "'resources'\n-------------------- >> begin captured logging << --------------------\nckan.lib.base: INFO: /api/action/package_list render time 0.004 seconds\nckan.lib.base: INFO: /api/action/package_show render time 0.031 seconds\n--------------------- >> end captured logging << ---------------------"
======================================================================
ERROR: ckan.tests.functional.api.test_activity.TestActivity.test_01_update_resource_not_logged_in
----------------------------------------------------------------------
Traceback (most recent call last):
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/nose/case.py", line 197, in runTest
self.test(*self.arg)
File "/home/seanh/Projects/ckan/ckan/ckan/tests/functional/api/test_activity.py", line 1405, in test_01_update_resource_not_logged_in
for resource in package_dict['resources']:
KeyError: "'resources'\n-------------------- >> begin captured logging << --------------------\nckan.lib.base: INFO: /api/action/package_list render time 0.003 seconds\nckan.lib.base: INFO: /api/action/package_show render time 0.024 seconds\n--------------------- >> end captured logging << ---------------------"
======================================================================
ERROR: ckan.tests.functional.api.test_activity.TestActivity.test_add_extras
----------------------------------------------------------------------
Traceback (most recent call last):
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/nose/case.py", line 197, in runTest
self.test(*self.arg)
File "/home/seanh/Projects/ckan/ckan/ckan/tests/functional/api/test_activity.py", line 1981, in test_add_extras
self._add_extra(package_dict, user=self.normal_user)
File "/home/seanh/Projects/ckan/ckan/ckan/tests/functional/api/test_activity.py", line 667, in _add_extra
[group['name'] for group in package_dict['groups']],
KeyError: "'groups'\n-------------------- >> begin captured logging << --------------------\nckan.lib.base: INFO: /api/action/package_list render time 0.005 seconds\nckan.lib.base: INFO: /api/action/package_show render time 0.015 seconds\n--------------------- >> end captured logging << ---------------------"
======================================================================
ERROR: ckan.tests.functional.api.test_activity.TestActivity.test_add_resources
----------------------------------------------------------------------
Traceback (most recent call last):
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/nose/case.py", line 197, in runTest
self.test(*self.arg)
File "/home/seanh/Projects/ckan/ckan/ckan/tests/functional/api/test_activity.py", line 1428, in test_add_resources
self._add_resource(package_dict, user=self.normal_user)
File "/home/seanh/Projects/ckan/ckan/ckan/tests/functional/api/test_activity.py", line 391, in _add_resource
[group['name'] for group in package['groups']], apikey=apikey)
KeyError: "'groups'\n-------------------- >> begin captured logging << --------------------\nckan.lib.base: INFO: /api/action/package_list render time 0.003 seconds\nckan.lib.base: INFO: /api/action/package_show render time 0.021 seconds\n--------------------- >> end captured logging << ---------------------"
======================================================================
ERROR: ckan.tests.functional.api.test_activity.TestActivity.test_add_tag
----------------------------------------------------------------------
Traceback (most recent call last):
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/nose/case.py", line 197, in runTest
self.test(*self.arg)
File "/home/seanh/Projects/ckan/ckan/ckan/tests/functional/api/test_activity.py", line 1605, in test_add_tag
for group_dict in pkg_dict['groups']:
KeyError: "'groups'\n-------------------- >> begin captured stdout << ---------------------\n@@@@@@@@ {'Authorization': '1eaa43d6-de22-4723-8d51-ebfcd6390d54'}\n@@@@@@@@ {'Authorization': '1eaa43d6-de22-4723-8d51-ebfcd6390d54'}\n\n--------------------- >> end captured stdout << ----------------------\n-------------------- >> begin captured logging << --------------------\nckan.lib.base: INFO: /api/action/package_show render time 0.024 seconds\nckan.lib.base: INFO: /api/2/rest/user/28212825-2dc2-4878-9a03-b27d63e9e1e3/activity render time 0.035 seconds\nckan.lib.base: INFO: /api/2/rest/dataset/74fa22ab-01c1-4f1d-8a0d-a20bfa9ea589/activity render time 0.040 seconds\nckan.lib.base: INFO: /api/action/recently_changed_packages_activity_list render time 0.021 seconds\nckan.lib.base: INFO: /api/action/package_update render time 0.773 seconds\nckan.lib.base: INFO: /api/2/rest/user/28212825-2dc2-4878-9a03-b27d63e9e1e3/activity render time 0.030 seconds\nckan.lib.base: INFO: /api/2/rest/dataset/74fa22ab-01c1-4f1d-8a0d-a20bfa9ea589/activity render time 0.023 seconds\nckan.lib.base: INFO: /api/action/recently_changed_packages_activity_list render time 0.022 seconds\n--------------------- >> end captured logging << ---------------------"
======================================================================
ERROR: ckan.tests.functional.api.test_activity.TestActivity.test_delete_extras
----------------------------------------------------------------------
Traceback (most recent call last):
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/nose/case.py", line 197, in runTest
self.test(*self.arg)
File "/home/seanh/Projects/ckan/ckan/ckan/tests/functional/api/test_activity.py", line 1994, in test_delete_extras
if len(package_dict['extras']) > 0:
KeyError: "'extras'\n-------------------- >> begin captured logging << --------------------\nckan.lib.base: INFO: /api/action/package_list render time 0.004 seconds\nckan.lib.base: INFO: /api/action/package_show render time 0.032 seconds\n--------------------- >> end captured logging << ---------------------"
======================================================================
ERROR: ckan.tests.functional.api.test_activity.TestActivity.test_delete_package
----------------------------------------------------------------------
Traceback (most recent call last):
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/nose/case.py", line 197, in runTest
self.test(*self.arg)
File "/home/seanh/Projects/ckan/ckan/ckan/tests/functional/api/test_activity.py", line 1440, in test_delete_package
self._delete_package(package_dict)
File "/home/seanh/Projects/ckan/ckan/ckan/tests/functional/api/test_activity.py", line 1208, in _delete_package
for group_dict in package['groups']:
KeyError: "'groups'\n-------------------- >> begin captured stdout << ---------------------\n@@@@@@@@ {'Authorization': 'ac42312b-5672-47df-bcb4-518c0ec2750d'}\n@@@@@@@@ {'Authorization': 'ac42312b-5672-47df-bcb4-518c0ec2750d'}\n\n--------------------- >> end captured stdout << ----------------------\n-------------------- >> begin captured logging << --------------------\nckan.lib.base: INFO: /api/action/package_list render time 0.004 seconds\nckan.lib.base: INFO: /api/action/package_show render time 0.029 seconds\nckan.lib.base: INFO: /api/2/rest/user/6fcde856-fd4a-45c6-a034-8cda32c4952d/activity render time 0.023 seconds\nckan.lib.base: INFO: /api/2/rest/dataset/0af83051-c69b-4857-b63f-7654ad235c91/activity render time 0.021 seconds\nckan.lib.base: INFO: /api/action/recently_changed_packages_activity_list render time 0.021 seconds\nckan.lib.base: INFO: /api/action/package_delete render time 0.204 seconds\nckan.lib.base: INFO: /api/2/rest/user/6fcde856-fd4a-45c6-a034-8cda32c4952d/activity render time 0.030 seconds\nckan.lib.base: INFO: /api/2/rest/dataset/0af83051-c69b-4857-b63f-7654ad235c91/activity render time 0.024 seconds\nckan.lib.base: INFO: /api/action/recently_changed_packages_activity_list render time 0.023 seconds\n--------------------- >> end captured logging << ---------------------"
======================================================================
ERROR: ckan.tests.functional.api.test_activity.TestActivity.test_update_user
----------------------------------------------------------------------
Traceback (most recent call last):
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/nose/case.py", line 197, in runTest
self.test(*self.arg)
File "/home/seanh/Projects/ckan/ckan/ckan/tests/functional/api/test_activity.py", line 1500, in test_update_user
self._update_user(user)
File "/home/seanh/Projects/ckan/ckan/ckan/tests/functional/api/test_activity.py", line 899, in _update_user
extra_environ={'Authorization': str(user['apikey'])})
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/paste/fixture.py", line 262, in post
expect_errors=expect_errors)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/paste/fixture.py", line 243, in _gen_request
return self.do_request(req, status=status)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/paste/fixture.py", line 406, in do_request
self._check_status(status, res)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/paste/fixture.py", line 439, in _check_status
res.body))
AppError: Bad response: 400 Bad Request (not 200 OK or 3xx redirect for /api/action/user_update)
"Bad request - JSON Error: Error decoding JSON data. Error: ValueError('Unterminated string starting at: line 1 column 952 (char 952)',) JSON data extracted from the request: '{\"openid\": null, \"about\": \"edited\", \"apikey\": \"1eaa43d6-de22-4723-8d51-ebfcd6390d54\", \"display_name\": \"annafan\", \"name\": \"annafan\", \"created\": \"2013-02-14T15:05:36.323931\", \"reset_key\": null, \"id\": \"28212825-2dc2-4878-9a03-b27d63e9e1e3\", \"sysadmin\": false, \"activity_streams_email_notifications\": false, \"email_hash\": \"d41d8cd98f00b204e9800998ecf8427e\", \"datasets\": [{\"name\": \"annakarenina\", \"isopen\": true, \"title\": \"A Novel By Tolstoy\", \"url\": \"http://www.annakarenina.com\", \"notes\": \"Some test notes\\\\n\\\\n### A 3rd level heading\\\\n\\\\n**Some bolded text.**\\\\n\\\\n*Some italicized text.*\\\\n\\\\nForeign characters:\\\\nu with umlaut \\\\u00fc\\\\n66-style quote \\\\u201c\\\\nforeign word: th\\\\u00fcmb\\\\n\\\\nNeeds escaping:\\\\nleft arrow <\\\\n\\\\n<http://ckan.net/>\\\\n\\\\n\", \"license_title\": \"Other (Open)\", \"state\": \"active\", \"version\": \"0.7a\", \"resources\": [{\"hash\": \"abc123\", \"description\": \"Full text. Needs escaping: \\\\\" Umlaut: \\\\u00fc\", \"created\": \"2013-02-14T15:05:36.303361\", \"url\": \"http://www.annakarenina.com/download/x'"
-------------------- >> begin captured logging << --------------------
ckan.lib.base: INFO: /api/action/user_show render time 0.444 seconds
ckan.lib.base: INFO: /api/2/rest/user/6fcde856-fd4a-45c6-a034-8cda32c4952d/activity render time 0.028 seconds
ckan.lib.base: INFO: /api/action/recently_changed_packages_activity_list render time 0.026 seconds
ckan.logic: CRITICAL: activity_create was pass extra keywords {'ignore_auth': True}
ckan.lib.base: INFO: /api/action/user_update render time 0.087 seconds
ckan.lib.base: INFO: /api/2/rest/user/6fcde856-fd4a-45c6-a034-8cda32c4952d/activity render time 0.027 seconds
ckan.lib.base: INFO: /api/action/recently_changed_packages_activity_list render time 0.026 seconds
ckan.lib.base: INFO: /api/action/user_show render time 0.421 seconds
ckan.lib.base: INFO: /api/2/rest/user/28212825-2dc2-4878-9a03-b27d63e9e1e3/activity render time 0.028 seconds
ckan.lib.base: INFO: /api/action/recently_changed_packages_activity_list render time 0.025 seconds
ckan.controllers.api: ERROR: Bad request data: Error decoding JSON data. Error: ValueError('Unterminated string starting at: line 1 column 952 (char 952)',) JSON data extracted from the request: '{"openid": null, "about": "edited", "apikey": "1eaa43d6-de22-4723-8d51-ebfcd6390d54", "display_name": "annafan", "name": "annafan", "created": "2013-02-14T15:05:36.323931", "reset_key": null, "id": "28212825-2dc2-4878-9a03-b27d63e9e1e3", "sysadmin": false, "activity_streams_email_notifications": false, "email_hash": "d41d8cd98f00b204e9800998ecf8427e", "datasets": [{"name": "annakarenina", "isopen": true, "title": "A Novel By Tolstoy", "url": "http://www.annakarenina.com", "notes": "Some test notes\\n\\n### A 3rd level heading\\n\\n**Some bolded text.**\\n\\n*Some italicized text.*\\n\\nForeign characters:\\nu with umlaut \\u00fc\\n66-style quote \\u201c\\nforeign word: th\\u00fcmb\\n\\nNeeds escaping:\\nleft arrow <\\n\\n<http://ckan.net/>\\n\\n", "license_title": "Other (Open)", "state": "active", "version": "0.7a", "resources": [{"hash": "abc123", "description": "Full text. Needs escaping: \\" Umlaut: \\u00fc", "created": "2013-02-14T15:05:36.303361", "url": "http://www.annakarenina.com/download/x'
ckan.lib.base: INFO: /api/action/user_update render time 0.024 seconds
--------------------- >> end captured logging << ---------------------
======================================================================
ERROR: ckan.tests.functional.test_activity.TestActivity.test_user_activity
----------------------------------------------------------------------
Traceback (most recent call last):
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/nose/case.py", line 197, in runTest
self.test(*self.arg)
File "/home/seanh/Projects/ckan/ckan/ckan/tests/functional/test_activity.py", line 65, in test_user_activity
result = self.app.get(offset, status=200)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/paste/fixture.py", line 208, in get
return self.do_request(req, status=status)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/paste/fixture.py", line 389, in do_request
**req.environ)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/paste/wsgilib.py", line 343, in raw_interactive
app_iter = application(basic_environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/paste/lint.py", line 170, in lint_app
iterator = application(environ, start_response_wrapper)
File "/home/seanh/Projects/ckan/ckan/ckan/config/middleware.py", line 348, in __call__
return self.app(environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/paste/cascade.py", line 130, in __call__
return self.apps[-1](environ, start_response)
File "/home/seanh/Projects/ckan/ckan/ckan/config/middleware.py", line 213, in __call__
return self.app(environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/paste/registry.py", line 379, in __call__
app_iter = self.application(environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/repoze/who/middleware.py", line 107, in __call__
app_iter = app(environ, wrapper.wrap_start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/pylons/middleware.py", line 201, in __call__
self.app, environ, catch_exc_info=True)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/pylons/util.py", line 94, in call_wsgi_application
app_iter = application(environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/weberror/evalexception.py", line 235, in __call__
return self.respond(environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/weberror/evalexception.py", line 418, in respond
return self.application(environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/webob/dec.py", line 147, in __call__
resp = self.call_func(req, *args, **self.kwargs)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/webob/dec.py", line 208, in call_func
return self.func(req, *args, **kwargs)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/fanstatic/publisher.py", line 234, in __call__
return request.get_response(self.app)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/webob/request.py", line 1053, in get_response
application, catch_exc_info=False)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/webob/request.py", line 1022, in call_application
app_iter = application(self.environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/webob/dec.py", line 147, in __call__
resp = self.call_func(req, *args, **self.kwargs)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/webob/dec.py", line 208, in call_func
return self.func(req, *args, **kwargs)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/fanstatic/injector.py", line 54, in __call__
response = request.get_response(self.app)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/webob/request.py", line 1053, in get_response
application, catch_exc_info=False)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/webob/request.py", line 1022, in call_application
app_iter = application(self.environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/beaker/middleware.py", line 73, in __call__
return self.app(environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/beaker/middleware.py", line 155, in __call__
return self.wrap_app(environ, session_start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/routes/middleware.py", line 131, in __call__
response = self.app(environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/pylons/wsgiapp.py", line 125, in __call__
response = self.dispatch(controller, environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/pylons/wsgiapp.py", line 324, in dispatch
return controller(environ, start_response)
File "/home/seanh/Projects/ckan/ckan/ckan/lib/base.py", line 293, in __call__
res = WSGIController.__call__(self, environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/pylons/controllers/core.py", line 221, in __call__
response = self._dispatch_call()
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/pylons/controllers/core.py", line 172, in _dispatch_call
response = self._inspect_call(func)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/pylons/controllers/core.py", line 107, in _inspect_call
result = self._perform_call(func, args)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/pylons/controllers/core.py", line 60, in _perform_call
return func(**args)
File "/home/seanh/Projects/ckan/ckan/ckan/controllers/user.py", line 133, in read
return render('user/read.html')
File "/home/seanh/Projects/ckan/ckan/ckan/lib/base.py", line 197, in render
loader_class=loader_class)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/pylons/templating.py", line 249, in cached_template
return render_func()
File "/home/seanh/Projects/ckan/ckan/ckan/lib/base.py", line 149, in render_template
strip_whitespace=True))
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/genshi/core.py", line 183, in render
return encode(generator, method=method, encoding=encoding, out=out)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/genshi/output.py", line 57, in encode
return _encode(''.join(list(iterator)))
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/genshi/output.py", line 339, in __call__
for kind, data, pos in stream:
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/genshi/output.py", line 670, in __call__
for kind, data, pos in stream:
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/genshi/output.py", line 771, in __call__
for kind, data, pos in chain(stream, [(None, None, None)]):
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/genshi/output.py", line 586, in __call__
for ev in stream:
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/genshi/core.py", line 288, in _ensure
for event in stream:
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/genshi/template/base.py", line 618, in _include
for event in tmpl.generate(ctxt, **vars):
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/genshi/template/base.py", line 618, in _include
for event in tmpl.generate(ctxt, **vars):
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/genshi/template/base.py", line 618, in _include
for event in tmpl.generate(ctxt, **vars):
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/genshi/template/base.py", line 605, in _include
for event in stream:
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/genshi/template/markup.py", line 378, in _match
ctxt, start=idx + 1, **vars):
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/genshi/template/markup.py", line 327, in _match
for event in stream:
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/genshi/template/base.py", line 565, in _flatten
result = _eval_expr(data, ctxt, vars)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/genshi/template/base.py", line 277, in _eval_expr
retval = expr.evaluate(ctxt)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/genshi/template/eval.py", line 178, in evaluate
return eval(self.code, _globals, {'__data__': data})
File "/home/seanh/Projects/ckan/ckan/ckan/templates_legacy/_util.html", line 63, in <Expression u"h.snippet('snippets/package_list.html', packages=packages)">
${h.snippet('snippets/package_list.html', packages=packages)}
File "/home/seanh/Projects/ckan/ckan/ckan/lib/helpers.py", line 989, in snippet
return base.render_snippet(template_name, **kw)
File "/home/seanh/Projects/ckan/ckan/ckan/lib/base.py", line 65, in render_snippet
renderer='snippet')
File "/home/seanh/Projects/ckan/ckan/ckan/lib/base.py", line 197, in render
loader_class=loader_class)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/pylons/templating.py", line 249, in cached_template
return render_func()
File "/home/seanh/Projects/ckan/ckan/ckan/lib/base.py", line 149, in render_template
strip_whitespace=True))
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/genshi/core.py", line 183, in render
return encode(generator, method=method, encoding=encoding, out=out)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/genshi/output.py", line 57, in encode
return _encode(''.join(list(iterator)))
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/genshi/output.py", line 339, in __call__
for kind, data, pos in stream:
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/genshi/output.py", line 670, in __call__
for kind, data, pos in stream:
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/genshi/output.py", line 771, in __call__
for kind, data, pos in chain(stream, [(None, None, None)]):
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/genshi/output.py", line 586, in __call__
for ev in stream:
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/genshi/core.py", line 288, in _ensure
for event in stream:
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/genshi/template/base.py", line 605, in _include
for event in stream:
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/genshi/template/markup.py", line 327, in _match
for event in stream:
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/genshi/template/base.py", line 585, in _flatten
stream = _apply_directives(data[1], data[0], ctxt, vars)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/genshi/template/base.py", line 262, in _apply_directives
stream = directives[0](iter(stream), directives[1:], ctxt, **vars)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/genshi/template/directives.py", line 400, in __call__
value = _eval_expr(self.expr, ctxt, vars)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/genshi/template/base.py", line 277, in _eval_expr
retval = expr.evaluate(ctxt)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/genshi/template/eval.py", line 178, in evaluate
return eval(self.code, _globals, {'__data__': data})
File "/home/seanh/Projects/ckan/ckan/ckan/templates_legacy/snippets/package_list.html", line 18, in <Expression u'package.resources'>
<py:if test="package.resources">
File "/home/seanh/Projects/ckan/ckan/ckan/config/environment.py", line 283, in genshi_lookup_attr
val = cls.undefined(key, owner=obj)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/genshi/template/eval.py", line 410, in undefined
raise UndefinedError(key, owner=owner)
UndefinedError: "{u'name': u'baseball_stats', 'isopen': False, u'title': u"Billy's Stats about Baseball Players", u'state': u'active', u'type': u'dataset', u'id': u'33ffedda-e0d6-46ff-bcc6-08d088ed698a'} has no member named "resources"
-------------------- >> begin captured logging << --------------------
ckan.model: INFO: Database table data deleted
ckan.model: INFO: Database initialised
ckan.model: INFO: Database rebuilt
ckan.model: INFO: Database initialised
ckan.logic: CRITICAL: activity_create was pass extra keywords {'ignore_auth': True}
ckan.lib.base: INFO: /user/277d988b-2c21-40e4-a28d-b2a3c702a53b render time 0.390 seconds
--------------------- >> end captured logging << ---------------------" not defined
======================================================================
ERROR: ckan.tests.functional.test_package.TestReadAtRevision.test_read_date1
----------------------------------------------------------------------
Traceback (most recent call last):
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/nose/case.py", line 197, in runTest
self.test(*self.arg)
File "/home/seanh/Projects/ckan/ckan/ckan/tests/functional/test_package.py", line 481, in test_read_date1
res = self.app.get(offset, status=200)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/paste/fixture.py", line 208, in get
return self.do_request(req, status=status)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/paste/fixture.py", line 389, in do_request
**req.environ)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/paste/wsgilib.py", line 343, in raw_interactive
app_iter = application(basic_environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/paste/lint.py", line 170, in lint_app
iterator = application(environ, start_response_wrapper)
File "/home/seanh/Projects/ckan/ckan/ckan/config/middleware.py", line 348, in __call__
return self.app(environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/paste/cascade.py", line 130, in __call__
return self.apps[-1](environ, start_response)
File "/home/seanh/Projects/ckan/ckan/ckan/config/middleware.py", line 213, in __call__
return self.app(environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/paste/registry.py", line 379, in __call__
app_iter = self.application(environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/repoze/who/middleware.py", line 107, in __call__
app_iter = app(environ, wrapper.wrap_start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/pylons/middleware.py", line 201, in __call__
self.app, environ, catch_exc_info=True)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/pylons/util.py", line 94, in call_wsgi_application
app_iter = application(environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/weberror/evalexception.py", line 235, in __call__
return self.respond(environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/weberror/evalexception.py", line 418, in respond
return self.application(environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/webob/dec.py", line 147, in __call__
resp = self.call_func(req, *args, **self.kwargs)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/webob/dec.py", line 208, in call_func
return self.func(req, *args, **kwargs)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/fanstatic/publisher.py", line 234, in __call__
return request.get_response(self.app)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/webob/request.py", line 1053, in get_response
application, catch_exc_info=False)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/webob/request.py", line 1022, in call_application
app_iter = application(self.environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/webob/dec.py", line 147, in __call__
resp = self.call_func(req, *args, **self.kwargs)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/webob/dec.py", line 208, in call_func
return self.func(req, *args, **kwargs)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/fanstatic/injector.py", line 54, in __call__
response = request.get_response(self.app)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/webob/request.py", line 1053, in get_response
application, catch_exc_info=False)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/webob/request.py", line 1022, in call_application
app_iter = application(self.environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/beaker/middleware.py", line 73, in __call__
return self.app(environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/beaker/middleware.py", line 155, in __call__
return self.wrap_app(environ, session_start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/routes/middleware.py", line 131, in __call__
response = self.app(environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/pylons/wsgiapp.py", line 125, in __call__
response = self.dispatch(controller, environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/pylons/wsgiapp.py", line 324, in dispatch
return controller(environ, start_response)
File "/home/seanh/Projects/ckan/ckan/ckan/lib/base.py", line 293, in __call__
res = WSGIController.__call__(self, environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/pylons/controllers/core.py", line 221, in __call__
response = self._dispatch_call()
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/pylons/controllers/core.py", line 172, in _dispatch_call
response = self._inspect_call(func)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/pylons/controllers/core.py", line 107, in _inspect_call
result = self._perform_call(func, args)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/pylons/controllers/core.py", line 60, in _perform_call
return func(**args)
File "/home/seanh/Projects/ckan/ckan/ckan/controllers/package.py", line 352, in read
PackageSaver().render_package(c.pkg_dict, context)
File "/home/seanh/Projects/ckan/ckan/ckan/lib/package_saver.py", line 56, in render_package
c.pkg_revision_id = c.pkg_dict[u'revision_id']
KeyError: u'revision_id'
======================================================================
ERROR: ckan.tests.functional.test_package.TestReadAtRevision.test_read_date2
----------------------------------------------------------------------
Traceback (most recent call last):
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/nose/case.py", line 197, in runTest
self.test(*self.arg)
File "/home/seanh/Projects/ckan/ckan/ckan/tests/functional/test_package.py", line 493, in test_read_date2
res = self.app.get(offset, status=200)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/paste/fixture.py", line 208, in get
return self.do_request(req, status=status)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/paste/fixture.py", line 389, in do_request
**req.environ)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/paste/wsgilib.py", line 343, in raw_interactive
app_iter = application(basic_environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/paste/lint.py", line 170, in lint_app
iterator = application(environ, start_response_wrapper)
File "/home/seanh/Projects/ckan/ckan/ckan/config/middleware.py", line 348, in __call__
return self.app(environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/paste/cascade.py", line 130, in __call__
return self.apps[-1](environ, start_response)
File "/home/seanh/Projects/ckan/ckan/ckan/config/middleware.py", line 213, in __call__
return self.app(environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/paste/registry.py", line 379, in __call__
app_iter = self.application(environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/repoze/who/middleware.py", line 107, in __call__
app_iter = app(environ, wrapper.wrap_start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/pylons/middleware.py", line 201, in __call__
self.app, environ, catch_exc_info=True)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/pylons/util.py", line 94, in call_wsgi_application
app_iter = application(environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/weberror/evalexception.py", line 235, in __call__
return self.respond(environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/weberror/evalexception.py", line 418, in respond
return self.application(environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/webob/dec.py", line 147, in __call__
resp = self.call_func(req, *args, **self.kwargs)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/webob/dec.py", line 208, in call_func
return self.func(req, *args, **kwargs)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/fanstatic/publisher.py", line 234, in __call__
return request.get_response(self.app)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/webob/request.py", line 1053, in get_response
application, catch_exc_info=False)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/webob/request.py", line 1022, in call_application
app_iter = application(self.environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/webob/dec.py", line 147, in __call__
resp = self.call_func(req, *args, **self.kwargs)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/webob/dec.py", line 208, in call_func
return self.func(req, *args, **kwargs)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/fanstatic/injector.py", line 54, in __call__
response = request.get_response(self.app)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/webob/request.py", line 1053, in get_response
application, catch_exc_info=False)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/webob/request.py", line 1022, in call_application
app_iter = application(self.environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/beaker/middleware.py", line 73, in __call__
return self.app(environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/beaker/middleware.py", line 155, in __call__
return self.wrap_app(environ, session_start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/routes/middleware.py", line 131, in __call__
response = self.app(environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/pylons/wsgiapp.py", line 125, in __call__
response = self.dispatch(controller, environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/pylons/wsgiapp.py", line 324, in dispatch
return controller(environ, start_response)
File "/home/seanh/Projects/ckan/ckan/ckan/lib/base.py", line 293, in __call__
res = WSGIController.__call__(self, environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/pylons/controllers/core.py", line 221, in __call__
response = self._dispatch_call()
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/pylons/controllers/core.py", line 172, in _dispatch_call
response = self._inspect_call(func)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/pylons/controllers/core.py", line 107, in _inspect_call
result = self._perform_call(func, args)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/pylons/controllers/core.py", line 60, in _perform_call
return func(**args)
File "/home/seanh/Projects/ckan/ckan/ckan/controllers/package.py", line 352, in read
PackageSaver().render_package(c.pkg_dict, context)
File "/home/seanh/Projects/ckan/ckan/ckan/lib/package_saver.py", line 56, in render_package
c.pkg_revision_id = c.pkg_dict[u'revision_id']
KeyError: u'revision_id'
======================================================================
ERROR: ckan.tests.functional.test_package.TestReadAtRevision.test_read_date3
----------------------------------------------------------------------
Traceback (most recent call last):
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/nose/case.py", line 197, in runTest
self.test(*self.arg)
File "/home/seanh/Projects/ckan/ckan/ckan/tests/functional/test_package.py", line 506, in test_read_date3
res = self.app.get(offset, status=200)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/paste/fixture.py", line 208, in get
return self.do_request(req, status=status)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/paste/fixture.py", line 389, in do_request
**req.environ)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/paste/wsgilib.py", line 343, in raw_interactive
app_iter = application(basic_environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/paste/lint.py", line 170, in lint_app
iterator = application(environ, start_response_wrapper)
File "/home/seanh/Projects/ckan/ckan/ckan/config/middleware.py", line 348, in __call__
return self.app(environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/paste/cascade.py", line 130, in __call__
return self.apps[-1](environ, start_response)
File "/home/seanh/Projects/ckan/ckan/ckan/config/middleware.py", line 213, in __call__
return self.app(environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/paste/registry.py", line 379, in __call__
app_iter = self.application(environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/repoze/who/middleware.py", line 107, in __call__
app_iter = app(environ, wrapper.wrap_start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/pylons/middleware.py", line 201, in __call__
self.app, environ, catch_exc_info=True)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/pylons/util.py", line 94, in call_wsgi_application
app_iter = application(environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/weberror/evalexception.py", line 235, in __call__
return self.respond(environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/weberror/evalexception.py", line 418, in respond
return self.application(environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/webob/dec.py", line 147, in __call__
resp = self.call_func(req, *args, **self.kwargs)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/webob/dec.py", line 208, in call_func
return self.func(req, *args, **kwargs)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/fanstatic/publisher.py", line 234, in __call__
return request.get_response(self.app)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/webob/request.py", line 1053, in get_response
application, catch_exc_info=False)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/webob/request.py", line 1022, in call_application
app_iter = application(self.environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/webob/dec.py", line 147, in __call__
resp = self.call_func(req, *args, **self.kwargs)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/webob/dec.py", line 208, in call_func
return self.func(req, *args, **kwargs)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/fanstatic/injector.py", line 54, in __call__
response = request.get_response(self.app)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/webob/request.py", line 1053, in get_response
application, catch_exc_info=False)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/webob/request.py", line 1022, in call_application
app_iter = application(self.environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/beaker/middleware.py", line 73, in __call__
return self.app(environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/beaker/middleware.py", line 155, in __call__
return self.wrap_app(environ, session_start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/routes/middleware.py", line 131, in __call__
response = self.app(environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/pylons/wsgiapp.py", line 125, in __call__
response = self.dispatch(controller, environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/pylons/wsgiapp.py", line 324, in dispatch
return controller(environ, start_response)
File "/home/seanh/Projects/ckan/ckan/ckan/lib/base.py", line 293, in __call__
res = WSGIController.__call__(self, environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/pylons/controllers/core.py", line 221, in __call__
response = self._dispatch_call()
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/pylons/controllers/core.py", line 172, in _dispatch_call
response = self._inspect_call(func)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/pylons/controllers/core.py", line 107, in _inspect_call
result = self._perform_call(func, args)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/pylons/controllers/core.py", line 60, in _perform_call
return func(**args)
File "/home/seanh/Projects/ckan/ckan/ckan/controllers/package.py", line 352, in read
PackageSaver().render_package(c.pkg_dict, context)
File "/home/seanh/Projects/ckan/ckan/ckan/lib/package_saver.py", line 56, in render_package
c.pkg_revision_id = c.pkg_dict[u'revision_id']
KeyError: u'revision_id'
======================================================================
ERROR: ckan.tests.functional.test_package.TestReadAtRevision.test_read_revision1
----------------------------------------------------------------------
Traceback (most recent call last):
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/nose/case.py", line 197, in runTest
self.test(*self.arg)
File "/home/seanh/Projects/ckan/ckan/ckan/tests/functional/test_package.py", line 531, in test_read_revision1
res = self.app.get(offset, status=200)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/paste/fixture.py", line 208, in get
return self.do_request(req, status=status)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/paste/fixture.py", line 389, in do_request
**req.environ)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/paste/wsgilib.py", line 343, in raw_interactive
app_iter = application(basic_environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/paste/lint.py", line 170, in lint_app
iterator = application(environ, start_response_wrapper)
File "/home/seanh/Projects/ckan/ckan/ckan/config/middleware.py", line 348, in __call__
return self.app(environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/paste/cascade.py", line 130, in __call__
return self.apps[-1](environ, start_response)
File "/home/seanh/Projects/ckan/ckan/ckan/config/middleware.py", line 213, in __call__
return self.app(environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/paste/registry.py", line 379, in __call__
app_iter = self.application(environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/repoze/who/middleware.py", line 107, in __call__
app_iter = app(environ, wrapper.wrap_start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/pylons/middleware.py", line 201, in __call__
self.app, environ, catch_exc_info=True)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/pylons/util.py", line 94, in call_wsgi_application
app_iter = application(environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/weberror/evalexception.py", line 235, in __call__
return self.respond(environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/weberror/evalexception.py", line 418, in respond
return self.application(environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/webob/dec.py", line 147, in __call__
resp = self.call_func(req, *args, **self.kwargs)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/webob/dec.py", line 208, in call_func
return self.func(req, *args, **kwargs)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/fanstatic/publisher.py", line 234, in __call__
return request.get_response(self.app)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/webob/request.py", line 1053, in get_response
application, catch_exc_info=False)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/webob/request.py", line 1022, in call_application
app_iter = application(self.environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/webob/dec.py", line 147, in __call__
resp = self.call_func(req, *args, **self.kwargs)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/webob/dec.py", line 208, in call_func
return self.func(req, *args, **kwargs)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/fanstatic/injector.py", line 54, in __call__
response = request.get_response(self.app)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/webob/request.py", line 1053, in get_response
application, catch_exc_info=False)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/webob/request.py", line 1022, in call_application
app_iter = application(self.environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/beaker/middleware.py", line 73, in __call__
return self.app(environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/beaker/middleware.py", line 155, in __call__
return self.wrap_app(environ, session_start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/routes/middleware.py", line 131, in __call__
response = self.app(environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/pylons/wsgiapp.py", line 125, in __call__
response = self.dispatch(controller, environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/pylons/wsgiapp.py", line 324, in dispatch
return controller(environ, start_response)
File "/home/seanh/Projects/ckan/ckan/ckan/lib/base.py", line 293, in __call__
res = WSGIController.__call__(self, environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/pylons/controllers/core.py", line 221, in __call__
response = self._dispatch_call()
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/pylons/controllers/core.py", line 172, in _dispatch_call
response = self._inspect_call(func)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/pylons/controllers/core.py", line 107, in _inspect_call
result = self._perform_call(func, args)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/pylons/controllers/core.py", line 60, in _perform_call
return func(**args)
File "/home/seanh/Projects/ckan/ckan/ckan/controllers/package.py", line 352, in read
PackageSaver().render_package(c.pkg_dict, context)
File "/home/seanh/Projects/ckan/ckan/ckan/lib/package_saver.py", line 56, in render_package
c.pkg_revision_id = c.pkg_dict[u'revision_id']
KeyError: u'revision_id'
======================================================================
ERROR: ckan.tests.functional.test_package.TestReadAtRevision.test_read_revision2
----------------------------------------------------------------------
Traceback (most recent call last):
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/nose/case.py", line 197, in runTest
self.test(*self.arg)
File "/home/seanh/Projects/ckan/ckan/ckan/tests/functional/test_package.py", line 549, in test_read_revision2
res = self.app.get(offset, status=200)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/paste/fixture.py", line 208, in get
return self.do_request(req, status=status)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/paste/fixture.py", line 389, in do_request
**req.environ)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/paste/wsgilib.py", line 343, in raw_interactive
app_iter = application(basic_environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/paste/lint.py", line 170, in lint_app
iterator = application(environ, start_response_wrapper)
File "/home/seanh/Projects/ckan/ckan/ckan/config/middleware.py", line 348, in __call__
return self.app(environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/paste/cascade.py", line 130, in __call__
return self.apps[-1](environ, start_response)
File "/home/seanh/Projects/ckan/ckan/ckan/config/middleware.py", line 213, in __call__
return self.app(environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/paste/registry.py", line 379, in __call__
app_iter = self.application(environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/repoze/who/middleware.py", line 107, in __call__
app_iter = app(environ, wrapper.wrap_start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/pylons/middleware.py", line 201, in __call__
self.app, environ, catch_exc_info=True)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/pylons/util.py", line 94, in call_wsgi_application
app_iter = application(environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/weberror/evalexception.py", line 235, in __call__
return self.respond(environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/weberror/evalexception.py", line 418, in respond
return self.application(environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/webob/dec.py", line 147, in __call__
resp = self.call_func(req, *args, **self.kwargs)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/webob/dec.py", line 208, in call_func
return self.func(req, *args, **kwargs)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/fanstatic/publisher.py", line 234, in __call__
return request.get_response(self.app)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/webob/request.py", line 1053, in get_response
application, catch_exc_info=False)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/webob/request.py", line 1022, in call_application
app_iter = application(self.environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/webob/dec.py", line 147, in __call__
resp = self.call_func(req, *args, **self.kwargs)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/webob/dec.py", line 208, in call_func
return self.func(req, *args, **kwargs)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/fanstatic/injector.py", line 54, in __call__
response = request.get_response(self.app)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/webob/request.py", line 1053, in get_response
application, catch_exc_info=False)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/webob/request.py", line 1022, in call_application
app_iter = application(self.environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/beaker/middleware.py", line 73, in __ca
|
KeyError
|
def db_to_form_package_schema():
schema = default_package_schema()
schema.update(
{
"tags": {
"__extras": [
ckan.lib.navl.validators.keep_extras,
ckan.logic.converters.free_tags_only,
]
},
}
)
# Add several keys to the 'resources' subschema so they don't get stripped
# from the resource dicts by validation.
schema["resources"].update(
{
"created": [ckan.lib.navl.validators.ignore_missing],
"position": [not_empty],
"last_modified": [ckan.lib.navl.validators.ignore_missing],
"cache_last_updated": [ckan.lib.navl.validators.ignore_missing],
"webstore_last_updated": [ckan.lib.navl.validators.ignore_missing],
"revision_timestamp": [],
"resource_group_id": [],
"cache_last_updated": [],
"webstore_last_updated": [],
"size": [],
"state": [],
"last_modified": [],
"mimetype": [],
"cache_url": [],
"name": [],
"webstore_url": [],
"mimetype_inner": [],
"resource_type": [],
}
)
schema.update(
{
"state": [ckan.lib.navl.validators.ignore_missing],
"isopen": [ignore_missing],
"license_url": [ignore_missing],
}
)
schema["groups"].update(
{
"description": [ignore_missing],
}
)
# Remove validators for several keys from the schema so validation doesn't
# strip the keys from the package dicts if the values are 'missing' (i.e.
# None).
schema["author"] = []
schema["author_email"] = []
schema["maintainer"] = []
schema["maintainer_email"] = []
schema["license_id"] = []
schema["notes"] = []
schema["url"] = []
schema["version"] = []
# Add several keys that are missing from default_package_schema(), so
# validation doesn't strip the keys from the package dicts.
schema["metadata_created"] = []
schema["metadata_modified"] = []
schema["num_resources"] = []
schema["num_tags"] = []
schema["organization"] = []
schema["owner_org"] = []
schema["private"] = []
schema["revision_id"] = []
schema["revision_timestamp"] = []
schema["tracking_summary"] = []
schema["license_title"] = []
return schema
|
def db_to_form_package_schema():
schema = default_package_schema()
schema.update(
{
"tags": {
"__extras": [
ckan.lib.navl.validators.keep_extras,
ckan.logic.converters.free_tags_only,
]
},
}
)
# Add several keys to the 'resources' subschema so they don't get stripped
# from the resource dicts by validation.
schema["resources"].update(
{
"created": [ckan.lib.navl.validators.ignore_missing],
"position": [not_empty],
"last_modified": [ckan.lib.navl.validators.ignore_missing],
"cache_last_updated": [ckan.lib.navl.validators.ignore_missing],
"webstore_last_updated": [ckan.lib.navl.validators.ignore_missing],
"revision_timestamp": [],
"resource_group_id": [],
"cache_last_updated": [],
"webstore_last_updated": [],
"size": [],
"state": [],
"last_modified": [],
"mimetype": [],
"cache_url": [],
"name": [],
"webstore_url": [],
"mimetype_inner": [],
"resource_type": [],
}
)
schema.update(
{
"state": [ckan.lib.navl.validators.ignore_missing],
"isopen": [ignore_missing],
"license_url": [ignore_missing],
}
)
schema["groups"].update(
{
"description": [ignore_missing],
}
)
# Remove validators for several keys from the schema so validation doesn't
# strip the keys from the package dicts if the values are 'missing' (i.e.
# None).
schema["author"] = []
schema["author_email"] = []
schema["maintainer"] = []
schema["maintainer_email"] = []
schema["license_id"] = []
schema["notes"] = []
schema["url"] = []
schema["version"] = []
# Add several keys that are missing from default_package_schema(), so
# validation doesn't strip the keys from the package dicts.
# schema['license_title'] = []
schema["metadata_created"] = []
schema["metadata_modified"] = []
schema["num_resources"] = []
schema["num_tags"] = []
schema["organization"] = []
schema["owner_org"] = []
schema["private"] = []
schema["revision_id"] = []
schema["revision_timestamp"] = []
schema["tracking_summary"] = []
schema["license_title"] = []
return schema
|
https://github.com/ckan/ckan/issues/396
|
nosetests --ckan --with-pylons=test-core.ini ckan ξ² ξ idatasetform-fixes
.........................................................................................................................................................S.S.S.S.S.S..............................................................F.F........................................E.E.EEE.................EEE...E.E.....E...........................................................................................................................................SSS.........S.................................S.............................................................S...............................................................E.......S...................................SSS....................................................EEE...EEE......E.....E................................................................................................................................................................................................................................................S...........S..............................................E..............SE.................S.S.....................................................................................................................................................................................
======================================================================
ERROR: ckan.tests.functional.api.test_activity.TestActivity.test_01_delete_resources
----------------------------------------------------------------------
Traceback (most recent call last):
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/nose/case.py", line 197, in runTest
self.test(*self.arg)
File "/home/seanh/Projects/ckan/ckan/ckan/tests/functional/api/test_activity.py", line 1253, in test_01_delete_resources
if len(package_dict['resources']) > 0:
KeyError: "'resources'\n-------------------- >> begin captured logging << --------------------\nckan.model: INFO: Database table data deleted\nckan.model: INFO: Database initialised\nckan.model: INFO: Database rebuilt\nckan.model: INFO: Database initialised\nckan.lib.base: INFO: /api/action/package_list render time 0.004 seconds\nckan.lib.base: INFO: /api/action/package_show render time 0.026 seconds\n--------------------- >> end captured logging << ---------------------"
======================================================================
ERROR: ckan.tests.functional.api.test_activity.TestActivity.test_01_update_extras
----------------------------------------------------------------------
Traceback (most recent call last):
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/nose/case.py", line 197, in runTest
self.test(*self.arg)
File "/home/seanh/Projects/ckan/ckan/ckan/tests/functional/api/test_activity.py", line 1366, in test_01_update_extras
if len(package_dict['extras']) > 0:
KeyError: "'extras'\n-------------------- >> begin captured logging << --------------------\nckan.lib.base: INFO: /api/action/package_list render time 0.004 seconds\nckan.lib.base: INFO: /api/action/package_show render time 0.039 seconds\n--------------------- >> end captured logging << ---------------------"
======================================================================
ERROR: ckan.tests.functional.api.test_activity.TestActivity.test_01_update_package
----------------------------------------------------------------------
Traceback (most recent call last):
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/nose/case.py", line 197, in runTest
self.test(*self.arg)
File "/home/seanh/Projects/ckan/ckan/ckan/tests/functional/api/test_activity.py", line 1383, in test_01_update_package
self._update_package(package_dict, user=self.normal_user)
File "/home/seanh/Projects/ckan/ckan/ckan/tests/functional/api/test_activity.py", line 1055, in _update_package
for group_dict in package['groups']:
KeyError: "'groups'\n-------------------- >> begin captured stdout << ---------------------\n@@@@@@@@ {'Authorization': '1eaa43d6-de22-4723-8d51-ebfcd6390d54'}\n@@@@@@@@ {'Authorization': '1eaa43d6-de22-4723-8d51-ebfcd6390d54'}\n\n--------------------- >> end captured stdout << ----------------------\n-------------------- >> begin captured logging << --------------------\nckan.lib.base: INFO: /api/action/package_list render time 0.003 seconds\nckan.lib.base: INFO: /api/action/package_show render time 0.024 seconds\nckan.lib.base: INFO: /api/2/rest/user/28212825-2dc2-4878-9a03-b27d63e9e1e3/activity render time 0.025 seconds\nckan.lib.base: INFO: /api/2/rest/dataset/74fa22ab-01c1-4f1d-8a0d-a20bfa9ea589/activity render time 0.022 seconds\nckan.lib.base: INFO: /api/action/recently_changed_packages_activity_list render time 0.019 seconds\nckan.lib.base: INFO: /api/action/package_update render time 0.655 seconds\nckan.lib.base: INFO: /api/2/rest/user/28212825-2dc2-4878-9a03-b27d63e9e1e3/activity render time 0.027 seconds\nckan.lib.base: INFO: /api/2/rest/dataset/74fa22ab-01c1-4f1d-8a0d-a20bfa9ea589/activity render time 0.045 seconds\nckan.lib.base: INFO: /api/action/recently_changed_packages_activity_list render time 0.020 seconds\n--------------------- >> end captured logging << ---------------------"
======================================================================
ERROR: ckan.tests.functional.api.test_activity.TestActivity.test_01_update_resource
----------------------------------------------------------------------
Traceback (most recent call last):
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/nose/case.py", line 197, in runTest
self.test(*self.arg)
File "/home/seanh/Projects/ckan/ckan/ckan/tests/functional/api/test_activity.py", line 1393, in test_01_update_resource
for resource in package_dict['resources']:
KeyError: "'resources'\n-------------------- >> begin captured logging << --------------------\nckan.lib.base: INFO: /api/action/package_list render time 0.004 seconds\nckan.lib.base: INFO: /api/action/package_show render time 0.031 seconds\n--------------------- >> end captured logging << ---------------------"
======================================================================
ERROR: ckan.tests.functional.api.test_activity.TestActivity.test_01_update_resource_not_logged_in
----------------------------------------------------------------------
Traceback (most recent call last):
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/nose/case.py", line 197, in runTest
self.test(*self.arg)
File "/home/seanh/Projects/ckan/ckan/ckan/tests/functional/api/test_activity.py", line 1405, in test_01_update_resource_not_logged_in
for resource in package_dict['resources']:
KeyError: "'resources'\n-------------------- >> begin captured logging << --------------------\nckan.lib.base: INFO: /api/action/package_list render time 0.003 seconds\nckan.lib.base: INFO: /api/action/package_show render time 0.024 seconds\n--------------------- >> end captured logging << ---------------------"
======================================================================
ERROR: ckan.tests.functional.api.test_activity.TestActivity.test_add_extras
----------------------------------------------------------------------
Traceback (most recent call last):
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/nose/case.py", line 197, in runTest
self.test(*self.arg)
File "/home/seanh/Projects/ckan/ckan/ckan/tests/functional/api/test_activity.py", line 1981, in test_add_extras
self._add_extra(package_dict, user=self.normal_user)
File "/home/seanh/Projects/ckan/ckan/ckan/tests/functional/api/test_activity.py", line 667, in _add_extra
[group['name'] for group in package_dict['groups']],
KeyError: "'groups'\n-------------------- >> begin captured logging << --------------------\nckan.lib.base: INFO: /api/action/package_list render time 0.005 seconds\nckan.lib.base: INFO: /api/action/package_show render time 0.015 seconds\n--------------------- >> end captured logging << ---------------------"
======================================================================
ERROR: ckan.tests.functional.api.test_activity.TestActivity.test_add_resources
----------------------------------------------------------------------
Traceback (most recent call last):
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/nose/case.py", line 197, in runTest
self.test(*self.arg)
File "/home/seanh/Projects/ckan/ckan/ckan/tests/functional/api/test_activity.py", line 1428, in test_add_resources
self._add_resource(package_dict, user=self.normal_user)
File "/home/seanh/Projects/ckan/ckan/ckan/tests/functional/api/test_activity.py", line 391, in _add_resource
[group['name'] for group in package['groups']], apikey=apikey)
KeyError: "'groups'\n-------------------- >> begin captured logging << --------------------\nckan.lib.base: INFO: /api/action/package_list render time 0.003 seconds\nckan.lib.base: INFO: /api/action/package_show render time 0.021 seconds\n--------------------- >> end captured logging << ---------------------"
======================================================================
ERROR: ckan.tests.functional.api.test_activity.TestActivity.test_add_tag
----------------------------------------------------------------------
Traceback (most recent call last):
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/nose/case.py", line 197, in runTest
self.test(*self.arg)
File "/home/seanh/Projects/ckan/ckan/ckan/tests/functional/api/test_activity.py", line 1605, in test_add_tag
for group_dict in pkg_dict['groups']:
KeyError: "'groups'\n-------------------- >> begin captured stdout << ---------------------\n@@@@@@@@ {'Authorization': '1eaa43d6-de22-4723-8d51-ebfcd6390d54'}\n@@@@@@@@ {'Authorization': '1eaa43d6-de22-4723-8d51-ebfcd6390d54'}\n\n--------------------- >> end captured stdout << ----------------------\n-------------------- >> begin captured logging << --------------------\nckan.lib.base: INFO: /api/action/package_show render time 0.024 seconds\nckan.lib.base: INFO: /api/2/rest/user/28212825-2dc2-4878-9a03-b27d63e9e1e3/activity render time 0.035 seconds\nckan.lib.base: INFO: /api/2/rest/dataset/74fa22ab-01c1-4f1d-8a0d-a20bfa9ea589/activity render time 0.040 seconds\nckan.lib.base: INFO: /api/action/recently_changed_packages_activity_list render time 0.021 seconds\nckan.lib.base: INFO: /api/action/package_update render time 0.773 seconds\nckan.lib.base: INFO: /api/2/rest/user/28212825-2dc2-4878-9a03-b27d63e9e1e3/activity render time 0.030 seconds\nckan.lib.base: INFO: /api/2/rest/dataset/74fa22ab-01c1-4f1d-8a0d-a20bfa9ea589/activity render time 0.023 seconds\nckan.lib.base: INFO: /api/action/recently_changed_packages_activity_list render time 0.022 seconds\n--------------------- >> end captured logging << ---------------------"
======================================================================
ERROR: ckan.tests.functional.api.test_activity.TestActivity.test_delete_extras
----------------------------------------------------------------------
Traceback (most recent call last):
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/nose/case.py", line 197, in runTest
self.test(*self.arg)
File "/home/seanh/Projects/ckan/ckan/ckan/tests/functional/api/test_activity.py", line 1994, in test_delete_extras
if len(package_dict['extras']) > 0:
KeyError: "'extras'\n-------------------- >> begin captured logging << --------------------\nckan.lib.base: INFO: /api/action/package_list render time 0.004 seconds\nckan.lib.base: INFO: /api/action/package_show render time 0.032 seconds\n--------------------- >> end captured logging << ---------------------"
======================================================================
ERROR: ckan.tests.functional.api.test_activity.TestActivity.test_delete_package
----------------------------------------------------------------------
Traceback (most recent call last):
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/nose/case.py", line 197, in runTest
self.test(*self.arg)
File "/home/seanh/Projects/ckan/ckan/ckan/tests/functional/api/test_activity.py", line 1440, in test_delete_package
self._delete_package(package_dict)
File "/home/seanh/Projects/ckan/ckan/ckan/tests/functional/api/test_activity.py", line 1208, in _delete_package
for group_dict in package['groups']:
KeyError: "'groups'\n-------------------- >> begin captured stdout << ---------------------\n@@@@@@@@ {'Authorization': 'ac42312b-5672-47df-bcb4-518c0ec2750d'}\n@@@@@@@@ {'Authorization': 'ac42312b-5672-47df-bcb4-518c0ec2750d'}\n\n--------------------- >> end captured stdout << ----------------------\n-------------------- >> begin captured logging << --------------------\nckan.lib.base: INFO: /api/action/package_list render time 0.004 seconds\nckan.lib.base: INFO: /api/action/package_show render time 0.029 seconds\nckan.lib.base: INFO: /api/2/rest/user/6fcde856-fd4a-45c6-a034-8cda32c4952d/activity render time 0.023 seconds\nckan.lib.base: INFO: /api/2/rest/dataset/0af83051-c69b-4857-b63f-7654ad235c91/activity render time 0.021 seconds\nckan.lib.base: INFO: /api/action/recently_changed_packages_activity_list render time 0.021 seconds\nckan.lib.base: INFO: /api/action/package_delete render time 0.204 seconds\nckan.lib.base: INFO: /api/2/rest/user/6fcde856-fd4a-45c6-a034-8cda32c4952d/activity render time 0.030 seconds\nckan.lib.base: INFO: /api/2/rest/dataset/0af83051-c69b-4857-b63f-7654ad235c91/activity render time 0.024 seconds\nckan.lib.base: INFO: /api/action/recently_changed_packages_activity_list render time 0.023 seconds\n--------------------- >> end captured logging << ---------------------"
======================================================================
ERROR: ckan.tests.functional.api.test_activity.TestActivity.test_update_user
----------------------------------------------------------------------
Traceback (most recent call last):
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/nose/case.py", line 197, in runTest
self.test(*self.arg)
File "/home/seanh/Projects/ckan/ckan/ckan/tests/functional/api/test_activity.py", line 1500, in test_update_user
self._update_user(user)
File "/home/seanh/Projects/ckan/ckan/ckan/tests/functional/api/test_activity.py", line 899, in _update_user
extra_environ={'Authorization': str(user['apikey'])})
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/paste/fixture.py", line 262, in post
expect_errors=expect_errors)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/paste/fixture.py", line 243, in _gen_request
return self.do_request(req, status=status)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/paste/fixture.py", line 406, in do_request
self._check_status(status, res)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/paste/fixture.py", line 439, in _check_status
res.body))
AppError: Bad response: 400 Bad Request (not 200 OK or 3xx redirect for /api/action/user_update)
"Bad request - JSON Error: Error decoding JSON data. Error: ValueError('Unterminated string starting at: line 1 column 952 (char 952)',) JSON data extracted from the request: '{\"openid\": null, \"about\": \"edited\", \"apikey\": \"1eaa43d6-de22-4723-8d51-ebfcd6390d54\", \"display_name\": \"annafan\", \"name\": \"annafan\", \"created\": \"2013-02-14T15:05:36.323931\", \"reset_key\": null, \"id\": \"28212825-2dc2-4878-9a03-b27d63e9e1e3\", \"sysadmin\": false, \"activity_streams_email_notifications\": false, \"email_hash\": \"d41d8cd98f00b204e9800998ecf8427e\", \"datasets\": [{\"name\": \"annakarenina\", \"isopen\": true, \"title\": \"A Novel By Tolstoy\", \"url\": \"http://www.annakarenina.com\", \"notes\": \"Some test notes\\\\n\\\\n### A 3rd level heading\\\\n\\\\n**Some bolded text.**\\\\n\\\\n*Some italicized text.*\\\\n\\\\nForeign characters:\\\\nu with umlaut \\\\u00fc\\\\n66-style quote \\\\u201c\\\\nforeign word: th\\\\u00fcmb\\\\n\\\\nNeeds escaping:\\\\nleft arrow <\\\\n\\\\n<http://ckan.net/>\\\\n\\\\n\", \"license_title\": \"Other (Open)\", \"state\": \"active\", \"version\": \"0.7a\", \"resources\": [{\"hash\": \"abc123\", \"description\": \"Full text. Needs escaping: \\\\\" Umlaut: \\\\u00fc\", \"created\": \"2013-02-14T15:05:36.303361\", \"url\": \"http://www.annakarenina.com/download/x'"
-------------------- >> begin captured logging << --------------------
ckan.lib.base: INFO: /api/action/user_show render time 0.444 seconds
ckan.lib.base: INFO: /api/2/rest/user/6fcde856-fd4a-45c6-a034-8cda32c4952d/activity render time 0.028 seconds
ckan.lib.base: INFO: /api/action/recently_changed_packages_activity_list render time 0.026 seconds
ckan.logic: CRITICAL: activity_create was pass extra keywords {'ignore_auth': True}
ckan.lib.base: INFO: /api/action/user_update render time 0.087 seconds
ckan.lib.base: INFO: /api/2/rest/user/6fcde856-fd4a-45c6-a034-8cda32c4952d/activity render time 0.027 seconds
ckan.lib.base: INFO: /api/action/recently_changed_packages_activity_list render time 0.026 seconds
ckan.lib.base: INFO: /api/action/user_show render time 0.421 seconds
ckan.lib.base: INFO: /api/2/rest/user/28212825-2dc2-4878-9a03-b27d63e9e1e3/activity render time 0.028 seconds
ckan.lib.base: INFO: /api/action/recently_changed_packages_activity_list render time 0.025 seconds
ckan.controllers.api: ERROR: Bad request data: Error decoding JSON data. Error: ValueError('Unterminated string starting at: line 1 column 952 (char 952)',) JSON data extracted from the request: '{"openid": null, "about": "edited", "apikey": "1eaa43d6-de22-4723-8d51-ebfcd6390d54", "display_name": "annafan", "name": "annafan", "created": "2013-02-14T15:05:36.323931", "reset_key": null, "id": "28212825-2dc2-4878-9a03-b27d63e9e1e3", "sysadmin": false, "activity_streams_email_notifications": false, "email_hash": "d41d8cd98f00b204e9800998ecf8427e", "datasets": [{"name": "annakarenina", "isopen": true, "title": "A Novel By Tolstoy", "url": "http://www.annakarenina.com", "notes": "Some test notes\\n\\n### A 3rd level heading\\n\\n**Some bolded text.**\\n\\n*Some italicized text.*\\n\\nForeign characters:\\nu with umlaut \\u00fc\\n66-style quote \\u201c\\nforeign word: th\\u00fcmb\\n\\nNeeds escaping:\\nleft arrow <\\n\\n<http://ckan.net/>\\n\\n", "license_title": "Other (Open)", "state": "active", "version": "0.7a", "resources": [{"hash": "abc123", "description": "Full text. Needs escaping: \\" Umlaut: \\u00fc", "created": "2013-02-14T15:05:36.303361", "url": "http://www.annakarenina.com/download/x'
ckan.lib.base: INFO: /api/action/user_update render time 0.024 seconds
--------------------- >> end captured logging << ---------------------
======================================================================
ERROR: ckan.tests.functional.test_activity.TestActivity.test_user_activity
----------------------------------------------------------------------
Traceback (most recent call last):
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/nose/case.py", line 197, in runTest
self.test(*self.arg)
File "/home/seanh/Projects/ckan/ckan/ckan/tests/functional/test_activity.py", line 65, in test_user_activity
result = self.app.get(offset, status=200)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/paste/fixture.py", line 208, in get
return self.do_request(req, status=status)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/paste/fixture.py", line 389, in do_request
**req.environ)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/paste/wsgilib.py", line 343, in raw_interactive
app_iter = application(basic_environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/paste/lint.py", line 170, in lint_app
iterator = application(environ, start_response_wrapper)
File "/home/seanh/Projects/ckan/ckan/ckan/config/middleware.py", line 348, in __call__
return self.app(environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/paste/cascade.py", line 130, in __call__
return self.apps[-1](environ, start_response)
File "/home/seanh/Projects/ckan/ckan/ckan/config/middleware.py", line 213, in __call__
return self.app(environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/paste/registry.py", line 379, in __call__
app_iter = self.application(environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/repoze/who/middleware.py", line 107, in __call__
app_iter = app(environ, wrapper.wrap_start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/pylons/middleware.py", line 201, in __call__
self.app, environ, catch_exc_info=True)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/pylons/util.py", line 94, in call_wsgi_application
app_iter = application(environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/weberror/evalexception.py", line 235, in __call__
return self.respond(environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/weberror/evalexception.py", line 418, in respond
return self.application(environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/webob/dec.py", line 147, in __call__
resp = self.call_func(req, *args, **self.kwargs)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/webob/dec.py", line 208, in call_func
return self.func(req, *args, **kwargs)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/fanstatic/publisher.py", line 234, in __call__
return request.get_response(self.app)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/webob/request.py", line 1053, in get_response
application, catch_exc_info=False)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/webob/request.py", line 1022, in call_application
app_iter = application(self.environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/webob/dec.py", line 147, in __call__
resp = self.call_func(req, *args, **self.kwargs)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/webob/dec.py", line 208, in call_func
return self.func(req, *args, **kwargs)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/fanstatic/injector.py", line 54, in __call__
response = request.get_response(self.app)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/webob/request.py", line 1053, in get_response
application, catch_exc_info=False)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/webob/request.py", line 1022, in call_application
app_iter = application(self.environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/beaker/middleware.py", line 73, in __call__
return self.app(environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/beaker/middleware.py", line 155, in __call__
return self.wrap_app(environ, session_start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/routes/middleware.py", line 131, in __call__
response = self.app(environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/pylons/wsgiapp.py", line 125, in __call__
response = self.dispatch(controller, environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/pylons/wsgiapp.py", line 324, in dispatch
return controller(environ, start_response)
File "/home/seanh/Projects/ckan/ckan/ckan/lib/base.py", line 293, in __call__
res = WSGIController.__call__(self, environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/pylons/controllers/core.py", line 221, in __call__
response = self._dispatch_call()
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/pylons/controllers/core.py", line 172, in _dispatch_call
response = self._inspect_call(func)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/pylons/controllers/core.py", line 107, in _inspect_call
result = self._perform_call(func, args)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/pylons/controllers/core.py", line 60, in _perform_call
return func(**args)
File "/home/seanh/Projects/ckan/ckan/ckan/controllers/user.py", line 133, in read
return render('user/read.html')
File "/home/seanh/Projects/ckan/ckan/ckan/lib/base.py", line 197, in render
loader_class=loader_class)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/pylons/templating.py", line 249, in cached_template
return render_func()
File "/home/seanh/Projects/ckan/ckan/ckan/lib/base.py", line 149, in render_template
strip_whitespace=True))
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/genshi/core.py", line 183, in render
return encode(generator, method=method, encoding=encoding, out=out)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/genshi/output.py", line 57, in encode
return _encode(''.join(list(iterator)))
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/genshi/output.py", line 339, in __call__
for kind, data, pos in stream:
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/genshi/output.py", line 670, in __call__
for kind, data, pos in stream:
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/genshi/output.py", line 771, in __call__
for kind, data, pos in chain(stream, [(None, None, None)]):
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/genshi/output.py", line 586, in __call__
for ev in stream:
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/genshi/core.py", line 288, in _ensure
for event in stream:
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/genshi/template/base.py", line 618, in _include
for event in tmpl.generate(ctxt, **vars):
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/genshi/template/base.py", line 618, in _include
for event in tmpl.generate(ctxt, **vars):
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/genshi/template/base.py", line 618, in _include
for event in tmpl.generate(ctxt, **vars):
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/genshi/template/base.py", line 605, in _include
for event in stream:
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/genshi/template/markup.py", line 378, in _match
ctxt, start=idx + 1, **vars):
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/genshi/template/markup.py", line 327, in _match
for event in stream:
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/genshi/template/base.py", line 565, in _flatten
result = _eval_expr(data, ctxt, vars)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/genshi/template/base.py", line 277, in _eval_expr
retval = expr.evaluate(ctxt)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/genshi/template/eval.py", line 178, in evaluate
return eval(self.code, _globals, {'__data__': data})
File "/home/seanh/Projects/ckan/ckan/ckan/templates_legacy/_util.html", line 63, in <Expression u"h.snippet('snippets/package_list.html', packages=packages)">
${h.snippet('snippets/package_list.html', packages=packages)}
File "/home/seanh/Projects/ckan/ckan/ckan/lib/helpers.py", line 989, in snippet
return base.render_snippet(template_name, **kw)
File "/home/seanh/Projects/ckan/ckan/ckan/lib/base.py", line 65, in render_snippet
renderer='snippet')
File "/home/seanh/Projects/ckan/ckan/ckan/lib/base.py", line 197, in render
loader_class=loader_class)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/pylons/templating.py", line 249, in cached_template
return render_func()
File "/home/seanh/Projects/ckan/ckan/ckan/lib/base.py", line 149, in render_template
strip_whitespace=True))
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/genshi/core.py", line 183, in render
return encode(generator, method=method, encoding=encoding, out=out)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/genshi/output.py", line 57, in encode
return _encode(''.join(list(iterator)))
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/genshi/output.py", line 339, in __call__
for kind, data, pos in stream:
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/genshi/output.py", line 670, in __call__
for kind, data, pos in stream:
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/genshi/output.py", line 771, in __call__
for kind, data, pos in chain(stream, [(None, None, None)]):
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/genshi/output.py", line 586, in __call__
for ev in stream:
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/genshi/core.py", line 288, in _ensure
for event in stream:
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/genshi/template/base.py", line 605, in _include
for event in stream:
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/genshi/template/markup.py", line 327, in _match
for event in stream:
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/genshi/template/base.py", line 585, in _flatten
stream = _apply_directives(data[1], data[0], ctxt, vars)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/genshi/template/base.py", line 262, in _apply_directives
stream = directives[0](iter(stream), directives[1:], ctxt, **vars)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/genshi/template/directives.py", line 400, in __call__
value = _eval_expr(self.expr, ctxt, vars)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/genshi/template/base.py", line 277, in _eval_expr
retval = expr.evaluate(ctxt)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/genshi/template/eval.py", line 178, in evaluate
return eval(self.code, _globals, {'__data__': data})
File "/home/seanh/Projects/ckan/ckan/ckan/templates_legacy/snippets/package_list.html", line 18, in <Expression u'package.resources'>
<py:if test="package.resources">
File "/home/seanh/Projects/ckan/ckan/ckan/config/environment.py", line 283, in genshi_lookup_attr
val = cls.undefined(key, owner=obj)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/genshi/template/eval.py", line 410, in undefined
raise UndefinedError(key, owner=owner)
UndefinedError: "{u'name': u'baseball_stats', 'isopen': False, u'title': u"Billy's Stats about Baseball Players", u'state': u'active', u'type': u'dataset', u'id': u'33ffedda-e0d6-46ff-bcc6-08d088ed698a'} has no member named "resources"
-------------------- >> begin captured logging << --------------------
ckan.model: INFO: Database table data deleted
ckan.model: INFO: Database initialised
ckan.model: INFO: Database rebuilt
ckan.model: INFO: Database initialised
ckan.logic: CRITICAL: activity_create was pass extra keywords {'ignore_auth': True}
ckan.lib.base: INFO: /user/277d988b-2c21-40e4-a28d-b2a3c702a53b render time 0.390 seconds
--------------------- >> end captured logging << ---------------------" not defined
======================================================================
ERROR: ckan.tests.functional.test_package.TestReadAtRevision.test_read_date1
----------------------------------------------------------------------
Traceback (most recent call last):
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/nose/case.py", line 197, in runTest
self.test(*self.arg)
File "/home/seanh/Projects/ckan/ckan/ckan/tests/functional/test_package.py", line 481, in test_read_date1
res = self.app.get(offset, status=200)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/paste/fixture.py", line 208, in get
return self.do_request(req, status=status)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/paste/fixture.py", line 389, in do_request
**req.environ)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/paste/wsgilib.py", line 343, in raw_interactive
app_iter = application(basic_environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/paste/lint.py", line 170, in lint_app
iterator = application(environ, start_response_wrapper)
File "/home/seanh/Projects/ckan/ckan/ckan/config/middleware.py", line 348, in __call__
return self.app(environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/paste/cascade.py", line 130, in __call__
return self.apps[-1](environ, start_response)
File "/home/seanh/Projects/ckan/ckan/ckan/config/middleware.py", line 213, in __call__
return self.app(environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/paste/registry.py", line 379, in __call__
app_iter = self.application(environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/repoze/who/middleware.py", line 107, in __call__
app_iter = app(environ, wrapper.wrap_start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/pylons/middleware.py", line 201, in __call__
self.app, environ, catch_exc_info=True)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/pylons/util.py", line 94, in call_wsgi_application
app_iter = application(environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/weberror/evalexception.py", line 235, in __call__
return self.respond(environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/weberror/evalexception.py", line 418, in respond
return self.application(environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/webob/dec.py", line 147, in __call__
resp = self.call_func(req, *args, **self.kwargs)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/webob/dec.py", line 208, in call_func
return self.func(req, *args, **kwargs)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/fanstatic/publisher.py", line 234, in __call__
return request.get_response(self.app)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/webob/request.py", line 1053, in get_response
application, catch_exc_info=False)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/webob/request.py", line 1022, in call_application
app_iter = application(self.environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/webob/dec.py", line 147, in __call__
resp = self.call_func(req, *args, **self.kwargs)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/webob/dec.py", line 208, in call_func
return self.func(req, *args, **kwargs)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/fanstatic/injector.py", line 54, in __call__
response = request.get_response(self.app)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/webob/request.py", line 1053, in get_response
application, catch_exc_info=False)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/webob/request.py", line 1022, in call_application
app_iter = application(self.environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/beaker/middleware.py", line 73, in __call__
return self.app(environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/beaker/middleware.py", line 155, in __call__
return self.wrap_app(environ, session_start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/routes/middleware.py", line 131, in __call__
response = self.app(environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/pylons/wsgiapp.py", line 125, in __call__
response = self.dispatch(controller, environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/pylons/wsgiapp.py", line 324, in dispatch
return controller(environ, start_response)
File "/home/seanh/Projects/ckan/ckan/ckan/lib/base.py", line 293, in __call__
res = WSGIController.__call__(self, environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/pylons/controllers/core.py", line 221, in __call__
response = self._dispatch_call()
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/pylons/controllers/core.py", line 172, in _dispatch_call
response = self._inspect_call(func)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/pylons/controllers/core.py", line 107, in _inspect_call
result = self._perform_call(func, args)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/pylons/controllers/core.py", line 60, in _perform_call
return func(**args)
File "/home/seanh/Projects/ckan/ckan/ckan/controllers/package.py", line 352, in read
PackageSaver().render_package(c.pkg_dict, context)
File "/home/seanh/Projects/ckan/ckan/ckan/lib/package_saver.py", line 56, in render_package
c.pkg_revision_id = c.pkg_dict[u'revision_id']
KeyError: u'revision_id'
======================================================================
ERROR: ckan.tests.functional.test_package.TestReadAtRevision.test_read_date2
----------------------------------------------------------------------
Traceback (most recent call last):
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/nose/case.py", line 197, in runTest
self.test(*self.arg)
File "/home/seanh/Projects/ckan/ckan/ckan/tests/functional/test_package.py", line 493, in test_read_date2
res = self.app.get(offset, status=200)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/paste/fixture.py", line 208, in get
return self.do_request(req, status=status)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/paste/fixture.py", line 389, in do_request
**req.environ)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/paste/wsgilib.py", line 343, in raw_interactive
app_iter = application(basic_environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/paste/lint.py", line 170, in lint_app
iterator = application(environ, start_response_wrapper)
File "/home/seanh/Projects/ckan/ckan/ckan/config/middleware.py", line 348, in __call__
return self.app(environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/paste/cascade.py", line 130, in __call__
return self.apps[-1](environ, start_response)
File "/home/seanh/Projects/ckan/ckan/ckan/config/middleware.py", line 213, in __call__
return self.app(environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/paste/registry.py", line 379, in __call__
app_iter = self.application(environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/repoze/who/middleware.py", line 107, in __call__
app_iter = app(environ, wrapper.wrap_start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/pylons/middleware.py", line 201, in __call__
self.app, environ, catch_exc_info=True)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/pylons/util.py", line 94, in call_wsgi_application
app_iter = application(environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/weberror/evalexception.py", line 235, in __call__
return self.respond(environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/weberror/evalexception.py", line 418, in respond
return self.application(environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/webob/dec.py", line 147, in __call__
resp = self.call_func(req, *args, **self.kwargs)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/webob/dec.py", line 208, in call_func
return self.func(req, *args, **kwargs)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/fanstatic/publisher.py", line 234, in __call__
return request.get_response(self.app)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/webob/request.py", line 1053, in get_response
application, catch_exc_info=False)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/webob/request.py", line 1022, in call_application
app_iter = application(self.environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/webob/dec.py", line 147, in __call__
resp = self.call_func(req, *args, **self.kwargs)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/webob/dec.py", line 208, in call_func
return self.func(req, *args, **kwargs)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/fanstatic/injector.py", line 54, in __call__
response = request.get_response(self.app)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/webob/request.py", line 1053, in get_response
application, catch_exc_info=False)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/webob/request.py", line 1022, in call_application
app_iter = application(self.environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/beaker/middleware.py", line 73, in __call__
return self.app(environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/beaker/middleware.py", line 155, in __call__
return self.wrap_app(environ, session_start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/routes/middleware.py", line 131, in __call__
response = self.app(environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/pylons/wsgiapp.py", line 125, in __call__
response = self.dispatch(controller, environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/pylons/wsgiapp.py", line 324, in dispatch
return controller(environ, start_response)
File "/home/seanh/Projects/ckan/ckan/ckan/lib/base.py", line 293, in __call__
res = WSGIController.__call__(self, environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/pylons/controllers/core.py", line 221, in __call__
response = self._dispatch_call()
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/pylons/controllers/core.py", line 172, in _dispatch_call
response = self._inspect_call(func)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/pylons/controllers/core.py", line 107, in _inspect_call
result = self._perform_call(func, args)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/pylons/controllers/core.py", line 60, in _perform_call
return func(**args)
File "/home/seanh/Projects/ckan/ckan/ckan/controllers/package.py", line 352, in read
PackageSaver().render_package(c.pkg_dict, context)
File "/home/seanh/Projects/ckan/ckan/ckan/lib/package_saver.py", line 56, in render_package
c.pkg_revision_id = c.pkg_dict[u'revision_id']
KeyError: u'revision_id'
======================================================================
ERROR: ckan.tests.functional.test_package.TestReadAtRevision.test_read_date3
----------------------------------------------------------------------
Traceback (most recent call last):
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/nose/case.py", line 197, in runTest
self.test(*self.arg)
File "/home/seanh/Projects/ckan/ckan/ckan/tests/functional/test_package.py", line 506, in test_read_date3
res = self.app.get(offset, status=200)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/paste/fixture.py", line 208, in get
return self.do_request(req, status=status)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/paste/fixture.py", line 389, in do_request
**req.environ)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/paste/wsgilib.py", line 343, in raw_interactive
app_iter = application(basic_environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/paste/lint.py", line 170, in lint_app
iterator = application(environ, start_response_wrapper)
File "/home/seanh/Projects/ckan/ckan/ckan/config/middleware.py", line 348, in __call__
return self.app(environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/paste/cascade.py", line 130, in __call__
return self.apps[-1](environ, start_response)
File "/home/seanh/Projects/ckan/ckan/ckan/config/middleware.py", line 213, in __call__
return self.app(environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/paste/registry.py", line 379, in __call__
app_iter = self.application(environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/repoze/who/middleware.py", line 107, in __call__
app_iter = app(environ, wrapper.wrap_start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/pylons/middleware.py", line 201, in __call__
self.app, environ, catch_exc_info=True)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/pylons/util.py", line 94, in call_wsgi_application
app_iter = application(environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/weberror/evalexception.py", line 235, in __call__
return self.respond(environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/weberror/evalexception.py", line 418, in respond
return self.application(environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/webob/dec.py", line 147, in __call__
resp = self.call_func(req, *args, **self.kwargs)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/webob/dec.py", line 208, in call_func
return self.func(req, *args, **kwargs)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/fanstatic/publisher.py", line 234, in __call__
return request.get_response(self.app)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/webob/request.py", line 1053, in get_response
application, catch_exc_info=False)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/webob/request.py", line 1022, in call_application
app_iter = application(self.environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/webob/dec.py", line 147, in __call__
resp = self.call_func(req, *args, **self.kwargs)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/webob/dec.py", line 208, in call_func
return self.func(req, *args, **kwargs)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/fanstatic/injector.py", line 54, in __call__
response = request.get_response(self.app)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/webob/request.py", line 1053, in get_response
application, catch_exc_info=False)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/webob/request.py", line 1022, in call_application
app_iter = application(self.environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/beaker/middleware.py", line 73, in __call__
return self.app(environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/beaker/middleware.py", line 155, in __call__
return self.wrap_app(environ, session_start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/routes/middleware.py", line 131, in __call__
response = self.app(environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/pylons/wsgiapp.py", line 125, in __call__
response = self.dispatch(controller, environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/pylons/wsgiapp.py", line 324, in dispatch
return controller(environ, start_response)
File "/home/seanh/Projects/ckan/ckan/ckan/lib/base.py", line 293, in __call__
res = WSGIController.__call__(self, environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/pylons/controllers/core.py", line 221, in __call__
response = self._dispatch_call()
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/pylons/controllers/core.py", line 172, in _dispatch_call
response = self._inspect_call(func)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/pylons/controllers/core.py", line 107, in _inspect_call
result = self._perform_call(func, args)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/pylons/controllers/core.py", line 60, in _perform_call
return func(**args)
File "/home/seanh/Projects/ckan/ckan/ckan/controllers/package.py", line 352, in read
PackageSaver().render_package(c.pkg_dict, context)
File "/home/seanh/Projects/ckan/ckan/ckan/lib/package_saver.py", line 56, in render_package
c.pkg_revision_id = c.pkg_dict[u'revision_id']
KeyError: u'revision_id'
======================================================================
ERROR: ckan.tests.functional.test_package.TestReadAtRevision.test_read_revision1
----------------------------------------------------------------------
Traceback (most recent call last):
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/nose/case.py", line 197, in runTest
self.test(*self.arg)
File "/home/seanh/Projects/ckan/ckan/ckan/tests/functional/test_package.py", line 531, in test_read_revision1
res = self.app.get(offset, status=200)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/paste/fixture.py", line 208, in get
return self.do_request(req, status=status)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/paste/fixture.py", line 389, in do_request
**req.environ)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/paste/wsgilib.py", line 343, in raw_interactive
app_iter = application(basic_environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/paste/lint.py", line 170, in lint_app
iterator = application(environ, start_response_wrapper)
File "/home/seanh/Projects/ckan/ckan/ckan/config/middleware.py", line 348, in __call__
return self.app(environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/paste/cascade.py", line 130, in __call__
return self.apps[-1](environ, start_response)
File "/home/seanh/Projects/ckan/ckan/ckan/config/middleware.py", line 213, in __call__
return self.app(environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/paste/registry.py", line 379, in __call__
app_iter = self.application(environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/repoze/who/middleware.py", line 107, in __call__
app_iter = app(environ, wrapper.wrap_start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/pylons/middleware.py", line 201, in __call__
self.app, environ, catch_exc_info=True)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/pylons/util.py", line 94, in call_wsgi_application
app_iter = application(environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/weberror/evalexception.py", line 235, in __call__
return self.respond(environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/weberror/evalexception.py", line 418, in respond
return self.application(environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/webob/dec.py", line 147, in __call__
resp = self.call_func(req, *args, **self.kwargs)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/webob/dec.py", line 208, in call_func
return self.func(req, *args, **kwargs)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/fanstatic/publisher.py", line 234, in __call__
return request.get_response(self.app)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/webob/request.py", line 1053, in get_response
application, catch_exc_info=False)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/webob/request.py", line 1022, in call_application
app_iter = application(self.environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/webob/dec.py", line 147, in __call__
resp = self.call_func(req, *args, **self.kwargs)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/webob/dec.py", line 208, in call_func
return self.func(req, *args, **kwargs)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/fanstatic/injector.py", line 54, in __call__
response = request.get_response(self.app)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/webob/request.py", line 1053, in get_response
application, catch_exc_info=False)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/webob/request.py", line 1022, in call_application
app_iter = application(self.environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/beaker/middleware.py", line 73, in __call__
return self.app(environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/beaker/middleware.py", line 155, in __call__
return self.wrap_app(environ, session_start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/routes/middleware.py", line 131, in __call__
response = self.app(environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/pylons/wsgiapp.py", line 125, in __call__
response = self.dispatch(controller, environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/pylons/wsgiapp.py", line 324, in dispatch
return controller(environ, start_response)
File "/home/seanh/Projects/ckan/ckan/ckan/lib/base.py", line 293, in __call__
res = WSGIController.__call__(self, environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/pylons/controllers/core.py", line 221, in __call__
response = self._dispatch_call()
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/pylons/controllers/core.py", line 172, in _dispatch_call
response = self._inspect_call(func)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/pylons/controllers/core.py", line 107, in _inspect_call
result = self._perform_call(func, args)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/pylons/controllers/core.py", line 60, in _perform_call
return func(**args)
File "/home/seanh/Projects/ckan/ckan/ckan/controllers/package.py", line 352, in read
PackageSaver().render_package(c.pkg_dict, context)
File "/home/seanh/Projects/ckan/ckan/ckan/lib/package_saver.py", line 56, in render_package
c.pkg_revision_id = c.pkg_dict[u'revision_id']
KeyError: u'revision_id'
======================================================================
ERROR: ckan.tests.functional.test_package.TestReadAtRevision.test_read_revision2
----------------------------------------------------------------------
Traceback (most recent call last):
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/nose/case.py", line 197, in runTest
self.test(*self.arg)
File "/home/seanh/Projects/ckan/ckan/ckan/tests/functional/test_package.py", line 549, in test_read_revision2
res = self.app.get(offset, status=200)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/paste/fixture.py", line 208, in get
return self.do_request(req, status=status)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/paste/fixture.py", line 389, in do_request
**req.environ)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/paste/wsgilib.py", line 343, in raw_interactive
app_iter = application(basic_environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/paste/lint.py", line 170, in lint_app
iterator = application(environ, start_response_wrapper)
File "/home/seanh/Projects/ckan/ckan/ckan/config/middleware.py", line 348, in __call__
return self.app(environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/paste/cascade.py", line 130, in __call__
return self.apps[-1](environ, start_response)
File "/home/seanh/Projects/ckan/ckan/ckan/config/middleware.py", line 213, in __call__
return self.app(environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/paste/registry.py", line 379, in __call__
app_iter = self.application(environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/repoze/who/middleware.py", line 107, in __call__
app_iter = app(environ, wrapper.wrap_start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/pylons/middleware.py", line 201, in __call__
self.app, environ, catch_exc_info=True)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/pylons/util.py", line 94, in call_wsgi_application
app_iter = application(environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/weberror/evalexception.py", line 235, in __call__
return self.respond(environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/weberror/evalexception.py", line 418, in respond
return self.application(environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/webob/dec.py", line 147, in __call__
resp = self.call_func(req, *args, **self.kwargs)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/webob/dec.py", line 208, in call_func
return self.func(req, *args, **kwargs)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/fanstatic/publisher.py", line 234, in __call__
return request.get_response(self.app)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/webob/request.py", line 1053, in get_response
application, catch_exc_info=False)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/webob/request.py", line 1022, in call_application
app_iter = application(self.environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/webob/dec.py", line 147, in __call__
resp = self.call_func(req, *args, **self.kwargs)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/webob/dec.py", line 208, in call_func
return self.func(req, *args, **kwargs)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/fanstatic/injector.py", line 54, in __call__
response = request.get_response(self.app)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/webob/request.py", line 1053, in get_response
application, catch_exc_info=False)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/webob/request.py", line 1022, in call_application
app_iter = application(self.environ, start_response)
File "/home/seanh/.virtualenvs/ckan/local/lib/python2.7/site-packages/beaker/middleware.py", line 73, in __ca
|
KeyError
|
def post(self, sketch_id):
"""Handles POST request to the resource.
Handler for /api/v1/sketches/:sketch_id/explore/
Args:
sketch_id: Integer primary key for a sketch database model
Returns:
JSON with list of matched events
"""
sketch = Sketch.query.get_with_acl(sketch_id)
if not sketch:
abort(HTTP_STATUS_CODE_NOT_FOUND, "No sketch found with this ID.")
if not sketch.has_permission(current_user, "read"):
abort(
HTTP_STATUS_CODE_FORBIDDEN,
"User does not have read access controls on sketch.",
)
if sketch.get_status.status == "archived":
abort(HTTP_STATUS_CODE_BAD_REQUEST, "Unable to query on an archived sketch.")
form = forms.ExploreForm.build(request)
if not form.validate_on_submit():
abort(
HTTP_STATUS_CODE_BAD_REQUEST,
"Unable to explore data, unable to validate form data",
)
# TODO: Remove form and use json instead.
query_dsl = form.dsl.data
enable_scroll = form.enable_scroll.data
scroll_id = form.scroll_id.data
file_name = form.file_name.data
count = bool(form.count.data)
query_filter = request.json.get("filter", {})
return_field_string = form.fields.data
if return_field_string:
return_fields = [x.strip() for x in return_field_string.split(",")]
else:
return_fields = query_filter.get("fields", [])
return_fields = [field["field"] for field in return_fields]
return_fields.extend(DEFAULT_SOURCE_FIELDS)
if not query_filter:
query_filter = {}
all_indices = list({t.searchindex.index_name for t in sketch.timelines})
indices = query_filter.get("indices", all_indices)
# If _all in indices then execute the query on all indices
if "_all" in indices:
indices = all_indices
# Make sure that the indices in the filter are part of the sketch.
# This will also remove any deleted timeline from the search result.
indices, timeline_ids = get_validated_indices(indices, sketch)
# Remove indices that don't exist from search.
indices = utils.validate_indices(indices, self.datastore)
if not indices:
abort(
HTTP_STATUS_CODE_BAD_REQUEST,
"No valid search indices were found to perform the search on.",
)
# Make sure we have a query string or star filter
if not (
form.query.data,
query_filter.get("star"),
query_filter.get("events"),
query_dsl,
):
abort(
HTTP_STATUS_CODE_BAD_REQUEST,
"The request needs a query string/DSL and or a star filter.",
)
# Aggregate hit count per index.
index_stats_agg = {
"indices": {"terms": {"field": "_index", "min_doc_count": 0}},
"timelines": {"terms": {"field": "__ts_timeline_id", "min_doc_count": 0}},
}
if count:
# Count operations do not support size parameters.
if "size" in query_filter:
_ = query_filter.pop("size")
if "terminate_after" in query_filter:
_ = query_filter.pop("terminate_after")
try:
result = self.datastore.search(
sketch_id=sketch_id,
query_string=form.query.data,
query_filter=query_filter,
query_dsl=query_dsl,
indices=indices,
timeline_ids=timeline_ids,
count=True,
)
except ValueError as e:
abort(HTTP_STATUS_CODE_BAD_REQUEST, e)
# Get number of matching documents per index.
schema = {"meta": {"total_count": result}, "objects": []}
return jsonify(schema)
if file_name:
file_object = io.BytesIO()
form_data = {
"created_at": datetime.datetime.utcnow().isoformat(),
"created_by": current_user.username,
"sketch": sketch_id,
"query": form.query.data,
"query_dsl": query_dsl,
"query_filter": query_filter,
"return_fields": return_fields,
}
with zipfile.ZipFile(file_object, mode="w") as zip_file:
zip_file.writestr("METADATA", data=json.dumps(form_data))
fh = export.query_to_filehandle(
query_string=form.query.data,
query_dsl=query_dsl,
query_filter=query_filter,
indices=indices,
sketch=sketch,
datastore=self.datastore,
)
fh.seek(0)
zip_file.writestr("query_results.csv", fh.read())
file_object.seek(0)
return send_file(file_object, mimetype="zip", attachment_filename=file_name)
if scroll_id:
# pylint: disable=unexpected-keyword-arg
result = self.datastore.client.scroll(scroll_id=scroll_id, scroll="1m")
else:
try:
result = self.datastore.search(
sketch_id=sketch_id,
query_string=form.query.data,
query_filter=query_filter,
query_dsl=query_dsl,
indices=indices,
aggregations=index_stats_agg,
return_fields=return_fields,
enable_scroll=enable_scroll,
timeline_ids=timeline_ids,
)
except ValueError as e:
abort(HTTP_STATUS_CODE_BAD_REQUEST, e)
# Get number of matching documents per index.
count_per_index = {}
try:
for bucket in result["aggregations"]["indices"]["buckets"]:
key = bucket.get("key")
if key:
count_per_index[key] = bucket.get("doc_count")
except KeyError:
pass
# Get number of matching documents per timeline.
count_per_timeline = {}
try:
for bucket in result["aggregations"]["timelines"]["buckets"]:
key = bucket.get("key")
if key:
count_per_timeline[key] = bucket.get("doc_count")
except KeyError:
pass
comments = {}
if "comment" in return_fields:
events = Event.query.filter_by(sketch=sketch).all()
for event in events:
for comment in event.comments:
comments.setdefault(event.document_id, [])
comments[event.document_id].append(comment.comment)
# Get labels for each event that matches the sketch.
# Remove all other labels.
for event in result["hits"]["hits"]:
event["selected"] = False
event["_source"]["label"] = []
try:
for label in event["_source"]["timesketch_label"]:
if sketch.id != label["sketch_id"]:
continue
event["_source"]["label"].append(label["name"])
del event["_source"]["timesketch_label"]
except KeyError:
pass
if "comment" in return_fields:
event["_source"]["comment"] = comments.get(event["_id"], [])
# Update or create user state view. This is used in the UI to let
# the user get back to the last state in the explore view.
# TODO: Add a call to utils.update_sketch_last_activity once new
# mechanism has been added, instead of relying on user views.
view = View.get_or_create(user=current_user, sketch=sketch, name="")
view.update_modification_time()
view.query_string = form.query.data
view.query_filter = json.dumps(query_filter, ensure_ascii=False)
view.query_dsl = json.dumps(query_dsl, ensure_ascii=False)
db_session.add(view)
db_session.commit()
# Add metadata for the query result. This is used by the UI to
# render the event correctly and to display timing and hit count
# information.
tl_colors = {}
tl_names = {}
for timeline in sketch.timelines:
tl_colors[timeline.searchindex.index_name] = timeline.color
tl_names[timeline.searchindex.index_name] = timeline.name
meta = {
"es_time": result["took"],
"es_total_count": result["hits"]["total"],
"timeline_colors": tl_colors,
"timeline_names": tl_names,
"count_per_index": count_per_index,
"count_per_timeline": count_per_timeline,
"scroll_id": result.get("_scroll_id", ""),
}
# Elasticsearch version 7.x returns total hits as a dictionary.
# TODO: Refactor when version 6.x has been deprecated.
if isinstance(meta["es_total_count"], dict):
meta["es_total_count"] = meta["es_total_count"].get("value", 0)
schema = {"meta": meta, "objects": result["hits"]["hits"]}
return jsonify(schema)
|
def post(self, sketch_id):
"""Handles POST request to the resource.
Handler for /api/v1/sketches/:sketch_id/explore/
Args:
sketch_id: Integer primary key for a sketch database model
Returns:
JSON with list of matched events
"""
sketch = Sketch.query.get_with_acl(sketch_id)
if not sketch:
abort(HTTP_STATUS_CODE_NOT_FOUND, "No sketch found with this ID.")
if not sketch.has_permission(current_user, "read"):
abort(
HTTP_STATUS_CODE_FORBIDDEN,
"User does not have read access controls on sketch.",
)
if sketch.get_status.status == "archived":
abort(HTTP_STATUS_CODE_BAD_REQUEST, "Unable to query on an archived sketch.")
form = forms.ExploreForm.build(request)
if not form.validate_on_submit():
abort(
HTTP_STATUS_CODE_BAD_REQUEST,
"Unable to explore data, unable to validate form data",
)
# TODO: Remove form and use json instead.
query_dsl = form.dsl.data
enable_scroll = form.enable_scroll.data
scroll_id = form.scroll_id.data
file_name = form.file_name.data
count = bool(form.count.data)
query_filter = request.json.get("filter", {})
return_field_string = form.fields.data
if return_field_string:
return_fields = [x.strip() for x in return_field_string.split(",")]
else:
return_fields = query_filter.get("fields", [])
return_fields = [field["field"] for field in return_fields]
return_fields.extend(DEFAULT_SOURCE_FIELDS)
if not query_filter:
query_filter = {}
all_indices = list({t.searchindex.index_name for t in sketch.timelines})
indices = query_filter.get("indices", all_indices)
# If _all in indices then execute the query on all indices
if "_all" in indices:
indices = all_indices
# Remove indices that don't exist from search.
indices = utils.validate_indices(indices, self.datastore)
# Make sure that the indices in the filter are part of the sketch.
# This will also remove any deleted timeline from the search result.
indices, timeline_ids = get_validated_indices(indices, sketch)
if not indices:
abort(
HTTP_STATUS_CODE_BAD_REQUEST,
"No valid search indices were found to perform the search on.",
)
# Make sure we have a query string or star filter
if not (
form.query.data,
query_filter.get("star"),
query_filter.get("events"),
query_dsl,
):
abort(
HTTP_STATUS_CODE_BAD_REQUEST,
"The request needs a query string/DSL and or a star filter.",
)
# Aggregate hit count per index.
index_stats_agg = {
"indices": {"terms": {"field": "_index", "min_doc_count": 0}},
"timelines": {"terms": {"field": "__ts_timeline_id", "min_doc_count": 0}},
}
if count:
# Count operations do not support size parameters.
if "size" in query_filter:
_ = query_filter.pop("size")
if "terminate_after" in query_filter:
_ = query_filter.pop("terminate_after")
try:
result = self.datastore.search(
sketch_id=sketch_id,
query_string=form.query.data,
query_filter=query_filter,
query_dsl=query_dsl,
indices=indices,
timeline_ids=timeline_ids,
count=True,
)
except ValueError as e:
abort(HTTP_STATUS_CODE_BAD_REQUEST, e)
# Get number of matching documents per index.
schema = {"meta": {"total_count": result}, "objects": []}
return jsonify(schema)
if file_name:
file_object = io.BytesIO()
form_data = {
"created_at": datetime.datetime.utcnow().isoformat(),
"created_by": current_user.username,
"sketch": sketch_id,
"query": form.query.data,
"query_dsl": query_dsl,
"query_filter": query_filter,
"return_fields": return_fields,
}
with zipfile.ZipFile(file_object, mode="w") as zip_file:
zip_file.writestr("METADATA", data=json.dumps(form_data))
fh = export.query_to_filehandle(
query_string=form.query.data,
query_dsl=query_dsl,
query_filter=query_filter,
indices=indices,
sketch=sketch,
datastore=self.datastore,
)
fh.seek(0)
zip_file.writestr("query_results.csv", fh.read())
file_object.seek(0)
return send_file(file_object, mimetype="zip", attachment_filename=file_name)
if scroll_id:
# pylint: disable=unexpected-keyword-arg
result = self.datastore.client.scroll(scroll_id=scroll_id, scroll="1m")
else:
try:
result = self.datastore.search(
sketch_id=sketch_id,
query_string=form.query.data,
query_filter=query_filter,
query_dsl=query_dsl,
indices=indices,
aggregations=index_stats_agg,
return_fields=return_fields,
enable_scroll=enable_scroll,
timeline_ids=timeline_ids,
)
except ValueError as e:
abort(HTTP_STATUS_CODE_BAD_REQUEST, e)
# Get number of matching documents per index.
count_per_index = {}
try:
for bucket in result["aggregations"]["indices"]["buckets"]:
key = bucket.get("key")
if key:
count_per_index[key] = bucket.get("doc_count")
except KeyError:
pass
# Get number of matching documents per timeline.
count_per_timeline = {}
try:
for bucket in result["aggregations"]["timelines"]["buckets"]:
key = bucket.get("key")
if key:
count_per_timeline[key] = bucket.get("doc_count")
except KeyError:
pass
comments = {}
if "comment" in return_fields:
events = Event.query.filter_by(sketch=sketch).all()
for event in events:
for comment in event.comments:
comments.setdefault(event.document_id, [])
comments[event.document_id].append(comment.comment)
# Get labels for each event that matches the sketch.
# Remove all other labels.
for event in result["hits"]["hits"]:
event["selected"] = False
event["_source"]["label"] = []
try:
for label in event["_source"]["timesketch_label"]:
if sketch.id != label["sketch_id"]:
continue
event["_source"]["label"].append(label["name"])
del event["_source"]["timesketch_label"]
except KeyError:
pass
if "comment" in return_fields:
event["_source"]["comment"] = comments.get(event["_id"], [])
# Update or create user state view. This is used in the UI to let
# the user get back to the last state in the explore view.
# TODO: Add a call to utils.update_sketch_last_activity once new
# mechanism has been added, instead of relying on user views.
view = View.get_or_create(user=current_user, sketch=sketch, name="")
view.update_modification_time()
view.query_string = form.query.data
view.query_filter = json.dumps(query_filter, ensure_ascii=False)
view.query_dsl = json.dumps(query_dsl, ensure_ascii=False)
db_session.add(view)
db_session.commit()
# Add metadata for the query result. This is used by the UI to
# render the event correctly and to display timing and hit count
# information.
tl_colors = {}
tl_names = {}
for timeline in sketch.timelines:
tl_colors[timeline.searchindex.index_name] = timeline.color
tl_names[timeline.searchindex.index_name] = timeline.name
meta = {
"es_time": result["took"],
"es_total_count": result["hits"]["total"],
"timeline_colors": tl_colors,
"timeline_names": tl_names,
"count_per_index": count_per_index,
"count_per_timeline": count_per_timeline,
"scroll_id": result.get("_scroll_id", ""),
}
# Elasticsearch version 7.x returns total hits as a dictionary.
# TODO: Refactor when version 6.x has been deprecated.
if isinstance(meta["es_total_count"], dict):
meta["es_total_count"] = meta["es_total_count"].get("value", 0)
schema = {"meta": meta, "objects": result["hits"]["hits"]}
return jsonify(schema)
|
https://github.com/google/timesketch/issues/1564
|
[2021-01-21 09:57:16,813] celery.worker.strategy/INFO Received task: timesketch.lib.tasks.run_csv_jsonl[34b8f46651b94a8d8ad9e29bb06a147c]
[2021-01-21 09:57:16,819] timesketch.tasks/INFO Index timeline [vol timeline] to index [34b8f46651b94a8d8ad9e29bb06a147c] (source: jsonl)
[2021-01-21 09:58:14,763] timesketch.elasticsearch/ERROR Unable to add events
Traceback (most recent call last):
File "/usr/lib/python3/dist-packages/urllib3/connectionpool.py", line 421, in _make_request
six.raise_from(e, None)
File "<string>", line 3, in raise_from
File "/usr/lib/python3/dist-packages/urllib3/connectionpool.py", line 416, in _make_request
httplib_response = conn.getresponse()
File "/usr/lib/python3.8/http/client.py", line 1347, in getresponse
response.begin()
File "/usr/lib/python3.8/http/client.py", line 307, in begin
version, status, reason = self._read_status()
File "/usr/lib/python3.8/http/client.py", line 268, in _read_status
line = str(self.fp.readline(_MAXLINE + 1), "iso-8859-1")
File "/usr/lib/python3.8/socket.py", line 669, in readinto
return self._sock.recv_into(b)
socket.timeout: timed out
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/usr/local/lib/python3.8/dist-packages/elasticsearch/connection/http_urllib3.py", line 241, in perform_request
response = self.pool.urlopen(
File "/usr/lib/python3/dist-packages/urllib3/connectionpool.py", line 719, in urlopen
retries = retries.increment(
File "/usr/lib/python3/dist-packages/urllib3/util/retry.py", line 376, in increment
raise six.reraise(type(error), error, _stacktrace)
File "/usr/local/lib/python3.8/dist-packages/six.py", line 693, in reraise
raise value
File "/usr/lib/python3/dist-packages/urllib3/connectionpool.py", line 665, in urlopen
httplib_response = self._make_request(
File "/usr/lib/python3/dist-packages/urllib3/connectionpool.py", line 423, in _make_request
self._raise_timeout(err=e, url=url, timeout_value=read_timeout)
File "/usr/lib/python3/dist-packages/urllib3/connectionpool.py", line 330, in _raise_timeout
raise ReadTimeoutError(
urllib3.exceptions.ReadTimeoutError: HTTPConnectionPool(host='elasticsearch', port=9200): Read timed out. (read timeout=10)
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/usr/local/lib/python3.8/dist-packages/timesketch/lib/datastores/elastic.py", line 830, in flush_queued_events
results = self.client.bulk(body=self.import_events)
File "/usr/local/lib/python3.8/dist-packages/elasticsearch/client/utils.py", line 84, in _wrapped
return func(*args, params=params, **kwargs)
File "/usr/local/lib/python3.8/dist-packages/elasticsearch/client/__init__.py", line 448, in bulk
return self.transport.perform_request(
File "/usr/local/lib/python3.8/dist-packages/elasticsearch/transport.py", line 351, in perform_request
status, headers_response, data = connection.perform_request(
File "/usr/local/lib/python3.8/dist-packages/elasticsearch/connection/http_urllib3.py", line 253, in perform_request
raise ConnectionTimeout("TIMEOUT", str(e), e)
elasticsearch.exceptions.ConnectionTimeout: ConnectionTimeout caused by - ReadTimeoutError(HTTPConnectionPool(host='elasticsearch', port=9200): Read timed out. (read timeout=10))
[2021-01-21 09:58:15,185] celery.app.trace/ERROR Task timesketch.lib.tasks.run_csv_jsonl[34b8f46651b94a8d8ad9e29bb06a147c] raised unexpected: UnboundLocalError("local variable 'results' referenced before assignment")
Traceback (most recent call last):
File "/usr/local/lib/python3.8/dist-packages/celery/app/trace.py", line 385, in trace_task
R = retval = fun(*args, **kwargs)
File "/usr/local/lib/python3.8/dist-packages/timesketch/app.py", line 198, in __call__
return TaskBase.__call__(self, *args, **kwargs)
File "/usr/local/lib/python3.8/dist-packages/celery/app/trace.py", line 650, in __protected_call__
return self.run(*args, **kwargs)
File "/usr/local/lib/python3.8/dist-packages/timesketch/lib/tasks.py", line 563, in run_csv_jsonl
results = es.flush_queued_events()
File "/usr/local/lib/python3.8/dist-packages/timesketch/lib/datastores/elastic.py", line 835, in flush_queued_events
errors_in_upload = results.get('errors', False)
UnboundLocalError: local variable 'results' referenced before assignment
|
urllib3.exceptions.ReadTimeoutError
|
def __init__(self, host="127.0.0.1", port=9200):
"""Create a Elasticsearch client."""
super().__init__()
self._error_container = {}
self.user = current_app.config.get("ELASTIC_USER", "user")
self.password = current_app.config.get("ELASTIC_PASSWORD", "pass")
self.ssl = current_app.config.get("ELASTIC_SSL", False)
self.verify = current_app.config.get("ELASTIC_VERIFY_CERTS", True)
if self.ssl:
self.client = Elasticsearch(
[{"host": host, "port": port}],
http_auth=(self.user, self.password),
use_ssl=self.ssl,
verify_certs=self.verify,
)
else:
self.client = Elasticsearch([{"host": host, "port": port}])
self.import_counter = Counter()
self.import_events = []
self._request_timeout = current_app.config.get(
"TIMEOUT_FOR_EVENT_IMPORT", self.DEFAULT_EVENT_IMPORT_TIMEOUT
)
|
def __init__(self, host="127.0.0.1", port=9200):
"""Create a Elasticsearch client."""
super().__init__()
self._error_container = {}
self.user = current_app.config.get("ELASTIC_USER", "user")
self.password = current_app.config.get("ELASTIC_PASSWORD", "pass")
self.ssl = current_app.config.get("ELASTIC_SSL", False)
self.verify = current_app.config.get("ELASTIC_VERIFY_CERTS", True)
if self.ssl:
self.client = Elasticsearch(
[{"host": host, "port": port}],
http_auth=(self.user, self.password),
use_ssl=self.ssl,
verify_certs=self.verify,
)
else:
self.client = Elasticsearch([{"host": host, "port": port}])
self.import_counter = Counter()
self.import_events = []
|
https://github.com/google/timesketch/issues/1564
|
[2021-01-21 09:57:16,813] celery.worker.strategy/INFO Received task: timesketch.lib.tasks.run_csv_jsonl[34b8f46651b94a8d8ad9e29bb06a147c]
[2021-01-21 09:57:16,819] timesketch.tasks/INFO Index timeline [vol timeline] to index [34b8f46651b94a8d8ad9e29bb06a147c] (source: jsonl)
[2021-01-21 09:58:14,763] timesketch.elasticsearch/ERROR Unable to add events
Traceback (most recent call last):
File "/usr/lib/python3/dist-packages/urllib3/connectionpool.py", line 421, in _make_request
six.raise_from(e, None)
File "<string>", line 3, in raise_from
File "/usr/lib/python3/dist-packages/urllib3/connectionpool.py", line 416, in _make_request
httplib_response = conn.getresponse()
File "/usr/lib/python3.8/http/client.py", line 1347, in getresponse
response.begin()
File "/usr/lib/python3.8/http/client.py", line 307, in begin
version, status, reason = self._read_status()
File "/usr/lib/python3.8/http/client.py", line 268, in _read_status
line = str(self.fp.readline(_MAXLINE + 1), "iso-8859-1")
File "/usr/lib/python3.8/socket.py", line 669, in readinto
return self._sock.recv_into(b)
socket.timeout: timed out
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/usr/local/lib/python3.8/dist-packages/elasticsearch/connection/http_urllib3.py", line 241, in perform_request
response = self.pool.urlopen(
File "/usr/lib/python3/dist-packages/urllib3/connectionpool.py", line 719, in urlopen
retries = retries.increment(
File "/usr/lib/python3/dist-packages/urllib3/util/retry.py", line 376, in increment
raise six.reraise(type(error), error, _stacktrace)
File "/usr/local/lib/python3.8/dist-packages/six.py", line 693, in reraise
raise value
File "/usr/lib/python3/dist-packages/urllib3/connectionpool.py", line 665, in urlopen
httplib_response = self._make_request(
File "/usr/lib/python3/dist-packages/urllib3/connectionpool.py", line 423, in _make_request
self._raise_timeout(err=e, url=url, timeout_value=read_timeout)
File "/usr/lib/python3/dist-packages/urllib3/connectionpool.py", line 330, in _raise_timeout
raise ReadTimeoutError(
urllib3.exceptions.ReadTimeoutError: HTTPConnectionPool(host='elasticsearch', port=9200): Read timed out. (read timeout=10)
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/usr/local/lib/python3.8/dist-packages/timesketch/lib/datastores/elastic.py", line 830, in flush_queued_events
results = self.client.bulk(body=self.import_events)
File "/usr/local/lib/python3.8/dist-packages/elasticsearch/client/utils.py", line 84, in _wrapped
return func(*args, params=params, **kwargs)
File "/usr/local/lib/python3.8/dist-packages/elasticsearch/client/__init__.py", line 448, in bulk
return self.transport.perform_request(
File "/usr/local/lib/python3.8/dist-packages/elasticsearch/transport.py", line 351, in perform_request
status, headers_response, data = connection.perform_request(
File "/usr/local/lib/python3.8/dist-packages/elasticsearch/connection/http_urllib3.py", line 253, in perform_request
raise ConnectionTimeout("TIMEOUT", str(e), e)
elasticsearch.exceptions.ConnectionTimeout: ConnectionTimeout caused by - ReadTimeoutError(HTTPConnectionPool(host='elasticsearch', port=9200): Read timed out. (read timeout=10))
[2021-01-21 09:58:15,185] celery.app.trace/ERROR Task timesketch.lib.tasks.run_csv_jsonl[34b8f46651b94a8d8ad9e29bb06a147c] raised unexpected: UnboundLocalError("local variable 'results' referenced before assignment")
Traceback (most recent call last):
File "/usr/local/lib/python3.8/dist-packages/celery/app/trace.py", line 385, in trace_task
R = retval = fun(*args, **kwargs)
File "/usr/local/lib/python3.8/dist-packages/timesketch/app.py", line 198, in __call__
return TaskBase.__call__(self, *args, **kwargs)
File "/usr/local/lib/python3.8/dist-packages/celery/app/trace.py", line 650, in __protected_call__
return self.run(*args, **kwargs)
File "/usr/local/lib/python3.8/dist-packages/timesketch/lib/tasks.py", line 563, in run_csv_jsonl
results = es.flush_queued_events()
File "/usr/local/lib/python3.8/dist-packages/timesketch/lib/datastores/elastic.py", line 835, in flush_queued_events
errors_in_upload = results.get('errors', False)
UnboundLocalError: local variable 'results' referenced before assignment
|
urllib3.exceptions.ReadTimeoutError
|
def flush_queued_events(self, retry_count=0):
"""Flush all queued events.
Returns:
dict: A dict object that contains the number of events
that were sent to Elastic as well as information
on whether there were any errors, and what the
details of these errors if any.
retry_count: optional int indicating whether this is a retry.
"""
if not self.import_events:
return {}
return_dict = {
"number_of_events": len(self.import_events) / 2,
"total_events": self.import_counter["events"],
}
try:
# pylint: disable=unexpected-keyword-arg
results = self.client.bulk(
body=self.import_events, timeout=self._request_timeout
)
except (ConnectionTimeout, socket.timeout):
if retry_count >= self.DEFAULT_FLUSH_RETRY_LIMIT:
es_logger.error("Unable to add events, reached recount max.", exc_info=True)
return {}
es_logger.error(
"Unable to add events (retry {0:d}/{1:d})".format(
retry_count, self.DEFAULT_FLUSH_RETRY_LIMIT
)
)
return self.flush_queued_events(retry_count + 1)
errors_in_upload = results.get("errors", False)
return_dict["errors_in_upload"] = errors_in_upload
if errors_in_upload:
items = results.get("items", [])
return_dict["errors"] = []
es_logger.error("Errors while attempting to upload events.")
for item in items:
index = item.get("index", {})
index_name = index.get("_index", "N/A")
_ = self._error_container.setdefault(
index_name, {"errors": [], "types": Counter(), "details": Counter()}
)
error_counter = self._error_container[index_name]["types"]
error_detail_counter = self._error_container[index_name]["details"]
error_list = self._error_container[index_name]["errors"]
error = index.get("error", {})
status_code = index.get("status", 0)
doc_id = index.get("_id", "(unable to get doc id)")
caused_by = error.get("caused_by", {})
caused_reason = caused_by.get("reason", "Unkown Detailed Reason")
error_counter[error.get("type")] += 1
detail_msg = "{0:s}/{1:s}".format(
caused_by.get("type", "Unknown Detailed Type"),
" ".join(caused_reason.split()[:5]),
)
error_detail_counter[detail_msg] += 1
error_msg = "<{0:s}> {1:s} [{2:s}/{3:s}]".format(
error.get("type", "Unknown Type"),
error.get("reason", "No reason given"),
caused_by.get("type", "Unknown Type"),
caused_reason,
)
error_list.append(error_msg)
try:
es_logger.error(
"Unable to upload document: {0:s} to index {1:s} - "
"[{2:d}] {3:s}".format(doc_id, index_name, status_code, error_msg)
)
# We need to catch all exceptions here, since this is a crucial
# call that we do not want to break operation.
except Exception: # pylint: disable=broad-except
es_logger.error(
"Unable to upload document, and unable to log the error itself.",
exc_info=True,
)
return_dict["error_container"] = self._error_container
self.import_events = []
return return_dict
|
def flush_queued_events(self, retry_count=0):
"""Flush all queued events.
Returns:
dict: A dict object that contains the number of events
that were sent to Elastic as well as information
on whether there were any errors, and what the
details of these errors if any.
retry_count: optional int indicating whether this is a retry.
"""
if not self.import_events:
return {}
return_dict = {
"number_of_events": len(self.import_events) / 2,
"total_events": self.import_counter["events"],
}
try:
results = self.client.bulk(body=self.import_events)
except (ConnectionTimeout, socket.timeout):
if retry_count >= self.DEFAULT_FLUSH_RETRY_LIMIT:
es_logger.error("Unable to add events, reached recount max.", exc_info=True)
return {}
es_logger.error(
"Unable to add events (retry {0:d}/{1:d})".format(
retry_count, self.DEFAULT_FLUSH_RETRY_LIMIT
)
)
return self.flush_queued_events(retry_count + 1)
errors_in_upload = results.get("errors", False)
return_dict["errors_in_upload"] = errors_in_upload
if errors_in_upload:
items = results.get("items", [])
return_dict["errors"] = []
es_logger.error("Errors while attempting to upload events.")
for item in items:
index = item.get("index", {})
index_name = index.get("_index", "N/A")
_ = self._error_container.setdefault(
index_name, {"errors": [], "types": Counter(), "details": Counter()}
)
error_counter = self._error_container[index_name]["types"]
error_detail_counter = self._error_container[index_name]["details"]
error_list = self._error_container[index_name]["errors"]
error = index.get("error", {})
status_code = index.get("status", 0)
doc_id = index.get("_id", "(unable to get doc id)")
caused_by = error.get("caused_by", {})
caused_reason = caused_by.get("reason", "Unkown Detailed Reason")
error_counter[error.get("type")] += 1
detail_msg = "{0:s}/{1:s}".format(
caused_by.get("type", "Unknown Detailed Type"),
" ".join(caused_reason.split()[:5]),
)
error_detail_counter[detail_msg] += 1
error_msg = "<{0:s}> {1:s} [{2:s}/{3:s}]".format(
error.get("type", "Unknown Type"),
error.get("reason", "No reason given"),
caused_by.get("type", "Unknown Type"),
caused_reason,
)
error_list.append(error_msg)
try:
es_logger.error(
"Unable to upload document: {0:s} to index {1:s} - "
"[{2:d}] {3:s}".format(doc_id, index_name, status_code, error_msg)
)
# We need to catch all exceptions here, since this is a crucial
# call that we do not want to break operation.
except Exception: # pylint: disable=broad-except
es_logger.error(
"Unable to upload document, and unable to log the error itself.",
exc_info=True,
)
return_dict["error_container"] = self._error_container
self.import_events = []
return return_dict
|
https://github.com/google/timesketch/issues/1564
|
[2021-01-21 09:57:16,813] celery.worker.strategy/INFO Received task: timesketch.lib.tasks.run_csv_jsonl[34b8f46651b94a8d8ad9e29bb06a147c]
[2021-01-21 09:57:16,819] timesketch.tasks/INFO Index timeline [vol timeline] to index [34b8f46651b94a8d8ad9e29bb06a147c] (source: jsonl)
[2021-01-21 09:58:14,763] timesketch.elasticsearch/ERROR Unable to add events
Traceback (most recent call last):
File "/usr/lib/python3/dist-packages/urllib3/connectionpool.py", line 421, in _make_request
six.raise_from(e, None)
File "<string>", line 3, in raise_from
File "/usr/lib/python3/dist-packages/urllib3/connectionpool.py", line 416, in _make_request
httplib_response = conn.getresponse()
File "/usr/lib/python3.8/http/client.py", line 1347, in getresponse
response.begin()
File "/usr/lib/python3.8/http/client.py", line 307, in begin
version, status, reason = self._read_status()
File "/usr/lib/python3.8/http/client.py", line 268, in _read_status
line = str(self.fp.readline(_MAXLINE + 1), "iso-8859-1")
File "/usr/lib/python3.8/socket.py", line 669, in readinto
return self._sock.recv_into(b)
socket.timeout: timed out
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/usr/local/lib/python3.8/dist-packages/elasticsearch/connection/http_urllib3.py", line 241, in perform_request
response = self.pool.urlopen(
File "/usr/lib/python3/dist-packages/urllib3/connectionpool.py", line 719, in urlopen
retries = retries.increment(
File "/usr/lib/python3/dist-packages/urllib3/util/retry.py", line 376, in increment
raise six.reraise(type(error), error, _stacktrace)
File "/usr/local/lib/python3.8/dist-packages/six.py", line 693, in reraise
raise value
File "/usr/lib/python3/dist-packages/urllib3/connectionpool.py", line 665, in urlopen
httplib_response = self._make_request(
File "/usr/lib/python3/dist-packages/urllib3/connectionpool.py", line 423, in _make_request
self._raise_timeout(err=e, url=url, timeout_value=read_timeout)
File "/usr/lib/python3/dist-packages/urllib3/connectionpool.py", line 330, in _raise_timeout
raise ReadTimeoutError(
urllib3.exceptions.ReadTimeoutError: HTTPConnectionPool(host='elasticsearch', port=9200): Read timed out. (read timeout=10)
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/usr/local/lib/python3.8/dist-packages/timesketch/lib/datastores/elastic.py", line 830, in flush_queued_events
results = self.client.bulk(body=self.import_events)
File "/usr/local/lib/python3.8/dist-packages/elasticsearch/client/utils.py", line 84, in _wrapped
return func(*args, params=params, **kwargs)
File "/usr/local/lib/python3.8/dist-packages/elasticsearch/client/__init__.py", line 448, in bulk
return self.transport.perform_request(
File "/usr/local/lib/python3.8/dist-packages/elasticsearch/transport.py", line 351, in perform_request
status, headers_response, data = connection.perform_request(
File "/usr/local/lib/python3.8/dist-packages/elasticsearch/connection/http_urllib3.py", line 253, in perform_request
raise ConnectionTimeout("TIMEOUT", str(e), e)
elasticsearch.exceptions.ConnectionTimeout: ConnectionTimeout caused by - ReadTimeoutError(HTTPConnectionPool(host='elasticsearch', port=9200): Read timed out. (read timeout=10))
[2021-01-21 09:58:15,185] celery.app.trace/ERROR Task timesketch.lib.tasks.run_csv_jsonl[34b8f46651b94a8d8ad9e29bb06a147c] raised unexpected: UnboundLocalError("local variable 'results' referenced before assignment")
Traceback (most recent call last):
File "/usr/local/lib/python3.8/dist-packages/celery/app/trace.py", line 385, in trace_task
R = retval = fun(*args, **kwargs)
File "/usr/local/lib/python3.8/dist-packages/timesketch/app.py", line 198, in __call__
return TaskBase.__call__(self, *args, **kwargs)
File "/usr/local/lib/python3.8/dist-packages/celery/app/trace.py", line 650, in __protected_call__
return self.run(*args, **kwargs)
File "/usr/local/lib/python3.8/dist-packages/timesketch/lib/tasks.py", line 563, in run_csv_jsonl
results = es.flush_queued_events()
File "/usr/local/lib/python3.8/dist-packages/timesketch/lib/datastores/elastic.py", line 835, in flush_queued_events
errors_in_upload = results.get('errors', False)
UnboundLocalError: local variable 'results' referenced before assignment
|
urllib3.exceptions.ReadTimeoutError
|
def run(self, name, index, username):
"""Create the SearchIndex."""
es = ElasticsearchDataStore(
host=current_app.config["ELASTIC_HOST"], port=current_app.config["ELASTIC_PORT"]
)
user = User.query.filter_by(username=username).first()
if not user:
sys.stderr.write("User does not exist\n")
sys.exit(1)
if not es.client.indices.exists(index=index):
sys.stderr.write("Index does not exist in the datastore\n")
sys.exit(1)
if SearchIndex.query.filter_by(name=name, index_name=index).first():
sys.stderr.write("Index with this name already exist in Timesketch\n")
sys.exit(1)
searchindex = SearchIndex(name=name, description=name, user=user, index_name=index)
db_session.add(searchindex)
db_session.commit()
searchindex.grant_permission("read")
sys.stdout.write("Search index {0:s} created\n".format(name))
|
def run(self, name, index, username):
"""Create the SearchIndex."""
es = ElasticsearchDataStore(
host=current_app.config["ELASTIC_HOST"], port=current_app.config["ELASTIC_PORT"]
)
user = User.query.filter_by(username=username).first()
if not user:
sys.stderr.write("User does not exist\n")
sys.exit(1)
if not es.client.indices.exists(index=index):
sys.stderr.write("Index does not exist in the datastore\n")
sys.exit(1)
if SearchIndex.query.filter_by(name=name, index_name=index).first():
sys.stderr.write("Index with this name already exist in Timesketch\n")
sys.exit(1)
searchindex = SearchIndex(name=name, description=name, user=user, index_name=index)
searchindex.grant_permission("read")
db_session.add(searchindex)
db_session.commit()
sys.stdout.write("Search index {0:s} created\n".format(name))
|
https://github.com/google/timesketch/issues/1093
|
tsctl import -f <timeline>.jsonl -u <username>
Traceback (most recent call last):
File "/usr/lib/python3.6/encodings/utf_8.py", line 16, in decode
return codecs.utf_8_decode(input, errors, True)
TypeError: a bytes-like object is required, not 'NoneType'
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "/usr/local/bin/tsctl", line 11, in <module>
sys.exit(main())
File "/usr/local/lib/python3.6/dist-packages/timesketch/tsctl.py", line 474, in main
shell_manager.run()
File "/usr/local/lib/python3.6/dist-packages/flask_script/__init__.py", line 417, in run
result = self.handle(argv[0], argv[1:])
File "/usr/local/lib/python3.6/dist-packages/flask_script/__init__.py", line 386, in handle
res = handle(*args, **config)
File "/usr/local/lib/python3.6/dist-packages/flask_script/commands.py", line 216, in __call__
return self.run(*args, **kwargs)
File "/usr/local/lib/python3.6/dist-packages/timesketch/tsctl.py", line 389, in run
timeline_name = codecs.decode(timeline_name, 'utf-8')
TypeError: <timesketch.tsctl.ImportTimeline object at 0x7f2648558080>: decoding with 'utf-8' codec failed (TypeError: a bytes-like object is required, not 'NoneType')
|
TypeError
|
def run(self, file_path, sketch_id, username, timeline_name):
"""This is the run method."""
file_path = os.path.realpath(file_path)
file_path_no_extension, extension = os.path.splitext(file_path)
extension = extension.lstrip(".")
filename = os.path.basename(file_path_no_extension)
supported_extensions = ("plaso", "csv", "jsonl")
if not os.path.isfile(file_path):
sys.exit("No such file: {0:s}".format(file_path))
if extension not in supported_extensions:
sys.exit(
"Extension {0:s} is not supported. "
"(supported extensions are: {1:s})".format(
extension, ", ".join(supported_extensions)
)
)
user = None
if not username:
username = pwd.getpwuid(os.stat(file_path).st_uid).pw_name
if not username == "root":
if not isinstance(username, six.text_type):
username = codecs.decode(username, "utf-8")
user = User.query.filter_by(username=username).first()
if not user:
sys.exit("Cannot determine user for file: {0:s}".format(file_path))
sketch = None
# If filename starts with <number> then use that as sketch_id.
# E.g: 42_file_name.plaso means sketch_id is 42.
sketch_id_from_filename = filename.split("_")[0]
if not sketch_id and sketch_id_from_filename.isdigit():
sketch_id = sketch_id_from_filename
if sketch_id:
try:
sketch = Sketch.query.get_with_acl(sketch_id, user=user)
except Forbidden:
pass
if not timeline_name:
if timeline_name is None:
timeline_name = "{0:s}_timeline".format(filename)
if not isinstance(timeline_name, six.text_type):
timeline_name = codecs.decode(timeline_name, "utf-8")
timeline_name = timeline_name.replace("_", " ")
# Remove sketch ID if present in the filename.
timeline_parts = timeline_name.split()
if timeline_parts[0].isdigit():
timeline_name = " ".join(timeline_name.split()[1:])
if not sketch:
# Create a new sketch.
sketch_name = "Sketch for: {0:s}".format(timeline_name)
sketch = Sketch(name=sketch_name, description=sketch_name, user=user)
# Need to commit here to be able to set permissions later.
db_session.add(sketch)
db_session.commit()
sketch.grant_permission(permission="read", user=user)
sketch.grant_permission(permission="write", user=user)
sketch.grant_permission(permission="delete", user=user)
sketch.status.append(sketch.Status(user=None, status="new"))
db_session.add(sketch)
db_session.commit()
index_name = uuid.uuid4().hex
if not isinstance(index_name, six.text_type):
index_name = codecs.decode(index_name, "utf-8")
searchindex = SearchIndex.get_or_create(
name=timeline_name, description=timeline_name, user=user, index_name=index_name
)
searchindex.grant_permission(permission="read", user=user)
searchindex.grant_permission(permission="write", user=user)
searchindex.grant_permission(permission="delete", user=user)
searchindex.set_status("processing")
db_session.add(searchindex)
db_session.commit()
if sketch and sketch.has_permission(user, "write"):
timeline = Timeline(
name=searchindex.name,
description=searchindex.description,
sketch=sketch,
user=user,
searchindex=searchindex,
)
timeline.set_status("processing")
sketch.timelines.append(timeline)
db_session.add(timeline)
db_session.commit()
# Start Celery pipeline for indexing and analysis.
# Import here to avoid circular imports.
from timesketch.lib import tasks # pylint: disable=import-outside-toplevel
pipeline = tasks.build_index_pipeline(
file_path, timeline_name, index_name, extension, sketch.id
)
pipeline.apply_async(task_id=index_name)
print(
"Imported {0:s} to sketch: {1:d} ({2:s})".format(
file_path, sketch.id, sketch.name
)
)
|
def run(self, file_path, sketch_id, username, timeline_name):
"""This is the run method."""
file_path = os.path.realpath(file_path)
file_path_no_extension, extension = os.path.splitext(file_path)
extension = extension.lstrip(".")
filename = os.path.basename(file_path_no_extension)
supported_extensions = ("plaso", "csv", "jsonl")
if not os.path.isfile(file_path):
sys.exit("No such file: {0:s}".format(file_path))
if extension not in supported_extensions:
sys.exit(
"Extension {0:s} is not supported. "
"(supported extensions are: {1:s})".format(
extension, ", ".join(supported_extensions)
)
)
user = None
if not username:
username = pwd.getpwuid(os.stat(file_path).st_uid).pw_name
if not username == "root":
if not isinstance(username, six.text_type):
username = codecs.decode(username, "utf-8")
user = User.query.filter_by(username=username).first()
if not user:
sys.exit("Cannot determine user for file: {0:s}".format(file_path))
sketch = None
# If filename starts with <number> then use that as sketch_id.
# E.g: 42_file_name.plaso means sketch_id is 42.
sketch_id_from_filename = filename.split("_")[0]
if not sketch_id and sketch_id_from_filename.isdigit():
sketch_id = sketch_id_from_filename
if sketch_id:
try:
sketch = Sketch.query.get_with_acl(sketch_id, user=user)
except Forbidden:
pass
if not timeline_name:
if not isinstance(timeline_name, six.text_type):
timeline_name = codecs.decode(timeline_name, "utf-8")
timeline_name = timeline_name.replace("_", " ")
# Remove sketch ID if present in the filename.
timeline_parts = timeline_name.split()
if timeline_parts[0].isdigit():
timeline_name = " ".join(timeline_name.split()[1:])
if not sketch:
# Create a new sketch.
sketch_name = "Sketch for: {0:s}".format(timeline_name)
sketch = Sketch(name=sketch_name, description=sketch_name, user=user)
# Need to commit here to be able to set permissions later.
db_session.add(sketch)
db_session.commit()
sketch.grant_permission(permission="read", user=user)
sketch.grant_permission(permission="write", user=user)
sketch.grant_permission(permission="delete", user=user)
sketch.status.append(sketch.Status(user=None, status="new"))
db_session.add(sketch)
db_session.commit()
index_name = uuid.uuid4().hex
if not isinstance(index_name, six.text_type):
index_name = codecs.decode(index_name, "utf-8")
searchindex = SearchIndex.get_or_create(
name=timeline_name, description=timeline_name, user=user, index_name=index_name
)
searchindex.grant_permission(permission="read", user=user)
searchindex.grant_permission(permission="write", user=user)
searchindex.grant_permission(permission="delete", user=user)
searchindex.set_status("processing")
db_session.add(searchindex)
db_session.commit()
if sketch and sketch.has_permission(user, "write"):
timeline = Timeline(
name=searchindex.name,
description=searchindex.description,
sketch=sketch,
user=user,
searchindex=searchindex,
)
timeline.set_status("processing")
sketch.timelines.append(timeline)
db_session.add(timeline)
db_session.commit()
# Start Celery pipeline for indexing and analysis.
# Import here to avoid circular imports.
from timesketch.lib import tasks
pipeline = tasks.build_index_pipeline(
file_path, timeline_name, index_name, extension, sketch.id
)
pipeline.apply_async(task_id=index_name)
print(
"Imported {0:s} to sketch: {1:d} ({2:s})".format(
file_path, sketch.id, sketch.name
)
)
|
https://github.com/google/timesketch/issues/1093
|
tsctl import -f <timeline>.jsonl -u <username>
Traceback (most recent call last):
File "/usr/lib/python3.6/encodings/utf_8.py", line 16, in decode
return codecs.utf_8_decode(input, errors, True)
TypeError: a bytes-like object is required, not 'NoneType'
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "/usr/local/bin/tsctl", line 11, in <module>
sys.exit(main())
File "/usr/local/lib/python3.6/dist-packages/timesketch/tsctl.py", line 474, in main
shell_manager.run()
File "/usr/local/lib/python3.6/dist-packages/flask_script/__init__.py", line 417, in run
result = self.handle(argv[0], argv[1:])
File "/usr/local/lib/python3.6/dist-packages/flask_script/__init__.py", line 386, in handle
res = handle(*args, **config)
File "/usr/local/lib/python3.6/dist-packages/flask_script/commands.py", line 216, in __call__
return self.run(*args, **kwargs)
File "/usr/local/lib/python3.6/dist-packages/timesketch/tsctl.py", line 389, in run
timeline_name = codecs.decode(timeline_name, 'utf-8')
TypeError: <timesketch.tsctl.ImportTimeline object at 0x7f2648558080>: decoding with 'utf-8' codec failed (TypeError: a bytes-like object is required, not 'NoneType')
|
TypeError
|
def add_file(self, filepath, delimiter=","):
"""Add a CSV, JSONL or a PLASO file to the buffer.
Args:
filepath: the path to the file to add.
delimiter: if this is a CSV file then a delimiter can be defined.
Raises:
TypeError: if the entry does not fulfill requirements.
"""
self._ready()
if not os.path.isfile(filepath):
raise TypeError("Entry object needs to be a file that exists.")
file_ending = filepath.lower().split(".")[-1]
if file_ending == "csv":
data_frame = pandas.read_csv(filepath, delimiter=delimiter)
self.add_data_frame(data_frame)
elif file_ending == "plaso":
self._sketch.upload(self._timeline_name, filepath)
elif file_ending == "jsonl":
data_frame = None
with open(filepath, "r") as fh:
lines = [json.loads(x) for x in fh]
data_frame = pandas.DataFrame(lines)
if data_frame is None:
raise TypeError("Unable to parse the JSON file.")
if data_frame.empty:
raise TypeError("Is the JSON file empty?")
self.add_data_frame(data_frame)
else:
raise TypeError(
"File needs to have a file extension of: .csv, .jsonl or .plaso"
)
|
def add_file(self, filepath, delimiter=","):
"""Add a CSV, JSONL or a PLASO file to the buffer.
Args:
filepath: the path to the file to add.
delimiter: if this is a CSV file then a delimiter can be defined.
Raises:
TypeError: if the entry does not fulfill requirements.
"""
self._ready()
if not os.path.isfile(filepath):
raise TypeError("Entry object needs to be a file that exists.")
file_ending = filepath.lower().split(".")[-1]
if file_ending == "csv":
data_frame = pandas.read_csv(filepath, delimiter=delimiter)
self.add_data_frame(data_frame)
elif file_ending == "plaso":
self._sketch.upload(self._timeline_name, filepath)
elif file_ending == "jsonl":
data_frame = None
with open(filepath, "r") as fh:
lines = [json.loads(x) for x in fh]
data_frame = pandas.DataFrame(lines)
if data_frame is None:
raise TypeError("Unable to parse the JSON file.")
if data_frame.empty:
raise TypeError("Is the JSON file empty?")
self.add_data_frame(data_frame)
raise TypeError("File needs to have a file extension of: .csv, .jsonl or .plaso")
|
https://github.com/google/timesketch/issues/1017
|
Traceback (most recent call last):
File "...lib/python2.7/site-packages/timesketch/lib/tasks.py", line 467, in run_csv_jsonl
for event in read_and_validate(source_file_path):
File ".../lib/python2.7/site-packages/timesketch/lib/utils.py", line 81, in read_and_validate_csv
for row in reader:
File "/usr/lib/python2.7/csv.py", line 108, in next
row = self.reader.next()
UnicodeEncodeError: 'ascii' codec can't encode character u'\u2019' in position 92: ordinal not in range(128)
|
UnicodeEncodeError
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.