after_merge
stringlengths 28
79.6k
| before_merge
stringlengths 20
79.6k
| url
stringlengths 38
71
| full_traceback
stringlengths 43
922k
| traceback_type
stringclasses 555
values |
|---|---|---|---|---|
def e1(self):
left = self.e2()
if self.accept("plusassign"):
value = self.e1()
if not isinstance(left, IdNode):
raise ParseException(
"Plusassignment target must be an id.",
self.getline(),
left.lineno,
left.colno,
)
return PlusAssignmentNode(
left.subdir, left.lineno, left.colno, left.value, value
)
elif self.accept("assign"):
value = self.e1()
if not isinstance(left, IdNode):
raise ParseException(
"Assignment target must be an id.",
self.getline(),
left.lineno,
left.colno,
)
return AssignmentNode(left.subdir, left.lineno, left.colno, left.value, value)
elif self.accept("questionmark"):
if self.in_ternary:
raise ParseException(
"Nested ternary operators are not allowed.",
self.getline(),
left.lineno,
left.colno,
)
self.in_ternary = True
trueblock = self.e1()
self.expect("colon")
falseblock = self.e1()
self.in_ternary = False
return TernaryNode(
left.subdir, left.lineno, left.colno, left, trueblock, falseblock
)
return left
|
def e1(self):
left = self.e2()
if self.accept("plusassign"):
value = self.e1()
if not isinstance(left, IdNode):
raise ParseException(
"Plusassignment target must be an id.",
self.getline(),
left.lineno,
left.colno,
)
return PlusAssignmentNode(
left.subdir, left.lineno, left.colno, left.value, value
)
elif self.accept("assign"):
value = self.e1()
if not isinstance(left, IdNode):
raise ParseException(
"Assignment target must be an id.",
self.getline(),
left.lineno,
left.colno,
)
return AssignmentNode(left.subdir, left.lineno, left.colno, left.value, value)
elif self.accept("questionmark"):
if self.in_ternary:
raise ParseException(
"Nested ternary operators are not allowed.",
self.getline(),
left.lineno,
left.colno,
)
self.in_ternary = True
trueblock = self.e1()
self.expect("colon")
falseblock = self.e1()
self.in_ternary = False
return TernaryNode(left.lineno, left.colno, left, trueblock, falseblock)
return left
|
https://github.com/mesonbuild/meson/issues/2404
|
Traceback (most recent call last):
File "/home/adrian/.local/lib/python3.5/site-packages/mesonbuild/mesonmain.py", line 353, in run
app.generate()
File "/home/adrian/.local/lib/python3.5/site-packages/mesonbuild/mesonmain.py", line 148, in generate
self._generate(env)
File "/home/adrian/.local/lib/python3.5/site-packages/mesonbuild/mesonmain.py", line 188, in _generate
intr = interpreter.Interpreter(b, g)
File "/home/adrian/.local/lib/python3.5/site-packages/mesonbuild/interpreter.py", line 1327, in __init__
self.load_root_meson_file()
File "/home/adrian/.local/lib/python3.5/site-packages/mesonbuild/interpreterbase.py", line 124, in load_root_meson_file
self.ast = mparser.Parser(code, self.subdir).parse()
File "/home/adrian/.local/lib/python3.5/site-packages/mesonbuild/mparser.py", line 443, in parse
block = self.codeblock()
File "/home/adrian/.local/lib/python3.5/site-packages/mesonbuild/mparser.py", line 679, in codeblock
curline = self.line()
File "/home/adrian/.local/lib/python3.5/site-packages/mesonbuild/mparser.py", line 673, in line
return self.statement()
File "/home/adrian/.local/lib/python3.5/site-packages/mesonbuild/mparser.py", line 448, in statement
return self.e1()
File "/home/adrian/.local/lib/python3.5/site-packages/mesonbuild/mparser.py", line 451, in e1
left = self.e2()
File "/home/adrian/.local/lib/python3.5/site-packages/mesonbuild/mparser.py", line 476, in e2
left = self.e3()
File "/home/adrian/.local/lib/python3.5/site-packages/mesonbuild/mparser.py", line 485, in e3
left = self.e4()
File "/home/adrian/.local/lib/python3.5/site-packages/mesonbuild/mparser.py", line 494, in e4
left = self.e5()
File "/home/adrian/.local/lib/python3.5/site-packages/mesonbuild/mparser.py", line 501, in e5
return self.e5add()
File "/home/adrian/.local/lib/python3.5/site-packages/mesonbuild/mparser.py", line 504, in e5add
left = self.e5sub()
File "/home/adrian/.local/lib/python3.5/site-packages/mesonbuild/mparser.py", line 510, in e5sub
left = self.e5mod()
File "/home/adrian/.local/lib/python3.5/site-packages/mesonbuild/mparser.py", line 516, in e5mod
left = self.e5mul()
File "/home/adrian/.local/lib/python3.5/site-packages/mesonbuild/mparser.py", line 522, in e5mul
left = self.e5div()
File "/home/adrian/.local/lib/python3.5/site-packages/mesonbuild/mparser.py", line 528, in e5div
left = self.e6()
File "/home/adrian/.local/lib/python3.5/site-packages/mesonbuild/mparser.py", line 538, in e6
return self.e7()
File "/home/adrian/.local/lib/python3.5/site-packages/mesonbuild/mparser.py", line 544, in e7
args = self.args()
File "/home/adrian/.local/lib/python3.5/site-packages/mesonbuild/mparser.py", line 589, in args
s = self.statement()
File "/home/adrian/.local/lib/python3.5/site-packages/mesonbuild/mparser.py", line 448, in statement
return self.e1()
File "/home/adrian/.local/lib/python3.5/site-packages/mesonbuild/mparser.py", line 451, in e1
left = self.e2()
File "/home/adrian/.local/lib/python3.5/site-packages/mesonbuild/mparser.py", line 476, in e2
left = self.e3()
File "/home/adrian/.local/lib/python3.5/site-packages/mesonbuild/mparser.py", line 485, in e3
left = self.e4()
File "/home/adrian/.local/lib/python3.5/site-packages/mesonbuild/mparser.py", line 494, in e4
left = self.e5()
File "/home/adrian/.local/lib/python3.5/site-packages/mesonbuild/mparser.py", line 501, in e5
return self.e5add()
File "/home/adrian/.local/lib/python3.5/site-packages/mesonbuild/mparser.py", line 504, in e5add
left = self.e5sub()
File "/home/adrian/.local/lib/python3.5/site-packages/mesonbuild/mparser.py", line 510, in e5sub
left = self.e5mod()
File "/home/adrian/.local/lib/python3.5/site-packages/mesonbuild/mparser.py", line 516, in e5mod
left = self.e5mul()
File "/home/adrian/.local/lib/python3.5/site-packages/mesonbuild/mparser.py", line 522, in e5mul
left = self.e5div()
File "/home/adrian/.local/lib/python3.5/site-packages/mesonbuild/mparser.py", line 528, in e5div
left = self.e6()
File "/home/adrian/.local/lib/python3.5/site-packages/mesonbuild/mparser.py", line 538, in e6
return self.e7()
File "/home/adrian/.local/lib/python3.5/site-packages/mesonbuild/mparser.py", line 555, in e7
left = self.method_call(left)
File "/home/adrian/.local/lib/python3.5/site-packages/mesonbuild/mparser.py", line 618, in method_call
args = self.args()
File "/home/adrian/.local/lib/python3.5/site-packages/mesonbuild/mparser.py", line 590, in args
a = ArgumentNode(s)
File "/home/adrian/.local/lib/python3.5/site-packages/mesonbuild/mparser.py", line 351, in __init__
self.subdir = token.subdir
AttributeError: 'TernaryNode' object has no attribute 'subdir'
|
AttributeError
|
def _func_custom_target_impl(self, node, args, kwargs):
"Implementation-only, without FeatureNew checks, for internal use"
name = args[0]
kwargs["install_mode"] = self._get_kwarg_install_mode(kwargs)
if "input" in kwargs:
try:
kwargs["input"] = self.source_strings_to_files(
extract_as_list(kwargs, "input")
)
except mesonlib.MesonException:
mlog.warning(
"""Custom target input \'%s\' can\'t be converted to File object(s).
This will become a hard error in the future."""
% kwargs["input"]
)
tg = CustomTargetHolder(
build.CustomTarget(name, self.subdir, self.subproject, kwargs), self
)
self.add_target(name, tg.held_object)
return tg
|
def _func_custom_target_impl(self, node, args, kwargs):
"Implementation-only, without FeatureNew checks, for internal use"
name = args[0]
kwargs["install_mode"] = self._get_kwarg_install_mode(kwargs)
tg = CustomTargetHolder(
build.CustomTarget(name, self.subdir, self.subproject, kwargs), self
)
self.add_target(name, tg.held_object)
return tg
|
https://github.com/mesonbuild/meson/issues/2783
|
$ meson -v
0.44.0
$ meson introspect --target-files egd_tables.h@cus
Traceback (most recent call last):
File "/usr/bin/meson", line 37, in <module>
sys.exit(main())
File "/usr/bin/meson", line 34, in main
return mesonmain.run(sys.argv[1:], launcher)
File "/usr/lib/python3.6/site-packages/mesonbuild/mesonmain.py", line 297, in run
return mintro.run(remaining_args)
File "/usr/lib/python3.6/site-packages/mesonbuild/mintro.py", line 236, in run
list_target_files(options.target_files, coredata, builddata)
File "/usr/lib/python3.6/site-packages/mesonbuild/mintro.py", line 118, in list_target_files
sources = [os.path.join(i.subdir, i.fname) for i in sources]
File "/usr/lib/python3.6/site-packages/mesonbuild/mintro.py", line 118, in <listcomp>
sources = [os.path.join(i.subdir, i.fname) for i in sources]
AttributeError: 'str' object has no attribute 'subdir'
|
AttributeError
|
def source_strings_to_files(self, sources):
results = []
mesonlib.check_direntry_issues(sources)
if not isinstance(sources, list):
sources = [sources]
for s in sources:
if isinstance(
s,
(mesonlib.File, GeneratedListHolder, TargetHolder, CustomTargetIndexHolder),
):
pass
elif isinstance(s, str):
self.validate_within_subproject(self.subdir, s)
s = mesonlib.File.from_source_file(
self.environment.source_dir, self.subdir, s
)
else:
raise InterpreterException(
"Source item is {!r} instead of string or File-type object".format(s)
)
results.append(s)
return results
|
def source_strings_to_files(self, sources):
results = []
mesonlib.check_direntry_issues(sources)
if not isinstance(sources, list):
sources = [sources]
for s in sources:
if isinstance(
s,
(
mesonlib.File,
GeneratedListHolder,
CustomTargetHolder,
CustomTargetIndexHolder,
),
):
pass
elif isinstance(s, str):
self.validate_within_subproject(self.subdir, s)
s = mesonlib.File.from_source_file(
self.environment.source_dir, self.subdir, s
)
else:
raise InterpreterException(
"Source item is {!r} instead of string or File-type object".format(s)
)
results.append(s)
return results
|
https://github.com/mesonbuild/meson/issues/2783
|
$ meson -v
0.44.0
$ meson introspect --target-files egd_tables.h@cus
Traceback (most recent call last):
File "/usr/bin/meson", line 37, in <module>
sys.exit(main())
File "/usr/bin/meson", line 34, in main
return mesonmain.run(sys.argv[1:], launcher)
File "/usr/lib/python3.6/site-packages/mesonbuild/mesonmain.py", line 297, in run
return mintro.run(remaining_args)
File "/usr/lib/python3.6/site-packages/mesonbuild/mintro.py", line 236, in run
list_target_files(options.target_files, coredata, builddata)
File "/usr/lib/python3.6/site-packages/mesonbuild/mintro.py", line 118, in list_target_files
sources = [os.path.join(i.subdir, i.fname) for i in sources]
File "/usr/lib/python3.6/site-packages/mesonbuild/mintro.py", line 118, in <listcomp>
sources = [os.path.join(i.subdir, i.fname) for i in sources]
AttributeError: 'str' object has no attribute 'subdir'
|
AttributeError
|
def __init__(self, build):
self.build = build
self.environment = build.environment
self.processed_targets = {}
self.build_to_src = mesonlib.relpath(
self.environment.get_source_dir(), self.environment.get_build_dir()
)
|
def __init__(self, build):
self.build = build
self.environment = build.environment
self.processed_targets = {}
self.build_to_src = os.path.relpath(
self.environment.get_source_dir(), self.environment.get_build_dir()
)
|
https://github.com/mesonbuild/meson/issues/3239
|
Stdout:
Traceback (most recent call last):
File "D:\dev\meson\mesonbuild\mesonmain.py", line 368, in run
app.generate()
File "D:\dev\meson\mesonbuild\mesonmain.py", line 150, in generate
self._generate(env)
File "D:\dev\meson\mesonbuild\mesonmain.py", line 168, in _generate
g = ninjabackend.NinjaBackend(b)
File "D:\dev\meson\mesonbuild\backend\ninjabackend.py", line 143, in __init__
super().__init__(build)
File "D:\dev\meson\mesonbuild\backend\backends.py", line 111, in __init__
self.environment.get_build_dir())
File "C:\Python36\lib\ntpath.py", line 585, in relpath
path_drive, start_drive))
ValueError: path is on mount 'D:', start on mount 'C:'
|
ValueError
|
def method_call(self, method_name, args, kwargs):
try:
fn = getattr(self.held_object, method_name)
except AttributeError:
raise InvalidArguments(
"Module %s does not have method %s." % (self.modname, method_name)
)
if method_name.startswith("_"):
raise InvalidArguments(
"Function {!r} in module {!r} is private.".format(method_name, self.modname)
)
if not getattr(fn, "no-args-flattening", False):
args = flatten(args)
# This is not 100% reliable but we can't use hash()
# because the Build object contains dicts and lists.
num_targets = len(self.interpreter.build.targets)
state = ModuleState(
build_to_src=mesonlib.relpath(
self.interpreter.environment.get_source_dir(),
self.interpreter.environment.get_build_dir(),
),
subproject=self.interpreter.subproject,
subdir=self.interpreter.subdir,
current_lineno=self.interpreter.current_lineno,
environment=self.interpreter.environment,
project_name=self.interpreter.build.project_name,
project_version=self.interpreter.build.dep_manifest[
self.interpreter.active_projectname
],
# The backend object is under-used right now, but we will need it:
# https://github.com/mesonbuild/meson/issues/1419
backend=self.interpreter.backend,
compilers=self.interpreter.build.compilers,
targets=self.interpreter.build.targets,
data=self.interpreter.build.data,
headers=self.interpreter.build.get_headers(),
man=self.interpreter.build.get_man(),
global_args=self.interpreter.build.global_args,
project_args=self.interpreter.build.projects_args.get(
self.interpreter.subproject, {}
),
build_machine=self.interpreter.builtin["build_machine"].held_object,
host_machine=self.interpreter.builtin["host_machine"].held_object,
target_machine=self.interpreter.builtin["target_machine"].held_object,
)
if self.held_object.is_snippet(method_name):
value = fn(self.interpreter, state, args, kwargs)
return self.interpreter.holderify(value)
else:
value = fn(state, args, kwargs)
if num_targets != len(self.interpreter.build.targets):
raise InterpreterException(
"Extension module altered internal state illegally."
)
return self.interpreter.module_method_callback(value)
|
def method_call(self, method_name, args, kwargs):
try:
fn = getattr(self.held_object, method_name)
except AttributeError:
raise InvalidArguments(
"Module %s does not have method %s." % (self.modname, method_name)
)
if method_name.startswith("_"):
raise InvalidArguments(
"Function {!r} in module {!r} is private.".format(method_name, self.modname)
)
if not getattr(fn, "no-args-flattening", False):
args = flatten(args)
# This is not 100% reliable but we can't use hash()
# because the Build object contains dicts and lists.
num_targets = len(self.interpreter.build.targets)
state = ModuleState(
build_to_src=os.path.relpath(
self.interpreter.environment.get_source_dir(),
self.interpreter.environment.get_build_dir(),
),
subproject=self.interpreter.subproject,
subdir=self.interpreter.subdir,
current_lineno=self.interpreter.current_lineno,
environment=self.interpreter.environment,
project_name=self.interpreter.build.project_name,
project_version=self.interpreter.build.dep_manifest[
self.interpreter.active_projectname
],
# The backend object is under-used right now, but we will need it:
# https://github.com/mesonbuild/meson/issues/1419
backend=self.interpreter.backend,
compilers=self.interpreter.build.compilers,
targets=self.interpreter.build.targets,
data=self.interpreter.build.data,
headers=self.interpreter.build.get_headers(),
man=self.interpreter.build.get_man(),
global_args=self.interpreter.build.global_args,
project_args=self.interpreter.build.projects_args.get(
self.interpreter.subproject, {}
),
build_machine=self.interpreter.builtin["build_machine"].held_object,
host_machine=self.interpreter.builtin["host_machine"].held_object,
target_machine=self.interpreter.builtin["target_machine"].held_object,
)
if self.held_object.is_snippet(method_name):
value = fn(self.interpreter, state, args, kwargs)
return self.interpreter.holderify(value)
else:
value = fn(state, args, kwargs)
if num_targets != len(self.interpreter.build.targets):
raise InterpreterException(
"Extension module altered internal state illegally."
)
return self.interpreter.module_method_callback(value)
|
https://github.com/mesonbuild/meson/issues/3239
|
Stdout:
Traceback (most recent call last):
File "D:\dev\meson\mesonbuild\mesonmain.py", line 368, in run
app.generate()
File "D:\dev\meson\mesonbuild\mesonmain.py", line 150, in generate
self._generate(env)
File "D:\dev\meson\mesonbuild\mesonmain.py", line 168, in _generate
g = ninjabackend.NinjaBackend(b)
File "D:\dev\meson\mesonbuild\backend\ninjabackend.py", line 143, in __init__
super().__init__(build)
File "D:\dev\meson\mesonbuild\backend\backends.py", line 111, in __init__
self.environment.get_build_dir())
File "C:\Python36\lib\ntpath.py", line 585, in relpath
path_drive, start_drive))
ValueError: path is on mount 'D:', start on mount 'C:'
|
ValueError
|
def run_command_impl(self, node, args, kwargs, in_builddir=False):
if len(args) < 1:
raise InterpreterException("Not enough arguments")
cmd = args[0]
cargs = args[1:]
capture = kwargs.get("capture", True)
srcdir = self.environment.get_source_dir()
builddir = self.environment.get_build_dir()
check = kwargs.get("check", False)
if not isinstance(check, bool):
raise InterpreterException("Check must be boolean.")
m = (
"must be a string, or the output of find_program(), files() "
"or configure_file(), or a compiler object; not {!r}"
)
if isinstance(cmd, ExternalProgramHolder):
cmd = cmd.held_object
if isinstance(cmd, build.Executable):
progname = node.args.arguments[0].value
msg = (
"Program {!r} was overridden with the compiled executable {!r}"
" and therefore cannot be used during configuration"
)
raise InterpreterException(msg.format(progname, cmd.description()))
elif isinstance(cmd, CompilerHolder):
cmd = cmd.compiler.get_exelist()[0]
prog = ExternalProgram(cmd, silent=True)
if not prog.found():
raise InterpreterException(
"Program {!r} not found or not executable".format(cmd)
)
cmd = prog
else:
if isinstance(cmd, mesonlib.File):
cmd = cmd.absolute_path(srcdir, builddir)
elif not isinstance(cmd, str):
raise InterpreterException("First argument " + m.format(cmd))
# Prefer scripts in the current source directory
search_dir = os.path.join(srcdir, self.subdir)
prog = ExternalProgram(cmd, silent=True, search_dir=search_dir)
if not prog.found():
raise InterpreterException(
"Program or command {!r} not found or not executable".format(cmd)
)
cmd = prog
cmd_path = mesonlib.relpath(cmd.get_path(), start=srcdir)
if not cmd_path.startswith("..") and cmd_path not in self.build_def_files:
self.build_def_files.append(cmd_path)
expanded_args = []
for a in listify(cargs):
if isinstance(a, str):
expanded_args.append(a)
elif isinstance(a, mesonlib.File):
expanded_args.append(a.absolute_path(srcdir, builddir))
elif isinstance(a, ExternalProgramHolder):
expanded_args.append(a.held_object.get_path())
else:
raise InterpreterException("Arguments " + m.format(a))
for a in expanded_args:
if not os.path.isabs(a):
a = os.path.join(builddir if in_builddir else srcdir, self.subdir, a)
if os.path.isfile(a):
a = mesonlib.relpath(a, start=srcdir)
if not a.startswith(".."):
if a not in self.build_def_files:
self.build_def_files.append(a)
return RunProcess(
cmd,
expanded_args,
srcdir,
builddir,
self.subdir,
self.environment.get_build_command() + ["introspect"],
in_builddir=in_builddir,
check=check,
capture=capture,
)
|
def run_command_impl(self, node, args, kwargs, in_builddir=False):
if len(args) < 1:
raise InterpreterException("Not enough arguments")
cmd = args[0]
cargs = args[1:]
capture = kwargs.get("capture", True)
srcdir = self.environment.get_source_dir()
builddir = self.environment.get_build_dir()
check = kwargs.get("check", False)
if not isinstance(check, bool):
raise InterpreterException("Check must be boolean.")
m = (
"must be a string, or the output of find_program(), files() "
"or configure_file(), or a compiler object; not {!r}"
)
if isinstance(cmd, ExternalProgramHolder):
cmd = cmd.held_object
if isinstance(cmd, build.Executable):
progname = node.args.arguments[0].value
msg = (
"Program {!r} was overridden with the compiled executable {!r}"
" and therefore cannot be used during configuration"
)
raise InterpreterException(msg.format(progname, cmd.description()))
elif isinstance(cmd, CompilerHolder):
cmd = cmd.compiler.get_exelist()[0]
prog = ExternalProgram(cmd, silent=True)
if not prog.found():
raise InterpreterException(
"Program {!r} not found or not executable".format(cmd)
)
cmd = prog
else:
if isinstance(cmd, mesonlib.File):
cmd = cmd.absolute_path(srcdir, builddir)
elif not isinstance(cmd, str):
raise InterpreterException("First argument " + m.format(cmd))
# Prefer scripts in the current source directory
search_dir = os.path.join(srcdir, self.subdir)
prog = ExternalProgram(cmd, silent=True, search_dir=search_dir)
if not prog.found():
raise InterpreterException(
"Program or command {!r} not found or not executable".format(cmd)
)
cmd = prog
try:
cmd_path = os.path.relpath(cmd.get_path(), start=srcdir)
except ValueError:
# On Windows a relative path can't be evaluated for
# paths on two different drives (i.e. c:\foo and f:\bar).
# The only thing left to is is to use the original absolute
# path.
cmd_path = cmd.get_path()
if not cmd_path.startswith("..") and cmd_path not in self.build_def_files:
self.build_def_files.append(cmd_path)
expanded_args = []
for a in listify(cargs):
if isinstance(a, str):
expanded_args.append(a)
elif isinstance(a, mesonlib.File):
expanded_args.append(a.absolute_path(srcdir, builddir))
elif isinstance(a, ExternalProgramHolder):
expanded_args.append(a.held_object.get_path())
else:
raise InterpreterException("Arguments " + m.format(a))
for a in expanded_args:
if not os.path.isabs(a):
a = os.path.join(builddir if in_builddir else srcdir, self.subdir, a)
if os.path.isfile(a):
a = os.path.relpath(a, start=srcdir)
if not a.startswith(".."):
if a not in self.build_def_files:
self.build_def_files.append(a)
return RunProcess(
cmd,
expanded_args,
srcdir,
builddir,
self.subdir,
self.environment.get_build_command() + ["introspect"],
in_builddir=in_builddir,
check=check,
capture=capture,
)
|
https://github.com/mesonbuild/meson/issues/3239
|
Stdout:
Traceback (most recent call last):
File "D:\dev\meson\mesonbuild\mesonmain.py", line 368, in run
app.generate()
File "D:\dev\meson\mesonbuild\mesonmain.py", line 150, in generate
self._generate(env)
File "D:\dev\meson\mesonbuild\mesonmain.py", line 168, in _generate
g = ninjabackend.NinjaBackend(b)
File "D:\dev\meson\mesonbuild\backend\ninjabackend.py", line 143, in __init__
super().__init__(build)
File "D:\dev\meson\mesonbuild\backend\backends.py", line 111, in __init__
self.environment.get_build_dir())
File "C:\Python36\lib\ntpath.py", line 585, in relpath
path_drive, start_drive))
ValueError: path is on mount 'D:', start on mount 'C:'
|
ValueError
|
def __init__(
self,
fname,
outdir,
aliases,
strip,
install_name_mappings,
install_rpath,
install_mode,
optional=False,
):
self.fname = fname
self.outdir = outdir
self.aliases = aliases
self.strip = strip
self.install_name_mappings = install_name_mappings
self.install_rpath = install_rpath
self.install_mode = install_mode
self.optional = optional
|
def __init__(
self,
fname,
outdir,
aliases,
strip,
install_name_mappings,
install_rpath,
install_mode,
):
self.fname = fname
self.outdir = outdir
self.aliases = aliases
self.strip = strip
self.install_name_mappings = install_name_mappings
self.install_rpath = install_rpath
self.install_mode = install_mode
|
https://github.com/mesonbuild/meson/issues/3965
|
Traceback (most recent call last):
File "d:\\projects\\cerbero\\meson\\master\\build\\build-tools\\Scripts\\meson.py", line 4, in <module>
__import__('pkg_resources').run_script('meson==0.47.0', 'meson.py')
File "c:\Python36-32\lib\site-packages\pkg_resources\__init__.py", line 658, in run_script
self.require(requires)[0].run_script(script_name, ns)
File "c:\Python36-32\lib\site-packages\pkg_resources\__init__.py", line 1438, in run_script
exec(code, namespace, namespace)
File "d:\projects\cerbero\meson\master\build\build-tools\lib\site-packages\meson-0.47.0-py3.6.egg\EGG-INFO\scripts\meson.py", line 29, in <module>
sys.exit(mesonmain.main())
File "d:\projects\cerbero\meson\master\build\build-tools\Lib\site-packages\meson-0.47.0-py3.6.egg\mesonbuild\mesonmain.py", line 367, in main
return run(sys.argv[1:], launcher)
File "d:\projects\cerbero\meson\master\build\build-tools\Lib\site-packages\meson-0.47.0-py3.6.egg\mesonbuild\mesonmain.py", line 281, in run
return minstall.run(remaining_args)
File "d:\projects\cerbero\meson\master\build\build-tools\Lib\site-packages\meson-0.47.0-py3.6.egg\mesonbuild\minstall.py", line 466, in run
installer.do_install(datafilename)
File "d:\projects\cerbero\meson\master\build\build-tools\Lib\site-packages\meson-0.47.0-py3.6.egg\mesonbuild\minstall.py", line 298, in do_install
self.install_targets(d)
File "d:\projects\cerbero\meson\master\build\build-tools\Lib\site-packages\meson-0.47.0-py3.6.egg\mesonbuild\minstall.py", line 387, in install_targets
fname = check_for_stampfile(t.fname)
File "d:\projects\cerbero\meson\master\build\build-tools\Lib\site-packages\meson-0.47.0-py3.6.egg\mesonbuild\minstall.py", line 170, in check_for_stampfile
if os.stat(fname).st_size == 0:
FileNotFoundError: [WinError 2] The system cannot find the file specified: 'tls/gnutls\\giognutls.lib'
FAILED: meson-install
"c:\\Python36-32\\python.exe" "d:\\projects\\cerbero\\meson\\master\\build\\build-tools\\Scripts\\meson.py" "install" "--no-rebuild"
ninja: build stopped: subcommand failed.
|
FileNotFoundError
|
def generate_target_install(self, d):
for t in self.build.get_targets().values():
if not t.should_install():
continue
outdirs, custom_install_dir = self.get_target_install_dirs(t)
# Sanity-check the outputs and install_dirs
num_outdirs, num_out = len(outdirs), len(t.get_outputs())
if num_outdirs != 1 and num_outdirs != num_out:
m = (
'Target {!r} has {} outputs: {!r}, but only {} "install_dir"s were found.\n'
"Pass 'false' for outputs that should not be installed and 'true' for\n"
"using the default installation directory for an output."
)
raise MesonException(
m.format(t.name, num_out, t.get_outputs(), num_outdirs)
)
install_mode = t.get_custom_install_mode()
# Install the target output(s)
if isinstance(t, build.BuildTarget):
should_strip = self.get_option_for_target("strip", t)
# Install primary build output (library/executable/jar, etc)
# Done separately because of strip/aliases/rpath
if outdirs[0] is not False:
mappings = self.get_target_link_deps_mappings(t, d.prefix)
i = TargetInstallData(
self.get_target_filename(t),
outdirs[0],
t.get_aliases(),
should_strip,
mappings,
t.install_rpath,
install_mode,
)
d.targets.append(i)
# On toolchains/platforms that use an import library for
# linking (separate from the shared library with all the
# code), we need to install that too (dll.a/.lib).
if (
isinstance(
t, (build.SharedLibrary, build.SharedModule, build.Executable)
)
and t.get_import_filename()
):
if custom_install_dir:
# If the DLL is installed into a custom directory,
# install the import library into the same place so
# it doesn't go into a surprising place
implib_install_dir = outdirs[0]
else:
implib_install_dir = self.environment.get_import_lib_dir()
# Install the import library; may not exist for shared modules
i = TargetInstallData(
self.get_target_filename_for_linking(t),
implib_install_dir,
{},
False,
{},
"",
install_mode,
optional=isinstance(t, build.SharedModule),
)
d.targets.append(i)
# Install secondary outputs. Only used for Vala right now.
if num_outdirs > 1:
for output, outdir in zip(t.get_outputs()[1:], outdirs[1:]):
# User requested that we not install this output
if outdir is False:
continue
f = os.path.join(self.get_target_dir(t), output)
i = TargetInstallData(f, outdir, {}, False, {}, None, install_mode)
d.targets.append(i)
elif isinstance(t, build.CustomTarget):
# If only one install_dir is specified, assume that all
# outputs will be installed into it. This is for
# backwards-compatibility and because it makes sense to
# avoid repetition since this is a common use-case.
#
# To selectively install only some outputs, pass `false` as
# the install_dir for the corresponding output by index
if num_outdirs == 1 and num_out > 1:
for output in t.get_outputs():
f = os.path.join(self.get_target_dir(t), output)
i = TargetInstallData(
f, outdirs[0], {}, False, {}, None, install_mode
)
d.targets.append(i)
else:
for output, outdir in zip(t.get_outputs(), outdirs):
# User requested that we not install this output
if outdir is False:
continue
f = os.path.join(self.get_target_dir(t), output)
i = TargetInstallData(f, outdir, {}, False, {}, None, install_mode)
d.targets.append(i)
|
def generate_target_install(self, d):
for t in self.build.get_targets().values():
if not t.should_install():
continue
outdirs, custom_install_dir = self.get_target_install_dirs(t)
# Sanity-check the outputs and install_dirs
num_outdirs, num_out = len(outdirs), len(t.get_outputs())
if num_outdirs != 1 and num_outdirs != num_out:
m = (
'Target {!r} has {} outputs: {!r}, but only {} "install_dir"s were found.\n'
"Pass 'false' for outputs that should not be installed and 'true' for\n"
"using the default installation directory for an output."
)
raise MesonException(
m.format(t.name, num_out, t.get_outputs(), num_outdirs)
)
install_mode = t.get_custom_install_mode()
# Install the target output(s)
if isinstance(t, build.BuildTarget):
should_strip = self.get_option_for_target("strip", t)
# Install primary build output (library/executable/jar, etc)
# Done separately because of strip/aliases/rpath
if outdirs[0] is not False:
mappings = self.get_target_link_deps_mappings(t, d.prefix)
i = TargetInstallData(
self.get_target_filename(t),
outdirs[0],
t.get_aliases(),
should_strip,
mappings,
t.install_rpath,
install_mode,
)
d.targets.append(i)
# On toolchains/platforms that use an import library for
# linking (separate from the shared library with all the
# code), we need to install that too (dll.a/.lib).
if (
isinstance(
t, (build.SharedLibrary, build.SharedModule, build.Executable)
)
and t.get_import_filename()
):
if custom_install_dir:
# If the DLL is installed into a custom directory,
# install the import library into the same place so
# it doesn't go into a surprising place
implib_install_dir = outdirs[0]
else:
implib_install_dir = self.environment.get_import_lib_dir()
# Install the import library.
i = TargetInstallData(
self.get_target_filename_for_linking(t),
implib_install_dir,
{},
False,
{},
"",
install_mode,
)
d.targets.append(i)
# Install secondary outputs. Only used for Vala right now.
if num_outdirs > 1:
for output, outdir in zip(t.get_outputs()[1:], outdirs[1:]):
# User requested that we not install this output
if outdir is False:
continue
f = os.path.join(self.get_target_dir(t), output)
i = TargetInstallData(f, outdir, {}, False, {}, None, install_mode)
d.targets.append(i)
elif isinstance(t, build.CustomTarget):
# If only one install_dir is specified, assume that all
# outputs will be installed into it. This is for
# backwards-compatibility and because it makes sense to
# avoid repetition since this is a common use-case.
#
# To selectively install only some outputs, pass `false` as
# the install_dir for the corresponding output by index
if num_outdirs == 1 and num_out > 1:
for output in t.get_outputs():
f = os.path.join(self.get_target_dir(t), output)
i = TargetInstallData(
f, outdirs[0], {}, False, {}, None, install_mode
)
d.targets.append(i)
else:
for output, outdir in zip(t.get_outputs(), outdirs):
# User requested that we not install this output
if outdir is False:
continue
f = os.path.join(self.get_target_dir(t), output)
i = TargetInstallData(f, outdir, {}, False, {}, None, install_mode)
d.targets.append(i)
|
https://github.com/mesonbuild/meson/issues/3965
|
Traceback (most recent call last):
File "d:\\projects\\cerbero\\meson\\master\\build\\build-tools\\Scripts\\meson.py", line 4, in <module>
__import__('pkg_resources').run_script('meson==0.47.0', 'meson.py')
File "c:\Python36-32\lib\site-packages\pkg_resources\__init__.py", line 658, in run_script
self.require(requires)[0].run_script(script_name, ns)
File "c:\Python36-32\lib\site-packages\pkg_resources\__init__.py", line 1438, in run_script
exec(code, namespace, namespace)
File "d:\projects\cerbero\meson\master\build\build-tools\lib\site-packages\meson-0.47.0-py3.6.egg\EGG-INFO\scripts\meson.py", line 29, in <module>
sys.exit(mesonmain.main())
File "d:\projects\cerbero\meson\master\build\build-tools\Lib\site-packages\meson-0.47.0-py3.6.egg\mesonbuild\mesonmain.py", line 367, in main
return run(sys.argv[1:], launcher)
File "d:\projects\cerbero\meson\master\build\build-tools\Lib\site-packages\meson-0.47.0-py3.6.egg\mesonbuild\mesonmain.py", line 281, in run
return minstall.run(remaining_args)
File "d:\projects\cerbero\meson\master\build\build-tools\Lib\site-packages\meson-0.47.0-py3.6.egg\mesonbuild\minstall.py", line 466, in run
installer.do_install(datafilename)
File "d:\projects\cerbero\meson\master\build\build-tools\Lib\site-packages\meson-0.47.0-py3.6.egg\mesonbuild\minstall.py", line 298, in do_install
self.install_targets(d)
File "d:\projects\cerbero\meson\master\build\build-tools\Lib\site-packages\meson-0.47.0-py3.6.egg\mesonbuild\minstall.py", line 387, in install_targets
fname = check_for_stampfile(t.fname)
File "d:\projects\cerbero\meson\master\build\build-tools\Lib\site-packages\meson-0.47.0-py3.6.egg\mesonbuild\minstall.py", line 170, in check_for_stampfile
if os.stat(fname).st_size == 0:
FileNotFoundError: [WinError 2] The system cannot find the file specified: 'tls/gnutls\\giognutls.lib'
FAILED: meson-install
"c:\\Python36-32\\python.exe" "d:\\projects\\cerbero\\meson\\master\\build\\build-tools\\Scripts\\meson.py" "install" "--no-rebuild"
ninja: build stopped: subcommand failed.
|
FileNotFoundError
|
def install_targets(self, d):
for t in d.targets:
if not os.path.exists(t.fname):
# For example, import libraries of shared modules are optional
if t.optional:
print("File {!r} not found, skipping".format(t.fname))
continue
else:
raise RuntimeError("File {!r} could not be found".format(t.fname))
fname = check_for_stampfile(t.fname)
outdir = get_destdir_path(d, t.outdir)
outname = os.path.join(outdir, os.path.basename(fname))
final_path = os.path.join(d.prefix, t.outdir, os.path.basename(fname))
aliases = t.aliases
should_strip = t.strip
install_rpath = t.install_rpath
install_name_mappings = t.install_name_mappings
install_mode = t.install_mode
d.dirmaker.makedirs(outdir, exist_ok=True)
if not os.path.exists(fname):
raise RuntimeError("File {!r} could not be found".format(fname))
elif os.path.isfile(fname):
self.do_copyfile(fname, outname)
set_mode(outname, install_mode, d.install_umask)
if should_strip and d.strip_bin is not None:
if fname.endswith(".jar"):
print("Not stripping jar target:", os.path.basename(fname))
continue
print("Stripping target {!r}".format(fname))
ps, stdo, stde = Popen_safe(d.strip_bin + [outname])
if ps.returncode != 0:
print("Could not strip file.\n")
print("Stdout:\n%s\n" % stdo)
print("Stderr:\n%s\n" % stde)
sys.exit(1)
pdb_filename = os.path.splitext(fname)[0] + ".pdb"
if not should_strip and os.path.exists(pdb_filename):
pdb_outname = os.path.splitext(outname)[0] + ".pdb"
self.do_copyfile(pdb_filename, pdb_outname)
set_mode(pdb_outname, install_mode, d.install_umask)
elif os.path.isdir(fname):
fname = os.path.join(d.build_dir, fname.rstrip("/"))
outname = os.path.join(outdir, os.path.basename(fname))
self.do_copydir(d, fname, outname, None, install_mode)
else:
raise RuntimeError("Unknown file type for {!r}".format(fname))
printed_symlink_error = False
for alias, to in aliases.items():
try:
symlinkfilename = os.path.join(outdir, alias)
try:
os.unlink(symlinkfilename)
except FileNotFoundError:
pass
os.symlink(to, symlinkfilename)
append_to_log(self.lf, symlinkfilename)
except (NotImplementedError, OSError):
if not printed_symlink_error:
print(
"Symlink creation does not work on this platform. "
"Skipping all symlinking."
)
printed_symlink_error = True
if os.path.isfile(outname):
try:
depfixer.fix_rpath(
outname,
install_rpath,
final_path,
install_name_mappings,
verbose=False,
)
except SystemExit as e:
if isinstance(e.code, int) and e.code == 0:
pass
else:
raise
|
def install_targets(self, d):
for t in d.targets:
fname = check_for_stampfile(t.fname)
outdir = get_destdir_path(d, t.outdir)
outname = os.path.join(outdir, os.path.basename(fname))
final_path = os.path.join(d.prefix, t.outdir, os.path.basename(fname))
aliases = t.aliases
should_strip = t.strip
install_rpath = t.install_rpath
install_name_mappings = t.install_name_mappings
install_mode = t.install_mode
d.dirmaker.makedirs(outdir, exist_ok=True)
if not os.path.exists(fname):
raise RuntimeError("File {!r} could not be found".format(fname))
elif os.path.isfile(fname):
self.do_copyfile(fname, outname)
set_mode(outname, install_mode, d.install_umask)
if should_strip and d.strip_bin is not None:
if fname.endswith(".jar"):
print("Not stripping jar target:", os.path.basename(fname))
continue
print("Stripping target {!r}".format(fname))
ps, stdo, stde = Popen_safe(d.strip_bin + [outname])
if ps.returncode != 0:
print("Could not strip file.\n")
print("Stdout:\n%s\n" % stdo)
print("Stderr:\n%s\n" % stde)
sys.exit(1)
pdb_filename = os.path.splitext(fname)[0] + ".pdb"
if not should_strip and os.path.exists(pdb_filename):
pdb_outname = os.path.splitext(outname)[0] + ".pdb"
self.do_copyfile(pdb_filename, pdb_outname)
set_mode(pdb_outname, install_mode, d.install_umask)
elif os.path.isdir(fname):
fname = os.path.join(d.build_dir, fname.rstrip("/"))
outname = os.path.join(outdir, os.path.basename(fname))
self.do_copydir(d, fname, outname, None, install_mode)
else:
raise RuntimeError("Unknown file type for {!r}".format(fname))
printed_symlink_error = False
for alias, to in aliases.items():
try:
symlinkfilename = os.path.join(outdir, alias)
try:
os.unlink(symlinkfilename)
except FileNotFoundError:
pass
os.symlink(to, symlinkfilename)
append_to_log(self.lf, symlinkfilename)
except (NotImplementedError, OSError):
if not printed_symlink_error:
print(
"Symlink creation does not work on this platform. "
"Skipping all symlinking."
)
printed_symlink_error = True
if os.path.isfile(outname):
try:
depfixer.fix_rpath(
outname,
install_rpath,
final_path,
install_name_mappings,
verbose=False,
)
except SystemExit as e:
if isinstance(e.code, int) and e.code == 0:
pass
else:
raise
|
https://github.com/mesonbuild/meson/issues/3965
|
Traceback (most recent call last):
File "d:\\projects\\cerbero\\meson\\master\\build\\build-tools\\Scripts\\meson.py", line 4, in <module>
__import__('pkg_resources').run_script('meson==0.47.0', 'meson.py')
File "c:\Python36-32\lib\site-packages\pkg_resources\__init__.py", line 658, in run_script
self.require(requires)[0].run_script(script_name, ns)
File "c:\Python36-32\lib\site-packages\pkg_resources\__init__.py", line 1438, in run_script
exec(code, namespace, namespace)
File "d:\projects\cerbero\meson\master\build\build-tools\lib\site-packages\meson-0.47.0-py3.6.egg\EGG-INFO\scripts\meson.py", line 29, in <module>
sys.exit(mesonmain.main())
File "d:\projects\cerbero\meson\master\build\build-tools\Lib\site-packages\meson-0.47.0-py3.6.egg\mesonbuild\mesonmain.py", line 367, in main
return run(sys.argv[1:], launcher)
File "d:\projects\cerbero\meson\master\build\build-tools\Lib\site-packages\meson-0.47.0-py3.6.egg\mesonbuild\mesonmain.py", line 281, in run
return minstall.run(remaining_args)
File "d:\projects\cerbero\meson\master\build\build-tools\Lib\site-packages\meson-0.47.0-py3.6.egg\mesonbuild\minstall.py", line 466, in run
installer.do_install(datafilename)
File "d:\projects\cerbero\meson\master\build\build-tools\Lib\site-packages\meson-0.47.0-py3.6.egg\mesonbuild\minstall.py", line 298, in do_install
self.install_targets(d)
File "d:\projects\cerbero\meson\master\build\build-tools\Lib\site-packages\meson-0.47.0-py3.6.egg\mesonbuild\minstall.py", line 387, in install_targets
fname = check_for_stampfile(t.fname)
File "d:\projects\cerbero\meson\master\build\build-tools\Lib\site-packages\meson-0.47.0-py3.6.egg\mesonbuild\minstall.py", line 170, in check_for_stampfile
if os.stat(fname).st_size == 0:
FileNotFoundError: [WinError 2] The system cannot find the file specified: 'tls/gnutls\\giognutls.lib'
FAILED: meson-install
"c:\\Python36-32\\python.exe" "d:\\projects\\cerbero\\meson\\master\\build\\build-tools\\Scripts\\meson.py" "install" "--no-rebuild"
ninja: build stopped: subcommand failed.
|
FileNotFoundError
|
def decode_match(match):
try:
return codecs.decode(match.group(0), "unicode_escape")
except UnicodeDecodeError as err:
raise MesonUnicodeDecodeError(match.group(0))
|
def decode_match(match):
return codecs.decode(match.group(0), "unicode_escape")
|
https://github.com/mesonbuild/meson/issues/3169
|
tansell@tansell:~/github/mesonbuild/meson/test cases/common/100 print null$ meson build .
The Meson build system
Version: 0.45.0.dev1
Source dir: XXXX/github/mesonbuild/meson/test cases/common/100 print null
Build dir: XXXX/github/mesonbuild/meson/test cases/common/100 print null/build
Build type: native build
Project name: 100 print null
Build machine cpu family: x86_64
Build machine cpu: x86_64
Program find found: YES (/usr/bin/find)
Traceback (most recent call last):
File "XXXX/.local/lib/python3.5/site-packages/mesonbuild/mesonmain.py", line
363, in run
app.generate()
File "XXXX/.local/lib/python3.5/site-packages/mesonbuild/mesonmain.py", line
150, in generate
self._generate(env)
File "XXXX/.local/lib/python3.5/site-packages/mesonbuild/mesonmain.py", line
197, in _generate
intr.run()
File "XXXX/.local/lib/python3.5/site-packages/mesonbuild/interpreter.py", line 2992, in run
super().run()
File "XXXX/.local/lib/python3.5/site-packages/mesonbuild/interpreterbase.py", line 173, in run
self.evaluate_codeblock(self.ast, start=1)
File "XXXX/.local/lib/python3.5/site-packages/mesonbuild/interpreterbase.py", line 195, in evaluate_codeblock
raise e
File "XXXX/.local/lib/python3.5/site-packages/mesonbuild/interpreterbase.py", line 189, in evaluate_codeblock
self.evaluate_statement(cur)
File "XXXX/.local/lib/python3.5/site-packages/mesonbuild/interpreterbase.py", line 230, in evaluate_statement
return self.evaluate_foreach(cur)
File "XXXX/.local/lib/python3.5/site-packages/mesonbuild/interpreterbase.py", line 404, in evaluate_foreach
self.evaluate_codeblock(node.block)
File "XXXX/.local/lib/python3.5/site-packages/mesonbuild/interpreterbase.py", line 195, in evaluate_codeblock
raise e
File "XXXX/.local/lib/python3.5/site-packages/mesonbuild/interpreterbase.py", line 189, in evaluate_codeblock
self.evaluate_statement(cur)
File "XXXX/.local/lib/python3.5/site-packages/mesonbuild/interpreterbase.py", line 232, in evaluate_statement
return self.evaluate_plusassign(cur)
File "XXXX/.local/lib/python3.5/site-packages/mesonbuild/interpreterbase.py", line 409, in evaluate_plusassign
addition = self.evaluate_statement(node.value)
File "XXXX/.local/lib/python3.5/site-packages/mesonbuild/interpreterbase.py", line 216, in evaluate_statement
return self.evaluate_arraystatement(cur)
File "XXXX/.local/lib/python3.5/site-packages/mesonbuild/interpreterbase.py", line 243, in evaluate_arraystatement
(arguments, kwargs) = self.reduce_arguments(cur.args)
File "XXXX/.local/lib/python3.5/site-packages/mesonbuild/interpreterbase.py", line 640, in reduce_arguments
reduced_pos = [self.evaluate_statement(arg) for arg in args.arguments]
File "XXXX/.local/lib/python3.5/site-packages/mesonbuild/interpreterbase.py", line 640, in <listcomp>
reduced_pos = [self.evaluate_statement(arg) for arg in args.arguments]
File "XXXX/.local/lib/python3.5/site-packages/mesonbuild/interpreterbase.py", line 200, in evaluate_statement
return self.function_call(cur)
File "XXXX/.local/lib/python3.5/site-packages/mesonbuild/interpreterbase.py", line 456, in function_call
return self.funcs[func_name](node, self.flatten(posargs), kwargs)
File "XXXX/.local/lib/python3.5/site-packages/mesonbuild/interpreterbase.py", line 55, in wrapped
return f(self, node, args, kwargs)
File "XXXX/.local/lib/python3.5/site-packages/mesonbuild/interpreterbase.py", line 47, in wrapped
return f(self, node, args, kwargs)
File "XXXX/.local/lib/python3.5/site-packages/mesonbuild/interpreter.py", line 1614, in func_files
return [mesonlib.File.from_source_file(self.environment.source_dir, self.subdir, fname) for fname in
args]
File "XXXX/.local/lib/python3.5/site-packages/mesonbuild/interpreter.py", line 1614, in <listcomp>
return [mesonlib.File.from_source_file(self.environment.source_dir, self.subdir, fname) for fname in
args]
File "XXXX/.local/lib/python3.5/site-packages/mesonbuild/mesonlib.py", line 230, in from_source_file
if not os.path.isfile(os.path.join(source_root, subdir, fname)):
File "/usr/lib/python3.5/genericpath.py", line 30, in isfile
st = os.stat(path)
ValueError: embedded null byte
|
ValueError
|
def lex(self, subdir):
line_start = 0
lineno = 1
loc = 0
par_count = 0
bracket_count = 0
col = 0
while loc < len(self.code):
matched = False
value = None
for tid, reg in self.token_specification:
mo = reg.match(self.code, loc)
if mo:
curline = lineno
curline_start = line_start
col = mo.start() - line_start
matched = True
span_start = loc
loc = mo.end()
span_end = loc
bytespan = (span_start, span_end)
match_text = mo.group()
if tid == "ignore" or tid == "comment":
break
elif tid == "lparen":
par_count += 1
elif tid == "rparen":
par_count -= 1
elif tid == "lbracket":
bracket_count += 1
elif tid == "rbracket":
bracket_count -= 1
elif tid == "dblquote":
raise ParseException(
"Double quotes are not supported. Use single quotes.",
self.getline(line_start),
lineno,
col,
)
elif tid == "string":
# Handle here and not on the regexp to give a better error message.
if match_text.find("\n") != -1:
mlog.warning(
"""Newline character in a string detected, use ''' (three single quotes) for multiline strings instead.
This will become a hard error in a future Meson release.""",
self.getline(line_start),
lineno,
col,
)
value = match_text[1:-1]
try:
value = ESCAPE_SEQUENCE_SINGLE_RE.sub(decode_match, value)
except MesonUnicodeDecodeError as err:
raise MesonException(
"Failed to parse escape sequence: '{}' in string:\n {}".format(
err.match, match_text
)
)
elif tid == "multiline_string":
tid = "string"
value = match_text[3:-3]
try:
value = ESCAPE_SEQUENCE_MULTI_RE.sub(decode_match, value)
except MesonUnicodeDecodeError as err:
raise MesonException(
"Failed to parse escape sequence: '{}' in string:\n{}".format(
err.match, match_text
)
)
lines = match_text.split("\n")
if len(lines) > 1:
lineno += len(lines) - 1
line_start = mo.end() - len(lines[-1])
elif tid == "number":
value = int(match_text)
elif tid == "hexnumber":
tid = "number"
value = int(match_text, base=16)
elif tid == "eol" or tid == "eol_cont":
lineno += 1
line_start = loc
if par_count > 0 or bracket_count > 0:
break
elif tid == "id":
if match_text in self.keywords:
tid = match_text
else:
value = match_text
yield Token(tid, subdir, curline_start, curline, col, bytespan, value)
break
if not matched:
raise ParseException("lexer", self.getline(line_start), lineno, col)
|
def lex(self, subdir):
line_start = 0
lineno = 1
loc = 0
par_count = 0
bracket_count = 0
col = 0
while loc < len(self.code):
matched = False
value = None
for tid, reg in self.token_specification:
mo = reg.match(self.code, loc)
if mo:
curline = lineno
curline_start = line_start
col = mo.start() - line_start
matched = True
span_start = loc
loc = mo.end()
span_end = loc
bytespan = (span_start, span_end)
match_text = mo.group()
if tid == "ignore" or tid == "comment":
break
elif tid == "lparen":
par_count += 1
elif tid == "rparen":
par_count -= 1
elif tid == "lbracket":
bracket_count += 1
elif tid == "rbracket":
bracket_count -= 1
elif tid == "dblquote":
raise ParseException(
"Double quotes are not supported. Use single quotes.",
self.getline(line_start),
lineno,
col,
)
elif tid == "string":
# Handle here and not on the regexp to give a better error message.
if match_text.find("\n") != -1:
mlog.warning(
"""Newline character in a string detected, use ''' (three single quotes) for multiline strings instead.
This will become a hard error in a future Meson release.""",
self.getline(line_start),
lineno,
col,
)
value = match_text[1:-1]
value = ESCAPE_SEQUENCE_SINGLE_RE.sub(decode_match, value)
elif tid == "multiline_string":
tid = "string"
value = match_text[3:-3]
value = ESCAPE_SEQUENCE_MULTI_RE.sub(decode_match, value)
lines = match_text.split("\n")
if len(lines) > 1:
lineno += len(lines) - 1
line_start = mo.end() - len(lines[-1])
elif tid == "number":
value = int(match_text)
elif tid == "hexnumber":
tid = "number"
value = int(match_text, base=16)
elif tid == "eol" or tid == "eol_cont":
lineno += 1
line_start = loc
if par_count > 0 or bracket_count > 0:
break
elif tid == "id":
if match_text in self.keywords:
tid = match_text
else:
value = match_text
yield Token(tid, subdir, curline_start, curline, col, bytespan, value)
break
if not matched:
raise ParseException("lexer", self.getline(line_start), lineno, col)
|
https://github.com/mesonbuild/meson/issues/3169
|
tansell@tansell:~/github/mesonbuild/meson/test cases/common/100 print null$ meson build .
The Meson build system
Version: 0.45.0.dev1
Source dir: XXXX/github/mesonbuild/meson/test cases/common/100 print null
Build dir: XXXX/github/mesonbuild/meson/test cases/common/100 print null/build
Build type: native build
Project name: 100 print null
Build machine cpu family: x86_64
Build machine cpu: x86_64
Program find found: YES (/usr/bin/find)
Traceback (most recent call last):
File "XXXX/.local/lib/python3.5/site-packages/mesonbuild/mesonmain.py", line
363, in run
app.generate()
File "XXXX/.local/lib/python3.5/site-packages/mesonbuild/mesonmain.py", line
150, in generate
self._generate(env)
File "XXXX/.local/lib/python3.5/site-packages/mesonbuild/mesonmain.py", line
197, in _generate
intr.run()
File "XXXX/.local/lib/python3.5/site-packages/mesonbuild/interpreter.py", line 2992, in run
super().run()
File "XXXX/.local/lib/python3.5/site-packages/mesonbuild/interpreterbase.py", line 173, in run
self.evaluate_codeblock(self.ast, start=1)
File "XXXX/.local/lib/python3.5/site-packages/mesonbuild/interpreterbase.py", line 195, in evaluate_codeblock
raise e
File "XXXX/.local/lib/python3.5/site-packages/mesonbuild/interpreterbase.py", line 189, in evaluate_codeblock
self.evaluate_statement(cur)
File "XXXX/.local/lib/python3.5/site-packages/mesonbuild/interpreterbase.py", line 230, in evaluate_statement
return self.evaluate_foreach(cur)
File "XXXX/.local/lib/python3.5/site-packages/mesonbuild/interpreterbase.py", line 404, in evaluate_foreach
self.evaluate_codeblock(node.block)
File "XXXX/.local/lib/python3.5/site-packages/mesonbuild/interpreterbase.py", line 195, in evaluate_codeblock
raise e
File "XXXX/.local/lib/python3.5/site-packages/mesonbuild/interpreterbase.py", line 189, in evaluate_codeblock
self.evaluate_statement(cur)
File "XXXX/.local/lib/python3.5/site-packages/mesonbuild/interpreterbase.py", line 232, in evaluate_statement
return self.evaluate_plusassign(cur)
File "XXXX/.local/lib/python3.5/site-packages/mesonbuild/interpreterbase.py", line 409, in evaluate_plusassign
addition = self.evaluate_statement(node.value)
File "XXXX/.local/lib/python3.5/site-packages/mesonbuild/interpreterbase.py", line 216, in evaluate_statement
return self.evaluate_arraystatement(cur)
File "XXXX/.local/lib/python3.5/site-packages/mesonbuild/interpreterbase.py", line 243, in evaluate_arraystatement
(arguments, kwargs) = self.reduce_arguments(cur.args)
File "XXXX/.local/lib/python3.5/site-packages/mesonbuild/interpreterbase.py", line 640, in reduce_arguments
reduced_pos = [self.evaluate_statement(arg) for arg in args.arguments]
File "XXXX/.local/lib/python3.5/site-packages/mesonbuild/interpreterbase.py", line 640, in <listcomp>
reduced_pos = [self.evaluate_statement(arg) for arg in args.arguments]
File "XXXX/.local/lib/python3.5/site-packages/mesonbuild/interpreterbase.py", line 200, in evaluate_statement
return self.function_call(cur)
File "XXXX/.local/lib/python3.5/site-packages/mesonbuild/interpreterbase.py", line 456, in function_call
return self.funcs[func_name](node, self.flatten(posargs), kwargs)
File "XXXX/.local/lib/python3.5/site-packages/mesonbuild/interpreterbase.py", line 55, in wrapped
return f(self, node, args, kwargs)
File "XXXX/.local/lib/python3.5/site-packages/mesonbuild/interpreterbase.py", line 47, in wrapped
return f(self, node, args, kwargs)
File "XXXX/.local/lib/python3.5/site-packages/mesonbuild/interpreter.py", line 1614, in func_files
return [mesonlib.File.from_source_file(self.environment.source_dir, self.subdir, fname) for fname in
args]
File "XXXX/.local/lib/python3.5/site-packages/mesonbuild/interpreter.py", line 1614, in <listcomp>
return [mesonlib.File.from_source_file(self.environment.source_dir, self.subdir, fname) for fname in
args]
File "XXXX/.local/lib/python3.5/site-packages/mesonbuild/mesonlib.py", line 230, in from_source_file
if not os.path.isfile(os.path.join(source_root, subdir, fname)):
File "/usr/lib/python3.5/genericpath.py", line 30, in isfile
st = os.stat(path)
ValueError: embedded null byte
|
ValueError
|
def lex(self, subdir):
line_start = 0
lineno = 1
loc = 0
par_count = 0
bracket_count = 0
col = 0
while loc < len(self.code):
matched = False
value = None
for tid, reg in self.token_specification:
mo = reg.match(self.code, loc)
if mo:
curline = lineno
curline_start = line_start
col = mo.start() - line_start
matched = True
span_start = loc
loc = mo.end()
span_end = loc
bytespan = (span_start, span_end)
match_text = mo.group()
if tid == "ignore" or tid == "comment":
break
elif tid == "lparen":
par_count += 1
elif tid == "rparen":
par_count -= 1
elif tid == "lbracket":
bracket_count += 1
elif tid == "rbracket":
bracket_count -= 1
elif tid == "dblquote":
raise ParseException(
"Double quotes are not supported. Use single quotes.",
self.getline(line_start),
lineno,
col,
)
elif tid == "string":
# Handle here and not on the regexp to give a better error message.
if match_text.find("\n") != -1:
mlog.warning(
"""Newline character in a string detected, use ''' (three single quotes) for multiline strings instead.
This will become a hard error in a future Meson release.""",
self.getline(line_start),
lineno,
col,
)
value = match_text[1:-1]
value = ESCAPE_SEQUENCE_SINGLE_RE.sub(decode_match, value)
elif tid == "multiline_string":
tid = "string"
value = match_text[3:-3]
value = ESCAPE_SEQUENCE_MULTI_RE.sub(decode_match, value)
lines = match_text.split("\n")
if len(lines) > 1:
lineno += len(lines) - 1
line_start = mo.end() - len(lines[-1])
elif tid == "number":
value = int(match_text)
elif tid == "hexnumber":
tid = "number"
value = int(match_text, base=16)
elif tid == "eol" or tid == "eol_cont":
lineno += 1
line_start = loc
if par_count > 0 or bracket_count > 0:
break
elif tid == "id":
if match_text in self.keywords:
tid = match_text
else:
value = match_text
yield Token(tid, subdir, curline_start, curline, col, bytespan, value)
break
if not matched:
raise ParseException("lexer", self.getline(line_start), lineno, col)
|
def lex(self, subdir):
line_start = 0
lineno = 1
loc = 0
par_count = 0
bracket_count = 0
col = 0
newline_rx = re.compile(r"(?<!\\)((?:\\\\)*)\\n")
while loc < len(self.code):
matched = False
value = None
for tid, reg in self.token_specification:
mo = reg.match(self.code, loc)
if mo:
curline = lineno
curline_start = line_start
col = mo.start() - line_start
matched = True
span_start = loc
loc = mo.end()
span_end = loc
bytespan = (span_start, span_end)
match_text = mo.group()
if tid == "ignore" or tid == "comment":
break
elif tid == "lparen":
par_count += 1
elif tid == "rparen":
par_count -= 1
elif tid == "lbracket":
bracket_count += 1
elif tid == "rbracket":
bracket_count -= 1
elif tid == "dblquote":
raise ParseException(
"Double quotes are not supported. Use single quotes.",
self.getline(line_start),
lineno,
col,
)
elif tid == "string":
# Handle here and not on the regexp to give a better error message.
if match_text.find("\n") != -1:
mlog.warning(
"""Newline character in a string detected, use ''' (three single quotes) for multiline strings instead.
This will become a hard error in a future Meson release.""",
self.getline(line_start),
lineno,
col,
)
value = match_text[1:-1].replace(r"\'", "'")
value = newline_rx.sub(r"\1\n", value)
value = value.replace(r" \\ ".strip(), r" \ ".strip())
elif tid == "multiline_string":
tid = "string"
value = match_text[3:-3]
lines = match_text.split("\n")
if len(lines) > 1:
lineno += len(lines) - 1
line_start = mo.end() - len(lines[-1])
elif tid == "number":
value = int(match_text)
elif tid == "hexnumber":
tid = "number"
value = int(match_text, base=16)
elif tid == "eol" or tid == "eol_cont":
lineno += 1
line_start = loc
if par_count > 0 or bracket_count > 0:
break
elif tid == "id":
if match_text in self.keywords:
tid = match_text
else:
value = match_text
yield Token(tid, subdir, curline_start, curline, col, bytespan, value)
break
if not matched:
raise ParseException("lexer", self.getline(line_start), lineno, col)
|
https://github.com/mesonbuild/meson/issues/3169
|
tansell@tansell:~/github/mesonbuild/meson/test cases/common/100 print null$ meson build .
The Meson build system
Version: 0.45.0.dev1
Source dir: XXXX/github/mesonbuild/meson/test cases/common/100 print null
Build dir: XXXX/github/mesonbuild/meson/test cases/common/100 print null/build
Build type: native build
Project name: 100 print null
Build machine cpu family: x86_64
Build machine cpu: x86_64
Program find found: YES (/usr/bin/find)
Traceback (most recent call last):
File "XXXX/.local/lib/python3.5/site-packages/mesonbuild/mesonmain.py", line
363, in run
app.generate()
File "XXXX/.local/lib/python3.5/site-packages/mesonbuild/mesonmain.py", line
150, in generate
self._generate(env)
File "XXXX/.local/lib/python3.5/site-packages/mesonbuild/mesonmain.py", line
197, in _generate
intr.run()
File "XXXX/.local/lib/python3.5/site-packages/mesonbuild/interpreter.py", line 2992, in run
super().run()
File "XXXX/.local/lib/python3.5/site-packages/mesonbuild/interpreterbase.py", line 173, in run
self.evaluate_codeblock(self.ast, start=1)
File "XXXX/.local/lib/python3.5/site-packages/mesonbuild/interpreterbase.py", line 195, in evaluate_codeblock
raise e
File "XXXX/.local/lib/python3.5/site-packages/mesonbuild/interpreterbase.py", line 189, in evaluate_codeblock
self.evaluate_statement(cur)
File "XXXX/.local/lib/python3.5/site-packages/mesonbuild/interpreterbase.py", line 230, in evaluate_statement
return self.evaluate_foreach(cur)
File "XXXX/.local/lib/python3.5/site-packages/mesonbuild/interpreterbase.py", line 404, in evaluate_foreach
self.evaluate_codeblock(node.block)
File "XXXX/.local/lib/python3.5/site-packages/mesonbuild/interpreterbase.py", line 195, in evaluate_codeblock
raise e
File "XXXX/.local/lib/python3.5/site-packages/mesonbuild/interpreterbase.py", line 189, in evaluate_codeblock
self.evaluate_statement(cur)
File "XXXX/.local/lib/python3.5/site-packages/mesonbuild/interpreterbase.py", line 232, in evaluate_statement
return self.evaluate_plusassign(cur)
File "XXXX/.local/lib/python3.5/site-packages/mesonbuild/interpreterbase.py", line 409, in evaluate_plusassign
addition = self.evaluate_statement(node.value)
File "XXXX/.local/lib/python3.5/site-packages/mesonbuild/interpreterbase.py", line 216, in evaluate_statement
return self.evaluate_arraystatement(cur)
File "XXXX/.local/lib/python3.5/site-packages/mesonbuild/interpreterbase.py", line 243, in evaluate_arraystatement
(arguments, kwargs) = self.reduce_arguments(cur.args)
File "XXXX/.local/lib/python3.5/site-packages/mesonbuild/interpreterbase.py", line 640, in reduce_arguments
reduced_pos = [self.evaluate_statement(arg) for arg in args.arguments]
File "XXXX/.local/lib/python3.5/site-packages/mesonbuild/interpreterbase.py", line 640, in <listcomp>
reduced_pos = [self.evaluate_statement(arg) for arg in args.arguments]
File "XXXX/.local/lib/python3.5/site-packages/mesonbuild/interpreterbase.py", line 200, in evaluate_statement
return self.function_call(cur)
File "XXXX/.local/lib/python3.5/site-packages/mesonbuild/interpreterbase.py", line 456, in function_call
return self.funcs[func_name](node, self.flatten(posargs), kwargs)
File "XXXX/.local/lib/python3.5/site-packages/mesonbuild/interpreterbase.py", line 55, in wrapped
return f(self, node, args, kwargs)
File "XXXX/.local/lib/python3.5/site-packages/mesonbuild/interpreterbase.py", line 47, in wrapped
return f(self, node, args, kwargs)
File "XXXX/.local/lib/python3.5/site-packages/mesonbuild/interpreter.py", line 1614, in func_files
return [mesonlib.File.from_source_file(self.environment.source_dir, self.subdir, fname) for fname in
args]
File "XXXX/.local/lib/python3.5/site-packages/mesonbuild/interpreter.py", line 1614, in <listcomp>
return [mesonlib.File.from_source_file(self.environment.source_dir, self.subdir, fname) for fname in
args]
File "XXXX/.local/lib/python3.5/site-packages/mesonbuild/mesonlib.py", line 230, in from_source_file
if not os.path.isfile(os.path.join(source_root, subdir, fname)):
File "/usr/lib/python3.5/genericpath.py", line 30, in isfile
st = os.stat(path)
ValueError: embedded null byte
|
ValueError
|
def generate_msvc_pch_command(self, target, compiler, pch):
if len(pch) != 2:
raise MesonException(
"MSVC requires one header and one source to produce precompiled headers."
)
header = pch[0]
source = pch[1]
pchname = compiler.get_pch_name(header)
dst = os.path.join(self.get_target_private_dir(target), pchname)
commands = []
commands += self.generate_basic_compiler_args(target, compiler)
just_name = os.path.split(header)[1]
(objname, pch_args) = compiler.gen_pch_args(just_name, source, dst)
commands += pch_args
commands += self.get_compile_debugfile_args(compiler, target, objname)
dep = dst + "." + compiler.get_depfile_suffix()
return commands, dep, dst, [objname]
|
def generate_msvc_pch_command(self, target, compiler, pch):
if len(pch) != 2:
raise RuntimeError(
"MSVC requires one header and one source to produce precompiled headers."
)
header = pch[0]
source = pch[1]
pchname = compiler.get_pch_name(header)
dst = os.path.join(self.get_target_private_dir(target), pchname)
commands = []
commands += self.generate_basic_compiler_args(target, compiler)
just_name = os.path.split(header)[1]
(objname, pch_args) = compiler.gen_pch_args(just_name, source, dst)
commands += pch_args
commands += self.get_compile_debugfile_args(compiler, target, objname)
dep = dst + "." + compiler.get_depfile_suffix()
return commands, dep, dst, [objname]
|
https://github.com/mesonbuild/meson/issues/2833
|
Traceback (most recent call last):
File "mesonbuild\mesonmain.py", line 352, in run
File "mesonbuild\mesonmain.py", line 147, in generate
File "mesonbuild\mesonmain.py", line 197, in _generate
File "mesonbuild\backend\vs2010backend.py", line 161, in generate
File "mesonbuild\backend\vs2010backend.py", line 311, in generate_projects
File "mesonbuild\backend\vs2010backend.py", line 864, in gen_vcxproj
IndexError: list index out of range
|
IndexError
|
def gen_vcxproj(self, target, ofname, guid):
mlog.debug("Generating vcxproj %s." % target.name)
entrypoint = "WinMainCRTStartup"
subsystem = "Windows"
if isinstance(target, build.Executable):
conftype = "Application"
if not target.gui_app:
subsystem = "Console"
entrypoint = "mainCRTStartup"
elif isinstance(target, build.StaticLibrary):
conftype = "StaticLibrary"
elif isinstance(target, build.SharedLibrary):
conftype = "DynamicLibrary"
entrypoint = "_DllMainCrtStartup"
elif isinstance(target, build.CustomTarget):
return self.gen_custom_target_vcxproj(target, ofname, guid)
elif isinstance(target, build.RunTarget):
return self.gen_run_target_vcxproj(target, ofname, guid)
else:
raise MesonException("Unknown target type for %s" % target.get_basename())
# Prefix to use to access the build root from the vcxproj dir
down = self.target_to_build_root(target)
# Prefix to use to access the source tree's root from the vcxproj dir
proj_to_src_root = os.path.join(down, self.build_to_src)
# Prefix to use to access the source tree's subdir from the vcxproj dir
proj_to_src_dir = os.path.join(proj_to_src_root, target.subdir)
(sources, headers, objects, languages) = self.split_sources(target.sources)
if self.is_unity(target):
sources = self.generate_unity_files(target, sources)
compiler = self._get_cl_compiler(target)
buildtype_args = compiler.get_buildtype_args(self.buildtype)
buildtype_link_args = compiler.get_buildtype_linker_args(self.buildtype)
project_name = target.name
target_name = target.name
root = ET.Element(
"Project",
{
"DefaultTargets": "Build",
"ToolsVersion": "4.0",
"xmlns": "http://schemas.microsoft.com/developer/msbuild/2003",
},
)
confitems = ET.SubElement(root, "ItemGroup", {"Label": "ProjectConfigurations"})
prjconf = ET.SubElement(
confitems,
"ProjectConfiguration",
{"Include": self.buildtype + "|" + self.platform},
)
p = ET.SubElement(prjconf, "Configuration")
p.text = self.buildtype
pl = ET.SubElement(prjconf, "Platform")
pl.text = self.platform
# Globals
globalgroup = ET.SubElement(root, "PropertyGroup", Label="Globals")
guidelem = ET.SubElement(globalgroup, "ProjectGuid")
guidelem.text = "{%s}" % guid
kw = ET.SubElement(globalgroup, "Keyword")
kw.text = self.platform + "Proj"
ns = ET.SubElement(globalgroup, "RootNamespace")
ns.text = target_name
p = ET.SubElement(globalgroup, "Platform")
p.text = self.platform
pname = ET.SubElement(globalgroup, "ProjectName")
pname.text = project_name
if self.windows_target_platform_version:
ET.SubElement(
globalgroup, "WindowsTargetPlatformVersion"
).text = self.windows_target_platform_version
ET.SubElement(
root, "Import", Project="$(VCTargetsPath)\Microsoft.Cpp.Default.props"
)
# Start configuration
type_config = ET.SubElement(root, "PropertyGroup", Label="Configuration")
ET.SubElement(type_config, "ConfigurationType").text = conftype
ET.SubElement(type_config, "CharacterSet").text = "MultiByte"
if self.platform_toolset:
ET.SubElement(type_config, "PlatformToolset").text = self.platform_toolset
# FIXME: Meson's LTO support needs to be integrated here
ET.SubElement(type_config, "WholeProgramOptimization").text = "false"
# Let VS auto-set the RTC level
ET.SubElement(type_config, "BasicRuntimeChecks").text = "Default"
o_flags = split_o_flags_args(buildtype_args)
if "/Oi" in o_flags:
ET.SubElement(type_config, "IntrinsicFunctions").text = "true"
if "/Ob1" in o_flags:
ET.SubElement(
type_config, "InlineFunctionExpansion"
).text = "OnlyExplicitInline"
elif "/Ob2" in o_flags:
ET.SubElement(type_config, "InlineFunctionExpansion").text = "AnySuitable"
# Size-preserving flags
if "/Os" in o_flags:
ET.SubElement(type_config, "FavorSizeOrSpeed").text = "Size"
else:
ET.SubElement(type_config, "FavorSizeOrSpeed").text = "Speed"
# Incremental linking increases code size
if "/INCREMENTAL:NO" in buildtype_link_args:
ET.SubElement(type_config, "LinkIncremental").text = "false"
# CRT type; debug or release
if "/MDd" in buildtype_args:
ET.SubElement(type_config, "UseDebugLibraries").text = "true"
ET.SubElement(type_config, "RuntimeLibrary").text = "MultiThreadedDebugDLL"
else:
ET.SubElement(type_config, "UseDebugLibraries").text = "false"
ET.SubElement(type_config, "RuntimeLibrary").text = "MultiThreadedDLL"
# Debug format
if "/ZI" in buildtype_args:
ET.SubElement(type_config, "DebugInformationFormat").text = "EditAndContinue"
elif "/Zi" in buildtype_args:
ET.SubElement(type_config, "DebugInformationFormat").text = "ProgramDatabase"
elif "/Z7" in buildtype_args:
ET.SubElement(type_config, "DebugInformationFormat").text = "OldStyle"
# Runtime checks
if "/RTC1" in buildtype_args:
ET.SubElement(type_config, "BasicRuntimeChecks").text = "EnableFastChecks"
elif "/RTCu" in buildtype_args:
ET.SubElement(
type_config, "BasicRuntimeChecks"
).text = "UninitializedLocalUsageCheck"
elif "/RTCs" in buildtype_args:
ET.SubElement(type_config, "BasicRuntimeChecks").text = "StackFrameRuntimeCheck"
# Optimization flags
if "/Ox" in o_flags:
ET.SubElement(type_config, "Optimization").text = "Full"
elif "/O2" in o_flags:
ET.SubElement(type_config, "Optimization").text = "MaxSpeed"
elif "/O1" in o_flags:
ET.SubElement(type_config, "Optimization").text = "MinSpace"
elif "/Od" in o_flags:
ET.SubElement(type_config, "Optimization").text = "Disabled"
# End configuration
ET.SubElement(root, "Import", Project="$(VCTargetsPath)\Microsoft.Cpp.props")
generated_files, custom_target_output_files, generated_files_include_dirs = (
self.generate_custom_generator_commands(target, root)
)
(gen_src, gen_hdrs, gen_objs, gen_langs) = self.split_sources(generated_files)
(custom_src, custom_hdrs, custom_objs, custom_langs) = self.split_sources(
custom_target_output_files
)
gen_src += custom_src
gen_hdrs += custom_hdrs
gen_langs += custom_langs
# Project information
direlem = ET.SubElement(root, "PropertyGroup")
fver = ET.SubElement(direlem, "_ProjectFileVersion")
fver.text = self.project_file_version
outdir = ET.SubElement(direlem, "OutDir")
outdir.text = ".\\"
intdir = ET.SubElement(direlem, "IntDir")
intdir.text = target.get_id() + "\\"
tfilename = os.path.splitext(target.get_filename())
ET.SubElement(direlem, "TargetName").text = tfilename[0]
ET.SubElement(direlem, "TargetExt").text = tfilename[1]
# Build information
compiles = ET.SubElement(root, "ItemDefinitionGroup")
clconf = ET.SubElement(compiles, "ClCompile")
# Arguments, include dirs, defines for all files in the current target
target_args = []
target_defines = []
target_inc_dirs = []
# Arguments, include dirs, defines passed to individual files in
# a target; perhaps because the args are language-specific
#
# file_args is also later split out into defines and include_dirs in
# case someone passed those in there
file_args = dict(
(lang, CompilerArgs(comp)) for lang, comp in target.compilers.items()
)
file_defines = dict((lang, []) for lang in target.compilers)
file_inc_dirs = dict((lang, []) for lang in target.compilers)
# The order in which these compile args are added must match
# generate_single_compile() and generate_basic_compiler_args()
for l, comp in target.compilers.items():
if l in file_args:
file_args[l] += compilers.get_base_compile_args(
self.environment.coredata.base_options, comp
)
file_args[l] += comp.get_option_compile_args(
self.environment.coredata.compiler_options
)
# Add compile args added using add_project_arguments()
for l, args in self.build.projects_args.get(target.subproject, {}).items():
if l in file_args:
file_args[l] += args
# Add compile args added using add_global_arguments()
# These override per-project arguments
for l, args in self.build.global_args.items():
if l in file_args:
file_args[l] += args
if not target.is_cross:
# Compile args added from the env: CFLAGS/CXXFLAGS, etc. We want these
# to override all the defaults, but not the per-target compile args.
for l, args in self.environment.coredata.external_args.items():
if l in file_args:
file_args[l] += args
for args in file_args.values():
# This is where Visual Studio will insert target_args, target_defines,
# etc, which are added later from external deps (see below).
args += [
"%(AdditionalOptions)",
"%(PreprocessorDefinitions)",
"%(AdditionalIncludeDirectories)",
]
# Add custom target dirs as includes automatically, but before
# target-specific include dirs. See _generate_single_compile() in
# the ninja backend for caveats.
args += ["-I" + arg for arg in generated_files_include_dirs]
# Add include dirs from the `include_directories:` kwarg on the target
# and from `include_directories:` of internal deps of the target.
#
# Target include dirs should override internal deps include dirs.
# This is handled in BuildTarget.process_kwargs()
#
# Include dirs from internal deps should override include dirs from
# external deps and must maintain the order in which they are
# specified. Hence, we must reverse so that the order is preserved.
#
# These are per-target, but we still add them as per-file because we
# need them to be looked in first.
for d in reversed(target.get_include_dirs()):
for i in d.get_incdirs():
curdir = os.path.join(d.get_curdir(), i)
args.append("-I" + self.relpath(curdir, target.subdir)) # build dir
args.append("-I" + os.path.join(proj_to_src_root, curdir)) # src dir
for i in d.get_extra_build_dirs():
curdir = os.path.join(d.get_curdir(), i)
args.append("-I" + self.relpath(curdir, target.subdir)) # build dir
# Add per-target compile args, f.ex, `c_args : ['/DFOO']`. We set these
# near the end since these are supposed to override everything else.
for l, args in target.extra_args.items():
if l in file_args:
file_args[l] += args
# The highest priority includes. In order of directory search:
# target private dir, target build dir, target source dir
for args in file_args.values():
t_inc_dirs = [
self.relpath(
self.get_target_private_dir(target), self.get_target_dir(target)
)
]
if target.implicit_include_directories:
t_inc_dirs += ["."]
if target.implicit_include_directories:
t_inc_dirs += [proj_to_src_dir]
args += ["-I" + arg for arg in t_inc_dirs]
# Split preprocessor defines and include directories out of the list of
# all extra arguments. The rest go into %(AdditionalOptions).
for l, args in file_args.items():
for arg in args[:]:
if arg.startswith(("-D", "/D")) or arg == "%(PreprocessorDefinitions)":
file_args[l].remove(arg)
# Don't escape the marker
if arg == "%(PreprocessorDefinitions)":
define = arg
else:
define = arg[2:]
# De-dup
if define in file_defines[l]:
file_defines[l].remove(define)
file_defines[l].append(define)
elif (
arg.startswith(("-I", "/I")) or arg == "%(AdditionalIncludeDirectories)"
):
file_args[l].remove(arg)
# Don't escape the marker
if arg == "%(AdditionalIncludeDirectories)":
inc_dir = arg
else:
inc_dir = arg[2:]
# De-dup
if inc_dir not in file_inc_dirs[l]:
file_inc_dirs[l].append(inc_dir)
# Split compile args needed to find external dependencies
# Link args are added while generating the link command
for d in reversed(target.get_external_deps()):
# Cflags required by external deps might have UNIX-specific flags,
# so filter them out if needed
d_compile_args = compiler.unix_args_to_native(d.get_compile_args())
for arg in d_compile_args:
if arg.startswith(("-D", "/D")):
define = arg[2:]
# De-dup
if define in target_defines:
target_defines.remove(define)
target_defines.append(define)
elif arg.startswith(("-I", "/I")):
inc_dir = arg[2:]
# De-dup
if inc_dir not in target_inc_dirs:
target_inc_dirs.append(inc_dir)
else:
target_args.append(arg)
languages += gen_langs
if len(target_args) > 0:
target_args.append("%(AdditionalOptions)")
ET.SubElement(clconf, "AdditionalOptions").text = " ".join(target_args)
target_inc_dirs.append("%(AdditionalIncludeDirectories)")
ET.SubElement(clconf, "AdditionalIncludeDirectories").text = ";".join(
target_inc_dirs
)
target_defines.append("%(PreprocessorDefinitions)")
ET.SubElement(clconf, "PreprocessorDefinitions").text = ";".join(target_defines)
ET.SubElement(clconf, "MinimalRebuild").text = "true"
ET.SubElement(clconf, "FunctionLevelLinking").text = "true"
pch_node = ET.SubElement(clconf, "PrecompiledHeader")
# Warning level
warning_level = self.get_option_for_target("warning_level", target)
ET.SubElement(clconf, "WarningLevel").text = "Level" + str(1 + int(warning_level))
if self.get_option_for_target("werror", target):
ET.SubElement(clconf, "TreatWarningAsError").text = "true"
# Note: SuppressStartupBanner is /NOLOGO and is 'true' by default
pch_sources = {}
for lang in ["c", "cpp"]:
pch = target.get_pch(lang)
if not pch:
continue
pch_node.text = "Use"
if compiler.id == "msvc":
if len(pch) != 2:
raise MesonException(
"MSVC requires one header and one source to produce precompiled headers."
)
pch_sources[lang] = [pch[0], pch[1], lang]
else:
# I don't know whether its relevant but let's handle other compilers
# used with a vs backend
pch_sources[lang] = [pch[0], None, lang]
if len(pch_sources) == 1:
# If there is only 1 language with precompiled headers, we can use it for the entire project, which
# is cleaner than specifying it for each source file.
pch_source = list(pch_sources.values())[0]
header = os.path.join(proj_to_src_dir, pch_source[0])
pch_file = ET.SubElement(clconf, "PrecompiledHeaderFile")
pch_file.text = header
pch_include = ET.SubElement(clconf, "ForcedIncludeFiles")
pch_include.text = header + ";%(ForcedIncludeFiles)"
pch_out = ET.SubElement(clconf, "PrecompiledHeaderOutputFile")
pch_out.text = "$(IntDir)$(TargetName)-%s.pch" % pch_source[2]
resourcecompile = ET.SubElement(compiles, "ResourceCompile")
ET.SubElement(resourcecompile, "PreprocessorDefinitions")
# Linker options
link = ET.SubElement(compiles, "Link")
extra_link_args = CompilerArgs(compiler)
# FIXME: Can these buildtype linker args be added as tags in the
# vcxproj file (similar to buildtype compiler args) instead of in
# AdditionalOptions?
extra_link_args += compiler.get_buildtype_linker_args(self.buildtype)
# Generate Debug info
if self.buildtype.startswith("debug"):
self.generate_debug_information(link)
if not isinstance(target, build.StaticLibrary):
if isinstance(target, build.SharedModule):
extra_link_args += compiler.get_std_shared_module_link_args()
# Add link args added using add_project_link_arguments()
extra_link_args += self.build.get_project_link_args(compiler, target.subproject)
# Add link args added using add_global_link_arguments()
# These override per-project link arguments
extra_link_args += self.build.get_global_link_args(compiler)
if not target.is_cross:
# Link args added from the env: LDFLAGS. We want these to
# override all the defaults but not the per-target link args.
extra_link_args += self.environment.coredata.external_link_args[
compiler.get_language()
]
# Only non-static built targets need link args and link dependencies
extra_link_args += target.link_args
# External deps must be last because target link libraries may depend on them.
for dep in target.get_external_deps():
# Extend without reordering or de-dup to preserve `-L -l` sets
# https://github.com/mesonbuild/meson/issues/1718
extra_link_args.extend_direct(dep.get_link_args())
for d in target.get_dependencies():
if isinstance(d, build.StaticLibrary):
for dep in d.get_external_deps():
extra_link_args.extend_direct(dep.get_link_args())
# Add link args for c_* or cpp_* build options. Currently this only
# adds c_winlibs and cpp_winlibs when building for Windows. This needs
# to be after all internal and external libraries so that unresolved
# symbols from those can be found here. This is needed when the
# *_winlibs that we want to link to are static mingw64 libraries.
extra_link_args += compiler.get_option_link_args(
self.environment.coredata.compiler_options
)
(additional_libpaths, additional_links, extra_link_args) = self.split_link_args(
extra_link_args.to_native()
)
# Add more libraries to be linked if needed
for t in target.get_dependencies():
lobj = self.build.targets[t.get_id()]
linkname = os.path.join(down, self.get_target_filename_for_linking(lobj))
if t in target.link_whole_targets:
# /WHOLEARCHIVE:foo must go into AdditionalOptions
extra_link_args += compiler.get_link_whole_for(linkname)
# To force Visual Studio to build this project even though it
# has no sources, we include a reference to the vcxproj file
# that builds this target. Technically we should add this only
# if the current target has no sources, but it doesn't hurt to
# have 'extra' references.
trelpath = self.get_target_dir_relative_to(t, target)
tvcxproj = os.path.join(trelpath, t.get_id() + ".vcxproj")
tid = self.environment.coredata.target_guids[t.get_id()]
self.add_project_reference(root, tvcxproj, tid)
else:
# Other libraries go into AdditionalDependencies
additional_links.append(linkname)
for lib in self.get_custom_target_provided_libraries(target):
additional_links.append(self.relpath(lib, self.get_target_dir(target)))
additional_objects = []
for o in self.flatten_object_list(target, down):
assert isinstance(o, str)
additional_objects.append(o)
for o in custom_objs:
additional_objects.append(o)
if len(extra_link_args) > 0:
extra_link_args.append("%(AdditionalOptions)")
ET.SubElement(link, "AdditionalOptions").text = " ".join(extra_link_args)
if len(additional_libpaths) > 0:
additional_libpaths.insert(0, "%(AdditionalLibraryDirectories)")
ET.SubElement(link, "AdditionalLibraryDirectories").text = ";".join(
additional_libpaths
)
if len(additional_links) > 0:
additional_links.append("%(AdditionalDependencies)")
ET.SubElement(link, "AdditionalDependencies").text = ";".join(additional_links)
ofile = ET.SubElement(link, "OutputFile")
ofile.text = "$(OutDir)%s" % target.get_filename()
subsys = ET.SubElement(link, "SubSystem")
subsys.text = subsystem
if (
isinstance(target, build.SharedLibrary) or isinstance(target, build.Executable)
) and target.get_import_filename():
# DLLs built with MSVC always have an import library except when
# they're data-only DLLs, but we don't support those yet.
ET.SubElement(link, "ImportLibrary").text = target.get_import_filename()
if isinstance(target, build.SharedLibrary):
# Add module definitions file, if provided
if target.vs_module_defs:
relpath = os.path.join(
down, target.vs_module_defs.rel_to_builddir(self.build_to_src)
)
ET.SubElement(link, "ModuleDefinitionFile").text = relpath
if "/ZI" in buildtype_args or "/Zi" in buildtype_args:
pdb = ET.SubElement(link, "ProgramDataBaseFileName")
pdb.text = "$(OutDir}%s.pdb" % target_name
if isinstance(target, build.Executable):
ET.SubElement(link, "EntryPointSymbol").text = entrypoint
targetmachine = ET.SubElement(link, "TargetMachine")
targetplatform = self.platform.lower()
if targetplatform == "win32":
targetmachine.text = "MachineX86"
elif targetplatform == "x64":
targetmachine.text = "MachineX64"
elif targetplatform == "arm":
targetmachine.text = "MachineARM"
else:
raise MesonException(
"Unsupported Visual Studio target machine: " + targetmachine
)
extra_files = target.extra_files
if len(headers) + len(gen_hdrs) + len(extra_files) > 0:
inc_hdrs = ET.SubElement(root, "ItemGroup")
for h in headers:
relpath = os.path.join(down, h.rel_to_builddir(self.build_to_src))
ET.SubElement(inc_hdrs, "CLInclude", Include=relpath)
for h in gen_hdrs:
ET.SubElement(inc_hdrs, "CLInclude", Include=h)
for h in target.extra_files:
relpath = os.path.join(down, h.rel_to_builddir(self.build_to_src))
ET.SubElement(inc_hdrs, "CLInclude", Include=relpath)
if len(sources) + len(gen_src) + len(pch_sources) > 0:
inc_src = ET.SubElement(root, "ItemGroup")
for s in sources:
relpath = os.path.join(down, s.rel_to_builddir(self.build_to_src))
inc_cl = ET.SubElement(inc_src, "CLCompile", Include=relpath)
lang = Vs2010Backend.lang_from_source_file(s)
self.add_pch(inc_cl, proj_to_src_dir, pch_sources, s)
self.add_additional_options(lang, inc_cl, file_args)
self.add_preprocessor_defines(lang, inc_cl, file_defines)
self.add_include_dirs(lang, inc_cl, file_inc_dirs)
ET.SubElement(inc_cl, "ObjectFileName").text = (
"$(IntDir)" + self.object_filename_from_source(target, s, False)
)
for s in gen_src:
inc_cl = ET.SubElement(inc_src, "CLCompile", Include=s)
lang = Vs2010Backend.lang_from_source_file(s)
self.add_pch(inc_cl, proj_to_src_dir, pch_sources, s)
self.add_additional_options(lang, inc_cl, file_args)
self.add_preprocessor_defines(lang, inc_cl, file_defines)
self.add_include_dirs(lang, inc_cl, file_inc_dirs)
for lang in pch_sources:
header, impl, suffix = pch_sources[lang]
if impl:
relpath = os.path.join(proj_to_src_dir, impl)
inc_cl = ET.SubElement(inc_src, "CLCompile", Include=relpath)
pch = ET.SubElement(inc_cl, "PrecompiledHeader")
pch.text = "Create"
pch_out = ET.SubElement(inc_cl, "PrecompiledHeaderOutputFile")
pch_out.text = "$(IntDir)$(TargetName)-%s.pch" % suffix
pch_file = ET.SubElement(inc_cl, "PrecompiledHeaderFile")
# MSBuild searches for the header relative from the implementation, so we have to use
# just the file name instead of the relative path to the file.
pch_file.text = os.path.split(header)[1]
self.add_additional_options(lang, inc_cl, file_args)
self.add_preprocessor_defines(lang, inc_cl, file_defines)
self.add_include_dirs(lang, inc_cl, file_inc_dirs)
if self.has_objects(objects, additional_objects, gen_objs):
inc_objs = ET.SubElement(root, "ItemGroup")
for s in objects:
relpath = os.path.join(down, s.rel_to_builddir(self.build_to_src))
ET.SubElement(inc_objs, "Object", Include=relpath)
for s in additional_objects:
ET.SubElement(inc_objs, "Object", Include=s)
self.add_generated_objects(inc_objs, gen_objs)
ET.SubElement(root, "Import", Project="$(VCTargetsPath)\Microsoft.Cpp.targets")
# Reference the regen target.
regen_vcxproj = os.path.join(self.environment.get_build_dir(), "REGEN.vcxproj")
self.add_project_reference(
root, regen_vcxproj, self.environment.coredata.regen_guid
)
self._prettyprint_vcxproj_xml(ET.ElementTree(root), ofname)
|
def gen_vcxproj(self, target, ofname, guid):
mlog.debug("Generating vcxproj %s." % target.name)
entrypoint = "WinMainCRTStartup"
subsystem = "Windows"
if isinstance(target, build.Executable):
conftype = "Application"
if not target.gui_app:
subsystem = "Console"
entrypoint = "mainCRTStartup"
elif isinstance(target, build.StaticLibrary):
conftype = "StaticLibrary"
elif isinstance(target, build.SharedLibrary):
conftype = "DynamicLibrary"
entrypoint = "_DllMainCrtStartup"
elif isinstance(target, build.CustomTarget):
return self.gen_custom_target_vcxproj(target, ofname, guid)
elif isinstance(target, build.RunTarget):
return self.gen_run_target_vcxproj(target, ofname, guid)
else:
raise MesonException("Unknown target type for %s" % target.get_basename())
# Prefix to use to access the build root from the vcxproj dir
down = self.target_to_build_root(target)
# Prefix to use to access the source tree's root from the vcxproj dir
proj_to_src_root = os.path.join(down, self.build_to_src)
# Prefix to use to access the source tree's subdir from the vcxproj dir
proj_to_src_dir = os.path.join(proj_to_src_root, target.subdir)
(sources, headers, objects, languages) = self.split_sources(target.sources)
if self.is_unity(target):
sources = self.generate_unity_files(target, sources)
compiler = self._get_cl_compiler(target)
buildtype_args = compiler.get_buildtype_args(self.buildtype)
buildtype_link_args = compiler.get_buildtype_linker_args(self.buildtype)
project_name = target.name
target_name = target.name
root = ET.Element(
"Project",
{
"DefaultTargets": "Build",
"ToolsVersion": "4.0",
"xmlns": "http://schemas.microsoft.com/developer/msbuild/2003",
},
)
confitems = ET.SubElement(root, "ItemGroup", {"Label": "ProjectConfigurations"})
prjconf = ET.SubElement(
confitems,
"ProjectConfiguration",
{"Include": self.buildtype + "|" + self.platform},
)
p = ET.SubElement(prjconf, "Configuration")
p.text = self.buildtype
pl = ET.SubElement(prjconf, "Platform")
pl.text = self.platform
# Globals
globalgroup = ET.SubElement(root, "PropertyGroup", Label="Globals")
guidelem = ET.SubElement(globalgroup, "ProjectGuid")
guidelem.text = "{%s}" % guid
kw = ET.SubElement(globalgroup, "Keyword")
kw.text = self.platform + "Proj"
ns = ET.SubElement(globalgroup, "RootNamespace")
ns.text = target_name
p = ET.SubElement(globalgroup, "Platform")
p.text = self.platform
pname = ET.SubElement(globalgroup, "ProjectName")
pname.text = project_name
if self.windows_target_platform_version:
ET.SubElement(
globalgroup, "WindowsTargetPlatformVersion"
).text = self.windows_target_platform_version
ET.SubElement(
root, "Import", Project="$(VCTargetsPath)\Microsoft.Cpp.Default.props"
)
# Start configuration
type_config = ET.SubElement(root, "PropertyGroup", Label="Configuration")
ET.SubElement(type_config, "ConfigurationType").text = conftype
ET.SubElement(type_config, "CharacterSet").text = "MultiByte"
if self.platform_toolset:
ET.SubElement(type_config, "PlatformToolset").text = self.platform_toolset
# FIXME: Meson's LTO support needs to be integrated here
ET.SubElement(type_config, "WholeProgramOptimization").text = "false"
# Let VS auto-set the RTC level
ET.SubElement(type_config, "BasicRuntimeChecks").text = "Default"
o_flags = split_o_flags_args(buildtype_args)
if "/Oi" in o_flags:
ET.SubElement(type_config, "IntrinsicFunctions").text = "true"
if "/Ob1" in o_flags:
ET.SubElement(
type_config, "InlineFunctionExpansion"
).text = "OnlyExplicitInline"
elif "/Ob2" in o_flags:
ET.SubElement(type_config, "InlineFunctionExpansion").text = "AnySuitable"
# Size-preserving flags
if "/Os" in o_flags:
ET.SubElement(type_config, "FavorSizeOrSpeed").text = "Size"
else:
ET.SubElement(type_config, "FavorSizeOrSpeed").text = "Speed"
# Incremental linking increases code size
if "/INCREMENTAL:NO" in buildtype_link_args:
ET.SubElement(type_config, "LinkIncremental").text = "false"
# CRT type; debug or release
if "/MDd" in buildtype_args:
ET.SubElement(type_config, "UseDebugLibraries").text = "true"
ET.SubElement(type_config, "RuntimeLibrary").text = "MultiThreadedDebugDLL"
else:
ET.SubElement(type_config, "UseDebugLibraries").text = "false"
ET.SubElement(type_config, "RuntimeLibrary").text = "MultiThreadedDLL"
# Debug format
if "/ZI" in buildtype_args:
ET.SubElement(type_config, "DebugInformationFormat").text = "EditAndContinue"
elif "/Zi" in buildtype_args:
ET.SubElement(type_config, "DebugInformationFormat").text = "ProgramDatabase"
elif "/Z7" in buildtype_args:
ET.SubElement(type_config, "DebugInformationFormat").text = "OldStyle"
# Runtime checks
if "/RTC1" in buildtype_args:
ET.SubElement(type_config, "BasicRuntimeChecks").text = "EnableFastChecks"
elif "/RTCu" in buildtype_args:
ET.SubElement(
type_config, "BasicRuntimeChecks"
).text = "UninitializedLocalUsageCheck"
elif "/RTCs" in buildtype_args:
ET.SubElement(type_config, "BasicRuntimeChecks").text = "StackFrameRuntimeCheck"
# Optimization flags
if "/Ox" in o_flags:
ET.SubElement(type_config, "Optimization").text = "Full"
elif "/O2" in o_flags:
ET.SubElement(type_config, "Optimization").text = "MaxSpeed"
elif "/O1" in o_flags:
ET.SubElement(type_config, "Optimization").text = "MinSpace"
elif "/Od" in o_flags:
ET.SubElement(type_config, "Optimization").text = "Disabled"
# End configuration
ET.SubElement(root, "Import", Project="$(VCTargetsPath)\Microsoft.Cpp.props")
generated_files, custom_target_output_files, generated_files_include_dirs = (
self.generate_custom_generator_commands(target, root)
)
(gen_src, gen_hdrs, gen_objs, gen_langs) = self.split_sources(generated_files)
(custom_src, custom_hdrs, custom_objs, custom_langs) = self.split_sources(
custom_target_output_files
)
gen_src += custom_src
gen_hdrs += custom_hdrs
gen_langs += custom_langs
# Project information
direlem = ET.SubElement(root, "PropertyGroup")
fver = ET.SubElement(direlem, "_ProjectFileVersion")
fver.text = self.project_file_version
outdir = ET.SubElement(direlem, "OutDir")
outdir.text = ".\\"
intdir = ET.SubElement(direlem, "IntDir")
intdir.text = target.get_id() + "\\"
tfilename = os.path.splitext(target.get_filename())
ET.SubElement(direlem, "TargetName").text = tfilename[0]
ET.SubElement(direlem, "TargetExt").text = tfilename[1]
# Build information
compiles = ET.SubElement(root, "ItemDefinitionGroup")
clconf = ET.SubElement(compiles, "ClCompile")
# Arguments, include dirs, defines for all files in the current target
target_args = []
target_defines = []
target_inc_dirs = []
# Arguments, include dirs, defines passed to individual files in
# a target; perhaps because the args are language-specific
#
# file_args is also later split out into defines and include_dirs in
# case someone passed those in there
file_args = dict(
(lang, CompilerArgs(comp)) for lang, comp in target.compilers.items()
)
file_defines = dict((lang, []) for lang in target.compilers)
file_inc_dirs = dict((lang, []) for lang in target.compilers)
# The order in which these compile args are added must match
# generate_single_compile() and generate_basic_compiler_args()
for l, comp in target.compilers.items():
if l in file_args:
file_args[l] += compilers.get_base_compile_args(
self.environment.coredata.base_options, comp
)
file_args[l] += comp.get_option_compile_args(
self.environment.coredata.compiler_options
)
# Add compile args added using add_project_arguments()
for l, args in self.build.projects_args.get(target.subproject, {}).items():
if l in file_args:
file_args[l] += args
# Add compile args added using add_global_arguments()
# These override per-project arguments
for l, args in self.build.global_args.items():
if l in file_args:
file_args[l] += args
if not target.is_cross:
# Compile args added from the env: CFLAGS/CXXFLAGS, etc. We want these
# to override all the defaults, but not the per-target compile args.
for l, args in self.environment.coredata.external_args.items():
if l in file_args:
file_args[l] += args
for args in file_args.values():
# This is where Visual Studio will insert target_args, target_defines,
# etc, which are added later from external deps (see below).
args += [
"%(AdditionalOptions)",
"%(PreprocessorDefinitions)",
"%(AdditionalIncludeDirectories)",
]
# Add custom target dirs as includes automatically, but before
# target-specific include dirs. See _generate_single_compile() in
# the ninja backend for caveats.
args += ["-I" + arg for arg in generated_files_include_dirs]
# Add include dirs from the `include_directories:` kwarg on the target
# and from `include_directories:` of internal deps of the target.
#
# Target include dirs should override internal deps include dirs.
# This is handled in BuildTarget.process_kwargs()
#
# Include dirs from internal deps should override include dirs from
# external deps and must maintain the order in which they are
# specified. Hence, we must reverse so that the order is preserved.
#
# These are per-target, but we still add them as per-file because we
# need them to be looked in first.
for d in reversed(target.get_include_dirs()):
for i in d.get_incdirs():
curdir = os.path.join(d.get_curdir(), i)
args.append("-I" + self.relpath(curdir, target.subdir)) # build dir
args.append("-I" + os.path.join(proj_to_src_root, curdir)) # src dir
for i in d.get_extra_build_dirs():
curdir = os.path.join(d.get_curdir(), i)
args.append("-I" + self.relpath(curdir, target.subdir)) # build dir
# Add per-target compile args, f.ex, `c_args : ['/DFOO']`. We set these
# near the end since these are supposed to override everything else.
for l, args in target.extra_args.items():
if l in file_args:
file_args[l] += args
# The highest priority includes. In order of directory search:
# target private dir, target build dir, target source dir
for args in file_args.values():
t_inc_dirs = [
self.relpath(
self.get_target_private_dir(target), self.get_target_dir(target)
)
]
if target.implicit_include_directories:
t_inc_dirs += ["."]
if target.implicit_include_directories:
t_inc_dirs += [proj_to_src_dir]
args += ["-I" + arg for arg in t_inc_dirs]
# Split preprocessor defines and include directories out of the list of
# all extra arguments. The rest go into %(AdditionalOptions).
for l, args in file_args.items():
for arg in args[:]:
if arg.startswith(("-D", "/D")) or arg == "%(PreprocessorDefinitions)":
file_args[l].remove(arg)
# Don't escape the marker
if arg == "%(PreprocessorDefinitions)":
define = arg
else:
define = arg[2:]
# De-dup
if define in file_defines[l]:
file_defines[l].remove(define)
file_defines[l].append(define)
elif (
arg.startswith(("-I", "/I")) or arg == "%(AdditionalIncludeDirectories)"
):
file_args[l].remove(arg)
# Don't escape the marker
if arg == "%(AdditionalIncludeDirectories)":
inc_dir = arg
else:
inc_dir = arg[2:]
# De-dup
if inc_dir not in file_inc_dirs[l]:
file_inc_dirs[l].append(inc_dir)
# Split compile args needed to find external dependencies
# Link args are added while generating the link command
for d in reversed(target.get_external_deps()):
# Cflags required by external deps might have UNIX-specific flags,
# so filter them out if needed
d_compile_args = compiler.unix_args_to_native(d.get_compile_args())
for arg in d_compile_args:
if arg.startswith(("-D", "/D")):
define = arg[2:]
# De-dup
if define in target_defines:
target_defines.remove(define)
target_defines.append(define)
elif arg.startswith(("-I", "/I")):
inc_dir = arg[2:]
# De-dup
if inc_dir not in target_inc_dirs:
target_inc_dirs.append(inc_dir)
else:
target_args.append(arg)
languages += gen_langs
if len(target_args) > 0:
target_args.append("%(AdditionalOptions)")
ET.SubElement(clconf, "AdditionalOptions").text = " ".join(target_args)
target_inc_dirs.append("%(AdditionalIncludeDirectories)")
ET.SubElement(clconf, "AdditionalIncludeDirectories").text = ";".join(
target_inc_dirs
)
target_defines.append("%(PreprocessorDefinitions)")
ET.SubElement(clconf, "PreprocessorDefinitions").text = ";".join(target_defines)
ET.SubElement(clconf, "MinimalRebuild").text = "true"
ET.SubElement(clconf, "FunctionLevelLinking").text = "true"
pch_node = ET.SubElement(clconf, "PrecompiledHeader")
# Warning level
warning_level = self.get_option_for_target("warning_level", target)
ET.SubElement(clconf, "WarningLevel").text = "Level" + str(1 + int(warning_level))
if self.get_option_for_target("werror", target):
ET.SubElement(clconf, "TreatWarningAsError").text = "true"
# Note: SuppressStartupBanner is /NOLOGO and is 'true' by default
pch_sources = {}
for lang in ["c", "cpp"]:
pch = target.get_pch(lang)
if not pch:
continue
pch_node.text = "Use"
pch_sources[lang] = [pch[0], pch[1], lang]
if len(pch_sources) == 1:
# If there is only 1 language with precompiled headers, we can use it for the entire project, which
# is cleaner than specifying it for each source file.
pch_source = list(pch_sources.values())[0]
header = os.path.join(proj_to_src_dir, pch_source[0])
pch_file = ET.SubElement(clconf, "PrecompiledHeaderFile")
pch_file.text = header
pch_include = ET.SubElement(clconf, "ForcedIncludeFiles")
pch_include.text = header + ";%(ForcedIncludeFiles)"
pch_out = ET.SubElement(clconf, "PrecompiledHeaderOutputFile")
pch_out.text = "$(IntDir)$(TargetName)-%s.pch" % pch_source[2]
resourcecompile = ET.SubElement(compiles, "ResourceCompile")
ET.SubElement(resourcecompile, "PreprocessorDefinitions")
# Linker options
link = ET.SubElement(compiles, "Link")
extra_link_args = CompilerArgs(compiler)
# FIXME: Can these buildtype linker args be added as tags in the
# vcxproj file (similar to buildtype compiler args) instead of in
# AdditionalOptions?
extra_link_args += compiler.get_buildtype_linker_args(self.buildtype)
# Generate Debug info
if self.buildtype.startswith("debug"):
self.generate_debug_information(link)
if not isinstance(target, build.StaticLibrary):
if isinstance(target, build.SharedModule):
extra_link_args += compiler.get_std_shared_module_link_args()
# Add link args added using add_project_link_arguments()
extra_link_args += self.build.get_project_link_args(compiler, target.subproject)
# Add link args added using add_global_link_arguments()
# These override per-project link arguments
extra_link_args += self.build.get_global_link_args(compiler)
if not target.is_cross:
# Link args added from the env: LDFLAGS. We want these to
# override all the defaults but not the per-target link args.
extra_link_args += self.environment.coredata.external_link_args[
compiler.get_language()
]
# Only non-static built targets need link args and link dependencies
extra_link_args += target.link_args
# External deps must be last because target link libraries may depend on them.
for dep in target.get_external_deps():
# Extend without reordering or de-dup to preserve `-L -l` sets
# https://github.com/mesonbuild/meson/issues/1718
extra_link_args.extend_direct(dep.get_link_args())
for d in target.get_dependencies():
if isinstance(d, build.StaticLibrary):
for dep in d.get_external_deps():
extra_link_args.extend_direct(dep.get_link_args())
# Add link args for c_* or cpp_* build options. Currently this only
# adds c_winlibs and cpp_winlibs when building for Windows. This needs
# to be after all internal and external libraries so that unresolved
# symbols from those can be found here. This is needed when the
# *_winlibs that we want to link to are static mingw64 libraries.
extra_link_args += compiler.get_option_link_args(
self.environment.coredata.compiler_options
)
(additional_libpaths, additional_links, extra_link_args) = self.split_link_args(
extra_link_args.to_native()
)
# Add more libraries to be linked if needed
for t in target.get_dependencies():
lobj = self.build.targets[t.get_id()]
linkname = os.path.join(down, self.get_target_filename_for_linking(lobj))
if t in target.link_whole_targets:
# /WHOLEARCHIVE:foo must go into AdditionalOptions
extra_link_args += compiler.get_link_whole_for(linkname)
# To force Visual Studio to build this project even though it
# has no sources, we include a reference to the vcxproj file
# that builds this target. Technically we should add this only
# if the current target has no sources, but it doesn't hurt to
# have 'extra' references.
trelpath = self.get_target_dir_relative_to(t, target)
tvcxproj = os.path.join(trelpath, t.get_id() + ".vcxproj")
tid = self.environment.coredata.target_guids[t.get_id()]
self.add_project_reference(root, tvcxproj, tid)
else:
# Other libraries go into AdditionalDependencies
additional_links.append(linkname)
for lib in self.get_custom_target_provided_libraries(target):
additional_links.append(self.relpath(lib, self.get_target_dir(target)))
additional_objects = []
for o in self.flatten_object_list(target, down):
assert isinstance(o, str)
additional_objects.append(o)
for o in custom_objs:
additional_objects.append(o)
if len(extra_link_args) > 0:
extra_link_args.append("%(AdditionalOptions)")
ET.SubElement(link, "AdditionalOptions").text = " ".join(extra_link_args)
if len(additional_libpaths) > 0:
additional_libpaths.insert(0, "%(AdditionalLibraryDirectories)")
ET.SubElement(link, "AdditionalLibraryDirectories").text = ";".join(
additional_libpaths
)
if len(additional_links) > 0:
additional_links.append("%(AdditionalDependencies)")
ET.SubElement(link, "AdditionalDependencies").text = ";".join(additional_links)
ofile = ET.SubElement(link, "OutputFile")
ofile.text = "$(OutDir)%s" % target.get_filename()
subsys = ET.SubElement(link, "SubSystem")
subsys.text = subsystem
if (
isinstance(target, build.SharedLibrary) or isinstance(target, build.Executable)
) and target.get_import_filename():
# DLLs built with MSVC always have an import library except when
# they're data-only DLLs, but we don't support those yet.
ET.SubElement(link, "ImportLibrary").text = target.get_import_filename()
if isinstance(target, build.SharedLibrary):
# Add module definitions file, if provided
if target.vs_module_defs:
relpath = os.path.join(
down, target.vs_module_defs.rel_to_builddir(self.build_to_src)
)
ET.SubElement(link, "ModuleDefinitionFile").text = relpath
if "/ZI" in buildtype_args or "/Zi" in buildtype_args:
pdb = ET.SubElement(link, "ProgramDataBaseFileName")
pdb.text = "$(OutDir}%s.pdb" % target_name
if isinstance(target, build.Executable):
ET.SubElement(link, "EntryPointSymbol").text = entrypoint
targetmachine = ET.SubElement(link, "TargetMachine")
targetplatform = self.platform.lower()
if targetplatform == "win32":
targetmachine.text = "MachineX86"
elif targetplatform == "x64":
targetmachine.text = "MachineX64"
elif targetplatform == "arm":
targetmachine.text = "MachineARM"
else:
raise MesonException(
"Unsupported Visual Studio target machine: " + targetmachine
)
extra_files = target.extra_files
if len(headers) + len(gen_hdrs) + len(extra_files) > 0:
inc_hdrs = ET.SubElement(root, "ItemGroup")
for h in headers:
relpath = os.path.join(down, h.rel_to_builddir(self.build_to_src))
ET.SubElement(inc_hdrs, "CLInclude", Include=relpath)
for h in gen_hdrs:
ET.SubElement(inc_hdrs, "CLInclude", Include=h)
for h in target.extra_files:
relpath = os.path.join(down, h.rel_to_builddir(self.build_to_src))
ET.SubElement(inc_hdrs, "CLInclude", Include=relpath)
if len(sources) + len(gen_src) + len(pch_sources) > 0:
inc_src = ET.SubElement(root, "ItemGroup")
for s in sources:
relpath = os.path.join(down, s.rel_to_builddir(self.build_to_src))
inc_cl = ET.SubElement(inc_src, "CLCompile", Include=relpath)
lang = Vs2010Backend.lang_from_source_file(s)
self.add_pch(inc_cl, proj_to_src_dir, pch_sources, s)
self.add_additional_options(lang, inc_cl, file_args)
self.add_preprocessor_defines(lang, inc_cl, file_defines)
self.add_include_dirs(lang, inc_cl, file_inc_dirs)
ET.SubElement(inc_cl, "ObjectFileName").text = (
"$(IntDir)" + self.object_filename_from_source(target, s, False)
)
for s in gen_src:
inc_cl = ET.SubElement(inc_src, "CLCompile", Include=s)
lang = Vs2010Backend.lang_from_source_file(s)
self.add_pch(inc_cl, proj_to_src_dir, pch_sources, s)
self.add_additional_options(lang, inc_cl, file_args)
self.add_preprocessor_defines(lang, inc_cl, file_defines)
self.add_include_dirs(lang, inc_cl, file_inc_dirs)
for lang in pch_sources:
header, impl, suffix = pch_sources[lang]
relpath = os.path.join(proj_to_src_dir, impl)
inc_cl = ET.SubElement(inc_src, "CLCompile", Include=relpath)
pch = ET.SubElement(inc_cl, "PrecompiledHeader")
pch.text = "Create"
pch_out = ET.SubElement(inc_cl, "PrecompiledHeaderOutputFile")
pch_out.text = "$(IntDir)$(TargetName)-%s.pch" % suffix
pch_file = ET.SubElement(inc_cl, "PrecompiledHeaderFile")
# MSBuild searches for the header relative from the implementation, so we have to use
# just the file name instead of the relative path to the file.
pch_file.text = os.path.split(header)[1]
self.add_additional_options(lang, inc_cl, file_args)
self.add_preprocessor_defines(lang, inc_cl, file_defines)
self.add_include_dirs(lang, inc_cl, file_inc_dirs)
if self.has_objects(objects, additional_objects, gen_objs):
inc_objs = ET.SubElement(root, "ItemGroup")
for s in objects:
relpath = os.path.join(down, s.rel_to_builddir(self.build_to_src))
ET.SubElement(inc_objs, "Object", Include=relpath)
for s in additional_objects:
ET.SubElement(inc_objs, "Object", Include=s)
self.add_generated_objects(inc_objs, gen_objs)
ET.SubElement(root, "Import", Project="$(VCTargetsPath)\Microsoft.Cpp.targets")
# Reference the regen target.
regen_vcxproj = os.path.join(self.environment.get_build_dir(), "REGEN.vcxproj")
self.add_project_reference(
root, regen_vcxproj, self.environment.coredata.regen_guid
)
self._prettyprint_vcxproj_xml(ET.ElementTree(root), ofname)
|
https://github.com/mesonbuild/meson/issues/2833
|
Traceback (most recent call last):
File "mesonbuild\mesonmain.py", line 352, in run
File "mesonbuild\mesonmain.py", line 147, in generate
File "mesonbuild\mesonmain.py", line 197, in _generate
File "mesonbuild\backend\vs2010backend.py", line 161, in generate
File "mesonbuild\backend\vs2010backend.py", line 311, in generate_projects
File "mesonbuild\backend\vs2010backend.py", line 864, in gen_vcxproj
IndexError: list index out of range
|
IndexError
|
def autodetect_vs_version(build):
vs_version = os.getenv("VisualStudioVersion", None)
vs_install_dir = os.getenv("VSINSTALLDIR", None)
if not vs_install_dir:
raise MesonException(
"Could not detect Visual Studio: Environment variable VSINSTALLDIR is not set!\n"
"Are you running meson from the Visual Studio Developer Command Prompt?"
)
# VisualStudioVersion is set since Visual Studio 12.0, but sometimes
# vcvarsall.bat doesn't set it, so also use VSINSTALLDIR
if vs_version == "14.0" or "Visual Studio 14" in vs_install_dir:
from mesonbuild.backend.vs2015backend import Vs2015Backend
return Vs2015Backend(build)
if (
vs_version == "15.0"
or "Visual Studio 17" in vs_install_dir
or "Visual Studio\\2017" in vs_install_dir
):
from mesonbuild.backend.vs2017backend import Vs2017Backend
return Vs2017Backend(build)
if "Visual Studio 10.0" in vs_install_dir:
return Vs2010Backend(build)
raise MesonException(
"Could not detect Visual Studio using VisualStudioVersion: {!r} or VSINSTALLDIR: {!r}!\n"
"Please specify the exact backend to use.".format(vs_version, vs_install_dir)
)
|
def autodetect_vs_version(build):
vs_version = os.getenv("VisualStudioVersion", None)
vs_install_dir = os.getenv("VSINSTALLDIR", None)
if not vs_version and not vs_install_dir:
raise MesonException(
"Could not detect Visual Studio: VisualStudioVersion and VSINSTALLDIR are unset!\n"
"Are we inside a Visual Studio build environment? "
"You can also try specifying the exact backend to use."
)
# VisualStudioVersion is set since Visual Studio 12.0, but sometimes
# vcvarsall.bat doesn't set it, so also use VSINSTALLDIR
if vs_version == "14.0" or "Visual Studio 14" in vs_install_dir:
from mesonbuild.backend.vs2015backend import Vs2015Backend
return Vs2015Backend(build)
if (
vs_version == "15.0"
or "Visual Studio 17" in vs_install_dir
or "Visual Studio\\2017" in vs_install_dir
):
from mesonbuild.backend.vs2017backend import Vs2017Backend
return Vs2017Backend(build)
if "Visual Studio 10.0" in vs_install_dir:
return Vs2010Backend(build)
raise MesonException(
"Could not detect Visual Studio using VisualStudioVersion: {!r} or VSINSTALLDIR: {!r}!\n"
"Please specify the exact backend to use.".format(vs_version, vs_install_dir)
)
|
https://github.com/mesonbuild/meson/issues/2848
|
1>------ Build started: Project: REGEN, Configuration: debug Win32 ------
1>Checking whether solution needs to be regenerated.
1>The Meson build system
1>Version: 0.45.0.dev1
1>Source dir: C:\msys64\home\Polarina\mysa
1>Build dir: C:\msys64\home\Polarina\mysa\build
1>Build type: native build
1>Traceback (most recent call last):
1> File "c:\users\polarina\appdata\local\programs\python\python36-32\lib\site-packages\mesonbuild\mesonmain.py", line 365, in run
1> app.generate()
1> File "c:\users\polarina\appdata\local\programs\python\python36-32\lib\site-packages\mesonbuild\mesonmain.py", line 151, in generate
1> self._generate(env)
1> File "c:\users\polarina\appdata\local\programs\python\python36-32\lib\site-packages\mesonbuild\mesonmain.py", line 174, in _generate
1> g = vs2010backend.autodetect_vs_version(b)
1> File "c:\users\polarina\appdata\local\programs\python\python36-32\lib\site-packages\mesonbuild\backend\vs2010backend.py", line 38, in autodetect_vs_version
1> if vs_version == '14.0' or 'Visual Studio 14' in vs_install_dir:
1>TypeError: argument of type 'NoneType' is not iterable
1>Traceback (most recent call last):
1> File "C:\Users\Polarina\AppData\Local\Programs\Python\Python36-32\Scripts\meson.py", line 26, in <module>
1> sys.exit(main())
1> File "C:\Users\Polarina\AppData\Local\Programs\Python\Python36-32\Scripts\meson.py", line 23, in main
1> return mesonmain.run(sys.argv[1:], launcher)
1> File "c:\users\polarina\appdata\local\programs\python\python36-32\lib\site-packages\mesonbuild\mesonmain.py", line 322, in run
1> sys.exit(run_script_command(args[1:]))
1> File "c:\users\polarina\appdata\local\programs\python\python36-32\lib\site-packages\mesonbuild\mesonmain.py", line 278, in run_script_command
1> return cmdfunc(cmdargs)
1> File "c:\users\polarina\appdata\local\programs\python\python36-32\lib\site-packages\mesonbuild\scripts\regen_checker.py", line 55, in run
1> regen(regeninfo, mesonscript, backend)
1> File "c:\users\polarina\appdata\local\programs\python\python36-32\lib\site-packages\mesonbuild\scripts\regen_checker.py", line 41, in regen
1> subprocess.check_call(cmd)
1> File "c:\users\polarina\appdata\local\programs\python\python36-32\lib\subprocess.py", line 291, in check_call
1> raise CalledProcessError(retcode, cmd)
1>subprocess.CalledProcessError: Command '['c:\\users\\polarina\\appdata\\local\\programs\\python\\python36-32\\python.exe', 'C:\\Users\\Polarina\\AppData\\Local\\Programs\\Python\\Python36-32\\Scripts\\meson.py', '--internal', 'regenerate', 'C:\\msys64\\home\\Polarina\\mysa\\build', 'C:\\msys64\\home\\Polarina\\mysa', '--backend=vs']' returned non-zero exit status 1.
1>C:\Program Files (x86)\Microsoft Visual Studio\2017\Community\Common7\IDE\VC\VCTargets\Microsoft.CppCommon.targets(171,5): error MSB6006: "cmd.exe" exited with code 1.
1>Done building project "REGEN.vcxproj" -- FAILED.
========== Build: 0 succeeded, 1 failed, 0 up-to-date, 0 skipped ==========
|
TypeError
|
def _generate(self, env):
mlog.debug("Build started at", datetime.datetime.now().isoformat())
mlog.debug("Main binary:", sys.executable)
mlog.debug("Python system:", platform.system())
mlog.log(mlog.bold("The Meson build system"))
self.check_pkgconfig_envvar(env)
mlog.log("Version:", coredata.version)
mlog.log("Source dir:", mlog.bold(self.source_dir))
mlog.log("Build dir:", mlog.bold(self.build_dir))
if env.is_cross_build():
mlog.log("Build type:", mlog.bold("cross build"))
else:
mlog.log("Build type:", mlog.bold("native build"))
b = build.Build(env)
if self.options.backend == "ninja":
from .backend import ninjabackend
g = ninjabackend.NinjaBackend(b)
elif self.options.backend == "vs":
from .backend import vs2010backend
g = vs2010backend.autodetect_vs_version(b)
env.coredata.set_builtin_option("backend", g.name)
mlog.log("Auto detected Visual Studio backend:", mlog.bold(g.name))
elif self.options.backend == "vs2010":
from .backend import vs2010backend
g = vs2010backend.Vs2010Backend(b)
elif self.options.backend == "vs2015":
from .backend import vs2015backend
g = vs2015backend.Vs2015Backend(b)
elif self.options.backend == "vs2017":
from .backend import vs2017backend
g = vs2017backend.Vs2017Backend(b)
elif self.options.backend == "xcode":
from .backend import xcodebackend
g = xcodebackend.XCodeBackend(b)
else:
raise RuntimeError('Unknown backend "%s".' % self.options.backend)
intr = interpreter.Interpreter(b, g)
if env.is_cross_build():
mlog.log(
"Host machine cpu family:",
mlog.bold(intr.builtin["host_machine"].cpu_family_method([], {})),
)
mlog.log(
"Host machine cpu:",
mlog.bold(intr.builtin["host_machine"].cpu_method([], {})),
)
mlog.log(
"Target machine cpu family:",
mlog.bold(intr.builtin["target_machine"].cpu_family_method([], {})),
)
mlog.log(
"Target machine cpu:",
mlog.bold(intr.builtin["target_machine"].cpu_method([], {})),
)
mlog.log(
"Build machine cpu family:",
mlog.bold(intr.builtin["build_machine"].cpu_family_method([], {})),
)
mlog.log(
"Build machine cpu:",
mlog.bold(intr.builtin["build_machine"].cpu_method([], {})),
)
intr.run()
try:
# We would like to write coredata as late as possible since we use the existence of
# this file to check if we generated the build file successfully. Since coredata
# includes settings, the build files must depend on it and appear newer. However, due
# to various kernel caches, we cannot guarantee that any time in Python is exactly in
# sync with the time that gets applied to any files. Thus, we dump this file as late as
# possible, but before build files, and if any error occurs, delete it.
cdf = env.dump_coredata()
g.generate(intr)
dumpfile = os.path.join(env.get_scratch_dir(), "build.dat")
with open(dumpfile, "wb") as f:
pickle.dump(b, f)
# Post-conf scripts must be run after writing coredata or else introspection fails.
g.run_postconf_scripts()
except:
os.unlink(cdf)
raise
|
def _generate(self, env):
mlog.debug("Build started at", datetime.datetime.now().isoformat())
mlog.debug("Main binary:", sys.executable)
mlog.debug("Python system:", platform.system())
mlog.log(mlog.bold("The Meson build system"))
self.check_pkgconfig_envvar(env)
mlog.log("Version:", coredata.version)
mlog.log("Source dir:", mlog.bold(self.source_dir))
mlog.log("Build dir:", mlog.bold(self.build_dir))
if env.is_cross_build():
mlog.log("Build type:", mlog.bold("cross build"))
else:
mlog.log("Build type:", mlog.bold("native build"))
b = build.Build(env)
if self.options.backend == "ninja":
from .backend import ninjabackend
g = ninjabackend.NinjaBackend(b)
elif self.options.backend == "vs":
from .backend import vs2010backend
g = vs2010backend.autodetect_vs_version(b)
mlog.log("Auto detected Visual Studio backend:", mlog.bold(g.name))
elif self.options.backend == "vs2010":
from .backend import vs2010backend
g = vs2010backend.Vs2010Backend(b)
elif self.options.backend == "vs2015":
from .backend import vs2015backend
g = vs2015backend.Vs2015Backend(b)
elif self.options.backend == "vs2017":
from .backend import vs2017backend
g = vs2017backend.Vs2017Backend(b)
elif self.options.backend == "xcode":
from .backend import xcodebackend
g = xcodebackend.XCodeBackend(b)
else:
raise RuntimeError('Unknown backend "%s".' % self.options.backend)
intr = interpreter.Interpreter(b, g)
if env.is_cross_build():
mlog.log(
"Host machine cpu family:",
mlog.bold(intr.builtin["host_machine"].cpu_family_method([], {})),
)
mlog.log(
"Host machine cpu:",
mlog.bold(intr.builtin["host_machine"].cpu_method([], {})),
)
mlog.log(
"Target machine cpu family:",
mlog.bold(intr.builtin["target_machine"].cpu_family_method([], {})),
)
mlog.log(
"Target machine cpu:",
mlog.bold(intr.builtin["target_machine"].cpu_method([], {})),
)
mlog.log(
"Build machine cpu family:",
mlog.bold(intr.builtin["build_machine"].cpu_family_method([], {})),
)
mlog.log(
"Build machine cpu:",
mlog.bold(intr.builtin["build_machine"].cpu_method([], {})),
)
intr.run()
try:
# We would like to write coredata as late as possible since we use the existence of
# this file to check if we generated the build file successfully. Since coredata
# includes settings, the build files must depend on it and appear newer. However, due
# to various kernel caches, we cannot guarantee that any time in Python is exactly in
# sync with the time that gets applied to any files. Thus, we dump this file as late as
# possible, but before build files, and if any error occurs, delete it.
cdf = env.dump_coredata()
g.generate(intr)
dumpfile = os.path.join(env.get_scratch_dir(), "build.dat")
with open(dumpfile, "wb") as f:
pickle.dump(b, f)
# Post-conf scripts must be run after writing coredata or else introspection fails.
g.run_postconf_scripts()
except:
os.unlink(cdf)
raise
|
https://github.com/mesonbuild/meson/issues/2848
|
1>------ Build started: Project: REGEN, Configuration: debug Win32 ------
1>Checking whether solution needs to be regenerated.
1>The Meson build system
1>Version: 0.45.0.dev1
1>Source dir: C:\msys64\home\Polarina\mysa
1>Build dir: C:\msys64\home\Polarina\mysa\build
1>Build type: native build
1>Traceback (most recent call last):
1> File "c:\users\polarina\appdata\local\programs\python\python36-32\lib\site-packages\mesonbuild\mesonmain.py", line 365, in run
1> app.generate()
1> File "c:\users\polarina\appdata\local\programs\python\python36-32\lib\site-packages\mesonbuild\mesonmain.py", line 151, in generate
1> self._generate(env)
1> File "c:\users\polarina\appdata\local\programs\python\python36-32\lib\site-packages\mesonbuild\mesonmain.py", line 174, in _generate
1> g = vs2010backend.autodetect_vs_version(b)
1> File "c:\users\polarina\appdata\local\programs\python\python36-32\lib\site-packages\mesonbuild\backend\vs2010backend.py", line 38, in autodetect_vs_version
1> if vs_version == '14.0' or 'Visual Studio 14' in vs_install_dir:
1>TypeError: argument of type 'NoneType' is not iterable
1>Traceback (most recent call last):
1> File "C:\Users\Polarina\AppData\Local\Programs\Python\Python36-32\Scripts\meson.py", line 26, in <module>
1> sys.exit(main())
1> File "C:\Users\Polarina\AppData\Local\Programs\Python\Python36-32\Scripts\meson.py", line 23, in main
1> return mesonmain.run(sys.argv[1:], launcher)
1> File "c:\users\polarina\appdata\local\programs\python\python36-32\lib\site-packages\mesonbuild\mesonmain.py", line 322, in run
1> sys.exit(run_script_command(args[1:]))
1> File "c:\users\polarina\appdata\local\programs\python\python36-32\lib\site-packages\mesonbuild\mesonmain.py", line 278, in run_script_command
1> return cmdfunc(cmdargs)
1> File "c:\users\polarina\appdata\local\programs\python\python36-32\lib\site-packages\mesonbuild\scripts\regen_checker.py", line 55, in run
1> regen(regeninfo, mesonscript, backend)
1> File "c:\users\polarina\appdata\local\programs\python\python36-32\lib\site-packages\mesonbuild\scripts\regen_checker.py", line 41, in regen
1> subprocess.check_call(cmd)
1> File "c:\users\polarina\appdata\local\programs\python\python36-32\lib\subprocess.py", line 291, in check_call
1> raise CalledProcessError(retcode, cmd)
1>subprocess.CalledProcessError: Command '['c:\\users\\polarina\\appdata\\local\\programs\\python\\python36-32\\python.exe', 'C:\\Users\\Polarina\\AppData\\Local\\Programs\\Python\\Python36-32\\Scripts\\meson.py', '--internal', 'regenerate', 'C:\\msys64\\home\\Polarina\\mysa\\build', 'C:\\msys64\\home\\Polarina\\mysa', '--backend=vs']' returned non-zero exit status 1.
1>C:\Program Files (x86)\Microsoft Visual Studio\2017\Community\Common7\IDE\VC\VCTargets\Microsoft.CppCommon.targets(171,5): error MSB6006: "cmd.exe" exited with code 1.
1>Done building project "REGEN.vcxproj" -- FAILED.
========== Build: 0 succeeded, 1 failed, 0 up-to-date, 0 skipped ==========
|
TypeError
|
def _detect_c_or_cpp_compiler(self, lang, evar, want_cross):
popen_exceptions = {}
compilers, ccache, is_cross, exe_wrap = self._get_compilers(lang, evar, want_cross)
for compiler in compilers:
if isinstance(compiler, str):
compiler = [compiler]
if "cl" in compiler or "cl.exe" in compiler:
arg = "/?"
else:
arg = "--version"
try:
p, out, err = Popen_safe(compiler + [arg])
except OSError as e:
popen_exceptions[" ".join(compiler + [arg])] = e
continue
version = search_version(out)
if "Free Software Foundation" in out:
defines = self.get_gnu_compiler_defines(compiler)
if not defines:
popen_exceptions[" ".join(compiler)] = "no pre-processor defines"
continue
gtype = self.get_gnu_compiler_type(defines)
version = self.get_gnu_version_from_defines(defines)
cls = GnuCCompiler if lang == "c" else GnuCPPCompiler
return cls(ccache + compiler, version, gtype, is_cross, exe_wrap, defines)
if "clang" in out:
if "Apple" in out or for_darwin(want_cross, self):
cltype = CLANG_OSX
elif "windows" in out or for_windows(want_cross, self):
cltype = CLANG_WIN
else:
cltype = CLANG_STANDARD
cls = ClangCCompiler if lang == "c" else ClangCPPCompiler
return cls(ccache + compiler, version, cltype, is_cross, exe_wrap)
if "Microsoft" in out or "Microsoft" in err:
# Visual Studio prints version number to stderr but
# everything else to stdout. Why? Lord only knows.
version = search_version(err)
cls = VisualStudioCCompiler if lang == "c" else VisualStudioCPPCompiler
return cls(compiler, version, is_cross, exe_wrap)
if "(ICC)" in out:
# TODO: add microsoft add check OSX
inteltype = ICC_STANDARD
cls = IntelCCompiler if lang == "c" else IntelCPPCompiler
return cls(ccache + compiler, version, inteltype, is_cross, exe_wrap)
self._handle_exceptions(popen_exceptions, compilers)
|
def _detect_c_or_cpp_compiler(self, lang, evar, want_cross):
popen_exceptions = {}
compilers, ccache, is_cross, exe_wrap = self._get_compilers(lang, evar, want_cross)
for compiler in compilers:
if isinstance(compiler, str):
compiler = [compiler]
if "cl" in compiler or "cl.exe" in compiler:
arg = "/?"
else:
arg = "--version"
try:
p, out, err = Popen_safe(compiler + [arg])
except OSError as e:
popen_exceptions[" ".join(compiler + [arg])] = e
continue
version = search_version(out)
if "Free Software Foundation" in out:
defines = self.get_gnu_compiler_defines(compiler)
if not defines:
popen_exceptions[compiler] = "no pre-processor defines"
continue
gtype = self.get_gnu_compiler_type(defines)
version = self.get_gnu_version_from_defines(defines)
cls = GnuCCompiler if lang == "c" else GnuCPPCompiler
return cls(ccache + compiler, version, gtype, is_cross, exe_wrap, defines)
if "clang" in out:
if "Apple" in out or for_darwin(want_cross, self):
cltype = CLANG_OSX
elif "windows" in out or for_windows(want_cross, self):
cltype = CLANG_WIN
else:
cltype = CLANG_STANDARD
cls = ClangCCompiler if lang == "c" else ClangCPPCompiler
return cls(ccache + compiler, version, cltype, is_cross, exe_wrap)
if "Microsoft" in out or "Microsoft" in err:
# Visual Studio prints version number to stderr but
# everything else to stdout. Why? Lord only knows.
version = search_version(err)
cls = VisualStudioCCompiler if lang == "c" else VisualStudioCPPCompiler
return cls(compiler, version, is_cross, exe_wrap)
if "(ICC)" in out:
# TODO: add microsoft add check OSX
inteltype = ICC_STANDARD
cls = IntelCCompiler if lang == "c" else IntelCPPCompiler
return cls(ccache + compiler, version, inteltype, is_cross, exe_wrap)
self._handle_exceptions(popen_exceptions, compilers)
|
https://github.com/mesonbuild/meson/issues/1989
|
$ CC=cgcc meson ../ --prefix=/home/hughsie/.root
The Meson build system
Version: 0.42.0.dev1
Source dir: /home/hughsie/Code/fwupd
Build dir: /home/hughsie/Code/fwupd/build
Build type: native build
Project name: fwupd
Traceback (most recent call last):
File "/usr/lib/python3.6/site-packages/mesonbuild/mesonmain.py", line 307, in run
app.generate()
File "/usr/lib/python3.6/site-packages/mesonbuild/mesonmain.py", line 172, in generate
intr = interpreter.Interpreter(b, g)
File "/usr/lib/python3.6/site-packages/mesonbuild/interpreter.py", line 1240, in __init__
self.parse_project()
File "/usr/lib/python3.6/site-packages/mesonbuild/interpreterbase.py", line 111, in parse_project
self.evaluate_codeblock(self.ast, end=1)
File "/usr/lib/python3.6/site-packages/mesonbuild/interpreterbase.py", line 146, in evaluate_codeblock
raise e
File "/usr/lib/python3.6/site-packages/mesonbuild/interpreterbase.py", line 140, in evaluate_codeblock
self.evaluate_statement(cur)
File "/usr/lib/python3.6/site-packages/mesonbuild/interpreterbase.py", line 151, in evaluate_statement
return self.function_call(cur)
File "/usr/lib/python3.6/site-packages/mesonbuild/interpreterbase.py", line 361, in function_call
return self.funcs[func_name](node, self.flatten(posargs), kwargs)
File "/usr/lib/python3.6/site-packages/mesonbuild/interpreterbase.py", line 55, in wrapped
return f(self, node, args, kwargs)
File "/usr/lib/python3.6/site-packages/mesonbuild/interpreter.py", line 1673, in func_project
self.add_languages(proj_langs, True)
File "/usr/lib/python3.6/site-packages/mesonbuild/interpreter.py", line 1789, in add_languages
(comp, cross_comp) = self.detect_compilers(lang, need_cross_compiler)
File "/usr/lib/python3.6/site-packages/mesonbuild/interpreter.py", line 1712, in detect_compilers
comp = self.environment.detect_c_compiler(False)
File "/usr/lib/python3.6/site-packages/mesonbuild/environment.py", line 504, in detect_c_compiler
return self._detect_c_or_cpp_compiler('c', 'CC', want_cross)
File "/usr/lib/python3.6/site-packages/mesonbuild/environment.py", line 475, in _detect_c_or_cpp_compiler
popen_exceptions[compiler] = 'no pre-processor defines'
TypeError: unhashable type: 'list'
|
TypeError
|
def detect_fortran_compiler(self, want_cross):
popen_exceptions = {}
compilers, ccache, is_cross, exe_wrap = self._get_compilers(
"fortran", "FC", want_cross
)
for compiler in compilers:
if isinstance(compiler, str):
compiler = [compiler]
for arg in ["--version", "-V"]:
try:
p, out, err = Popen_safe(compiler + [arg])
except OSError as e:
popen_exceptions[" ".join(compiler + [arg])] = e
continue
version = search_version(out)
if "GNU Fortran" in out:
defines = self.get_gnu_compiler_defines(compiler)
if not defines:
popen_exceptions[" ".join(compiler)] = "no pre-processor defines"
continue
gtype = self.get_gnu_compiler_type(defines)
version = self.get_gnu_version_from_defines(defines)
return GnuFortranCompiler(
compiler, version, gtype, is_cross, exe_wrap, defines
)
if "G95" in out:
return G95FortranCompiler(compiler, version, is_cross, exe_wrap)
if "Sun Fortran" in err:
version = search_version(err)
return SunFortranCompiler(compiler, version, is_cross, exe_wrap)
if "ifort (IFORT)" in out:
return IntelFortranCompiler(compiler, version, is_cross, exe_wrap)
if "PathScale EKOPath(tm)" in err:
return PathScaleFortranCompiler(compiler, version, is_cross, exe_wrap)
if "PGI Compilers" in out:
return PGIFortranCompiler(compiler, version, is_cross, exe_wrap)
if "Open64 Compiler Suite" in err:
return Open64FortranCompiler(compiler, version, is_cross, exe_wrap)
if "NAG Fortran" in err:
return NAGFortranCompiler(compiler, version, is_cross, exe_wrap)
self._handle_exceptions(popen_exceptions, compilers)
|
def detect_fortran_compiler(self, want_cross):
popen_exceptions = {}
compilers, ccache, is_cross, exe_wrap = self._get_compilers(
"fortran", "FC", want_cross
)
for compiler in compilers:
if isinstance(compiler, str):
compiler = [compiler]
for arg in ["--version", "-V"]:
try:
p, out, err = Popen_safe(compiler + [arg])
except OSError as e:
popen_exceptions[" ".join(compiler + [arg])] = e
continue
version = search_version(out)
if "GNU Fortran" in out:
defines = self.get_gnu_compiler_defines(compiler)
if not defines:
popen_exceptions[compiler] = "no pre-processor defines"
continue
gtype = self.get_gnu_compiler_type(defines)
version = self.get_gnu_version_from_defines(defines)
return GnuFortranCompiler(
compiler, version, gtype, is_cross, exe_wrap, defines
)
if "G95" in out:
return G95FortranCompiler(compiler, version, is_cross, exe_wrap)
if "Sun Fortran" in err:
version = search_version(err)
return SunFortranCompiler(compiler, version, is_cross, exe_wrap)
if "ifort (IFORT)" in out:
return IntelFortranCompiler(compiler, version, is_cross, exe_wrap)
if "PathScale EKOPath(tm)" in err:
return PathScaleFortranCompiler(compiler, version, is_cross, exe_wrap)
if "PGI Compilers" in out:
return PGIFortranCompiler(compiler, version, is_cross, exe_wrap)
if "Open64 Compiler Suite" in err:
return Open64FortranCompiler(compiler, version, is_cross, exe_wrap)
if "NAG Fortran" in err:
return NAGFortranCompiler(compiler, version, is_cross, exe_wrap)
self._handle_exceptions(popen_exceptions, compilers)
|
https://github.com/mesonbuild/meson/issues/1989
|
$ CC=cgcc meson ../ --prefix=/home/hughsie/.root
The Meson build system
Version: 0.42.0.dev1
Source dir: /home/hughsie/Code/fwupd
Build dir: /home/hughsie/Code/fwupd/build
Build type: native build
Project name: fwupd
Traceback (most recent call last):
File "/usr/lib/python3.6/site-packages/mesonbuild/mesonmain.py", line 307, in run
app.generate()
File "/usr/lib/python3.6/site-packages/mesonbuild/mesonmain.py", line 172, in generate
intr = interpreter.Interpreter(b, g)
File "/usr/lib/python3.6/site-packages/mesonbuild/interpreter.py", line 1240, in __init__
self.parse_project()
File "/usr/lib/python3.6/site-packages/mesonbuild/interpreterbase.py", line 111, in parse_project
self.evaluate_codeblock(self.ast, end=1)
File "/usr/lib/python3.6/site-packages/mesonbuild/interpreterbase.py", line 146, in evaluate_codeblock
raise e
File "/usr/lib/python3.6/site-packages/mesonbuild/interpreterbase.py", line 140, in evaluate_codeblock
self.evaluate_statement(cur)
File "/usr/lib/python3.6/site-packages/mesonbuild/interpreterbase.py", line 151, in evaluate_statement
return self.function_call(cur)
File "/usr/lib/python3.6/site-packages/mesonbuild/interpreterbase.py", line 361, in function_call
return self.funcs[func_name](node, self.flatten(posargs), kwargs)
File "/usr/lib/python3.6/site-packages/mesonbuild/interpreterbase.py", line 55, in wrapped
return f(self, node, args, kwargs)
File "/usr/lib/python3.6/site-packages/mesonbuild/interpreter.py", line 1673, in func_project
self.add_languages(proj_langs, True)
File "/usr/lib/python3.6/site-packages/mesonbuild/interpreter.py", line 1789, in add_languages
(comp, cross_comp) = self.detect_compilers(lang, need_cross_compiler)
File "/usr/lib/python3.6/site-packages/mesonbuild/interpreter.py", line 1712, in detect_compilers
comp = self.environment.detect_c_compiler(False)
File "/usr/lib/python3.6/site-packages/mesonbuild/environment.py", line 504, in detect_c_compiler
return self._detect_c_or_cpp_compiler('c', 'CC', want_cross)
File "/usr/lib/python3.6/site-packages/mesonbuild/environment.py", line 475, in _detect_c_or_cpp_compiler
popen_exceptions[compiler] = 'no pre-processor defines'
TypeError: unhashable type: 'list'
|
TypeError
|
def detect_objc_compiler(self, want_cross):
popen_exceptions = {}
compilers, ccache, is_cross, exe_wrap = self._get_compilers(
"objc", "OBJC", want_cross
)
for compiler in compilers:
if isinstance(compiler, str):
compiler = [compiler]
arg = ["--version"]
try:
p, out, err = Popen_safe(compiler + arg)
except OSError as e:
popen_exceptions[" ".join(compiler + arg)] = e
version = search_version(out)
if "Free Software Foundation" in out:
defines = self.get_gnu_compiler_defines(compiler)
if not defines:
popen_exceptions[" ".join(compiler)] = "no pre-processor defines"
continue
gtype = self.get_gnu_compiler_type(defines)
version = self.get_gnu_version_from_defines(defines)
return GnuObjCCompiler(
ccache + compiler, version, gtype, is_cross, exe_wrap, defines
)
if out.startswith("Apple LLVM"):
return ClangObjCCompiler(
ccache + compiler, version, CLANG_OSX, is_cross, exe_wrap
)
if out.startswith("clang"):
return ClangObjCCompiler(
ccache + compiler, version, CLANG_STANDARD, is_cross, exe_wrap
)
self._handle_exceptions(popen_exceptions, compilers)
|
def detect_objc_compiler(self, want_cross):
popen_exceptions = {}
compilers, ccache, is_cross, exe_wrap = self._get_compilers(
"objc", "OBJC", want_cross
)
for compiler in compilers:
if isinstance(compiler, str):
compiler = [compiler]
arg = ["--version"]
try:
p, out, err = Popen_safe(compiler + arg)
except OSError as e:
popen_exceptions[" ".join(compiler + arg)] = e
version = search_version(out)
if "Free Software Foundation" in out:
defines = self.get_gnu_compiler_defines(compiler)
if not defines:
popen_exceptions[compiler] = "no pre-processor defines"
continue
gtype = self.get_gnu_compiler_type(defines)
version = self.get_gnu_version_from_defines(defines)
return GnuObjCCompiler(
ccache + compiler, version, gtype, is_cross, exe_wrap, defines
)
if out.startswith("Apple LLVM"):
return ClangObjCCompiler(
ccache + compiler, version, CLANG_OSX, is_cross, exe_wrap
)
if out.startswith("clang"):
return ClangObjCCompiler(
ccache + compiler, version, CLANG_STANDARD, is_cross, exe_wrap
)
self._handle_exceptions(popen_exceptions, compilers)
|
https://github.com/mesonbuild/meson/issues/1989
|
$ CC=cgcc meson ../ --prefix=/home/hughsie/.root
The Meson build system
Version: 0.42.0.dev1
Source dir: /home/hughsie/Code/fwupd
Build dir: /home/hughsie/Code/fwupd/build
Build type: native build
Project name: fwupd
Traceback (most recent call last):
File "/usr/lib/python3.6/site-packages/mesonbuild/mesonmain.py", line 307, in run
app.generate()
File "/usr/lib/python3.6/site-packages/mesonbuild/mesonmain.py", line 172, in generate
intr = interpreter.Interpreter(b, g)
File "/usr/lib/python3.6/site-packages/mesonbuild/interpreter.py", line 1240, in __init__
self.parse_project()
File "/usr/lib/python3.6/site-packages/mesonbuild/interpreterbase.py", line 111, in parse_project
self.evaluate_codeblock(self.ast, end=1)
File "/usr/lib/python3.6/site-packages/mesonbuild/interpreterbase.py", line 146, in evaluate_codeblock
raise e
File "/usr/lib/python3.6/site-packages/mesonbuild/interpreterbase.py", line 140, in evaluate_codeblock
self.evaluate_statement(cur)
File "/usr/lib/python3.6/site-packages/mesonbuild/interpreterbase.py", line 151, in evaluate_statement
return self.function_call(cur)
File "/usr/lib/python3.6/site-packages/mesonbuild/interpreterbase.py", line 361, in function_call
return self.funcs[func_name](node, self.flatten(posargs), kwargs)
File "/usr/lib/python3.6/site-packages/mesonbuild/interpreterbase.py", line 55, in wrapped
return f(self, node, args, kwargs)
File "/usr/lib/python3.6/site-packages/mesonbuild/interpreter.py", line 1673, in func_project
self.add_languages(proj_langs, True)
File "/usr/lib/python3.6/site-packages/mesonbuild/interpreter.py", line 1789, in add_languages
(comp, cross_comp) = self.detect_compilers(lang, need_cross_compiler)
File "/usr/lib/python3.6/site-packages/mesonbuild/interpreter.py", line 1712, in detect_compilers
comp = self.environment.detect_c_compiler(False)
File "/usr/lib/python3.6/site-packages/mesonbuild/environment.py", line 504, in detect_c_compiler
return self._detect_c_or_cpp_compiler('c', 'CC', want_cross)
File "/usr/lib/python3.6/site-packages/mesonbuild/environment.py", line 475, in _detect_c_or_cpp_compiler
popen_exceptions[compiler] = 'no pre-processor defines'
TypeError: unhashable type: 'list'
|
TypeError
|
def detect_objcpp_compiler(self, want_cross):
popen_exceptions = {}
compilers, ccache, is_cross, exe_wrap = self._get_compilers(
"objcpp", "OBJCXX", want_cross
)
for compiler in compilers:
if isinstance(compiler, str):
compiler = [compiler]
arg = ["--version"]
try:
p, out, err = Popen_safe(compiler + arg)
except OSError as e:
popen_exceptions[" ".join(compiler + arg)] = e
version = search_version(out)
if "Free Software Foundation" in out:
defines = self.get_gnu_compiler_defines(compiler)
if not defines:
popen_exceptions[" ".join(compiler)] = "no pre-processor defines"
continue
gtype = self.get_gnu_compiler_type(defines)
version = self.get_gnu_version_from_defines(defines)
return GnuObjCPPCompiler(
ccache + compiler, version, gtype, is_cross, exe_wrap, defines
)
if out.startswith("Apple LLVM"):
return ClangObjCPPCompiler(
ccache + compiler, version, CLANG_OSX, is_cross, exe_wrap
)
if out.startswith("clang"):
return ClangObjCPPCompiler(
ccache + compiler, version, CLANG_STANDARD, is_cross, exe_wrap
)
self._handle_exceptions(popen_exceptions, compilers)
|
def detect_objcpp_compiler(self, want_cross):
popen_exceptions = {}
compilers, ccache, is_cross, exe_wrap = self._get_compilers(
"objcpp", "OBJCXX", want_cross
)
for compiler in compilers:
if isinstance(compiler, str):
compiler = [compiler]
arg = ["--version"]
try:
p, out, err = Popen_safe(compiler + arg)
except OSError as e:
popen_exceptions[" ".join(compiler + arg)] = e
version = search_version(out)
if "Free Software Foundation" in out:
defines = self.get_gnu_compiler_defines(compiler)
if not defines:
popen_exceptions[compiler] = "no pre-processor defines"
continue
gtype = self.get_gnu_compiler_type(defines)
version = self.get_gnu_version_from_defines(defines)
return GnuObjCPPCompiler(
ccache + compiler, version, gtype, is_cross, exe_wrap, defines
)
if out.startswith("Apple LLVM"):
return ClangObjCPPCompiler(
ccache + compiler, version, CLANG_OSX, is_cross, exe_wrap
)
if out.startswith("clang"):
return ClangObjCPPCompiler(
ccache + compiler, version, CLANG_STANDARD, is_cross, exe_wrap
)
self._handle_exceptions(popen_exceptions, compilers)
|
https://github.com/mesonbuild/meson/issues/1989
|
$ CC=cgcc meson ../ --prefix=/home/hughsie/.root
The Meson build system
Version: 0.42.0.dev1
Source dir: /home/hughsie/Code/fwupd
Build dir: /home/hughsie/Code/fwupd/build
Build type: native build
Project name: fwupd
Traceback (most recent call last):
File "/usr/lib/python3.6/site-packages/mesonbuild/mesonmain.py", line 307, in run
app.generate()
File "/usr/lib/python3.6/site-packages/mesonbuild/mesonmain.py", line 172, in generate
intr = interpreter.Interpreter(b, g)
File "/usr/lib/python3.6/site-packages/mesonbuild/interpreter.py", line 1240, in __init__
self.parse_project()
File "/usr/lib/python3.6/site-packages/mesonbuild/interpreterbase.py", line 111, in parse_project
self.evaluate_codeblock(self.ast, end=1)
File "/usr/lib/python3.6/site-packages/mesonbuild/interpreterbase.py", line 146, in evaluate_codeblock
raise e
File "/usr/lib/python3.6/site-packages/mesonbuild/interpreterbase.py", line 140, in evaluate_codeblock
self.evaluate_statement(cur)
File "/usr/lib/python3.6/site-packages/mesonbuild/interpreterbase.py", line 151, in evaluate_statement
return self.function_call(cur)
File "/usr/lib/python3.6/site-packages/mesonbuild/interpreterbase.py", line 361, in function_call
return self.funcs[func_name](node, self.flatten(posargs), kwargs)
File "/usr/lib/python3.6/site-packages/mesonbuild/interpreterbase.py", line 55, in wrapped
return f(self, node, args, kwargs)
File "/usr/lib/python3.6/site-packages/mesonbuild/interpreter.py", line 1673, in func_project
self.add_languages(proj_langs, True)
File "/usr/lib/python3.6/site-packages/mesonbuild/interpreter.py", line 1789, in add_languages
(comp, cross_comp) = self.detect_compilers(lang, need_cross_compiler)
File "/usr/lib/python3.6/site-packages/mesonbuild/interpreter.py", line 1712, in detect_compilers
comp = self.environment.detect_c_compiler(False)
File "/usr/lib/python3.6/site-packages/mesonbuild/environment.py", line 504, in detect_c_compiler
return self._detect_c_or_cpp_compiler('c', 'CC', want_cross)
File "/usr/lib/python3.6/site-packages/mesonbuild/environment.py", line 475, in _detect_c_or_cpp_compiler
popen_exceptions[compiler] = 'no pre-processor defines'
TypeError: unhashable type: 'list'
|
TypeError
|
def _find_source_script(self, name, args):
# Prefer scripts in the current source directory
search_dir = os.path.join(
self.interpreter.environment.source_dir, self.interpreter.subdir
)
key = (name, search_dir)
if key in self._found_source_scripts:
found = self._found_source_scripts[key]
else:
found = dependencies.ExternalProgram(name, search_dir=search_dir)
if found.found():
self._found_source_scripts[key] = found
else:
raise InterpreterException("Script {!r} not found".format(name))
return build.RunScript(found.get_command(), args)
|
def _find_source_script(self, name, args):
# Prefer scripts in the current source directory
search_dir = os.path.join(
self.interpreter.environment.source_dir, self.interpreter.subdir
)
key = (name, search_dir)
if key in self._found_source_scripts:
found = self._found_source_scripts[key]
else:
found = dependencies.ExternalProgram(name, search_dir=search_dir)
if found:
self._found_source_scripts[key] = found
else:
raise InterpreterException("Script {!r} not found".format(name))
return build.RunScript(found.get_command(), args)
|
https://github.com/mesonbuild/meson/issues/1600
|
Traceback (most recent call last):
File "/usr/bin/meson", line 37, in <module>
sys.exit(main())
File "/usr/bin/meson", line 34, in main
return mesonmain.run(launcher, sys.argv[1:])
File "/home/zbyszek/.local/lib/python3.5/site-packages/meson-0.40.0.dev1-py3.5.egg/mesonbuild/mesonmain.py", line 260, in run
sys.exit(run_script_command(args[1:]))
File "/home/zbyszek/.local/lib/python3.5/site-packages/meson-0.40.0.dev1-py3.5.egg/mesonbuild/mesonmain.py", line 248, in run_script_command
return cmdfunc(cmdargs)
File "/home/zbyszek/.local/lib/python3.5/site-packages/meson-0.40.0.dev1-py3.5.egg/mesonbuild/scripts/meson_install.py", line 306, in run
do_install(datafilename)
File "/home/zbyszek/.local/lib/python3.5/site-packages/meson-0.40.0.dev1-py3.5.egg/mesonbuild/scripts/meson_install.py", line 129, in do_install
run_install_script(d)
File "/home/zbyszek/.local/lib/python3.5/site-packages/meson-0.40.0.dev1-py3.5.egg/mesonbuild/scripts/meson_install.py", line 193, in run_install_script
name = ' '.join(script + args)
TypeError: sequence item 0: expected str instance, NoneType found
|
TypeError
|
def generate_custom_generator_commands(self, target, parent_node):
generator_output_files = []
custom_target_include_dirs = []
custom_target_output_files = []
target_private_dir = self.relpath(
self.get_target_private_dir(target), self.get_target_dir(target)
)
down = self.target_to_build_root(target)
for genlist in target.get_generated_sources():
if isinstance(genlist, build.CustomTarget):
for i in genlist.get_outputs():
# Path to the generated source from the current vcxproj dir via the build root
ipath = os.path.join(down, self.get_target_dir(genlist), i)
custom_target_output_files.append(ipath)
idir = self.relpath(
self.get_target_dir(genlist), self.get_target_dir(target)
)
if idir not in custom_target_include_dirs:
custom_target_include_dirs.append(idir)
else:
generator = genlist.get_generator()
exe = generator.get_exe()
infilelist = genlist.get_inputs()
outfilelist = genlist.get_outputs()
exe_arr = self.exe_object_to_cmd_array(exe)
base_args = generator.get_arglist()
idgroup = ET.SubElement(parent_node, "ItemGroup")
for i in range(len(infilelist)):
if len(infilelist) == len(outfilelist):
sole_output = os.path.join(target_private_dir, outfilelist[i])
else:
sole_output = ""
curfile = infilelist[i]
infilename = os.path.join(
down, curfile.rel_to_builddir(self.build_to_src)
)
outfiles_rel = genlist.get_outputs_for(curfile)
outfiles = [os.path.join(target_private_dir, of) for of in outfiles_rel]
generator_output_files += outfiles
args = [
x.replace("@INPUT@", infilename).replace("@OUTPUT@", sole_output)
for x in base_args
]
args = self.replace_outputs(args, target_private_dir, outfiles_rel)
args = [
x.replace(
"@SOURCE_DIR@", self.environment.get_source_dir()
).replace("@BUILD_DIR@", target_private_dir)
for x in args
]
cmd = exe_arr + self.replace_extra_args(args, genlist)
cbs = ET.SubElement(idgroup, "CustomBuild", Include=infilename)
ET.SubElement(cbs, "Command").text = " ".join(self.quote_arguments(cmd))
ET.SubElement(cbs, "Outputs").text = ";".join(outfiles)
return (
generator_output_files,
custom_target_output_files,
custom_target_include_dirs,
)
|
def generate_custom_generator_commands(self, target, parent_node):
generator_output_files = []
commands = []
inputs = []
outputs = []
custom_target_include_dirs = []
custom_target_output_files = []
target_private_dir = self.relpath(
self.get_target_private_dir(target), self.get_target_dir(target)
)
down = self.target_to_build_root(target)
for genlist in target.get_generated_sources():
if isinstance(genlist, build.CustomTarget):
for i in genlist.get_outputs():
# Path to the generated source from the current vcxproj dir via the build root
ipath = os.path.join(down, self.get_target_dir(genlist), i)
custom_target_output_files.append(ipath)
idir = self.relpath(
self.get_target_dir(genlist), self.get_target_dir(target)
)
if idir not in custom_target_include_dirs:
custom_target_include_dirs.append(idir)
else:
generator = genlist.get_generator()
exe = generator.get_exe()
infilelist = genlist.get_inputs()
outfilelist = genlist.get_outputs()
exe_arr = self.exe_object_to_cmd_array(exe)
base_args = generator.get_arglist()
for i in range(len(infilelist)):
if len(infilelist) == len(outfilelist):
sole_output = os.path.join(target_private_dir, outfilelist[i])
else:
sole_output = ""
curfile = infilelist[i]
infilename = os.path.join(
down, curfile.rel_to_builddir(self.build_to_src)
)
outfiles_rel = genlist.get_outputs_for(curfile)
outfiles = [os.path.join(target_private_dir, of) for of in outfiles_rel]
generator_output_files += outfiles
args = [
x.replace("@INPUT@", infilename).replace("@OUTPUT@", sole_output)
for x in base_args
]
args = self.replace_outputs(args, target_private_dir, outfiles_rel)
args = [
x.replace(
"@SOURCE_DIR@", self.environment.get_source_dir()
).replace("@BUILD_DIR@", target_private_dir)
for x in args
]
fullcmd = exe_arr + self.replace_extra_args(args, genlist)
commands.append(" ".join(self.special_quote(fullcmd)))
inputs.append(infilename)
outputs.extend(outfiles)
if len(commands) > 0:
idgroup = ET.SubElement(parent_node, "ItemDefinitionGroup")
cbs = ET.SubElement(idgroup, "CustomBuildStep")
ET.SubElement(cbs, "Command").text = "\r\n".join(commands)
ET.SubElement(cbs, "Inputs").text = ";".join(inputs)
ET.SubElement(cbs, "Outputs").text = ";".join(outputs)
ET.SubElement(cbs, "Message").text = "Generating custom sources."
pg = ET.SubElement(parent_node, "PropertyGroup")
ET.SubElement(pg, "CustomBuildBeforeTargets").text = "ClCompile"
return (
generator_output_files,
custom_target_output_files,
custom_target_include_dirs,
)
|
https://github.com/mesonbuild/meson/issues/1004
|
Traceback (most recent call last):
File "C:\projects\meson\mesonbuild\mesonmain.py", line 279, in run
app.generate()
File "C:\projects\meson\mesonbuild\mesonmain.py", line 169, in generate
g.generate(intr)
File "C:\projects\meson\mesonbuild\backend\vs2010backend.py", line 168, in generate
self.generate_solution(sln_filename, projlist)
File "C:\projects\meson\mesonbuild\backend\vs2010backend.py", line 233, in generate_solution
all_deps = self.determine_deps(p)
File "C:\projects\meson\mesonbuild\backend\vs2010backend.py", line 203, in determine_deps
all_deps[d.get_id()] = True
AttributeError: 'GeneratedList' object has no attribute 'get_id'
|
AttributeError
|
def get_target_deps(self, t, recursive=False):
all_deps = {}
for target in t.values():
if isinstance(target, build.CustomTarget):
for d in target.get_target_dependencies():
all_deps[d.get_id()] = d
elif isinstance(target, build.RunTarget):
for d in [target.command] + target.args:
if isinstance(d, (build.BuildTarget, build.CustomTarget)):
all_deps[d.get_id()] = d
elif isinstance(target, build.BuildTarget):
for ldep in target.link_targets:
all_deps[ldep.get_id()] = ldep
for obj_id, objdep in self.get_obj_target_deps(target.objects):
all_deps[obj_id] = objdep
for gendep in target.get_generated_sources():
if isinstance(gendep, build.CustomTarget):
all_deps[gendep.get_id()] = gendep
else:
gen_exe = gendep.generator.get_exe()
if isinstance(gen_exe, build.Executable):
all_deps[gen_exe.get_id()] = gen_exe
else:
raise MesonException("Unknown target type for target %s" % target)
if not t or not recursive:
return all_deps
ret = self.get_target_deps(all_deps, recursive)
ret.update(all_deps)
return ret
|
def get_target_deps(self, t, recursive=False):
all_deps = {}
for target in t.values():
if isinstance(target, build.CustomTarget):
for d in target.get_target_dependencies():
all_deps[d.get_id()] = d
elif isinstance(target, build.RunTarget):
for d in [target.command] + target.args:
if isinstance(d, (build.BuildTarget, build.CustomTarget)):
all_deps[d.get_id()] = d
# BuildTarget
else:
for ldep in target.link_targets:
all_deps[ldep.get_id()] = ldep
for obj_id, objdep in self.get_obj_target_deps(target.objects):
all_deps[obj_id] = objdep
for gendep in target.get_generated_sources():
if isinstance(gendep, build.CustomTarget):
all_deps[gendep.get_id()] = gendep
else:
gen_exe = gendep.generator.get_exe()
if isinstance(gen_exe, build.Executable):
all_deps[gen_exe.get_id()] = gen_exe
if not t or not recursive:
return all_deps
ret = self.get_target_deps(all_deps, recursive)
ret.update(all_deps)
return ret
|
https://github.com/mesonbuild/meson/issues/1004
|
Traceback (most recent call last):
File "C:\projects\meson\mesonbuild\mesonmain.py", line 279, in run
app.generate()
File "C:\projects\meson\mesonbuild\mesonmain.py", line 169, in generate
g.generate(intr)
File "C:\projects\meson\mesonbuild\backend\vs2010backend.py", line 168, in generate
self.generate_solution(sln_filename, projlist)
File "C:\projects\meson\mesonbuild\backend\vs2010backend.py", line 233, in generate_solution
all_deps = self.determine_deps(p)
File "C:\projects\meson\mesonbuild\backend\vs2010backend.py", line 203, in determine_deps
all_deps[d.get_id()] = True
AttributeError: 'GeneratedList' object has no attribute 'get_id'
|
AttributeError
|
def gen_custom_target_vcxproj(self, target, ofname, guid):
root = self.create_basic_crap(target)
action = ET.SubElement(root, "ItemDefinitionGroup")
customstep = ET.SubElement(action, "CustomBuildStep")
# We need to always use absolute paths because our invocation is always
# from the target dir, not the build root.
target.absolute_paths = True
(srcs, ofilenames, cmd) = self.eval_custom_target_command(target, True)
ET.SubElement(customstep, "Command").text = " ".join(self.quote_arguments(cmd))
ET.SubElement(customstep, "Outputs").text = ";".join(ofilenames)
ET.SubElement(customstep, "Inputs").text = ";".join(srcs)
ET.SubElement(root, "Import", Project="$(VCTargetsPath)\Microsoft.Cpp.targets")
self.generate_custom_generator_commands(target, root)
tree = ET.ElementTree(root)
tree.write(ofname, encoding="utf-8", xml_declaration=True)
|
def gen_custom_target_vcxproj(self, target, ofname, guid):
root = self.create_basic_crap(target)
action = ET.SubElement(root, "ItemDefinitionGroup")
customstep = ET.SubElement(action, "CustomBuildStep")
# We need to always use absolute paths because our invocation is always
# from the target dir, not the build root.
target.absolute_paths = True
(srcs, ofilenames, cmd) = self.eval_custom_target_command(target, True)
cmd_templ = """"%s" """ * len(cmd)
ET.SubElement(customstep, "Command").text = cmd_templ % tuple(cmd)
ET.SubElement(customstep, "Outputs").text = ";".join(ofilenames)
ET.SubElement(customstep, "Inputs").text = ";".join(srcs)
ET.SubElement(root, "Import", Project="$(VCTargetsPath)\Microsoft.Cpp.targets")
tree = ET.ElementTree(root)
tree.write(ofname, encoding="utf-8", xml_declaration=True)
|
https://github.com/mesonbuild/meson/issues/1004
|
Traceback (most recent call last):
File "C:\projects\meson\mesonbuild\mesonmain.py", line 279, in run
app.generate()
File "C:\projects\meson\mesonbuild\mesonmain.py", line 169, in generate
g.generate(intr)
File "C:\projects\meson\mesonbuild\backend\vs2010backend.py", line 168, in generate
self.generate_solution(sln_filename, projlist)
File "C:\projects\meson\mesonbuild\backend\vs2010backend.py", line 233, in generate_solution
all_deps = self.determine_deps(p)
File "C:\projects\meson\mesonbuild\backend\vs2010backend.py", line 203, in determine_deps
all_deps[d.get_id()] = True
AttributeError: 'GeneratedList' object has no attribute 'get_id'
|
AttributeError
|
def has_objects(objects, additional_objects, generated_objects):
# Ignore generated objects, those are automatically used by MSBuild because they are part of
# the CustomBuild Outputs.
return len(objects) + len(additional_objects) > 0
|
def has_objects(objects, additional_objects, generated_objects):
# Ignore generated objects, those are automatically used by MSBuild for VS2010, because they are part of
# the CustomBuildStep Outputs.
return len(objects) + len(additional_objects) > 0
|
https://github.com/mesonbuild/meson/issues/1004
|
Traceback (most recent call last):
File "C:\projects\meson\mesonbuild\mesonmain.py", line 279, in run
app.generate()
File "C:\projects\meson\mesonbuild\mesonmain.py", line 169, in generate
g.generate(intr)
File "C:\projects\meson\mesonbuild\backend\vs2010backend.py", line 168, in generate
self.generate_solution(sln_filename, projlist)
File "C:\projects\meson\mesonbuild\backend\vs2010backend.py", line 233, in generate_solution
all_deps = self.determine_deps(p)
File "C:\projects\meson\mesonbuild\backend\vs2010backend.py", line 203, in determine_deps
all_deps[d.get_id()] = True
AttributeError: 'GeneratedList' object has no attribute 'get_id'
|
AttributeError
|
def add_generated_objects(node, generated_objects):
# Do not add generated objects to project file. Those are automatically used by MSBuild, because
# they are part of the CustomBuild Outputs.
return
|
def add_generated_objects(node, generated_objects):
# Do not add generated objects to project file. Those are automatically used by MSBuild for VS2010, because
# they are part of the CustomBuildStep Outputs.
return
|
https://github.com/mesonbuild/meson/issues/1004
|
Traceback (most recent call last):
File "C:\projects\meson\mesonbuild\mesonmain.py", line 279, in run
app.generate()
File "C:\projects\meson\mesonbuild\mesonmain.py", line 169, in generate
g.generate(intr)
File "C:\projects\meson\mesonbuild\backend\vs2010backend.py", line 168, in generate
self.generate_solution(sln_filename, projlist)
File "C:\projects\meson\mesonbuild\backend\vs2010backend.py", line 233, in generate_solution
all_deps = self.determine_deps(p)
File "C:\projects\meson\mesonbuild\backend\vs2010backend.py", line 203, in determine_deps
all_deps[d.get_id()] = True
AttributeError: 'GeneratedList' object has no attribute 'get_id'
|
AttributeError
|
def get_target_dependencies(self):
deps = self.dependencies[:]
deps += self.extra_depends
for c in self.sources:
if hasattr(c, "held_object"):
c = c.held_object
if isinstance(c, (BuildTarget, CustomTarget)):
deps.append(c)
return deps
|
def get_target_dependencies(self):
deps = self.dependencies[:]
deps += self.extra_depends
for c in self.sources:
if hasattr(c, "held_object"):
c = c.held_object
if isinstance(c, (BuildTarget, CustomTarget, GeneratedList)):
deps.append(c)
return deps
|
https://github.com/mesonbuild/meson/issues/1004
|
Traceback (most recent call last):
File "C:\projects\meson\mesonbuild\mesonmain.py", line 279, in run
app.generate()
File "C:\projects\meson\mesonbuild\mesonmain.py", line 169, in generate
g.generate(intr)
File "C:\projects\meson\mesonbuild\backend\vs2010backend.py", line 168, in generate
self.generate_solution(sln_filename, projlist)
File "C:\projects\meson\mesonbuild\backend\vs2010backend.py", line 233, in generate_solution
all_deps = self.determine_deps(p)
File "C:\projects\meson\mesonbuild\backend\vs2010backend.py", line 203, in determine_deps
all_deps[d.get_id()] = True
AttributeError: 'GeneratedList' object has no attribute 'get_id'
|
AttributeError
|
def get_generated_sources(self):
return self.get_generated_lists()
|
def get_generated_sources(self):
return []
|
https://github.com/mesonbuild/meson/issues/1004
|
Traceback (most recent call last):
File "C:\projects\meson\mesonbuild\mesonmain.py", line 279, in run
app.generate()
File "C:\projects\meson\mesonbuild\mesonmain.py", line 169, in generate
g.generate(intr)
File "C:\projects\meson\mesonbuild\backend\vs2010backend.py", line 168, in generate
self.generate_solution(sln_filename, projlist)
File "C:\projects\meson\mesonbuild\backend\vs2010backend.py", line 233, in generate_solution
all_deps = self.determine_deps(p)
File "C:\projects\meson\mesonbuild\backend\vs2010backend.py", line 203, in determine_deps
all_deps[d.get_id()] = True
AttributeError: 'GeneratedList' object has no attribute 'get_id'
|
AttributeError
|
def unpack_env_kwarg(self, kwargs):
envlist = kwargs.get("env", EnvironmentVariablesHolder())
if isinstance(envlist, EnvironmentVariablesHolder):
env = envlist.held_object
else:
if not isinstance(envlist, list):
envlist = [envlist]
env = {}
for e in envlist:
if "=" not in e:
raise InterpreterException(
"Env var definition must be of type key=val."
)
(k, val) = e.split("=", 1)
k = k.strip()
val = val.strip()
if " " in k:
raise InterpreterException("Env var key must not have spaces in it.")
env[k] = val
return env
|
def unpack_env_kwarg(self, kwargs):
envlist = kwargs.get("env", [])
if isinstance(envlist, EnvironmentVariablesHolder):
env = envlist.held_object
else:
if not isinstance(envlist, list):
envlist = [envlist]
env = {}
for e in envlist:
if "=" not in e:
raise InterpreterException(
"Env var definition must be of type key=val."
)
(k, val) = e.split("=", 1)
k = k.strip()
val = val.strip()
if " " in k:
raise InterpreterException("Env var key must not have spaces in it.")
env[k] = val
return env
|
https://github.com/mesonbuild/meson/issues/1371
|
$ /home/cassidy/dev/meson/mesontest.py -C build --setup leaks
ninja: Entering directory `/home/cassidy/dev/gst/master/gst-build/build'
ninja: no work to do.
Traceback (most recent call last):
File "/home/cassidy/dev/meson/mesontest.py", line 579, in <module>
sys.exit(run(sys.argv[1:]))
File "/home/cassidy/dev/meson/mesontest.py", line 575, in run
return th.doit()
File "/home/cassidy/dev/meson/mesontest.py", line 337, in doit
self.run_tests(tests)
File "/home/cassidy/dev/meson/mesontest.py", line 458, in run_tests
(logfile, logfilename, jsonlogfile, jsonlogfilename) = self.open_log_files()
File "/home/cassidy/dev/meson/mesontest.py", line 415, in open_log_files
namebase = os.path.split(self.get_wrapper()[0])[1]
IndexError: list index out of range
|
IndexError
|
def get_option_link_args(self, options):
# FIXME: See GnuCCompiler.get_option_link_args
if "c_winlibs" in options:
return options["c_winlibs"].value[:]
else:
return msvc_winlibs[:]
|
def get_option_link_args(self, options):
return options["c_winlibs"].value[:]
|
https://github.com/mesonbuild/meson/issues/1029
|
The Meson build system
Version: 0.35.1
Source dir: F:\avian\test
Build dir: F:\avian\test\build\linux-64
Build type: cross build
Project name: avian 18:22Native cpp compiler: c++ (gcc 4.8.1)
Cross cpp compiler: f:/cygwin64/bin/x86_64-pc-linux-gnu-g++ (gcc 4.8.2)
Native c compiler: gcc (gcc 4.8.1)
Cross c compiler: f:/cygwin64/bin/x86_64-pc-linux-gnu-gcc (gcc 4.8.2)
Native java compiler: javac (unknown 1.8.0)
Cross java compiler: javac (unknown 1.8.0)
Host machine cpu family: x86_64
Host machine cpu: i686
Target machine cpu family: x86_64
Target machine cpu: i686
Build machine cpu family: x86_64
Build machine cpu: x86_64
Found pkg-config: f:\cygwin64\bin\pkg-config.EXE (0.29.1)
Cross dependency zlib found: YES 1.2.7
Checking for size of "void*": 8
Build targets in project: 2
Traceback (most recent call last):
File "c:\program files (x86)\python35-32\lib\site-packages\mesonbuild\mesonmain.py", line 282, in run
app.generate()
File "c:\program files (x86)\python35-32\lib\site-packages\mesonbuild\mesonmain.py", line 169, in generate
g.generate(intr)
File "c:\program files (x86)\python35-32\lib\site-packages\mesonbuild\backend\ninjabackend.py", line 187, in generate
self.generate_target(t, outfile)
File "c:\program files (x86)\python35-32\lib\site-packages\mesonbuild\backend\ninjabackend.py", line 355, in generate_target
elem = self.generate_link(target, outfile, outname, obj_list, linker, pch_objects)
File "c:\program files (x86)\python35-32\lib\site-packages\mesonbuild\backend\ninjabackend.py", line 1817, in generate_link
commands += linker.get_option_link_args(self.environment.coredata.compiler_options)
File "c:\program files (x86)\python35-32\lib\site-packages\mesonbuild\compilers.py", line 2027, in get_option_link_args
return options['cpp_winlibs'].value
KeyError: 'cpp_winlibs'
|
KeyError
|
def get_option_link_args(self, options):
# FIXME: See GnuCCompiler.get_option_link_args
if "cpp_winlibs" in options:
return options["cpp_winlibs"].value[:]
else:
return msvc_winlibs[:]
|
def get_option_link_args(self, options):
return options["cpp_winlibs"].value[:]
|
https://github.com/mesonbuild/meson/issues/1029
|
The Meson build system
Version: 0.35.1
Source dir: F:\avian\test
Build dir: F:\avian\test\build\linux-64
Build type: cross build
Project name: avian 18:22Native cpp compiler: c++ (gcc 4.8.1)
Cross cpp compiler: f:/cygwin64/bin/x86_64-pc-linux-gnu-g++ (gcc 4.8.2)
Native c compiler: gcc (gcc 4.8.1)
Cross c compiler: f:/cygwin64/bin/x86_64-pc-linux-gnu-gcc (gcc 4.8.2)
Native java compiler: javac (unknown 1.8.0)
Cross java compiler: javac (unknown 1.8.0)
Host machine cpu family: x86_64
Host machine cpu: i686
Target machine cpu family: x86_64
Target machine cpu: i686
Build machine cpu family: x86_64
Build machine cpu: x86_64
Found pkg-config: f:\cygwin64\bin\pkg-config.EXE (0.29.1)
Cross dependency zlib found: YES 1.2.7
Checking for size of "void*": 8
Build targets in project: 2
Traceback (most recent call last):
File "c:\program files (x86)\python35-32\lib\site-packages\mesonbuild\mesonmain.py", line 282, in run
app.generate()
File "c:\program files (x86)\python35-32\lib\site-packages\mesonbuild\mesonmain.py", line 169, in generate
g.generate(intr)
File "c:\program files (x86)\python35-32\lib\site-packages\mesonbuild\backend\ninjabackend.py", line 187, in generate
self.generate_target(t, outfile)
File "c:\program files (x86)\python35-32\lib\site-packages\mesonbuild\backend\ninjabackend.py", line 355, in generate_target
elem = self.generate_link(target, outfile, outname, obj_list, linker, pch_objects)
File "c:\program files (x86)\python35-32\lib\site-packages\mesonbuild\backend\ninjabackend.py", line 1817, in generate_link
commands += linker.get_option_link_args(self.environment.coredata.compiler_options)
File "c:\program files (x86)\python35-32\lib\site-packages\mesonbuild\compilers.py", line 2027, in get_option_link_args
return options['cpp_winlibs'].value
KeyError: 'cpp_winlibs'
|
KeyError
|
def get_option_link_args(self, options):
if self.gcc_type == GCC_MINGW:
# FIXME: This check is needed because we currently pass
# cross-compiler options to the native compiler too and when
# cross-compiling from Windows to Linux, `options` will contain
# Linux-specific options which doesn't include `c_winlibs`. The
# proper fix is to allow cross-info files to specify compiler
# options and to maintain both cross and native compiler options in
# coredata: https://github.com/mesonbuild/meson/issues/1029
if "c_winlibs" in options:
return options["c_winlibs"].value[:]
else:
return gnu_winlibs[:]
return []
|
def get_option_link_args(self, options):
if self.gcc_type == GCC_MINGW:
return options["c_winlibs"].value
return []
|
https://github.com/mesonbuild/meson/issues/1029
|
The Meson build system
Version: 0.35.1
Source dir: F:\avian\test
Build dir: F:\avian\test\build\linux-64
Build type: cross build
Project name: avian 18:22Native cpp compiler: c++ (gcc 4.8.1)
Cross cpp compiler: f:/cygwin64/bin/x86_64-pc-linux-gnu-g++ (gcc 4.8.2)
Native c compiler: gcc (gcc 4.8.1)
Cross c compiler: f:/cygwin64/bin/x86_64-pc-linux-gnu-gcc (gcc 4.8.2)
Native java compiler: javac (unknown 1.8.0)
Cross java compiler: javac (unknown 1.8.0)
Host machine cpu family: x86_64
Host machine cpu: i686
Target machine cpu family: x86_64
Target machine cpu: i686
Build machine cpu family: x86_64
Build machine cpu: x86_64
Found pkg-config: f:\cygwin64\bin\pkg-config.EXE (0.29.1)
Cross dependency zlib found: YES 1.2.7
Checking for size of "void*": 8
Build targets in project: 2
Traceback (most recent call last):
File "c:\program files (x86)\python35-32\lib\site-packages\mesonbuild\mesonmain.py", line 282, in run
app.generate()
File "c:\program files (x86)\python35-32\lib\site-packages\mesonbuild\mesonmain.py", line 169, in generate
g.generate(intr)
File "c:\program files (x86)\python35-32\lib\site-packages\mesonbuild\backend\ninjabackend.py", line 187, in generate
self.generate_target(t, outfile)
File "c:\program files (x86)\python35-32\lib\site-packages\mesonbuild\backend\ninjabackend.py", line 355, in generate_target
elem = self.generate_link(target, outfile, outname, obj_list, linker, pch_objects)
File "c:\program files (x86)\python35-32\lib\site-packages\mesonbuild\backend\ninjabackend.py", line 1817, in generate_link
commands += linker.get_option_link_args(self.environment.coredata.compiler_options)
File "c:\program files (x86)\python35-32\lib\site-packages\mesonbuild\compilers.py", line 2027, in get_option_link_args
return options['cpp_winlibs'].value
KeyError: 'cpp_winlibs'
|
KeyError
|
def get_options(self):
opts = {
"cpp_std": coredata.UserComboOption(
"cpp_std",
"C++ language standard to use",
[
"none",
"c++03",
"c++11",
"c++14",
"c++1z",
"gnu++03",
"gnu++11",
"gnu++14",
"gnu++1z",
],
"none",
),
"cpp_debugstl": coredata.UserBooleanOption(
"cpp_debugstl", "STL debug mode", False
),
}
if self.gcc_type == GCC_MINGW:
opts.update(
{
"cpp_winlibs": coredata.UserStringArrayOption(
"cpp_winlibs", "Standard Win libraries to link against", gnu_winlibs
),
}
)
return opts
|
def get_options(self):
opts = {
"cpp_std": coredata.UserComboOption(
"cpp_std",
"C++ language standard to use",
[
"none",
"c++03",
"c++11",
"c++14",
"c++1z",
"gnu++03",
"gnu++11",
"gnu++14",
"gnu++1z",
],
"none",
),
"cpp_debugstl": coredata.UserBooleanOption(
"cpp_debugstl", "STL debug mode", False
),
}
if self.gcc_type == GCC_MINGW:
opts.update(
{
"cpp_winlibs": coredata.UserStringArrayOption(
"c_winlibs", "Standard Win libraries to link against", gnu_winlibs
),
}
)
return opts
|
https://github.com/mesonbuild/meson/issues/1029
|
The Meson build system
Version: 0.35.1
Source dir: F:\avian\test
Build dir: F:\avian\test\build\linux-64
Build type: cross build
Project name: avian 18:22Native cpp compiler: c++ (gcc 4.8.1)
Cross cpp compiler: f:/cygwin64/bin/x86_64-pc-linux-gnu-g++ (gcc 4.8.2)
Native c compiler: gcc (gcc 4.8.1)
Cross c compiler: f:/cygwin64/bin/x86_64-pc-linux-gnu-gcc (gcc 4.8.2)
Native java compiler: javac (unknown 1.8.0)
Cross java compiler: javac (unknown 1.8.0)
Host machine cpu family: x86_64
Host machine cpu: i686
Target machine cpu family: x86_64
Target machine cpu: i686
Build machine cpu family: x86_64
Build machine cpu: x86_64
Found pkg-config: f:\cygwin64\bin\pkg-config.EXE (0.29.1)
Cross dependency zlib found: YES 1.2.7
Checking for size of "void*": 8
Build targets in project: 2
Traceback (most recent call last):
File "c:\program files (x86)\python35-32\lib\site-packages\mesonbuild\mesonmain.py", line 282, in run
app.generate()
File "c:\program files (x86)\python35-32\lib\site-packages\mesonbuild\mesonmain.py", line 169, in generate
g.generate(intr)
File "c:\program files (x86)\python35-32\lib\site-packages\mesonbuild\backend\ninjabackend.py", line 187, in generate
self.generate_target(t, outfile)
File "c:\program files (x86)\python35-32\lib\site-packages\mesonbuild\backend\ninjabackend.py", line 355, in generate_target
elem = self.generate_link(target, outfile, outname, obj_list, linker, pch_objects)
File "c:\program files (x86)\python35-32\lib\site-packages\mesonbuild\backend\ninjabackend.py", line 1817, in generate_link
commands += linker.get_option_link_args(self.environment.coredata.compiler_options)
File "c:\program files (x86)\python35-32\lib\site-packages\mesonbuild\compilers.py", line 2027, in get_option_link_args
return options['cpp_winlibs'].value
KeyError: 'cpp_winlibs'
|
KeyError
|
def get_option_link_args(self, options):
if self.gcc_type == GCC_MINGW:
# FIXME: See GnuCCompiler.get_option_link_args
if "cpp_winlibs" in options:
return options["cpp_winlibs"].value[:]
else:
return gnu_winlibs[:]
return []
|
def get_option_link_args(self, options):
if self.gcc_type == GCC_MINGW:
return options["cpp_winlibs"].value
return []
|
https://github.com/mesonbuild/meson/issues/1029
|
The Meson build system
Version: 0.35.1
Source dir: F:\avian\test
Build dir: F:\avian\test\build\linux-64
Build type: cross build
Project name: avian 18:22Native cpp compiler: c++ (gcc 4.8.1)
Cross cpp compiler: f:/cygwin64/bin/x86_64-pc-linux-gnu-g++ (gcc 4.8.2)
Native c compiler: gcc (gcc 4.8.1)
Cross c compiler: f:/cygwin64/bin/x86_64-pc-linux-gnu-gcc (gcc 4.8.2)
Native java compiler: javac (unknown 1.8.0)
Cross java compiler: javac (unknown 1.8.0)
Host machine cpu family: x86_64
Host machine cpu: i686
Target machine cpu family: x86_64
Target machine cpu: i686
Build machine cpu family: x86_64
Build machine cpu: x86_64
Found pkg-config: f:\cygwin64\bin\pkg-config.EXE (0.29.1)
Cross dependency zlib found: YES 1.2.7
Checking for size of "void*": 8
Build targets in project: 2
Traceback (most recent call last):
File "c:\program files (x86)\python35-32\lib\site-packages\mesonbuild\mesonmain.py", line 282, in run
app.generate()
File "c:\program files (x86)\python35-32\lib\site-packages\mesonbuild\mesonmain.py", line 169, in generate
g.generate(intr)
File "c:\program files (x86)\python35-32\lib\site-packages\mesonbuild\backend\ninjabackend.py", line 187, in generate
self.generate_target(t, outfile)
File "c:\program files (x86)\python35-32\lib\site-packages\mesonbuild\backend\ninjabackend.py", line 355, in generate_target
elem = self.generate_link(target, outfile, outname, obj_list, linker, pch_objects)
File "c:\program files (x86)\python35-32\lib\site-packages\mesonbuild\backend\ninjabackend.py", line 1817, in generate_link
commands += linker.get_option_link_args(self.environment.coredata.compiler_options)
File "c:\program files (x86)\python35-32\lib\site-packages\mesonbuild\compilers.py", line 2027, in get_option_link_args
return options['cpp_winlibs'].value
KeyError: 'cpp_winlibs'
|
KeyError
|
def __init__(self, env, kwargs):
QtBaseDependency.__init__(self, "qt5", env, kwargs)
|
def __init__(self, environment, kwargs):
Dependency.__init__(self, "qt5")
self.name = "qt5"
self.root = "/usr"
mods = kwargs.get("modules", [])
self.cargs = []
self.largs = []
self.is_found = False
if isinstance(mods, str):
mods = [mods]
if len(mods) == 0:
raise DependencyException("No Qt5 modules specified.")
type_text = "native"
if environment.is_cross_build() and kwargs.get("native", False):
type_text = "cross"
self.pkgconfig_detect(mods, environment, kwargs)
elif not environment.is_cross_build() and shutil.which("pkg-config") is not None:
self.pkgconfig_detect(mods, environment, kwargs)
elif shutil.which("qmake") is not None:
self.qmake_detect(mods, kwargs)
else:
self.version = "none"
if not self.is_found:
mlog.log("Qt5 %s dependency found: " % type_text, mlog.red("NO"))
else:
mlog.log("Qt5 %s dependency found: " % type_text, mlog.green("YES"))
|
https://github.com/mesonbuild/meson/issues/758
|
Traceback (most recent call last):
File "/home/jenkins/workspace/meson/mesonbuild/mesonmain.py", line 282, in run
app.generate()
File "/home/jenkins/workspace/meson/mesonbuild/mesonmain.py", line 169, in generate
g.generate(intr)
File "/home/jenkins/workspace/meson/mesonbuild/backend/ninjabackend.py", line 184, in generate
self.generate_target(t, outfile)
File "/home/jenkins/workspace/meson/mesonbuild/backend/ninjabackend.py", line 257, in generate_target
self.generate_custom_generator_rules(target, outfile)
File "/home/jenkins/workspace/meson/mesonbuild/backend/ninjabackend.py", line 1360, in generate_custom_generator_rules
self.generate_genlist_for_target(genlist, target, outfile)
File "/home/jenkins/workspace/meson/mesonbuild/backend/ninjabackend.py", line 1406, in generate_genlist_for_target
elem.write(outfile)
File "/home/jenkins/workspace/meson/mesonbuild/backend/ninjabackend.py", line 116, in write
if quote_char == '"':
File "/home/jenkins/workspace/meson/mesonbuild/backend/ninjabackend.py", line 35, in ninja_quote
return text.replace(' ', '$ ').replace(':', '$:')
AttributeError: 'NoneType' object has no attribute 'replace'
|
AttributeError
|
def __init__(self, env, kwargs):
QtBaseDependency.__init__(self, "qt4", env, kwargs)
|
def __init__(self, environment, kwargs):
Dependency.__init__(self, "qt4")
self.name = "qt4"
self.root = "/usr"
self.modules = []
mods = kwargs.get("modules", [])
if isinstance(mods, str):
mods = [mods]
for module in mods:
self.modules.append(PkgConfigDependency("Qt" + module, environment, kwargs))
if len(self.modules) == 0:
raise DependencyException("No Qt4 modules specified.")
|
https://github.com/mesonbuild/meson/issues/758
|
Traceback (most recent call last):
File "/home/jenkins/workspace/meson/mesonbuild/mesonmain.py", line 282, in run
app.generate()
File "/home/jenkins/workspace/meson/mesonbuild/mesonmain.py", line 169, in generate
g.generate(intr)
File "/home/jenkins/workspace/meson/mesonbuild/backend/ninjabackend.py", line 184, in generate
self.generate_target(t, outfile)
File "/home/jenkins/workspace/meson/mesonbuild/backend/ninjabackend.py", line 257, in generate_target
self.generate_custom_generator_rules(target, outfile)
File "/home/jenkins/workspace/meson/mesonbuild/backend/ninjabackend.py", line 1360, in generate_custom_generator_rules
self.generate_genlist_for_target(genlist, target, outfile)
File "/home/jenkins/workspace/meson/mesonbuild/backend/ninjabackend.py", line 1406, in generate_genlist_for_target
elem.write(outfile)
File "/home/jenkins/workspace/meson/mesonbuild/backend/ninjabackend.py", line 116, in write
if quote_char == '"':
File "/home/jenkins/workspace/meson/mesonbuild/backend/ninjabackend.py", line 35, in ninja_quote
return text.replace(' ', '$ ').replace(':', '$:')
AttributeError: 'NoneType' object has no attribute 'replace'
|
AttributeError
|
def preprocess(self, state, args, kwargs):
rcc_files = kwargs.pop("qresources", [])
if not isinstance(rcc_files, list):
rcc_files = [rcc_files]
ui_files = kwargs.pop("ui_files", [])
if not isinstance(ui_files, list):
ui_files = [ui_files]
moc_headers = kwargs.pop("moc_headers", [])
if not isinstance(moc_headers, list):
moc_headers = [moc_headers]
moc_sources = kwargs.pop("moc_sources", [])
if not isinstance(moc_sources, list):
moc_sources = [moc_sources]
srctmp = kwargs.pop("sources", [])
if not isinstance(srctmp, list):
srctmp = [srctmp]
sources = args[1:] + srctmp
self._detect_tools(state.environment)
err_msg = (
"{0} sources specified and couldn't find {1}, "
"please check your qt4 installation"
)
if len(moc_headers) + len(moc_sources) > 0 and not self.moc.found():
raise MesonException(err_msg.format("MOC", "moc-qt4"))
if len(rcc_files) > 0:
if not self.rcc.found():
raise MesonException(err_msg.format("RCC", "rcc-qt4"))
qrc_deps = []
for i in rcc_files:
qrc_deps += self.parse_qrc(state, i)
basename = os.path.split(rcc_files[0])[1]
name = "qt4-" + basename.replace(".", "_")
rcc_kwargs = {
"input": rcc_files,
"output": name + ".cpp",
"command": [self.rcc, "-o", "@OUTPUT@", "@INPUT@"],
"depend_files": qrc_deps,
}
res_target = build.CustomTarget(name, state.subdir, rcc_kwargs)
sources.append(res_target)
if len(ui_files) > 0:
if not self.uic.found():
raise MesonException(err_msg.format("UIC", "uic-qt4"))
ui_kwargs = {
"output": "ui_@BASENAME@.h",
"arguments": ["-o", "@OUTPUT@", "@INPUT@"],
}
ui_gen = build.Generator([self.uic], ui_kwargs)
ui_output = build.GeneratedList(ui_gen)
[ui_output.add_file(os.path.join(state.subdir, a)) for a in ui_files]
sources.append(ui_output)
if len(moc_headers) > 0:
moc_kwargs = {
"output": "moc_@BASENAME@.cpp",
"arguments": ["@INPUT@", "-o", "@OUTPUT@"],
}
moc_gen = build.Generator([self.moc], moc_kwargs)
moc_output = build.GeneratedList(moc_gen)
[moc_output.add_file(os.path.join(state.subdir, a)) for a in moc_headers]
sources.append(moc_output)
if len(moc_sources) > 0:
moc_kwargs = {
"output": "@BASENAME@.moc",
"arguments": ["@INPUT@", "-o", "@OUTPUT@"],
}
moc_gen = build.Generator([self.moc], moc_kwargs)
moc_output = build.GeneratedList(moc_gen)
[moc_output.add_file(os.path.join(state.subdir, a)) for a in moc_sources]
sources.append(moc_output)
return sources
|
def preprocess(self, state, args, kwargs):
rcc_files = kwargs.pop("qresources", [])
if not isinstance(rcc_files, list):
rcc_files = [rcc_files]
ui_files = kwargs.pop("ui_files", [])
if not isinstance(ui_files, list):
ui_files = [ui_files]
moc_headers = kwargs.pop("moc_headers", [])
if not isinstance(moc_headers, list):
moc_headers = [moc_headers]
moc_sources = kwargs.pop("moc_sources", [])
if not isinstance(moc_sources, list):
moc_sources = [moc_sources]
srctmp = kwargs.pop("sources", [])
if not isinstance(srctmp, list):
srctmp = [srctmp]
sources = args[1:] + srctmp
if len(rcc_files) > 0:
rcc_kwargs = {
"output": "@BASENAME@.cpp",
"arguments": ["@INPUT@", "-o", "@OUTPUT@"],
}
rcc_gen = build.Generator([self.rcc], rcc_kwargs)
rcc_output = build.GeneratedList(rcc_gen)
qrc_deps = []
for i in rcc_files:
qrc_deps += self.parse_qrc(state, i)
rcc_output.extra_depends = qrc_deps
[rcc_output.add_file(os.path.join(state.subdir, a)) for a in rcc_files]
sources.append(rcc_output)
if len(ui_files) > 0:
ui_kwargs = {
"output": "ui_@BASENAME@.h",
"arguments": ["-o", "@OUTPUT@", "@INPUT@"],
}
ui_gen = build.Generator([self.uic], ui_kwargs)
ui_output = build.GeneratedList(ui_gen)
[ui_output.add_file(os.path.join(state.subdir, a)) for a in ui_files]
sources.append(ui_output)
if len(moc_headers) > 0:
moc_kwargs = {
"output": "moc_@BASENAME@.cpp",
"arguments": ["@INPUT@", "-o", "@OUTPUT@"],
}
moc_gen = build.Generator([self.moc], moc_kwargs)
moc_output = build.GeneratedList(moc_gen)
[moc_output.add_file(os.path.join(state.subdir, a)) for a in moc_headers]
sources.append(moc_output)
if len(moc_sources) > 0:
moc_kwargs = {
"output": "@BASENAME@.moc",
"arguments": ["@INPUT@", "-o", "@OUTPUT@"],
}
moc_gen = build.Generator([self.moc], moc_kwargs)
moc_output = build.GeneratedList(moc_gen)
[moc_output.add_file(os.path.join(state.subdir, a)) for a in moc_sources]
sources.append(moc_output)
return sources
|
https://github.com/mesonbuild/meson/issues/758
|
Traceback (most recent call last):
File "/home/jenkins/workspace/meson/mesonbuild/mesonmain.py", line 282, in run
app.generate()
File "/home/jenkins/workspace/meson/mesonbuild/mesonmain.py", line 169, in generate
g.generate(intr)
File "/home/jenkins/workspace/meson/mesonbuild/backend/ninjabackend.py", line 184, in generate
self.generate_target(t, outfile)
File "/home/jenkins/workspace/meson/mesonbuild/backend/ninjabackend.py", line 257, in generate_target
self.generate_custom_generator_rules(target, outfile)
File "/home/jenkins/workspace/meson/mesonbuild/backend/ninjabackend.py", line 1360, in generate_custom_generator_rules
self.generate_genlist_for_target(genlist, target, outfile)
File "/home/jenkins/workspace/meson/mesonbuild/backend/ninjabackend.py", line 1406, in generate_genlist_for_target
elem.write(outfile)
File "/home/jenkins/workspace/meson/mesonbuild/backend/ninjabackend.py", line 116, in write
if quote_char == '"':
File "/home/jenkins/workspace/meson/mesonbuild/backend/ninjabackend.py", line 35, in ninja_quote
return text.replace(' ', '$ ').replace(':', '$:')
AttributeError: 'NoneType' object has no attribute 'replace'
|
AttributeError
|
def preprocess(self, state, args, kwargs):
rcc_files = kwargs.pop("qresources", [])
if not isinstance(rcc_files, list):
rcc_files = [rcc_files]
ui_files = kwargs.pop("ui_files", [])
if not isinstance(ui_files, list):
ui_files = [ui_files]
moc_headers = kwargs.pop("moc_headers", [])
if not isinstance(moc_headers, list):
moc_headers = [moc_headers]
moc_sources = kwargs.pop("moc_sources", [])
if not isinstance(moc_sources, list):
moc_sources = [moc_sources]
srctmp = kwargs.pop("sources", [])
if not isinstance(srctmp, list):
srctmp = [srctmp]
sources = args[1:] + srctmp
self._detect_tools(state.environment)
err_msg = (
"{0} sources specified and couldn't find {1}, "
"please check your qt5 installation"
)
if len(moc_headers) + len(moc_sources) > 0 and not self.moc.found():
raise MesonException(err_msg.format("MOC", "moc-qt5"))
if len(rcc_files) > 0:
if not self.rcc.found():
raise MesonException(err_msg.format("RCC", "rcc-qt5"))
qrc_deps = []
for i in rcc_files:
qrc_deps += self.parse_qrc(state, i)
basename = os.path.split(rcc_files[0])[1]
rcc_kwargs = {
"input": rcc_files,
"output": basename + ".cpp",
"command": [self.rcc, "-o", "@OUTPUT@", "@INPUT@"],
"depend_files": qrc_deps,
}
name = "qt5-" + basename.replace(".", "_")
res_target = build.CustomTarget(name, state.subdir, rcc_kwargs)
sources.append(res_target)
if len(ui_files) > 0:
if not self.uic.found():
raise MesonException(err_msg.format("UIC", "uic-qt5"))
ui_kwargs = {
"output": "ui_@BASENAME@.h",
"arguments": ["-o", "@OUTPUT@", "@INPUT@"],
}
ui_gen = build.Generator([self.uic], ui_kwargs)
ui_output = build.GeneratedList(ui_gen)
[ui_output.add_file(os.path.join(state.subdir, a)) for a in ui_files]
sources.append(ui_output)
if len(moc_headers) > 0:
moc_kwargs = {
"output": "moc_@BASENAME@.cpp",
"arguments": ["@INPUT@", "-o", "@OUTPUT@"],
}
moc_gen = build.Generator([self.moc], moc_kwargs)
moc_output = build.GeneratedList(moc_gen)
[moc_output.add_file(os.path.join(state.subdir, a)) for a in moc_headers]
sources.append(moc_output)
if len(moc_sources) > 0:
moc_kwargs = {
"output": "@BASENAME@.moc",
"arguments": ["@INPUT@", "-o", "@OUTPUT@"],
}
moc_gen = build.Generator([self.moc], moc_kwargs)
moc_output = build.GeneratedList(moc_gen)
[moc_output.add_file(os.path.join(state.subdir, a)) for a in moc_sources]
sources.append(moc_output)
return sources
|
def preprocess(self, state, args, kwargs):
rcc_files = kwargs.pop("qresources", [])
if not isinstance(rcc_files, list):
rcc_files = [rcc_files]
ui_files = kwargs.pop("ui_files", [])
if not isinstance(ui_files, list):
ui_files = [ui_files]
moc_headers = kwargs.pop("moc_headers", [])
if not isinstance(moc_headers, list):
moc_headers = [moc_headers]
moc_sources = kwargs.pop("moc_sources", [])
if not isinstance(moc_sources, list):
moc_sources = [moc_sources]
srctmp = kwargs.pop("sources", [])
if not isinstance(srctmp, list):
srctmp = [srctmp]
sources = args[1:] + srctmp
if len(rcc_files) > 0:
qrc_deps = []
for i in rcc_files:
qrc_deps += self.parse_qrc(state, i)
basename = os.path.split(rcc_files[0])[1]
rcc_kwargs = {
"input": rcc_files,
"output": basename + ".cpp",
"command": [self.rcc, "-o", "@OUTPUT@", "@INPUT@"],
"depend_files": qrc_deps,
}
res_target = build.CustomTarget(
basename.replace(".", "_"), state.subdir, rcc_kwargs
)
sources.append(res_target)
if len(ui_files) > 0:
ui_kwargs = {
"output": "ui_@BASENAME@.h",
"arguments": ["-o", "@OUTPUT@", "@INPUT@"],
}
ui_gen = build.Generator([self.uic], ui_kwargs)
ui_output = build.GeneratedList(ui_gen)
[ui_output.add_file(os.path.join(state.subdir, a)) for a in ui_files]
sources.append(ui_output)
if len(moc_headers) > 0:
moc_kwargs = {
"output": "moc_@BASENAME@.cpp",
"arguments": ["@INPUT@", "-o", "@OUTPUT@"],
}
moc_gen = build.Generator([self.moc], moc_kwargs)
moc_output = build.GeneratedList(moc_gen)
[moc_output.add_file(os.path.join(state.subdir, a)) for a in moc_headers]
sources.append(moc_output)
if len(moc_sources) > 0:
moc_kwargs = {
"output": "@BASENAME@.moc",
"arguments": ["@INPUT@", "-o", "@OUTPUT@"],
}
moc_gen = build.Generator([self.moc], moc_kwargs)
moc_output = build.GeneratedList(moc_gen)
[moc_output.add_file(os.path.join(state.subdir, a)) for a in moc_sources]
sources.append(moc_output)
return sources
|
https://github.com/mesonbuild/meson/issues/758
|
Traceback (most recent call last):
File "/home/jenkins/workspace/meson/mesonbuild/mesonmain.py", line 282, in run
app.generate()
File "/home/jenkins/workspace/meson/mesonbuild/mesonmain.py", line 169, in generate
g.generate(intr)
File "/home/jenkins/workspace/meson/mesonbuild/backend/ninjabackend.py", line 184, in generate
self.generate_target(t, outfile)
File "/home/jenkins/workspace/meson/mesonbuild/backend/ninjabackend.py", line 257, in generate_target
self.generate_custom_generator_rules(target, outfile)
File "/home/jenkins/workspace/meson/mesonbuild/backend/ninjabackend.py", line 1360, in generate_custom_generator_rules
self.generate_genlist_for_target(genlist, target, outfile)
File "/home/jenkins/workspace/meson/mesonbuild/backend/ninjabackend.py", line 1406, in generate_genlist_for_target
elem.write(outfile)
File "/home/jenkins/workspace/meson/mesonbuild/backend/ninjabackend.py", line 116, in write
if quote_char == '"':
File "/home/jenkins/workspace/meson/mesonbuild/backend/ninjabackend.py", line 35, in ninja_quote
return text.replace(' ', '$ ').replace(':', '$:')
AttributeError: 'NoneType' object has no attribute 'replace'
|
AttributeError
|
def eval_custom_target_command(self, target, absolute_paths=False):
if not absolute_paths:
ofilenames = [
os.path.join(self.get_target_dir(target), i) for i in target.output
]
else:
ofilenames = [
os.path.join(
self.environment.get_build_dir(), self.get_target_dir(target), i
)
for i in target.output
]
srcs = []
outdir = self.get_target_dir(target)
# Many external programs fail on empty arguments.
if outdir == "":
outdir = "."
if absolute_paths:
outdir = os.path.join(self.environment.get_build_dir(), outdir)
for i in target.get_sources():
if hasattr(i, "held_object"):
i = i.held_object
if isinstance(i, str):
fname = [os.path.join(self.build_to_src, target.subdir, i)]
elif isinstance(i, build.BuildTarget):
fname = [self.get_target_filename(i)]
elif isinstance(i, build.CustomTarget):
fname = [os.path.join(self.get_target_dir(i), p) for p in i.get_outputs()]
elif isinstance(i, build.GeneratedList):
fname = [
os.path.join(self.get_target_private_dir(target), p)
for p in i.get_outputs()
]
else:
fname = [i.rel_to_builddir(self.build_to_src)]
if absolute_paths:
fname = [os.path.join(self.environment.get_build_dir(), f) for f in fname]
srcs += fname
cmd = []
for i in target.command:
if isinstance(i, build.Executable):
cmd += self.exe_object_to_cmd_array(i)
continue
elif isinstance(i, build.CustomTarget):
# GIR scanner will attempt to execute this binary but
# it assumes that it is in path, so always give it a full path.
tmp = i.get_outputs()[0]
i = os.path.join(self.get_target_dir(i), tmp)
elif isinstance(i, mesonlib.File):
i = i.rel_to_builddir(self.build_to_src)
if absolute_paths:
i = os.path.join(self.environment.get_build_dir(), i)
# FIXME: str types are blindly added and ignore the 'absolute_paths' argument
elif not isinstance(i, str):
err_msg = "Argument {0} is of unknown type {1}"
raise RuntimeError(err_msg.format(str(i), str(type(i))))
for j, src in enumerate(srcs):
i = i.replace("@INPUT%d@" % j, src)
for j, res in enumerate(ofilenames):
i = i.replace("@OUTPUT%d@" % j, res)
if "@INPUT@" in i:
msg = "Custom target {} has @INPUT@ in the command, but".format(target.name)
if len(srcs) == 0:
raise MesonException(msg + " no input files")
if i == "@INPUT@":
cmd += srcs
continue
else:
if len(srcs) > 1:
raise MesonException(msg + " more than one input file")
i = i.replace("@INPUT@", srcs[0])
elif "@OUTPUT@" in i:
msg = "Custom target {} has @OUTPUT@ in the command, but".format(
target.name
)
if len(ofilenames) == 0:
raise MesonException(msg + " no output files")
if i == "@OUTPUT@":
cmd += ofilenames
continue
else:
if len(ofilenames) > 1:
raise MesonException(msg + " more than one output file")
i = i.replace("@OUTPUT@", ofilenames[0])
elif "@OUTDIR@" in i:
i = i.replace("@OUTDIR@", outdir)
elif "@DEPFILE@" in i:
if target.depfile is None:
msg = (
"Custom target {!r} has @DEPFILE@ but no depfile "
"keyword argument.".format(target.name)
)
raise MesonException(msg)
dfilename = os.path.join(outdir, target.depfile)
i = i.replace("@DEPFILE@", dfilename)
elif "@PRIVATE_OUTDIR_" in i:
match = re.search("@PRIVATE_OUTDIR_(ABS_)?([^\/\s*]*)@", i)
if not match:
msg = "Custom target {!r} has an invalid argument {!r}".format(
target.name, i
)
raise MesonException(msg)
source = match.group(0)
if match.group(1) is None and not absolute_paths:
lead_dir = ""
else:
lead_dir = self.environment.get_build_dir()
i = i.replace(source, os.path.join(lead_dir, outdir))
cmd.append(i)
# This should not be necessary but removing it breaks
# building GStreamer on Windows. The underlying issue
# is problems with quoting backslashes on Windows
# which is the seventh circle of hell. The downside is
# that this breaks custom targets whose command lines
# have backslashes. If you try to fix this be sure to
# check that it does not break GST.
#
# The bug causes file paths such as c:\foo to get escaped
# into c:\\foo.
#
# Unfortunately we have not been able to come up with an
# isolated test case for this so unless you manage to come up
# with one, the only way is to test the building with Gst's
# setup. Note this in your MR or ping us and we will get it
# fixed.
#
# https://github.com/mesonbuild/meson/pull/737
cmd = [i.replace("\\", "/") for i in cmd]
return (srcs, ofilenames, cmd)
|
def eval_custom_target_command(self, target, absolute_paths=False):
if not absolute_paths:
ofilenames = [
os.path.join(self.get_target_dir(target), i) for i in target.output
]
else:
ofilenames = [
os.path.join(
self.environment.get_build_dir(), self.get_target_dir(target), i
)
for i in target.output
]
srcs = []
outdir = self.get_target_dir(target)
# Many external programs fail on empty arguments.
if outdir == "":
outdir = "."
if absolute_paths:
outdir = os.path.join(self.environment.get_build_dir(), outdir)
for i in target.get_sources():
if hasattr(i, "held_object"):
i = i.held_object
if isinstance(i, str):
fname = [os.path.join(self.build_to_src, target.subdir, i)]
elif isinstance(i, build.BuildTarget):
fname = [self.get_target_filename(i)]
elif isinstance(i, build.CustomTarget):
fname = [os.path.join(self.get_target_dir(i), p) for p in i.get_outputs()]
elif isinstance(i, build.GeneratedList):
fname = [
os.path.join(self.get_target_private_dir(target), p)
for p in i.get_outputs()
]
else:
fname = [i.rel_to_builddir(self.build_to_src)]
if absolute_paths:
fname = [os.path.join(self.environment.get_build_dir(), f) for f in fname]
srcs += fname
cmd = []
for i in target.command:
if isinstance(i, build.Executable):
cmd += self.exe_object_to_cmd_array(i)
continue
elif isinstance(i, build.CustomTarget):
# GIR scanner will attempt to execute this binary but
# it assumes that it is in path, so always give it a full path.
tmp = i.get_outputs()[0]
i = os.path.join(self.get_target_dir(i), tmp)
elif isinstance(i, mesonlib.File):
i = i.rel_to_builddir(self.build_to_src)
if absolute_paths:
i = os.path.join(self.environment.get_build_dir(), i)
# FIXME: str types are blindly added and ignore the 'absolute_paths' argument
elif not isinstance(i, str):
err_msg = "Argument {0} is of unknown type {1}"
raise RuntimeError(err_msg.format(str(i), str(type(i))))
for j, src in enumerate(srcs):
i = i.replace("@INPUT%d@" % j, src)
for j, res in enumerate(ofilenames):
i = i.replace("@OUTPUT%d@" % j, res)
if "@INPUT@" in i:
msg = "Custom target {} has @INPUT@ in the command, but".format(target.name)
if len(srcs) == 0:
raise MesonException(msg + " no input files")
if i == "@INPUT@":
cmd += srcs
continue
else:
if len(srcs) > 1:
raise MesonException(msg + " more than one input file")
i = i.replace("@INPUT@", srcs[0])
elif "@OUTPUT@" in i:
msg = "Custom target {} has @OUTPUT@ in the command, but".format(
target.name
)
if len(ofilenames) == 0:
raise MesonException(msg + " no output files")
if i == "@OUTPUT@":
cmd += ofilenames
continue
else:
if len(ofilenames) > 1:
raise MesonException(msg + " more than one output file")
i = i.replace("@OUTPUT@", ofilenames[0])
elif "@OUTDIR@" in i:
i = i.replace("@OUTDIR@", outdir)
elif "@DEPFILE@" in i:
if target.depfile is None:
raise MesonException(
"Custom target %s has @DEPFILE@ but no depfile keyword argument."
% target.name
)
dfilename = os.path.join(outdir, target.depfile)
i = i.replace("@DEPFILE@", dfilename)
elif "@PRIVATE_OUTDIR_" in i:
match = re.search("@PRIVATE_OUTDIR_(ABS_)?([-a-zA-Z0-9.@:]*)@", i)
source = match.group(0)
if match.group(1) is None and not absolute_paths:
lead_dir = ""
else:
lead_dir = self.environment.get_build_dir()
i = i.replace(source, os.path.join(lead_dir, outdir))
cmd.append(i)
# This should not be necessary but removing it breaks
# building GStreamer on Windows. The underlying issue
# is problems with quoting backslashes on Windows
# which is the seventh circle of hell. The downside is
# that this breaks custom targets whose command lines
# have backslashes. If you try to fix this be sure to
# check that it does not break GST.
#
# The bug causes file paths such as c:\foo to get escaped
# into c:\\foo.
#
# Unfortunately we have not been able to come up with an
# isolated test case for this so unless you manage to come up
# with one, the only way is to test the building with Gst's
# setup. Note this in your MR or ping us and we will get it
# fixed.
#
# https://github.com/mesonbuild/meson/pull/737
cmd = [i.replace("\\", "/") for i in cmd]
return (srcs, ofilenames, cmd)
|
https://github.com/mesonbuild/meson/issues/436
|
(dev_env)[meh@meh-host build]$ rm -rf * && meson.py ..
The Meson build system
Version: 0.30.0.dev1
Source dir: /home/meh/devel/hotdoc/test_hotdoc
Build dir: /home/meh/devel/hotdoc/test_hotdoc/build
Build type: native build
Build machine cpu family: x86_64
Build machine cpu: x86_64
Project name: Hotdoc-Test
Native c compiler: cc (gcc 5.3.1)
Warning, glib compiled dependencies will not work reliably until this upstream issue is fixed: https://bugzilla.gnome.org/show_bug.cgi?id=745754
Found pkg-config: /usr/bin/pkg-config (0.28)
Native dependency glib-2.0 found: YES 2.44.1
Native dependency gobject-2.0 found: YES 2.44.1
Build targets in project: 3
Traceback (most recent call last):
File "/usr/lib/python3.4/site-packages/meson-0.30.0.dev1-py3.4.egg/mesonbuild/mesonmain.py", line 266, in run
app.generate()
File "/usr/lib/python3.4/site-packages/meson-0.30.0.dev1-py3.4.egg/mesonbuild/mesonmain.py", line 170, in generate
g.generate(intr)
File "/usr/lib/python3.4/site-packages/meson-0.30.0.dev1-py3.4.egg/mesonbuild/backend/ninjabackend.py", line 176, in generate
[self.generate_target(t, outfile) for t in self.build.get_targets().values()]
File "/usr/lib/python3.4/site-packages/meson-0.30.0.dev1-py3.4.egg/mesonbuild/backend/ninjabackend.py", line 176, in <listcomp>
[self.generate_target(t, outfile) for t in self.build.get_targets().values()]
File "/usr/lib/python3.4/site-packages/meson-0.30.0.dev1-py3.4.egg/mesonbuild/backend/ninjabackend.py", line 219, in generate_target
self.generate_custom_target(target, outfile)
File "/usr/lib/python3.4/site-packages/meson-0.30.0.dev1-py3.4.egg/mesonbuild/backend/ninjabackend.py", line 338, in generate_custom_target
(srcs, ofilenames, cmd) = self.eval_custom_target_command(target)
File "/usr/lib/python3.4/site-packages/meson-0.30.0.dev1-py3.4.egg/mesonbuild/backend/backends.py", line 427, in eval_custom_target_command
source = match.group(0)
AttributeError: 'NoneType' object has no attribute 'group'
(dev_env)[meh@meh-host build]$
|
AttributeError
|
def can_compile(self, filename):
suffix = filename.split(".")[-1]
return suffix in ("vala", "vapi")
|
def can_compile(self, fname):
return fname.endswith(".vala") or fname.endswith(".vapi")
|
https://github.com/mesonbuild/meson/issues/189
|
Traceback (most recent call last):
File "/usr/share/meson/meson.py", line 188, in run
app.generate()
File "/usr/share/meson/meson.py", line 141, in generate
g.generate()
File "/usr/share/meson/ninjabackend.py", line 132, in generate
[self.generate_target(t, outfile) for t in self.build.get_targets().values()]
File "/usr/share/meson/ninjabackend.py", line 132, in <listcomp>
[self.generate_target(t, outfile) for t in self.build.get_targets().values()]
File "/usr/share/meson/ninjabackend.py", line 209, in generate_target
header_deps))
File "/usr/share/meson/ninjabackend.py", line 1100, in generate_single_compile
compiler = self.get_compiler_for_source(src)
File "/usr/share/meson/backends.py", line 54, in get_compiler_for_source
if i.can_compile(src):
File "/usr/share/meson/compilers.py", line 824, in can_compile
return fname.endswith('.vala') or fname.endswith('.vapi')
AttributeError: 'RawFilename' object has no attribute 'endswith'
|
AttributeError
|
def _hval(value):
value = tonat(value)
if "\n" in value or "\r" in value or "\0" in value:
raise ValueError("Header value must not contain control characters: %r" % value)
return value
|
def _hval(value):
value = value if isinstance(value, unicode) else str(value)
if "\n" in value or "\r" in value or "\0" in value:
raise ValueError("Header value must not contain control characters: %r" % value)
return value
|
https://github.com/bottlepy/bottle/issues/923
|
Critical error while processing request: /
Error:
TypeError("WSGI response header value u'text/plain' is not of type str.",)
Traceback:
Traceback (most recent call last):
File "/usr/lib/python2.7/dist-packages/bottle.py", line 960, in wsgi
start_response(response._status_line, response.headerlist)
File "/usr/lib/python2.7/dist-packages/cherrypy/wsgiserver/wsgiserver2.py", line 2309, in start_response
"WSGI response header value %r is not of type str." % v)
TypeError: WSGI response header value u'text/plain' is not of type str.
|
TypeError
|
def _AddVersionResource(self, exe):
try:
from win32verstamp import stamp
except:
print("*** WARNING *** unable to create version resource")
print("install pywin32 extensions first")
return
fileName = exe.targetName
versionInfo = VersionInfo(
self.metadata.version,
comments=self.metadata.long_description,
description=self.metadata.description,
company=self.metadata.author,
product=self.metadata.name,
copyright=exe.copyright,
trademarks=exe.trademarks,
)
stamp(fileName, versionInfo)
|
def _AddVersionResource(self, fileName):
try:
from win32verstamp import stamp
except:
print("*** WARNING *** unable to create version resource")
print("install pywin32 extensions first")
return
versionInfo = VersionInfo(
self.metadata.version,
comments=self.metadata.long_description,
description=self.metadata.description,
company=self.metadata.author,
product=self.metadata.name,
)
stamp(fileName, versionInfo)
|
https://github.com/marcelotduarte/cx_Freeze/issues/94
|
C:\Users\Belli\Desktop\failing_cx_freeze>python setup.py build
running build
running build_exe
creating directory demo
copying C:\Miniconda\lib\site-packages\cx_Freeze\bases\Win32GUI.exe -> demo\demo.exe
Stamped: demo\demo.exe
writing zip file demo\demo.zip
Name File
---- ----
m __main__ demo.py
m cx_Freeze__init__ C:\Miniconda\lib\site-packages\cx_Freeze\initscripts\Console.py
Traceback (most recent call last):
File "setup.py", line 47, in <module>
executables = [GUI2Exe_Target_1]
File "C:\Miniconda\lib\site-packages\cx_Freeze\dist.py", line 362, in setup
distutils.core.setup(**attrs)
File "C:\Miniconda\lib\distutils\core.py", line 152, in setup
dist.run_commands()
File "C:\Miniconda\lib\distutils\dist.py", line 953, in run_commands
self.run_command(cmd)
File "C:\Miniconda\lib\distutils\dist.py", line 972, in run_command
cmd_obj.run()
File "C:\Miniconda\lib\distutils\command\build.py", line 127, in run
self.run_command(cmd_name)
File "C:\Miniconda\lib\distutils\cmd.py", line 326, in run_command
self.distribution.run_command(command)
File "C:\Miniconda\lib\distutils\dist.py", line 972, in run_command
cmd_obj.run()
File "C:\Miniconda\lib\site-packages\cx_Freeze\dist.py", line 232, in run
freezer.Freeze()
File "C:\Miniconda\lib\site-packages\cx_Freeze\freezer.py", line 610, in Freeze
self.compress, self.copyDependentFiles)
File "C:\Miniconda\lib\site-packages\cx_Freeze\freezer.py", line 510, in _WriteModules
module.Create(finder)
File "C:\Miniconda\lib\site-packages\cx_Freeze\freezer.py", line 746, in Create
module.file, module.name)
cx_Freeze.freezer.ConfigError: no file named sys (for module CoolProp.CoolProp.sys)
|
cx_Freeze.freezer.ConfigError
|
def _FreezeExecutable(self, exe):
finder = self.finder
finder.IncludeFile(exe.script, exe.moduleName)
finder.IncludeFile(exe.initScript, exe.initModuleName)
self._CopyFile(exe.base, exe.targetName, copyDependentFiles=True, includeMode=True)
if self.includeMSVCR:
self._IncludeMSVCR(exe)
# Copy icon
if exe.icon is not None:
if sys.platform == "win32":
import cx_Freeze.util
cx_Freeze.util.AddIcon(exe.targetName, exe.icon)
else:
targetName = os.path.join(
os.path.dirname(exe.targetName), os.path.basename(exe.icon)
)
self._CopyFile(exe.icon, targetName, copyDependentFiles=False)
if not os.access(exe.targetName, os.W_OK):
mode = os.stat(exe.targetName).st_mode
os.chmod(exe.targetName, mode | stat.S_IWUSR)
if self.metadata is not None and sys.platform == "win32":
self._AddVersionResource(exe)
|
def _FreezeExecutable(self, exe):
finder = self.finder
finder.IncludeFile(exe.script, exe.moduleName)
finder.IncludeFile(exe.initScript, exe.initModuleName)
self._CopyFile(exe.base, exe.targetName, copyDependentFiles=True, includeMode=True)
if self.includeMSVCR:
self._IncludeMSVCR(exe)
# Copy icon
if exe.icon is not None:
if sys.platform == "win32":
import cx_Freeze.util
cx_Freeze.util.AddIcon(exe.targetName, exe.icon)
else:
targetName = os.path.join(
os.path.dirname(exe.targetName), os.path.basename(exe.icon)
)
self._CopyFile(exe.icon, targetName, copyDependentFiles=False)
if not os.access(exe.targetName, os.W_OK):
mode = os.stat(exe.targetName).st_mode
os.chmod(exe.targetName, mode | stat.S_IWUSR)
if self.metadata is not None and sys.platform == "win32":
self._AddVersionResource(exe.targetName)
|
https://github.com/marcelotduarte/cx_Freeze/issues/94
|
C:\Users\Belli\Desktop\failing_cx_freeze>python setup.py build
running build
running build_exe
creating directory demo
copying C:\Miniconda\lib\site-packages\cx_Freeze\bases\Win32GUI.exe -> demo\demo.exe
Stamped: demo\demo.exe
writing zip file demo\demo.zip
Name File
---- ----
m __main__ demo.py
m cx_Freeze__init__ C:\Miniconda\lib\site-packages\cx_Freeze\initscripts\Console.py
Traceback (most recent call last):
File "setup.py", line 47, in <module>
executables = [GUI2Exe_Target_1]
File "C:\Miniconda\lib\site-packages\cx_Freeze\dist.py", line 362, in setup
distutils.core.setup(**attrs)
File "C:\Miniconda\lib\distutils\core.py", line 152, in setup
dist.run_commands()
File "C:\Miniconda\lib\distutils\dist.py", line 953, in run_commands
self.run_command(cmd)
File "C:\Miniconda\lib\distutils\dist.py", line 972, in run_command
cmd_obj.run()
File "C:\Miniconda\lib\distutils\command\build.py", line 127, in run
self.run_command(cmd_name)
File "C:\Miniconda\lib\distutils\cmd.py", line 326, in run_command
self.distribution.run_command(command)
File "C:\Miniconda\lib\distutils\dist.py", line 972, in run_command
cmd_obj.run()
File "C:\Miniconda\lib\site-packages\cx_Freeze\dist.py", line 232, in run
freezer.Freeze()
File "C:\Miniconda\lib\site-packages\cx_Freeze\freezer.py", line 610, in Freeze
self.compress, self.copyDependentFiles)
File "C:\Miniconda\lib\site-packages\cx_Freeze\freezer.py", line 510, in _WriteModules
module.Create(finder)
File "C:\Miniconda\lib\site-packages\cx_Freeze\freezer.py", line 746, in Create
module.file, module.name)
cx_Freeze.freezer.ConfigError: no file named sys (for module CoolProp.CoolProp.sys)
|
cx_Freeze.freezer.ConfigError
|
def __init__(
self,
script,
initScript=None,
base=None,
targetName=None,
icon=None,
shortcutName=None,
shortcutDir=None,
copyright=None,
trademarks=None,
):
self.script = script
self.initScript = initScript or "Console"
self.base = base or "Console"
self.targetName = targetName
self.icon = icon
self.shortcutName = shortcutName
self.shortcutDir = shortcutDir
self.copyright = copyright
self.trademarks = trademarks
|
def __init__(
self,
script,
initScript=None,
base=None,
targetName=None,
icon=None,
shortcutName=None,
shortcutDir=None,
):
self.script = script
self.initScript = initScript or "Console"
self.base = base or "Console"
self.targetName = targetName
self.icon = icon
self.shortcutName = shortcutName
self.shortcutDir = shortcutDir
|
https://github.com/marcelotduarte/cx_Freeze/issues/94
|
C:\Users\Belli\Desktop\failing_cx_freeze>python setup.py build
running build
running build_exe
creating directory demo
copying C:\Miniconda\lib\site-packages\cx_Freeze\bases\Win32GUI.exe -> demo\demo.exe
Stamped: demo\demo.exe
writing zip file demo\demo.zip
Name File
---- ----
m __main__ demo.py
m cx_Freeze__init__ C:\Miniconda\lib\site-packages\cx_Freeze\initscripts\Console.py
Traceback (most recent call last):
File "setup.py", line 47, in <module>
executables = [GUI2Exe_Target_1]
File "C:\Miniconda\lib\site-packages\cx_Freeze\dist.py", line 362, in setup
distutils.core.setup(**attrs)
File "C:\Miniconda\lib\distutils\core.py", line 152, in setup
dist.run_commands()
File "C:\Miniconda\lib\distutils\dist.py", line 953, in run_commands
self.run_command(cmd)
File "C:\Miniconda\lib\distutils\dist.py", line 972, in run_command
cmd_obj.run()
File "C:\Miniconda\lib\distutils\command\build.py", line 127, in run
self.run_command(cmd_name)
File "C:\Miniconda\lib\distutils\cmd.py", line 326, in run_command
self.distribution.run_command(command)
File "C:\Miniconda\lib\distutils\dist.py", line 972, in run_command
cmd_obj.run()
File "C:\Miniconda\lib\site-packages\cx_Freeze\dist.py", line 232, in run
freezer.Freeze()
File "C:\Miniconda\lib\site-packages\cx_Freeze\freezer.py", line 610, in Freeze
self.compress, self.copyDependentFiles)
File "C:\Miniconda\lib\site-packages\cx_Freeze\freezer.py", line 510, in _WriteModules
module.Create(finder)
File "C:\Miniconda\lib\site-packages\cx_Freeze\freezer.py", line 746, in Create
module.file, module.name)
cx_Freeze.freezer.ConfigError: no file named sys (for module CoolProp.CoolProp.sys)
|
cx_Freeze.freezer.ConfigError
|
def _ReplacePathsInCode(self, topLevelModule, co):
"""Replace paths in the code as directed, returning a new code object
with the modified paths in place."""
# Prepare the new filename.
origFileName = newFileName = os.path.normpath(co.co_filename)
for searchValue, replaceValue in self.replacePaths:
if searchValue == "*":
searchValue = os.path.dirname(topLevelModule.file)
if topLevelModule.path:
searchValue = os.path.dirname(searchValue)
if searchValue:
searchValue = searchValue + os.path.sep
if not origFileName.startswith(searchValue):
continue
newFileName = replaceValue + origFileName[len(searchValue) :]
break
# Run on subordinate code objects from function & class definitions.
constants = list(co.co_consts)
for i, value in enumerate(constants):
if isinstance(value, type(co)):
constants[i] = self._ReplacePathsInCode(topLevelModule, value)
# Build the new code object.
params = [
co.co_argcount,
co.co_kwonlyargcount,
co.co_nlocals,
co.co_stacksize,
co.co_flags,
co.co_code,
tuple(constants),
co.co_names,
co.co_varnames,
newFileName,
co.co_name,
co.co_firstlineno,
co.co_lnotab,
co.co_freevars,
co.co_cellvars,
]
if hasattr(co, "co_posonlyargcount"):
# PEP570 added "positional only arguments" in Python 3.8
params.insert(1, co.co_posonlyargcount)
return types.CodeType(*params)
|
def _ReplacePathsInCode(self, topLevelModule, co):
"""Replace paths in the code as directed, returning a new code object
with the modified paths in place."""
# Prepare the new filename.
origFileName = newFileName = os.path.normpath(co.co_filename)
for searchValue, replaceValue in self.replacePaths:
if searchValue == "*":
searchValue = os.path.dirname(topLevelModule.file)
if topLevelModule.path:
searchValue = os.path.dirname(searchValue)
if searchValue:
searchValue = searchValue + os.path.sep
if not origFileName.startswith(searchValue):
continue
newFileName = replaceValue + origFileName[len(searchValue) :]
break
# Run on subordinate code objects from function & class definitions.
constants = list(co.co_consts)
for i, value in enumerate(constants):
if isinstance(value, type(co)):
constants[i] = self._ReplacePathsInCode(topLevelModule, value)
# Build the new code object.
return types.CodeType(
co.co_argcount,
co.co_kwonlyargcount,
co.co_nlocals,
co.co_stacksize,
co.co_flags,
co.co_code,
tuple(constants),
co.co_names,
co.co_varnames,
newFileName,
co.co_name,
co.co_firstlineno,
co.co_lnotab,
co.co_freevars,
co.co_cellvars,
)
|
https://github.com/marcelotduarte/cx_Freeze/issues/543
|
Outputting to: build\exe.win-amd64-3.8
running build_exe
Traceback (most recent call last):
File "E:/ConParser/setup.py", line 110, in <module>
cx_Freeze.setup(
File "C:\Program Files\Python38\lib\site-packages\cx_Freeze\dist.py", line 348, in setup
distutils.core.setup(**attrs)
File "C:\Program Files\Python38\lib\distutils\core.py", line 148, in setup
dist.run_commands()
File "C:\Program Files\Python38\lib\distutils\dist.py", line 966, in run_commands
self.run_command(cmd)
File "C:\Program Files\Python38\lib\distutils\dist.py", line 985, in run_command
cmd_obj.run()
File "C:\Program Files\Python38\lib\site-packages\cx_Freeze\dist.py", line 219, in run
freezer.Freeze()
File "C:\Program Files\Python38\lib\site-packages\cx_Freeze\freezer.py", line 641, in Freeze
self.finder = self._GetModuleFinder()
File "C:\Program Files\Python38\lib\site-packages\cx_Freeze\freezer.py", line 362, in _GetModuleFinder
finder = cx_Freeze.ModuleFinder(self.includeFiles, self.excludes,
File "C:\Program Files\Python38\lib\site-packages\cx_Freeze\finder.py", line 141, in __init__
self._AddBaseModules()
File "C:\Program Files\Python38\lib\site-packages\cx_Freeze\finder.py", line 152, in _AddBaseModules
self.IncludeModule("traceback")
File "C:\Program Files\Python38\lib\site-packages\cx_Freeze\finder.py", line 616, in IncludeModule
module = self._ImportModule(name, deferredImports,
File "C:\Program Files\Python38\lib\site-packages\cx_Freeze\finder.py", line 296, in _ImportModule
module = self._InternalImportModule(name,
File "C:\Program Files\Python38\lib\site-packages\cx_Freeze\finder.py", line 389, in _InternalImportModule
module = self._LoadModule(name, fp, path, info, deferredImports,
File "C:\Program Files\Python38\lib\site-packages\cx_Freeze\finder.py", line 450, in _LoadModule
module.code = self._ReplacePathsInCode(topLevelModule,
File "C:\Program Files\Python38\lib\site-packages\cx_Freeze\finder.py", line 496, in _ReplacePathsInCode
constants[i] = self._ReplacePathsInCode(topLevelModule, value)
File "C:\Program Files\Python38\lib\site-packages\cx_Freeze\finder.py", line 499, in _ReplacePathsInCode
return types.CodeType(co.co_argcount, co.co_kwonlyargcount,
TypeError: an integer is required (got type bytes)
Process finished with exit code 1
|
TypeError
|
def _ScanCode(self, co, module, deferredImports, topLevel=True):
"""Scan code, looking for imported modules and keeping track of the
constants that have been created in order to better tell which
modules are truly missing."""
arguments = []
importedModule = None
method = (
dis._unpack_opargs if sys.version_info[:2] >= (3, 5) else self._UnpackOpArgs
)
for opIndex, op, opArg in method(co.co_code):
# keep track of constants (these are used for importing)
# immediately restart loop so arguments are retained
if op == LOAD_CONST:
arguments.append(co.co_consts[opArg])
continue
# import statement: attempt to import module
elif op == IMPORT_NAME:
name = co.co_names[opArg]
if len(arguments) >= 2:
relativeImportIndex, fromList = arguments[-2:]
else:
relativeImportIndex = -1
(fromList,) = arguments
if name not in module.excludeNames:
importedModule = self._ImportModule(
name, deferredImports, module, relativeImportIndex
)
if importedModule is not None:
if (
fromList
and fromList != ("*",)
and importedModule.path is not None
):
self._EnsureFromList(
module, importedModule, fromList, deferredImports
)
# import * statement: copy all global names
elif op == IMPORT_STAR and topLevel and importedModule is not None:
module.globalNames.update(importedModule.globalNames)
# store operation: track only top level
elif topLevel and op in STORE_OPS:
name = co.co_names[opArg]
module.globalNames[name] = None
# reset arguments; these are only needed for import statements so
# ignore them in all other cases!
arguments = []
# Scan the code objects from function & class definitions
for constant in co.co_consts:
if isinstance(constant, type(co)):
self._ScanCode(constant, module, deferredImports, topLevel=False)
|
def _ScanCode(self, co, module, deferredImports, topLevel=True):
"""Scan code, looking for imported modules and keeping track of the
constants that have been created in order to better tell which
modules are truly missing."""
opIndex = 0
arguments = []
code = co.co_code
numOps = len(code)
is3 = sys.version_info[0] >= 3
while opIndex < numOps:
if is3:
op = code[opIndex]
else:
op = ord(code[opIndex])
opIndex += 1
if op >= dis.HAVE_ARGUMENT:
if is3:
opArg = code[opIndex] + code[opIndex + 1] * 256
else:
opArg = ord(code[opIndex]) + ord(code[opIndex + 1]) * 256
opIndex += 2
if op == LOAD_CONST:
# Store an argument to be used later by an IMPORT_NAME operation.
arguments.append(co.co_consts[opArg])
elif op == IMPORT_NAME:
name = co.co_names[opArg]
if len(arguments) >= 2:
relativeImportIndex, fromList = arguments[-2:]
else:
relativeImportIndex = -1
(fromList,) = arguments
if name not in module.excludeNames:
# Load the imported module
importedModule = self._ImportModule(
name, deferredImports, module, relativeImportIndex
)
if importedModule is not None:
if (
fromList
and fromList != ("*",)
and importedModule.path is not None
):
self._EnsureFromList(
module, importedModule, fromList, deferredImports
)
elif op == IMPORT_FROM and topLevel:
if is3:
op = code[opIndex]
opArg = code[opIndex + 1] + code[opIndex + 2] * 256
else:
op = ord(code[opIndex])
opArg = ord(code[opIndex + 1]) + ord(code[opIndex + 2]) * 256
opIndex += 3
if op == STORE_FAST:
name = co.co_varnames[opArg]
else:
name = co.co_names[opArg]
storeName = True
if deferredImports:
deferredCaller, deferredPackage, deferredFromList = deferredImports[-1]
storeName = deferredCaller is not module
if storeName:
module.globalNames[name] = None
elif op == IMPORT_STAR and topLevel and importedModule is not None:
module.globalNames.update(importedModule.globalNames)
arguments = []
elif op not in (BUILD_LIST, INPLACE_ADD):
# The stack was used for something else, so we clear it.
if topLevel and op in STORE_OPS:
name = co.co_names[opArg]
module.globalNames[name] = None
arguments = []
# Scan the code objects from function & class definitions
for constant in co.co_consts:
if isinstance(constant, type(co)):
self._ScanCode(constant, module, deferredImports, topLevel=False)
|
https://github.com/marcelotduarte/cx_Freeze/issues/215
|
#!python
[vagrant@localhost curator_source]$ python3.6 setup.py build_exe
running build_exe
Traceback (most recent call last):
File "setup.py", line 124, in <module>
executables = [curator_exe,curator_cli_exe,repomgr_exe]
File "/home/vagrant/.local/lib/python3.6/site-packages/cx_Freeze/dist.py", line 337, in setup
distutils.core.setup(**attrs)
File "/usr/local/lib/python3.6/distutils/core.py", line 148, in setup
dist.run_commands()
File "/usr/local/lib/python3.6/distutils/dist.py", line 955, in run_commands
self.run_command(cmd)
File "/usr/local/lib/python3.6/distutils/dist.py", line 974, in run_command
cmd_obj.run()
File "/home/vagrant/.local/lib/python3.6/site-packages/cx_Freeze/dist.py", line 207, in run
freezer.Freeze()
File "/home/vagrant/.local/lib/python3.6/site-packages/cx_Freeze/freezer.py", line 544, in Freeze
self.finder = self._GetModuleFinder()
File "/home/vagrant/.local/lib/python3.6/site-packages/cx_Freeze/freezer.py", line 322, in _GetModuleFinder
self.path, self.replacePaths)
File "/home/vagrant/.local/lib/python3.6/site-packages/cx_Freeze/finder.py", line 150, in __init__
self._AddBaseModules()
File "/home/vagrant/.local/lib/python3.6/site-packages/cx_Freeze/finder.py", line 161, in _AddBaseModules
self.IncludeModule("traceback")
File "/home/vagrant/.local/lib/python3.6/site-packages/cx_Freeze/finder.py", line 650, in IncludeModule
namespace = namespace)
File "/home/vagrant/.local/lib/python3.6/site-packages/cx_Freeze/finder.py", line 309, in _ImportModule
deferredImports, namespace = namespace)
File "/home/vagrant/.local/lib/python3.6/site-packages/cx_Freeze/finder.py", line 402, in _InternalImportModule
parentModule, namespace)
File "/home/vagrant/.local/lib/python3.6/site-packages/cx_Freeze/finder.py", line 473, in _LoadModule
self._ScanCode(module.code, module, deferredImports)
File "/home/vagrant/.local/lib/python3.6/site-packages/cx_Freeze/finder.py", line 561, in _ScanCode
arguments.append(co.co_consts[opArg])
IndexError: tuple index out of range
|
IndexError
|
def clone(self, default_value=NoDefaultSpecified, **metadata):
"""Copy, optionally modifying default value and metadata.
Clones the contents of this object into a new instance of the same
class, and then modifies the cloned copy using the specified
``default_value`` and ``metadata``. Returns the cloned object as the
result.
Note that subclasses can change the signature of this method if
needed, but should always call the 'super' method if possible.
Parameters
----------
default_value : any
The new default value for the trait.
**metadata : dict
A dictionary of metadata names and corresponding values as
arbitrary keyword arguments.
Returns
-------
clone : TraitType
Clone of self.
"""
if "parent" not in metadata:
metadata["parent"] = self
new = self.__class__.__new__(self.__class__)
new_dict = new.__dict__
new_dict.update(self.__dict__)
if "editor" in new_dict:
del new_dict["editor"]
if "_metadata" in new_dict:
new._metadata = new._metadata.copy()
else:
new._metadata = {}
new._metadata.update(metadata)
if default_value is not NoDefaultSpecified:
new.default_value = default_value
if self.validate is not None:
try:
new.default_value = self.validate(None, None, default_value)
except Exception:
pass
return new
|
def clone(self, default_value=NoDefaultSpecified, **metadata):
"""Copy, optionally modifying default value and metadata.
Clones the contents of this object into a new instance of the same
class, and then modifies the cloned copy using the specified
``default_value`` and ``metadata``. Returns the cloned object as the
result.
Note that subclasses can change the signature of this method if
needed, but should always call the 'super' method if possible.
Parameters
----------
default_value : any
The new default value for the trait.
**metadata : dict
A dictionary of metadata names and corresponding values as
arbitrary keyword arguments.
"""
if "parent" not in metadata:
metadata["parent"] = self
new = self.__class__.__new__(self.__class__)
new_dict = new.__dict__
new_dict.update(self.__dict__)
if "editor" in new_dict:
del new_dict["editor"]
if "_metadata" in new_dict:
new._metadata = new._metadata.copy()
else:
new._metadata = {}
new._metadata.update(metadata)
if default_value is not NoDefaultSpecified:
new.default_value = default_value
if self.validate is not None:
try:
new.default_value = self.validate(None, None, default_value)
except Exception:
pass
return new
|
https://github.com/enthought/traits/issues/495
|
In [4]: foo.instance_date = None
---------------------------------------------------------------------------
TraitError Traceback (most recent call last)
<ipython-input-4-fa663306bb41> in <module>()
----> 1 foo.instance_date = None
/Users/kchoi/.edm/envs/resist-py36/lib/python3.6/site-packages/traits/trait_handlers.py in error(self, object, name, value)
234 """
235 raise TraitError(
--> 236 object, name, self.full_info(object, name, value), value
237 )
238
TraitError: The 'instance_date' trait of a Foo instance must be a date, but a value of None <class 'NoneType'> was specified.
In [5]: foo.instance_time = None
---------------------------------------------------------------------------
TraitError Traceback (most recent call last)
<ipython-input-5-adf6a97a17be> in <module>()
----> 1 foo.instance_time = None
/Users/kchoi/.edm/envs/resist-py36/lib/python3.6/site-packages/traits/trait_handlers.py in error(self, object, name, value)
234 """
235 raise TraitError(
--> 236 object, name, self.full_info(object, name, value), value
237 )
238
TraitError: The 'instance_time' trait of a Foo instance must be a time, but a value of None <class 'NoneType'> was specified.
|
TraitError
|
def notifier(self, trait_dict, removed, added, changed):
"""Fire the TraitDictEvent with the provided parameters.
Parameters
----------
trait_dict : dict
The complete dictionary.
removed : dict
Dict of removed items.
added : dict
Dict of added items.
changed : dict
Dict of changed items.
"""
if self.name_items is None:
return
object = self.object()
if object is None:
return
if getattr(object, self.name) is not self:
# Workaround having this dict inside another container which
# also uses the name_items trait for notification.
# See enthought/traits#25
return
event = TraitDictEvent(removed=removed, added=added, changed=changed)
items_event = self.trait.items_event()
object.trait_items_event(self.name_items, event, items_event)
|
def notifier(self, trait_dict, removed, added, changed):
"""Fire the TraitDictEvent with the provided parameters.
Parameters
----------
trait_dict : dict
The complete dictionary.
removed : dict
Dict of removed items.
added : dict
Dict of added items.
changed : dict
Dict of changed items.
"""
if self.name_items is None:
return
object = self.object()
if object is None:
return
event = TraitDictEvent(removed=removed, added=added, changed=changed)
items_event = self.trait.items_event()
object.trait_items_event(self.name_items, event, items_event)
|
https://github.com/enthought/traits/issues/25
|
Traceback (most recent call last):
File "/home/punchagan/tmp/foo.py", line 12, in <module>
a.foo[0]['x'] = 20
File "/home/punchagan/work/traits/traits/trait_handlers.py", line 3159, in __setitem__
raise excp
traits.trait_errors.TraitError: Each value of the 'foo_items' trait of an A instance must be an implementor of, or can be adapted to implement, TraitListEvent or None, but a value of <traits.trait_handlers.TraitDictEvent object at 0x13046d0> <class 'traits.trait_handlers.TraitDictEvent'> was specified.
|
traits.trait_errors.TraitError
|
def notifier(self, trait_list, index, removed, added):
"""Converts and consolidates the parameters to a TraitListEvent and
then fires the event.
Parameters
----------
trait_list : list
The list
index : int or slice
Index or slice that was modified
removed : list
Values that were removed
added : list
Values that were added
"""
is_trait_none = self.trait is None
is_name_items_none = self.name_items is None
if not hasattr(self, "trait") or is_trait_none or is_name_items_none:
return
object = self.object()
if object is None:
return
if getattr(object, self.name) is not self:
# Workaround having this list inside another container which
# also uses the name_items trait for notification.
# See enthought/traits#25, enthought/traits#281
return
event = TraitListEvent(index, removed, added)
items_event = self.trait.items_event()
object.trait_items_event(self.name_items, event, items_event)
|
def notifier(self, trait_list, index, removed, added):
"""Converts and consolidates the parameters to a TraitListEvent and
then fires the event.
Parameters
----------
trait_list : list
The list
index : int or slice
Index or slice that was modified
removed : list
Values that were removed
added : list
Values that were added
"""
is_trait_none = self.trait is None
is_name_items_none = self.name_items is None
if not hasattr(self, "trait") or is_trait_none or is_name_items_none:
return
object = self.object()
if object is None:
return
event = TraitListEvent(index, removed, added)
items_event = self.trait.items_event()
object.trait_items_event(self.name_items, event, items_event)
|
https://github.com/enthought/traits/issues/25
|
Traceback (most recent call last):
File "/home/punchagan/tmp/foo.py", line 12, in <module>
a.foo[0]['x'] = 20
File "/home/punchagan/work/traits/traits/trait_handlers.py", line 3159, in __setitem__
raise excp
traits.trait_errors.TraitError: Each value of the 'foo_items' trait of an A instance must be an implementor of, or can be adapted to implement, TraitListEvent or None, but a value of <traits.trait_handlers.TraitDictEvent object at 0x13046d0> <class 'traits.trait_handlers.TraitDictEvent'> was specified.
|
traits.trait_errors.TraitError
|
def notifier(self, trait_set, removed, added):
"""Converts and consolidates the parameters to a TraitSetEvent and
then fires the event.
Parameters
----------
trait_set : set
The complete set
removed : set
Set of values that were removed.
added : set
Set of values that were added.
"""
if self.name_items is None:
return
object = self.object()
if object is None:
return
if getattr(object, self.name) is not self:
# Workaround having this set inside another container which
# also uses the name_items trait for notification.
# Similar to enthought/traits#25
return
event = TraitSetEvent(removed=removed, added=added)
items_event = self.trait.items_event()
object.trait_items_event(self.name_items, event, items_event)
|
def notifier(self, trait_set, removed, added):
"""Converts and consolidates the parameters to a TraitSetEvent and
then fires the event.
Parameters
----------
trait_set : set
The complete set
removed : set
Set of values that were removed.
added : set
Set of values that were added.
"""
if self.name_items is None:
return
object = self.object()
if object is None:
return
event = TraitSetEvent(removed=removed, added=added)
items_event = self.trait.items_event()
object.trait_items_event(self.name_items, event, items_event)
|
https://github.com/enthought/traits/issues/25
|
Traceback (most recent call last):
File "/home/punchagan/tmp/foo.py", line 12, in <module>
a.foo[0]['x'] = 20
File "/home/punchagan/work/traits/traits/trait_handlers.py", line 3159, in __setitem__
raise excp
traits.trait_errors.TraitError: Each value of the 'foo_items' trait of an A instance must be an implementor of, or can be adapted to implement, TraitListEvent or None, but a value of <traits.trait_handlers.TraitDictEvent object at 0x13046d0> <class 'traits.trait_handlers.TraitDictEvent'> was specified.
|
traits.trait_errors.TraitError
|
def validate(self, object, name, value):
if isinstance(value, object.__class__):
return value
self.error(object, name, value)
|
def validate(self, object, name, value):
if isinstance(value, object.__class__):
return value
self.validate_failed(object, name, value)
|
https://github.com/enthought/traits/issues/623
|
from traits.api import *
class A(HasTraits):
... foo = List(This(allow_none=False))
...
a = A()
a.foo = [None]
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/Users/mdickinson/Enthought/ETS/traits/traits/trait_types.py", line 2452, in validate
return TraitListObject(self, object, name, value)
File "/Users/mdickinson/Enthought/ETS/traits/traits/trait_handlers.py", line 2258, in __init__
raise excp
File "/Users/mdickinson/Enthought/ETS/traits/traits/trait_handlers.py", line 2250, in __init__
value = [validate(object, name, val) for val in value]
File "/Users/mdickinson/Enthought/ETS/traits/traits/trait_handlers.py", line 2250, in <listcomp>
value = [validate(object, name, val) for val in value]
File "/Users/mdickinson/Enthought/ETS/traits/traits/trait_types.py", line 1075, in validate
self.validate_failed(object, name, value)
File "/Users/mdickinson/Enthought/ETS/traits/traits/trait_types.py", line 1096, in validate_failed
self.error(object, name, msg)
File "/Users/mdickinson/Enthought/ETS/traits/traits/trait_handlers.py", line 256, in error
object, name, self.full_info(object, name, value), value
traits.trait_errors.TraitError: Each element of the 'foo' trait of an A instance must be an instance of the same type as the receiver, but a value of "class 'NoneType' (i.e. None)" <class 'str'> was specified.
|
traits.trait_errors.TraitError
|
def validate_none(self, object, name, value):
if isinstance(value, object.__class__) or (value is None):
return value
self.error(object, name, value)
|
def validate_none(self, object, name, value):
if isinstance(value, object.__class__) or (value is None):
return value
self.validate_failed(object, name, value)
|
https://github.com/enthought/traits/issues/623
|
from traits.api import *
class A(HasTraits):
... foo = List(This(allow_none=False))
...
a = A()
a.foo = [None]
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/Users/mdickinson/Enthought/ETS/traits/traits/trait_types.py", line 2452, in validate
return TraitListObject(self, object, name, value)
File "/Users/mdickinson/Enthought/ETS/traits/traits/trait_handlers.py", line 2258, in __init__
raise excp
File "/Users/mdickinson/Enthought/ETS/traits/traits/trait_handlers.py", line 2250, in __init__
value = [validate(object, name, val) for val in value]
File "/Users/mdickinson/Enthought/ETS/traits/traits/trait_handlers.py", line 2250, in <listcomp>
value = [validate(object, name, val) for val in value]
File "/Users/mdickinson/Enthought/ETS/traits/traits/trait_types.py", line 1075, in validate
self.validate_failed(object, name, value)
File "/Users/mdickinson/Enthought/ETS/traits/traits/trait_types.py", line 1096, in validate_failed
self.error(object, name, msg)
File "/Users/mdickinson/Enthought/ETS/traits/traits/trait_handlers.py", line 256, in error
object, name, self.full_info(object, name, value), value
traits.trait_errors.TraitError: Each element of the 'foo' trait of an A instance must be an instance of the same type as the receiver, but a value of "class 'NoneType' (i.e. None)" <class 'str'> was specified.
|
traits.trait_errors.TraitError
|
def plot_pair(
ax,
infdata_group,
numvars,
figsize,
textsize,
kind,
kde_kwargs,
hexbin_kwargs,
contour, # pylint: disable=unused-argument
plot_kwargs, # pylint: disable=unused-argument
fill_last, # pylint: disable=unused-argument
divergences,
diverging_mask,
flat_var_names,
backend_kwargs,
marginals,
marginal_kwargs,
point_estimate,
point_estimate_kwargs,
point_estimate_marker_kwargs,
reference_values,
reference_values_kwargs,
show,
):
"""Bokeh pair plot."""
if backend_kwargs is None:
backend_kwargs = {}
backend_kwargs = {
**backend_kwarg_defaults(
("dpi", "plot.bokeh.figure.dpi"),
),
**backend_kwargs,
}
if hexbin_kwargs is None:
hexbin_kwargs = {}
hexbin_kwargs.setdefault("size", 0.5)
if kind != "kde":
kde_kwargs.setdefault("contourf_kwargs", {"fill_alpha": 0})
kde_kwargs.setdefault("contour_kwargs", {})
kde_kwargs["contour_kwargs"].setdefault("line_color", "black")
kde_kwargs["contour_kwargs"].setdefault("line_alpha", 1)
if reference_values:
reference_values_copy = {}
label = []
for variable in list(reference_values.keys()):
if " " in variable:
variable_copy = variable.replace(" ", "\n", 1)
else:
variable_copy = variable
label.append(variable_copy)
reference_values_copy[variable_copy] = reference_values[variable]
difference = set(flat_var_names).difference(set(label))
for dif in difference:
reference_values_copy[dif] = None
if difference:
warn = [dif.replace("\n", " ", 1) for dif in difference]
warnings.warn(
"Argument reference_values does not include reference value for: {}".format(
", ".join(warn)
),
UserWarning,
)
if reference_values_kwargs is None:
reference_values_kwargs = {}
reference_values_kwargs.setdefault("line_color", "red")
reference_values_kwargs.setdefault("line_width", 5)
dpi = backend_kwargs.pop("dpi")
max_plots = (
numvars**2
if rcParams["plot.max_subplots"] is None
else rcParams["plot.max_subplots"]
)
vars_to_plot = np.sum(np.arange(numvars).cumsum() < max_plots)
if vars_to_plot < numvars:
warnings.warn(
"rcParams['plot.max_subplots'] ({max_plots}) is smaller than the number "
"of resulting pair plots with these variables, generating only a "
"{side}x{side} grid".format(max_plots=max_plots, side=vars_to_plot),
UserWarning,
)
numvars = vars_to_plot
(figsize, _, _, _, _, markersize) = _scale_fig_size(
figsize, textsize, numvars - 2, numvars - 2
)
point_estimate_marker_kwargs.setdefault("line_width", markersize)
point_estimate_kwargs.setdefault("line_color", "orange")
point_estimate_kwargs.setdefault("line_width", 3)
point_estimate_kwargs.setdefault("line_dash", "solid")
tmp_flat_var_names = None
if len(flat_var_names) == len(list(set(flat_var_names))):
source_dict = dict(zip(flat_var_names, [list(post) for post in infdata_group]))
else:
tmp_flat_var_names = [
"{}__{}".format(name, str(uuid4())) for name in flat_var_names
]
source_dict = dict(
zip(tmp_flat_var_names, [list(post) for post in infdata_group])
)
if divergences:
divergenve_name = "divergences_{}".format(str(uuid4()))
source_dict[divergenve_name] = (
np.array(diverging_mask).astype(bool).astype(int).astype(str)
)
source = ColumnDataSource(data=source_dict)
if divergences:
source_nondiv = CDSView(
source=source, filters=[GroupFilter(column_name=divergenve_name, group="0")]
)
source_div = CDSView(
source=source, filters=[GroupFilter(column_name=divergenve_name, group="1")]
)
def get_width_and_height(jointplot, rotate):
"""Compute subplots dimensions for two or more variables."""
if jointplot:
if rotate:
width = int(figsize[0] / (numvars - 1) + 2 * dpi)
height = int(figsize[1] / (numvars - 1) * dpi)
else:
width = int(figsize[0] / (numvars - 1) * dpi)
height = int(figsize[1] / (numvars - 1) + 2 * dpi)
else:
width = int(figsize[0] / (numvars - 1) * dpi)
height = int(figsize[1] / (numvars - 1) * dpi)
return width, height
if marginals:
var = 0
else:
var = 1
if ax is None:
ax = []
backend_kwargs.setdefault("width", int(figsize[0] / (numvars - 1) * dpi))
backend_kwargs.setdefault("height", int(figsize[1] / (numvars - 1) * dpi))
for row in range(numvars - var):
row_ax = []
var1 = (
flat_var_names[row + var]
if tmp_flat_var_names is None
else tmp_flat_var_names[row + var]
)
for n, col in enumerate(range(numvars - var)):
var2 = (
flat_var_names[col]
if tmp_flat_var_names is None
else tmp_flat_var_names[col]
)
backend_kwargs_copy = backend_kwargs.copy()
if "scatter" in kind:
tooltips = [
(var2, "@{{{}}}".format(var2)),
(var1, "@{{{}}}".format(var1)),
]
backend_kwargs_copy.setdefault("tooltips", tooltips)
else:
tooltips = None
if row < col:
row_ax.append(None)
else:
jointplot = row == col and numvars == 2 and marginals
rotate = n == 1
width, height = get_width_and_height(jointplot, rotate)
if jointplot:
ax_ = bkp.figure(width=width, height=height, tooltips=tooltips)
else:
ax_ = bkp.figure(**backend_kwargs_copy)
row_ax.append(ax_)
ax.append(row_ax)
ax = np.array(ax)
else:
assert ax.shape == (numvars - var, numvars - var)
# pylint: disable=too-many-nested-blocks
for i in range(0, numvars - var):
var1 = (
flat_var_names[i] if tmp_flat_var_names is None else tmp_flat_var_names[i]
)
for j in range(0, numvars - var):
var2 = (
flat_var_names[j + var]
if tmp_flat_var_names is None
else tmp_flat_var_names[j + var]
)
if j == i and marginals:
rotate = numvars == 2 and j == 1
var1_dist = infdata_group[i]
plot_dist(
var1_dist,
ax=ax[j, i],
show=False,
backend="bokeh",
rotated=rotate,
**marginal_kwargs,
)
ax[j, i].xaxis.axis_label = flat_var_names[i]
ax[j, i].yaxis.axis_label = flat_var_names[j + var]
elif j + var > i:
if "scatter" in kind:
if divergences:
ax[j, i].circle(var1, var2, source=source, view=source_nondiv)
else:
ax[j, i].circle(var1, var2, source=source)
if "kde" in kind:
var1_kde = infdata_group[i]
var2_kde = infdata_group[j + var]
plot_kde(
var1_kde,
var2_kde,
ax=ax[j, i],
backend="bokeh",
backend_kwargs={},
show=False,
**kde_kwargs,
)
if "hexbin" in kind:
var1_hexbin = infdata_group[i]
var2_hexbin = infdata_group[j + var]
ax[j, i].grid.visible = False
ax[j, i].hexbin(
var1_hexbin,
var2_hexbin,
**hexbin_kwargs,
)
if divergences:
ax[j, i].circle(
var1,
var2,
line_color="black",
fill_color="orange",
line_width=1,
size=10,
source=source,
view=source_div,
)
if point_estimate:
var1_pe = infdata_group[i]
var2_pe = infdata_group[j]
pe_x = calculate_point_estimate(point_estimate, var1_pe)
pe_y = calculate_point_estimate(point_estimate, var2_pe)
ax[j, i].square(pe_x, pe_y, **point_estimate_marker_kwargs)
ax_hline = Span(
location=pe_y,
dimension="width",
**point_estimate_kwargs,
)
ax_vline = Span(
location=pe_x,
dimension="height",
**point_estimate_kwargs,
)
ax[j, i].add_layout(ax_hline)
ax[j, i].add_layout(ax_vline)
if marginals:
ax[j - 1, i].add_layout(ax_vline)
pe_last = calculate_point_estimate(
point_estimate, infdata_group[-1]
)
ax_pe_vline = Span(
location=pe_last,
dimension="height",
**point_estimate_kwargs,
)
ax[-1, -1].add_layout(ax_pe_vline)
if numvars == 2:
ax_pe_hline = Span(
location=pe_last,
dimension="width",
**point_estimate_kwargs,
)
ax[-1, -1].add_layout(ax_pe_hline)
if reference_values:
x = reference_values_copy[flat_var_names[j + var]]
y = reference_values_copy[flat_var_names[i]]
if x and y:
ax[j, i].circle(y, x, **reference_values_kwargs)
ax[j, i].xaxis.axis_label = flat_var_names[i]
ax[j, i].yaxis.axis_label = flat_var_names[j + var]
show_layout(ax, show)
return ax
|
def plot_pair(
ax,
infdata_group,
numvars,
figsize,
textsize,
kind,
kde_kwargs,
hexbin_kwargs,
contour, # pylint: disable=unused-argument
plot_kwargs, # pylint: disable=unused-argument
fill_last, # pylint: disable=unused-argument
divergences,
diverging_mask,
flat_var_names,
backend_kwargs,
diagonal,
marginal_kwargs,
point_estimate,
point_estimate_kwargs,
reference_values,
reference_values_kwargs,
show,
):
"""Bokeh pair plot."""
if backend_kwargs is None:
backend_kwargs = {}
backend_kwargs = {
**backend_kwarg_defaults(
("dpi", "plot.bokeh.figure.dpi"),
),
**backend_kwargs,
}
if hexbin_kwargs is None:
hexbin_kwargs = {}
hexbin_kwargs.setdefault("size", 0.5)
if kind != "kde":
kde_kwargs.setdefault("contourf_kwargs", {"fill_alpha": 0})
kde_kwargs.setdefault("contour_kwargs", {})
kde_kwargs["contour_kwargs"].setdefault("line_color", "black")
kde_kwargs["contour_kwargs"].setdefault("line_alpha", 1)
if reference_values:
reference_values_copy = {}
label = []
for variable in list(reference_values.keys()):
if " " in variable:
variable_copy = variable.replace(" ", "\n", 1)
else:
variable_copy = variable
label.append(variable_copy)
reference_values_copy[variable_copy] = reference_values[variable]
difference = set(flat_var_names).difference(set(label))
for dif in difference:
reference_values_copy[dif] = None
if difference:
warn = [dif.replace("\n", " ", 1) for dif in difference]
warnings.warn(
"Argument reference_values does not include reference value for: {}".format(
", ".join(warn)
),
UserWarning,
)
if reference_values_kwargs is None:
reference_values_kwargs = {}
reference_values_kwargs.setdefault("line_color", "red")
reference_values_kwargs.setdefault("line_width", 5)
dpi = backend_kwargs.pop("dpi")
max_plots = (
numvars**2
if rcParams["plot.max_subplots"] is None
else rcParams["plot.max_subplots"]
)
vars_to_plot = np.sum(np.arange(numvars).cumsum() < max_plots)
if vars_to_plot < numvars:
warnings.warn(
"rcParams['plot.max_subplots'] ({max_plots}) is smaller than the number "
"of resulting pair plots with these variables, generating only a "
"{side}x{side} grid".format(max_plots=max_plots, side=vars_to_plot),
UserWarning,
)
numvars = vars_to_plot
(figsize, _, _, _, _, _) = _scale_fig_size(
figsize, textsize, numvars - 2, numvars - 2
)
tmp_flat_var_names = None
if len(flat_var_names) == len(list(set(flat_var_names))):
source_dict = dict(zip(flat_var_names, [list(post) for post in infdata_group]))
else:
tmp_flat_var_names = [
"{}__{}".format(name, str(uuid4())) for name in flat_var_names
]
source_dict = dict(
zip(tmp_flat_var_names, [list(post) for post in infdata_group])
)
if divergences:
divergenve_name = "divergences_{}".format(str(uuid4()))
source_dict[divergenve_name] = (
np.array(diverging_mask).astype(bool).astype(int).astype(str)
)
source = ColumnDataSource(data=source_dict)
if divergences:
source_nondiv = CDSView(
source=source, filters=[GroupFilter(column_name=divergenve_name, group="0")]
)
source_div = CDSView(
source=source, filters=[GroupFilter(column_name=divergenve_name, group="1")]
)
def get_width_and_height(jointplot, rotate):
"""Compute subplots dimensions for two or more variables."""
if jointplot:
if rotate:
width = int(figsize[0] / (numvars - 1) + 2 * dpi)
height = int(figsize[1] / (numvars - 1) * dpi)
else:
width = int(figsize[0] / (numvars - 1) * dpi)
height = int(figsize[1] / (numvars - 1) + 2 * dpi)
else:
width = int(figsize[0] / (numvars - 1) * dpi)
height = int(figsize[1] / (numvars - 1) * dpi)
return width, height
if diagonal:
var = 0
else:
var = 1
if ax is None:
ax = []
backend_kwargs.setdefault("width", int(figsize[0] / (numvars - 1) * dpi))
backend_kwargs.setdefault("height", int(figsize[1] / (numvars - 1) * dpi))
for row in range(numvars - var):
row_ax = []
var1 = (
flat_var_names[row + var]
if tmp_flat_var_names is None
else tmp_flat_var_names[row + var]
)
for n, col in enumerate(range(numvars - var)):
var2 = (
flat_var_names[col]
if tmp_flat_var_names is None
else tmp_flat_var_names[col]
)
backend_kwargs_copy = backend_kwargs.copy()
if "scatter" in kind:
tooltips = [
(var2, "@{{{}}}".format(var2)),
(var1, "@{{{}}}".format(var1)),
]
backend_kwargs_copy.setdefault("tooltips", tooltips)
else:
tooltips = None
if row < col:
row_ax.append(None)
else:
jointplot = row == col and numvars == 2 and diagonal
rotate = n == 1
width, height = get_width_and_height(jointplot, rotate)
if jointplot:
ax_ = bkp.figure(width=width, height=height, tooltips=tooltips)
else:
ax_ = bkp.figure(**backend_kwargs_copy)
row_ax.append(ax_)
ax.append(row_ax)
ax = np.array(ax)
else:
assert ax.shape == (numvars - var, numvars - var)
# pylint: disable=too-many-nested-blocks
for i in range(0, numvars - var):
var1 = (
flat_var_names[i] if tmp_flat_var_names is None else tmp_flat_var_names[i]
)
for j in range(0, numvars - var):
var2 = (
flat_var_names[j + var]
if tmp_flat_var_names is None
else tmp_flat_var_names[j + var]
)
if j == i and diagonal:
rotate = numvars == 2 and j == 1
var1_dist = infdata_group[i]
plot_dist(
var1_dist,
ax=ax[j, i],
show=False,
backend="bokeh",
rotated=rotate,
**marginal_kwargs,
)
ax[j, i].xaxis.axis_label = flat_var_names[i]
ax[j, i].yaxis.axis_label = flat_var_names[j + var]
elif j + var > i:
if "scatter" in kind:
if divergences:
ax[j, i].circle(var1, var2, source=source, view=source_nondiv)
else:
ax[j, i].circle(var1, var2, source=source)
if "kde" in kind:
var1_kde = infdata_group[i]
var2_kde = infdata_group[j + var]
plot_kde(
var1_kde,
var2_kde,
ax=ax[j, i],
backend="bokeh",
backend_kwargs={},
show=False,
**kde_kwargs,
)
if "hexbin" in kind:
var1_hexbin = infdata_group[i]
var2_hexbin = infdata_group[j + var]
ax[j, i].grid.visible = False
ax[j, i].hexbin(
var1_hexbin,
var2_hexbin,
**hexbin_kwargs,
)
if divergences:
ax[j, i].circle(
var1,
var2,
line_color="black",
fill_color="orange",
line_width=1,
size=10,
source=source,
view=source_div,
)
if point_estimate:
var1_pe = infdata_group[i]
var2_pe = infdata_group[j]
pe_x = calculate_point_estimate(point_estimate, var1_pe)
pe_y = calculate_point_estimate(point_estimate, var2_pe)
ax[j, i].square(
pe_x, pe_y, line_width=figsize[0] + 1, **point_estimate_kwargs
)
ax_hline = Span(
location=pe_y,
dimension="width",
line_dash="solid",
line_width=3,
**point_estimate_kwargs,
)
ax_vline = Span(
location=pe_x,
dimension="height",
line_dash="solid",
line_width=3,
**point_estimate_kwargs,
)
ax[j, i].add_layout(ax_hline)
ax[j, i].add_layout(ax_vline)
if diagonal:
ax[j - 1, i].add_layout(ax_vline)
pe_last = calculate_point_estimate(
point_estimate, infdata_group[-1]
)
ax_pe_vline = Span(
location=pe_last,
dimension="height",
line_dash="solid",
line_width=3,
**point_estimate_kwargs,
)
ax[-1, -1].add_layout(ax_pe_vline)
if numvars == 2:
ax_pe_hline = Span(
location=pe_last,
dimension="width",
line_dash="solid",
line_width=3,
**point_estimate_kwargs,
)
ax[-1, -1].add_layout(ax_pe_hline)
if reference_values:
x = reference_values_copy[flat_var_names[j + var]]
y = reference_values_copy[flat_var_names[i]]
if x and y:
ax[j, i].circle(y, x, **reference_values_kwargs)
ax[j, i].xaxis.axis_label = flat_var_names[i]
ax[j, i].yaxis.axis_label = flat_var_names[j + var]
show_layout(ax, show)
return ax
|
https://github.com/arviz-devs/arviz/issues/1166
|
/Users/alex_andorra/opt/anaconda3/envs/stat-rethink-pymc3/lib/python3.7/site-packages/arviz/plots/pairplot.py:167: UserWarning: fill_last and contour will be deprecated. Please use kde_kwargs
"fill_last and contour will be deprecated. Please use kde_kwargs", UserWarning,
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-7-6741897b23b8> in <module>
7 diagonal=True,
8 point_estimate="mean",
----> 9 point_estimate_marker_kwargs={"alpha": 0.6, "s": 6},
10 );
~/opt/anaconda3/envs/stat-rethink-pymc3/lib/python3.7/site-packages/arviz/plots/pairplot.py in plot_pair(data, group, var_names, coords, figsize, textsize, kind, gridsize, contour, plot_kwargs, fill_last, divergences, colorbar, ax, divergences_kwargs, scatter_kwargs, kde_kwargs, hexbin_kwargs, backend, backend_kwargs, diagonal, marginal_kwargs, point_estimate, point_estimate_kwargs, point_estimate_marker_kwargs, reference_values, reference_values_kwargs, show)
305 # TODO: Add backend kwargs
306 plot = get_plotting_function("plot_pair", "pairplot", backend)
--> 307 ax = plot(**pairplot_kwargs)
308 return ax
~/opt/anaconda3/envs/stat-rethink-pymc3/lib/python3.7/site-packages/arviz/plots/backends/matplotlib/pairplot.py in plot_pair(ax, infdata_group, numvars, figsize, textsize, kind, fill_last, contour, plot_kwargs, scatter_kwargs, kde_kwargs, hexbin_kwargs, gridsize, colorbar, divergences, diverging_mask, divergences_kwargs, flat_var_names, backend_kwargs, marginal_kwargs, show, diagonal, point_estimate, point_estimate_kwargs, point_estimate_marker_kwargs, reference_values, reference_values_kwargs)
264
265 ax[j, i].scatter(
--> 266 pe_x, pe_y, s=figsize[0] + 50, zorder=4, **point_estimate_marker_kwargs
267 )
268
TypeError: scatter() got multiple values for keyword argument 's'
|
TypeError
|
def plot_pair(
ax,
infdata_group,
numvars,
figsize,
textsize,
kind,
fill_last, # pylint: disable=unused-argument
contour, # pylint: disable=unused-argument
plot_kwargs, # pylint: disable=unused-argument
scatter_kwargs,
kde_kwargs,
hexbin_kwargs,
gridsize,
colorbar,
divergences,
diverging_mask,
divergences_kwargs,
flat_var_names,
backend_kwargs,
marginal_kwargs,
show,
marginals,
point_estimate,
point_estimate_kwargs,
point_estimate_marker_kwargs,
reference_values,
reference_values_kwargs,
):
"""Matplotlib pairplot."""
if backend_kwargs is None:
backend_kwargs = {}
backend_kwargs = {
**backend_kwarg_defaults(),
**backend_kwargs,
}
backend_kwargs.pop("constrained_layout")
if hexbin_kwargs is None:
hexbin_kwargs = {}
hexbin_kwargs.setdefault("mincnt", 1)
if kind != "kde":
kde_kwargs.setdefault("contourf_kwargs", {"alpha": 0})
kde_kwargs.setdefault("contour_kwargs", {})
kde_kwargs["contour_kwargs"].setdefault("colors", "k")
if reference_values:
reference_values_copy = {}
label = []
for variable in list(reference_values.keys()):
if " " in variable:
variable_copy = variable.replace(" ", "\n", 1)
else:
variable_copy = variable
label.append(variable_copy)
reference_values_copy[variable_copy] = reference_values[variable]
difference = set(flat_var_names).difference(set(label))
if difference:
warn = [dif.replace("\n", " ", 1) for dif in difference]
warnings.warn(
"Argument reference_values does not include reference value for: {}".format(
", ".join(warn)
),
UserWarning,
)
if reference_values_kwargs is None:
reference_values_kwargs = {}
reference_values_kwargs.setdefault("color", "C3")
reference_values_kwargs.setdefault("marker", "o")
point_estimate_marker_kwargs.setdefault("marker", "s")
point_estimate_marker_kwargs.setdefault("color", "C1")
# pylint: disable=too-many-nested-blocks
if numvars == 2:
(figsize, ax_labelsize, _, xt_labelsize, linewidth, markersize) = (
_scale_fig_size(figsize, textsize, numvars - 1, numvars - 1)
)
marginal_kwargs.setdefault("plot_kwargs", {})
marginal_kwargs["plot_kwargs"].setdefault("linewidth", linewidth)
point_estimate_marker_kwargs.setdefault("s", markersize + 50)
# Flatten data
x = infdata_group[0].flatten()
y = infdata_group[1].flatten()
if ax is None:
if marginals:
# Instantiate figure and grid
widths = [2, 2, 2, 1]
heights = [1.4, 2, 2, 2]
fig, _ = plt.subplots(0, 0, figsize=figsize, **backend_kwargs)
grid = plt.GridSpec(
4,
4,
hspace=0.1,
wspace=0.1,
figure=fig,
width_ratios=widths,
height_ratios=heights,
)
# Set up main plot
ax = fig.add_subplot(grid[1:, :-1])
# Set up top KDE
ax_hist_x = fig.add_subplot(grid[0, :-1], sharex=ax)
ax_hist_x.set_yticks([])
# Set up right KDE
ax_hist_y = fig.add_subplot(grid[1:, -1], sharey=ax)
ax_hist_y.set_xticks([])
ax_return = np.array([[ax_hist_x, None], [ax, ax_hist_y]])
for val, ax_, rotate in ((x, ax_hist_x, False), (y, ax_hist_y, True)):
plot_dist(
val,
textsize=xt_labelsize,
rotated=rotate,
ax=ax_,
**marginal_kwargs,
)
# Personalize axes
ax_hist_x.tick_params(labelleft=False, labelbottom=False)
ax_hist_y.tick_params(labelleft=False, labelbottom=False)
else:
fig, ax = plt.subplots(
numvars - 1, numvars - 1, figsize=figsize, **backend_kwargs
)
else:
if marginals:
assert ax.shape == (numvars, numvars)
if ax[0, 1] is not None and ax[0, 1].get_figure() is not None:
ax[0, 1].remove()
ax_return = ax
ax_hist_x = ax[0, 0]
ax_hist_y = ax[1, 1]
ax = ax[1, 0]
for val, ax_, rotate in ((x, ax_hist_x, False), (y, ax_hist_y, True)):
plot_dist(
val,
textsize=xt_labelsize,
rotated=rotate,
ax=ax_,
**marginal_kwargs,
)
else:
ax = np.atleast_2d(ax)[0, 0]
if "scatter" in kind:
ax.plot(infdata_group[0], infdata_group[1], **scatter_kwargs)
if "kde" in kind:
plot_kde(infdata_group[0], infdata_group[1], ax=ax, **kde_kwargs)
if "hexbin" in kind:
hexbin = ax.hexbin(
infdata_group[0],
infdata_group[1],
gridsize=gridsize,
**hexbin_kwargs,
)
ax.grid(False)
if kind == "hexbin" and colorbar:
cbar = ax.figure.colorbar(
hexbin, ticks=[hexbin.norm.vmin, hexbin.norm.vmax], ax=ax
)
cbar.ax.set_yticklabels(["low", "high"], fontsize=ax_labelsize)
if divergences:
ax.plot(
infdata_group[0][diverging_mask],
infdata_group[1][diverging_mask],
**divergences_kwargs,
)
if point_estimate:
pe_x = calculate_point_estimate(point_estimate, x)
pe_y = calculate_point_estimate(point_estimate, y)
if marginals:
ax_hist_x.axvline(pe_x, **point_estimate_kwargs)
ax_hist_y.axhline(pe_y, **point_estimate_kwargs)
ax.axvline(pe_x, **point_estimate_kwargs)
ax.axhline(pe_y, **point_estimate_kwargs)
ax.scatter(pe_x, pe_y, **point_estimate_marker_kwargs)
if reference_values:
ax.plot(
reference_values_copy[flat_var_names[0]],
reference_values_copy[flat_var_names[1]],
**reference_values_kwargs,
)
ax.set_xlabel("{}".format(flat_var_names[0]), fontsize=ax_labelsize, wrap=True)
ax.set_ylabel("{}".format(flat_var_names[1]), fontsize=ax_labelsize, wrap=True)
ax.tick_params(labelsize=xt_labelsize)
else:
max_plots = (
numvars**2
if rcParams["plot.max_subplots"] is None
else rcParams["plot.max_subplots"]
)
vars_to_plot = np.sum(np.arange(numvars).cumsum() < max_plots)
if vars_to_plot < numvars:
warnings.warn(
"rcParams['plot.max_subplots'] ({max_plots}) is smaller than the number "
"of resulting pair plots with these variables, generating only a "
"{side}x{side} grid".format(max_plots=max_plots, side=vars_to_plot),
UserWarning,
)
numvars = vars_to_plot
(figsize, ax_labelsize, _, xt_labelsize, _, markersize) = _scale_fig_size(
figsize, textsize, numvars - 2, numvars - 2
)
point_estimate_marker_kwargs.setdefault("s", markersize + 50)
if ax is None:
fig, ax = plt.subplots(numvars, numvars, figsize=figsize, **backend_kwargs)
hexbin_values = []
for i in range(0, numvars):
var1 = infdata_group[i]
for j in range(0, numvars):
var2 = infdata_group[j]
if i > j:
if ax[j, i].get_figure() is not None:
ax[j, i].remove()
continue
elif i == j:
if marginals:
loc = "right"
plot_dist(var1, ax=ax[i, j], **marginal_kwargs)
else:
loc = "left"
if ax[j, i].get_figure() is not None:
ax[j, i].remove()
continue
else:
if "scatter" in kind:
ax[j, i].plot(var1, var2, **scatter_kwargs)
if "kde" in kind:
plot_kde(
var1,
var2,
ax=ax[j, i],
**kde_kwargs,
)
if "hexbin" in kind:
ax[j, i].grid(False)
hexbin = ax[j, i].hexbin(
var1, var2, gridsize=gridsize, **hexbin_kwargs
)
if divergences:
ax[j, i].plot(
var1[diverging_mask],
var2[diverging_mask],
**divergences_kwargs,
)
if kind == "hexbin" and colorbar:
hexbin_values.append(hexbin.norm.vmin)
hexbin_values.append(hexbin.norm.vmax)
divider = make_axes_locatable(ax[-1, -1])
cax = divider.append_axes(loc, size="7%", pad="5%")
cbar = fig.colorbar(
hexbin, ticks=[hexbin.norm.vmin, hexbin.norm.vmax], cax=cax
)
cbar.ax.set_yticklabels(["low", "high"], fontsize=ax_labelsize)
if point_estimate:
pe_x = calculate_point_estimate(point_estimate, var1)
pe_y = calculate_point_estimate(point_estimate, var2)
ax[j, i].axvline(pe_x, **point_estimate_kwargs)
ax[j, i].axhline(pe_y, **point_estimate_kwargs)
if marginals:
ax[j - 1, i].axvline(pe_x, **point_estimate_kwargs)
pe_last = calculate_point_estimate(
point_estimate, infdata_group[-1]
)
ax[-1, -1].axvline(pe_last, **point_estimate_kwargs)
ax[j, i].scatter(pe_x, pe_y, **point_estimate_marker_kwargs)
if reference_values:
x_name = flat_var_names[i]
y_name = flat_var_names[j]
if x_name and y_name not in difference:
ax[j, i].plot(
reference_values_copy[x_name],
reference_values_copy[y_name],
**reference_values_kwargs,
)
if j != numvars - 1:
ax[j, i].axes.get_xaxis().set_major_formatter(NullFormatter())
else:
ax[j, i].set_xlabel(
"{}".format(flat_var_names[i]), fontsize=ax_labelsize, wrap=True
)
if i != 0:
ax[j, i].axes.get_yaxis().set_major_formatter(NullFormatter())
else:
ax[j, i].set_ylabel(
"{}".format(flat_var_names[j]), fontsize=ax_labelsize, wrap=True
)
ax[j, i].tick_params(labelsize=xt_labelsize)
if backend_show(show):
plt.show()
if marginals and numvars == 2:
return ax_return
return ax
|
def plot_pair(
ax,
infdata_group,
numvars,
figsize,
textsize,
kind,
fill_last, # pylint: disable=unused-argument
contour, # pylint: disable=unused-argument
plot_kwargs, # pylint: disable=unused-argument
scatter_kwargs,
kde_kwargs,
hexbin_kwargs,
gridsize,
colorbar,
divergences,
diverging_mask,
divergences_kwargs,
flat_var_names,
backend_kwargs,
marginal_kwargs,
show,
diagonal,
point_estimate,
point_estimate_kwargs,
point_estimate_marker_kwargs,
reference_values,
reference_values_kwargs,
):
"""Matplotlib pairplot."""
if backend_kwargs is None:
backend_kwargs = {}
backend_kwargs = {
**backend_kwarg_defaults(),
**backend_kwargs,
}
backend_kwargs.pop("constrained_layout")
if hexbin_kwargs is None:
hexbin_kwargs = {}
hexbin_kwargs.setdefault("mincnt", 1)
if kind != "kde":
kde_kwargs.setdefault("contourf_kwargs", {"alpha": 0})
kde_kwargs.setdefault("contour_kwargs", {})
kde_kwargs["contour_kwargs"].setdefault("colors", "k")
if reference_values:
reference_values_copy = {}
label = []
for variable in list(reference_values.keys()):
if " " in variable:
variable_copy = variable.replace(" ", "\n", 1)
else:
variable_copy = variable
label.append(variable_copy)
reference_values_copy[variable_copy] = reference_values[variable]
difference = set(flat_var_names).difference(set(label))
if difference:
warn = [dif.replace("\n", " ", 1) for dif in difference]
warnings.warn(
"Argument reference_values does not include reference value for: {}".format(
", ".join(warn)
),
UserWarning,
)
if reference_values_kwargs is None:
reference_values_kwargs = {}
reference_values_kwargs.setdefault("color", "C3")
reference_values_kwargs.setdefault("marker", "o")
# pylint: disable=too-many-nested-blocks
if numvars == 2:
(figsize, ax_labelsize, _, xt_labelsize, linewidth, _) = _scale_fig_size(
figsize, textsize, numvars - 1, numvars - 1
)
marginal_kwargs.setdefault("plot_kwargs", {})
marginal_kwargs["plot_kwargs"].setdefault("linewidth", linewidth)
# Flatten data
x = infdata_group[0].flatten()
y = infdata_group[1].flatten()
if ax is None:
if diagonal:
# Instantiate figure and grid
fig, _ = plt.subplots(0, 0, figsize=figsize, **backend_kwargs)
grid = plt.GridSpec(4, 4, hspace=0.1, wspace=0.1, figure=fig)
# Set up main plot
ax = fig.add_subplot(grid[1:, :-1])
# Set up top KDE
ax_hist_x = fig.add_subplot(grid[0, :-1], sharex=ax)
# Set up right KDE
ax_hist_y = fig.add_subplot(grid[1:, -1], sharey=ax)
ax_return = np.array([[ax_hist_x, None], [ax, ax_hist_y]])
for val, ax_, rotate in ((x, ax_hist_x, False), (y, ax_hist_y, True)):
plot_dist(
val,
textsize=xt_labelsize,
rotated=rotate,
ax=ax_,
**marginal_kwargs,
)
# Personalize axes
ax_hist_x.tick_params(labelleft=False, labelbottom=False)
ax_hist_y.tick_params(labelleft=False, labelbottom=False)
else:
fig, ax = plt.subplots(
numvars - 1, numvars - 1, figsize=figsize, **backend_kwargs
)
else:
if diagonal:
assert ax.shape == (numvars, numvars)
if ax[0, 1] is not None and ax[0, 1].get_figure() is not None:
ax[0, 1].remove()
ax_return = ax
ax_hist_x = ax[0, 0]
ax_hist_y = ax[1, 1]
ax = ax[1, 0]
for val, ax_, rotate in ((x, ax_hist_x, False), (y, ax_hist_y, True)):
plot_dist(
val,
textsize=xt_labelsize,
rotated=rotate,
ax=ax_,
**marginal_kwargs,
)
else:
ax = np.atleast_2d(ax)[0, 0]
if "scatter" in kind:
ax.plot(infdata_group[0], infdata_group[1], **scatter_kwargs)
if "kde" in kind:
plot_kde(infdata_group[0], infdata_group[1], ax=ax, **kde_kwargs)
if "hexbin" in kind:
hexbin = ax.hexbin(
infdata_group[0],
infdata_group[1],
gridsize=gridsize,
**hexbin_kwargs,
)
ax.grid(False)
if kind == "hexbin" and colorbar:
cbar = ax.figure.colorbar(
hexbin, ticks=[hexbin.norm.vmin, hexbin.norm.vmax], ax=ax
)
cbar.ax.set_yticklabels(["low", "high"], fontsize=ax_labelsize)
if divergences:
ax.plot(
infdata_group[0][diverging_mask],
infdata_group[1][diverging_mask],
**divergences_kwargs,
)
if point_estimate:
pe_x = calculate_point_estimate(point_estimate, x)
pe_y = calculate_point_estimate(point_estimate, y)
if diagonal:
ax_hist_x.axvline(pe_x, **point_estimate_kwargs)
ax_hist_y.axhline(pe_y, **point_estimate_kwargs)
ax.axvline(pe_x, **point_estimate_kwargs)
ax.axhline(pe_y, **point_estimate_kwargs)
ax.scatter(
pe_x,
pe_y,
marker="s",
s=figsize[0] + 50,
**point_estimate_kwargs,
zorder=4,
)
if reference_values:
ax.plot(
reference_values_copy[flat_var_names[0]],
reference_values_copy[flat_var_names[1]],
**reference_values_kwargs,
)
ax.set_xlabel("{}".format(flat_var_names[0]), fontsize=ax_labelsize, wrap=True)
ax.set_ylabel("{}".format(flat_var_names[1]), fontsize=ax_labelsize, wrap=True)
ax.tick_params(labelsize=xt_labelsize)
else:
max_plots = (
numvars**2
if rcParams["plot.max_subplots"] is None
else rcParams["plot.max_subplots"]
)
vars_to_plot = np.sum(np.arange(numvars).cumsum() < max_plots)
if vars_to_plot < numvars:
warnings.warn(
"rcParams['plot.max_subplots'] ({max_plots}) is smaller than the number "
"of resulting pair plots with these variables, generating only a "
"{side}x{side} grid".format(max_plots=max_plots, side=vars_to_plot),
UserWarning,
)
numvars = vars_to_plot
(figsize, ax_labelsize, _, xt_labelsize, _, _) = _scale_fig_size(
figsize, textsize, numvars - 2, numvars - 2
)
if ax is None:
fig, ax = plt.subplots(numvars, numvars, figsize=figsize, **backend_kwargs)
hexbin_values = []
for i in range(0, numvars):
var1 = infdata_group[i]
for j in range(0, numvars):
var2 = infdata_group[j]
if i > j:
if ax[j, i].get_figure() is not None:
ax[j, i].remove()
continue
elif i == j:
if diagonal:
loc = "right"
plot_dist(var1, ax=ax[i, j], **marginal_kwargs)
else:
loc = "left"
if ax[j, i].get_figure() is not None:
ax[j, i].remove()
continue
else:
if "scatter" in kind:
ax[j, i].plot(var1, var2, **scatter_kwargs)
if "kde" in kind:
plot_kde(
var1,
var2,
ax=ax[j, i],
**kde_kwargs,
)
if "hexbin" in kind:
ax[j, i].grid(False)
hexbin = ax[j, i].hexbin(
var1, var2, gridsize=gridsize, **hexbin_kwargs
)
if divergences:
ax[j, i].plot(
var1[diverging_mask],
var2[diverging_mask],
**divergences_kwargs,
)
if kind == "hexbin" and colorbar:
hexbin_values.append(hexbin.norm.vmin)
hexbin_values.append(hexbin.norm.vmax)
divider = make_axes_locatable(ax[-1, -1])
cax = divider.append_axes(loc, size="7%", pad="5%")
cbar = fig.colorbar(
hexbin, ticks=[hexbin.norm.vmin, hexbin.norm.vmax], cax=cax
)
cbar.ax.set_yticklabels(["low", "high"], fontsize=ax_labelsize)
if point_estimate:
pe_x = calculate_point_estimate(point_estimate, var1)
pe_y = calculate_point_estimate(point_estimate, var2)
ax[j, i].axvline(pe_x, **point_estimate_kwargs)
ax[j, i].axhline(pe_y, **point_estimate_kwargs)
if diagonal:
ax[j - 1, i].axvline(pe_x, **point_estimate_kwargs)
pe_last = calculate_point_estimate(
point_estimate, infdata_group[-1]
)
ax[-1, -1].axvline(pe_last, **point_estimate_kwargs)
ax[j, i].scatter(
pe_x,
pe_y,
s=figsize[0] + 50,
zorder=4,
**point_estimate_marker_kwargs,
)
if reference_values:
x_name = flat_var_names[i]
y_name = flat_var_names[j]
if x_name and y_name not in difference:
ax[j, i].plot(
reference_values_copy[x_name],
reference_values_copy[y_name],
**reference_values_kwargs,
)
if j != numvars - 1:
ax[j, i].axes.get_xaxis().set_major_formatter(NullFormatter())
else:
ax[j, i].set_xlabel(
"{}".format(flat_var_names[i]), fontsize=ax_labelsize, wrap=True
)
if i != 0:
ax[j, i].axes.get_yaxis().set_major_formatter(NullFormatter())
else:
ax[j, i].set_ylabel(
"{}".format(flat_var_names[j]), fontsize=ax_labelsize, wrap=True
)
ax[j, i].tick_params(labelsize=xt_labelsize)
if backend_show(show):
plt.show()
if diagonal and numvars == 2:
return ax_return
return ax
|
https://github.com/arviz-devs/arviz/issues/1166
|
/Users/alex_andorra/opt/anaconda3/envs/stat-rethink-pymc3/lib/python3.7/site-packages/arviz/plots/pairplot.py:167: UserWarning: fill_last and contour will be deprecated. Please use kde_kwargs
"fill_last and contour will be deprecated. Please use kde_kwargs", UserWarning,
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-7-6741897b23b8> in <module>
7 diagonal=True,
8 point_estimate="mean",
----> 9 point_estimate_marker_kwargs={"alpha": 0.6, "s": 6},
10 );
~/opt/anaconda3/envs/stat-rethink-pymc3/lib/python3.7/site-packages/arviz/plots/pairplot.py in plot_pair(data, group, var_names, coords, figsize, textsize, kind, gridsize, contour, plot_kwargs, fill_last, divergences, colorbar, ax, divergences_kwargs, scatter_kwargs, kde_kwargs, hexbin_kwargs, backend, backend_kwargs, diagonal, marginal_kwargs, point_estimate, point_estimate_kwargs, point_estimate_marker_kwargs, reference_values, reference_values_kwargs, show)
305 # TODO: Add backend kwargs
306 plot = get_plotting_function("plot_pair", "pairplot", backend)
--> 307 ax = plot(**pairplot_kwargs)
308 return ax
~/opt/anaconda3/envs/stat-rethink-pymc3/lib/python3.7/site-packages/arviz/plots/backends/matplotlib/pairplot.py in plot_pair(ax, infdata_group, numvars, figsize, textsize, kind, fill_last, contour, plot_kwargs, scatter_kwargs, kde_kwargs, hexbin_kwargs, gridsize, colorbar, divergences, diverging_mask, divergences_kwargs, flat_var_names, backend_kwargs, marginal_kwargs, show, diagonal, point_estimate, point_estimate_kwargs, point_estimate_marker_kwargs, reference_values, reference_values_kwargs)
264
265 ax[j, i].scatter(
--> 266 pe_x, pe_y, s=figsize[0] + 50, zorder=4, **point_estimate_marker_kwargs
267 )
268
TypeError: scatter() got multiple values for keyword argument 's'
|
TypeError
|
def plot_pair(
data,
group="posterior",
var_names: Optional[List[str]] = None,
filter_vars: Optional[str] = None,
coords=None,
figsize=None,
textsize=None,
kind: Union[str, List[str]] = "scatter",
gridsize="auto",
contour: Optional[bool] = None,
plot_kwargs=None,
fill_last=False,
divergences=False,
colorbar=False,
ax=None,
divergences_kwargs=None,
scatter_kwargs=None,
kde_kwargs=None,
hexbin_kwargs=None,
backend=None,
backend_kwargs=None,
marginals=False,
marginal_kwargs=None,
point_estimate=None,
point_estimate_kwargs=None,
point_estimate_marker_kwargs=None,
reference_values=None,
reference_values_kwargs=None,
show=None,
):
"""
Plot a scatter, kde and/or hexbin matrix with (optional) marginals on the diagonal.
Parameters
----------
data: obj
Any object that can be converted to an az.InferenceData object
Refer to documentation of az.convert_to_dataset for details
group: str, optional
Specifies which InferenceData group should be plotted. Defaults to 'posterior'.
var_names: list of variable names, optional
Variables to be plotted, if None all variable are plotted. Prefix the
variables by `~` when you want to exclude them from the plot.
filter_vars: {None, "like", "regex"}, optional, default=None
If `None` (default), interpret var_names as the real variables names. If "like",
interpret var_names as substrings of the real variables names. If "regex",
interpret var_names as regular expressions on the real variables names. A la
`pandas.filter`.
coords: mapping, optional
Coordinates of var_names to be plotted. Passed to `Dataset.sel`
figsize: figure size tuple
If None, size is (8 + numvars, 8 + numvars)
textsize: int
Text size for labels. If None it will be autoscaled based on figsize.
kind : str or List[str]
Type of plot to display (scatter, kde and/or hexbin)
gridsize: int or (int, int), optional
Only works for kind=hexbin.
The number of hexagons in the x-direction. The corresponding number of hexagons in the
y-direction is chosen such that the hexagons are approximately regular.
Alternatively, gridsize can be a tuple with two elements specifying the number of hexagons
in the x-direction and the y-direction.
contour : bool, optional, deprecated, Defaults to True.
If True plot the 2D KDE using contours, otherwise plot a smooth 2D KDE. Defaults to True.
**Note:** this default is implemented in the body of the code, not in argument processing.
fill_last : bool
If True fill the last contour of the 2D KDE plot. Defaults to True.
divergences: Boolean
If True divergences will be plotted in a different color, only if group is either 'prior'
or 'posterior'.
colorbar: bool
If True a colorbar will be included as part of the plot (Defaults to False).
Only works when kind=hexbin
ax: axes, optional
Matplotlib axes or bokeh figures.
divergences_kwargs: dicts, optional
Additional keywords passed to ax.scatter for divergences
scatter_kwargs:
Additional keywords passed to ax.plot when using scatter kind
kde_kwargs: dict, optional
Additional keywords passed to az.plot_kde when using kde kind
hexbin_kwargs: dict, optional
Additional keywords passed to ax.hexbin when using hexbin kind
backend: str, optional
Select plotting backend {"matplotlib","bokeh"}. Default "matplotlib".
backend_kwargs: bool, optional
These are kwargs specific to the backend being used. For additional documentation
check the plotting method of the backend.
diagonal: bool, optional
If True pairplot will include marginal distributions for every variable
marginal_kwargs: dict, optional
Additional keywords passed to az.plot_dist, modifying the marginal distributions
plotted in the diagonal.
point_estimate: str, optional
Select point estimate from 'mean', 'mode' or 'median'. The point estimate will be
plotted using a scatter marker and vertical/horizontal lines.
point_estimate_kwargs: dict, optional
Additional keywords passed to ax.vline, ax.hline (matplotlib) or ax.square, Span (bokeh)
point_estimate_marker_kwargs: dict, optional
Additional keywords passed to ax.scatter in point estimate plot. Not available in bokeh
reference_values: dict, optional
Reference values for the plotted variables. The Reference values will be plotted
using a scatter marker
reference_values_kwargs: dict, optional
Additional keywords passed to ax.plot or ax.circle in reference values plot
show: bool, optional
Call backend show function.
Returns
-------
axes: matplotlib axes or bokeh figures
Examples
--------
KDE Pair Plot
.. plot::
:context: close-figs
>>> import arviz as az
>>> centered = az.load_arviz_data('centered_eight')
>>> coords = {'school': ['Choate', 'Deerfield']}
>>> az.plot_pair(centered,
>>> var_names=['theta', 'mu', 'tau'],
>>> kind='kde',
>>> coords=coords,
>>> divergences=True,
>>> textsize=18)
Hexbin pair plot
.. plot::
:context: close-figs
>>> az.plot_pair(centered,
>>> var_names=['theta', 'mu'],
>>> coords=coords,
>>> textsize=18,
>>> kind='hexbin')
Pair plot showing divergences and select variables with regular expressions
.. plot::
:context: close-figs
>>> az.plot_pair(centered,
... var_names=['^t', 'mu'],
... filter_vars="regex",
... coords=coords,
... divergences=True,
... textsize=18)
"""
valid_kinds = ["scatter", "kde", "hexbin"]
kind_boolean: Union[bool, List[bool]]
if isinstance(kind, str):
kind_boolean = kind in valid_kinds
else:
kind_boolean = [kind[i] in valid_kinds for i in range(len(kind))]
if not np.all(kind_boolean):
raise ValueError(
(f"Plot type {kind} not recognized.Plot type must be in {{valid_kinds}}")
)
if fill_last or contour:
warnings.warn(
"fill_last and contour will be deprecated. Please use kde_kwargs",
UserWarning,
)
if contour is None:
contour = True
if coords is None:
coords = {}
if plot_kwargs is None:
plot_kwargs = {}
else:
warnings.warn(
"plot_kwargs will be deprecated."
" Please use scatter_kwargs, kde_kwargs and/or hexbin_kwargs",
UserWarning,
)
if scatter_kwargs is None:
scatter_kwargs = {}
scatter_kwargs.setdefault("marker", ".")
scatter_kwargs.setdefault("lw", 0)
scatter_kwargs.setdefault("zorder", 0)
if kde_kwargs is None:
kde_kwargs = {}
if hexbin_kwargs is None:
hexbin_kwargs = {}
if divergences_kwargs is None:
divergences_kwargs = {}
divergences_kwargs.setdefault("marker", "o")
divergences_kwargs.setdefault("markeredgecolor", "k")
divergences_kwargs.setdefault("color", "C1")
divergences_kwargs.setdefault("lw", 0)
if marginal_kwargs is None:
marginal_kwargs = {}
if point_estimate_kwargs is None:
point_estimate_kwargs = {}
if point_estimate_marker_kwargs is None:
point_estimate_marker_kwargs = {}
# Get posterior draws and combine chains
data = convert_to_inference_data(data)
grouped_data = convert_to_dataset(data, group=group)
var_names = _var_names(var_names, grouped_data, filter_vars)
flat_var_names, infdata_group = xarray_to_ndarray(
get_coords(grouped_data, coords), var_names=var_names, combined=True
)
divergent_data = None
diverging_mask = None
# Assigning divergence group based on group param
if group == "posterior":
divergent_group = "sample_stats"
elif group == "prior":
divergent_group = "sample_stats_prior"
else:
divergences = False
# Get diverging draws and combine chains
if divergences:
if hasattr(data, divergent_group) and hasattr(
getattr(data, divergent_group), "diverging"
):
divergent_data = convert_to_dataset(data, group=divergent_group)
_, diverging_mask = xarray_to_ndarray(
divergent_data, var_names=("diverging",), combined=True
)
diverging_mask = np.squeeze(diverging_mask)
else:
divergences = False
warnings.warn(
"Divergences data not found, plotting without divergences. "
"Make sure the sample method provides divergences data and "
"that it is present in the `diverging` field of `sample_stats` "
"or `sample_stats_prior` or set divergences=False",
UserWarning,
)
if gridsize == "auto":
gridsize = int(len(infdata_group[0]) ** 0.35)
numvars = len(flat_var_names)
if numvars < 2:
raise Exception("Number of variables to be plotted must be 2 or greater.")
pairplot_kwargs = dict(
ax=ax,
infdata_group=infdata_group,
numvars=numvars,
figsize=figsize,
textsize=textsize,
kind=kind,
plot_kwargs=plot_kwargs,
scatter_kwargs=scatter_kwargs,
kde_kwargs=kde_kwargs,
hexbin_kwargs=hexbin_kwargs,
contour=contour,
fill_last=fill_last,
gridsize=gridsize,
colorbar=colorbar,
divergences=divergences,
diverging_mask=diverging_mask,
divergences_kwargs=divergences_kwargs,
flat_var_names=flat_var_names,
backend_kwargs=backend_kwargs,
marginal_kwargs=marginal_kwargs,
show=show,
marginals=marginals,
point_estimate=point_estimate,
point_estimate_kwargs=point_estimate_kwargs,
point_estimate_marker_kwargs=point_estimate_marker_kwargs,
reference_values=reference_values,
reference_values_kwargs=reference_values_kwargs,
)
if backend is None:
backend = rcParams["plot.backend"]
backend = backend.lower()
if backend == "bokeh":
pairplot_kwargs.pop("gridsize", None)
pairplot_kwargs.pop("colorbar", None)
pairplot_kwargs.pop("divergences_kwargs", None)
pairplot_kwargs.pop("hexbin_values", None)
pairplot_kwargs.pop("scatter_kwargs", None)
point_estimate_kwargs.setdefault("line_color", "orange")
point_estimate_marker_kwargs.setdefault("line_color", "orange")
else:
point_estimate_kwargs.setdefault("color", "C1")
# TODO: Add backend kwargs
plot = get_plotting_function("plot_pair", "pairplot", backend)
ax = plot(**pairplot_kwargs)
return ax
|
def plot_pair(
data,
group="posterior",
var_names: Optional[List[str]] = None,
filter_vars: Optional[str] = None,
coords=None,
figsize=None,
textsize=None,
kind: Union[str, List[str]] = "scatter",
gridsize="auto",
contour: Optional[bool] = None,
plot_kwargs=None,
fill_last=False,
divergences=False,
colorbar=False,
ax=None,
divergences_kwargs=None,
scatter_kwargs=None,
kde_kwargs=None,
hexbin_kwargs=None,
backend=None,
backend_kwargs=None,
diagonal=False,
marginal_kwargs=None,
point_estimate=None,
point_estimate_kwargs=None,
point_estimate_marker_kwargs=None,
reference_values=None,
reference_values_kwargs=None,
show=None,
):
"""
Plot a scatter, kde and/or hexbin matrix with (optional) marginals on the diagonal.
Parameters
----------
data: obj
Any object that can be converted to an az.InferenceData object
Refer to documentation of az.convert_to_dataset for details
group: str, optional
Specifies which InferenceData group should be plotted. Defaults to 'posterior'.
var_names: list of variable names, optional
Variables to be plotted, if None all variable are plotted. Prefix the
variables by `~` when you want to exclude them from the plot.
filter_vars: {None, "like", "regex"}, optional, default=None
If `None` (default), interpret var_names as the real variables names. If "like",
interpret var_names as substrings of the real variables names. If "regex",
interpret var_names as regular expressions on the real variables names. A la
`pandas.filter`.
coords: mapping, optional
Coordinates of var_names to be plotted. Passed to `Dataset.sel`
figsize: figure size tuple
If None, size is (8 + numvars, 8 + numvars)
textsize: int
Text size for labels. If None it will be autoscaled based on figsize.
kind : str or List[str]
Type of plot to display (scatter, kde and/or hexbin)
gridsize: int or (int, int), optional
Only works for kind=hexbin.
The number of hexagons in the x-direction. The corresponding number of hexagons in the
y-direction is chosen such that the hexagons are approximately regular.
Alternatively, gridsize can be a tuple with two elements specifying the number of hexagons
in the x-direction and the y-direction.
contour : bool, optional, deprecated, Defaults to True.
If True plot the 2D KDE using contours, otherwise plot a smooth 2D KDE. Defaults to True.
**Note:** this default is implemented in the body of the code, not in argument processing.
fill_last : bool
If True fill the last contour of the 2D KDE plot. Defaults to True.
divergences: Boolean
If True divergences will be plotted in a different color, only if group is either 'prior'
or 'posterior'.
colorbar: bool
If True a colorbar will be included as part of the plot (Defaults to False).
Only works when kind=hexbin
ax: axes, optional
Matplotlib axes or bokeh figures.
divergences_kwargs: dicts, optional
Additional keywords passed to ax.scatter for divergences
scatter_kwargs:
Additional keywords passed to ax.plot when using scatter kind
kde_kwargs: dict, optional
Additional keywords passed to az.plot_kde when using kde kind
hexbin_kwargs: dict, optional
Additional keywords passed to ax.hexbin when using hexbin kind
backend: str, optional
Select plotting backend {"matplotlib","bokeh"}. Default "matplotlib".
backend_kwargs: bool, optional
These are kwargs specific to the backend being used. For additional documentation
check the plotting method of the backend.
diagonal: bool, optional
If True pairplot will include marginal distributions for every variable
marginal_kwargs: dict, optional
Additional keywords passed to az.plot_dist, modifying the marginal distributions
plotted in the diagonal.
point_estimate: str, optional
Select point estimate from 'mean', 'mode' or 'median'. The point estimate will be
plotted using a scatter marker and vertical/horizontal lines.
point_estimate_kwargs: dict, optional
Additional keywords passed to ax.vline, ax.hline (matplotlib) or ax.square, Span (bokeh)
point_estimate_marker_kwargs: dict, optional
Additional keywords passed to ax.scatter in point estimate plot. Not available in bokeh
reference_values: dict, optional
Reference values for the plotted variables. The Reference values will be plotted
using a scatter marker
reference_values_kwargs: dict, optional
Additional keywords passed to ax.plot or ax.circle in reference values plot
show: bool, optional
Call backend show function.
Returns
-------
axes: matplotlib axes or bokeh figures
Examples
--------
KDE Pair Plot
.. plot::
:context: close-figs
>>> import arviz as az
>>> centered = az.load_arviz_data('centered_eight')
>>> coords = {'school': ['Choate', 'Deerfield']}
>>> az.plot_pair(centered,
>>> var_names=['theta', 'mu', 'tau'],
>>> kind='kde',
>>> coords=coords,
>>> divergences=True,
>>> textsize=18)
Hexbin pair plot
.. plot::
:context: close-figs
>>> az.plot_pair(centered,
>>> var_names=['theta', 'mu'],
>>> coords=coords,
>>> textsize=18,
>>> kind='hexbin')
Pair plot showing divergences and select variables with regular expressions
.. plot::
:context: close-figs
>>> az.plot_pair(centered,
... var_names=['^t', 'mu'],
... filter_vars="regex",
... coords=coords,
... divergences=True,
... textsize=18)
"""
valid_kinds = ["scatter", "kde", "hexbin"]
kind_boolean: Union[bool, List[bool]]
if isinstance(kind, str):
kind_boolean = kind in valid_kinds
else:
kind_boolean = [kind[i] in valid_kinds for i in range(len(kind))]
if not np.all(kind_boolean):
raise ValueError(
(f"Plot type {kind} not recognized.Plot type must be in {{valid_kinds}}")
)
if fill_last or contour:
warnings.warn(
"fill_last and contour will be deprecated. Please use kde_kwargs",
UserWarning,
)
if contour is None:
contour = True
if coords is None:
coords = {}
if plot_kwargs is None:
plot_kwargs = {}
else:
warnings.warn(
"plot_kwargs will be deprecated."
" Please use scatter_kwargs, kde_kwargs and/or hexbin_kwargs",
UserWarning,
)
if scatter_kwargs is None:
scatter_kwargs = {}
scatter_kwargs.setdefault("marker", ".")
scatter_kwargs.setdefault("lw", 0)
scatter_kwargs.setdefault("zorder", 0)
if kde_kwargs is None:
kde_kwargs = {}
if hexbin_kwargs is None:
hexbin_kwargs = {}
if divergences_kwargs is None:
divergences_kwargs = {}
divergences_kwargs.setdefault("marker", "o")
divergences_kwargs.setdefault("markeredgecolor", "k")
divergences_kwargs.setdefault("color", "C1")
divergences_kwargs.setdefault("lw", 0)
if marginal_kwargs is None:
marginal_kwargs = {}
if point_estimate_kwargs is None:
point_estimate_kwargs = {}
if point_estimate_marker_kwargs is None:
point_estimate_marker_kwargs = {}
point_estimate_marker_kwargs.setdefault("marker", "s")
point_estimate_marker_kwargs.setdefault("color", "C1")
# Get posterior draws and combine chains
data = convert_to_inference_data(data)
grouped_data = convert_to_dataset(data, group=group)
var_names = _var_names(var_names, grouped_data, filter_vars)
flat_var_names, infdata_group = xarray_to_ndarray(
get_coords(grouped_data, coords), var_names=var_names, combined=True
)
divergent_data = None
diverging_mask = None
# Assigning divergence group based on group param
if group == "posterior":
divergent_group = "sample_stats"
elif group == "prior":
divergent_group = "sample_stats_prior"
else:
divergences = False
# Get diverging draws and combine chains
if divergences:
if hasattr(data, divergent_group) and hasattr(
getattr(data, divergent_group), "diverging"
):
divergent_data = convert_to_dataset(data, group=divergent_group)
_, diverging_mask = xarray_to_ndarray(
divergent_data, var_names=("diverging",), combined=True
)
diverging_mask = np.squeeze(diverging_mask)
else:
divergences = False
warnings.warn(
"Divergences data not found, plotting without divergences. "
"Make sure the sample method provides divergences data and "
"that it is present in the `diverging` field of `sample_stats` "
"or `sample_stats_prior` or set divergences=False",
UserWarning,
)
if gridsize == "auto":
gridsize = int(len(infdata_group[0]) ** 0.35)
numvars = len(flat_var_names)
if numvars < 2:
raise Exception("Number of variables to be plotted must be 2 or greater.")
pairplot_kwargs = dict(
ax=ax,
infdata_group=infdata_group,
numvars=numvars,
figsize=figsize,
textsize=textsize,
kind=kind,
plot_kwargs=plot_kwargs,
scatter_kwargs=scatter_kwargs,
kde_kwargs=kde_kwargs,
hexbin_kwargs=hexbin_kwargs,
contour=contour,
fill_last=fill_last,
gridsize=gridsize,
colorbar=colorbar,
divergences=divergences,
diverging_mask=diverging_mask,
divergences_kwargs=divergences_kwargs,
flat_var_names=flat_var_names,
backend_kwargs=backend_kwargs,
marginal_kwargs=marginal_kwargs,
show=show,
diagonal=diagonal,
point_estimate=point_estimate,
point_estimate_kwargs=point_estimate_kwargs,
point_estimate_marker_kwargs=point_estimate_marker_kwargs,
reference_values=reference_values,
reference_values_kwargs=reference_values_kwargs,
)
if backend is None:
backend = rcParams["plot.backend"]
backend = backend.lower()
if backend == "bokeh":
pairplot_kwargs.pop("gridsize", None)
pairplot_kwargs.pop("colorbar", None)
pairplot_kwargs.pop("divergences_kwargs", None)
pairplot_kwargs.pop("hexbin_values", None)
pairplot_kwargs.pop("scatter_kwargs", None)
pairplot_kwargs.pop("point_estimate_marker_kwargs", None)
point_estimate_kwargs.setdefault("line_color", "orange")
else:
point_estimate_kwargs.setdefault("color", "C1")
# TODO: Add backend kwargs
plot = get_plotting_function("plot_pair", "pairplot", backend)
ax = plot(**pairplot_kwargs)
return ax
|
https://github.com/arviz-devs/arviz/issues/1166
|
/Users/alex_andorra/opt/anaconda3/envs/stat-rethink-pymc3/lib/python3.7/site-packages/arviz/plots/pairplot.py:167: UserWarning: fill_last and contour will be deprecated. Please use kde_kwargs
"fill_last and contour will be deprecated. Please use kde_kwargs", UserWarning,
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-7-6741897b23b8> in <module>
7 diagonal=True,
8 point_estimate="mean",
----> 9 point_estimate_marker_kwargs={"alpha": 0.6, "s": 6},
10 );
~/opt/anaconda3/envs/stat-rethink-pymc3/lib/python3.7/site-packages/arviz/plots/pairplot.py in plot_pair(data, group, var_names, coords, figsize, textsize, kind, gridsize, contour, plot_kwargs, fill_last, divergences, colorbar, ax, divergences_kwargs, scatter_kwargs, kde_kwargs, hexbin_kwargs, backend, backend_kwargs, diagonal, marginal_kwargs, point_estimate, point_estimate_kwargs, point_estimate_marker_kwargs, reference_values, reference_values_kwargs, show)
305 # TODO: Add backend kwargs
306 plot = get_plotting_function("plot_pair", "pairplot", backend)
--> 307 ax = plot(**pairplot_kwargs)
308 return ax
~/opt/anaconda3/envs/stat-rethink-pymc3/lib/python3.7/site-packages/arviz/plots/backends/matplotlib/pairplot.py in plot_pair(ax, infdata_group, numvars, figsize, textsize, kind, fill_last, contour, plot_kwargs, scatter_kwargs, kde_kwargs, hexbin_kwargs, gridsize, colorbar, divergences, diverging_mask, divergences_kwargs, flat_var_names, backend_kwargs, marginal_kwargs, show, diagonal, point_estimate, point_estimate_kwargs, point_estimate_marker_kwargs, reference_values, reference_values_kwargs)
264
265 ax[j, i].scatter(
--> 266 pe_x, pe_y, s=figsize[0] + 50, zorder=4, **point_estimate_marker_kwargs
267 )
268
TypeError: scatter() got multiple values for keyword argument 's'
|
TypeError
|
def plot_pair(
ax,
infdata_group,
numvars,
figsize,
textsize,
kind,
fill_last, # pylint: disable=unused-argument
contour, # pylint: disable=unused-argument
plot_kwargs, # pylint: disable=unused-argument
scatter_kwargs,
kde_kwargs,
hexbin_kwargs,
gridsize,
colorbar,
divergences,
diverging_mask,
divergences_kwargs,
flat_var_names,
backend_kwargs,
marginal_kwargs,
show,
diagonal,
point_estimate,
point_estimate_kwargs,
point_estimate_marker_kwargs,
):
"""Matplotlib pairplot."""
if backend_kwargs is None:
backend_kwargs = {}
backend_kwargs = {
**backend_kwarg_defaults(),
**backend_kwargs,
}
backend_kwargs.pop("constrained_layout")
if hexbin_kwargs is None:
hexbin_kwargs = {}
hexbin_kwargs.setdefault("mincnt", 1)
if kind != "kde":
kde_kwargs.setdefault("contourf_kwargs", {"alpha": 0})
kde_kwargs.setdefault("contour_kwargs", {})
kde_kwargs["contour_kwargs"].setdefault("colors", "k")
# pylint: disable=too-many-nested-blocks
if numvars == 2:
(figsize, ax_labelsize, _, xt_labelsize, linewidth, _) = _scale_fig_size(
figsize, textsize, numvars - 1, numvars - 1
)
marginal_kwargs.setdefault("plot_kwargs", {})
marginal_kwargs["plot_kwargs"].setdefault("linewidth", linewidth)
# Flatten data
x = infdata_group[0].flatten()
y = infdata_group[1].flatten()
if ax is None:
if diagonal:
# Instantiate figure and grid
fig, _ = plt.subplots(0, 0, figsize=figsize, **backend_kwargs)
grid = plt.GridSpec(4, 4, hspace=0.1, wspace=0.1, figure=fig)
# Set up main plot
ax = fig.add_subplot(grid[1:, :-1])
# Set up top KDE
ax_hist_x = fig.add_subplot(grid[0, :-1], sharex=ax)
# Set up right KDE
ax_hist_y = fig.add_subplot(grid[1:, -1], sharey=ax)
ax_return = np.array([[ax_hist_x, None], [ax, ax_hist_y]])
for val, ax_, rotate in ((x, ax_hist_x, False), (y, ax_hist_y, True)):
plot_dist(
val,
textsize=xt_labelsize,
rotated=rotate,
ax=ax_,
**marginal_kwargs,
)
# Personalize axes
ax_hist_x.tick_params(labelleft=False, labelbottom=False)
ax_hist_y.tick_params(labelleft=False, labelbottom=False)
else:
fig, ax = plt.subplots(
numvars - 1, numvars - 1, figsize=figsize, **backend_kwargs
)
else:
if diagonal:
assert ax.shape == (numvars, numvars)
if ax[0, 1] is not None and ax[0, 1].get_figure() is not None:
ax[0, 1].remove()
ax_return = ax
ax_hist_x = ax[0, 0]
ax_hist_y = ax[1, 1]
ax = ax[1, 0]
for val, ax_, rotate in ((x, ax_hist_x, False), (y, ax_hist_y, True)):
plot_dist(
val,
textsize=xt_labelsize,
rotated=rotate,
ax=ax_,
**marginal_kwargs,
)
else:
ax = np.atleast_2d(ax)[0, 0]
if "scatter" in kind:
ax.plot(infdata_group[0], infdata_group[1], **scatter_kwargs)
if "kde" in kind:
plot_kde(infdata_group[0], infdata_group[1], ax=ax, **kde_kwargs)
if "hexbin" in kind:
hexbin = ax.hexbin(
infdata_group[0],
infdata_group[1],
gridsize=gridsize,
**hexbin_kwargs,
)
ax.grid(False)
if kind == "hexbin" and colorbar:
cbar = ax.figure.colorbar(
hexbin, ticks=[hexbin.norm.vmin, hexbin.norm.vmax], ax=ax
)
cbar.ax.set_yticklabels(["low", "high"], fontsize=ax_labelsize)
if divergences:
ax.plot(
infdata_group[0][diverging_mask],
infdata_group[1][diverging_mask],
**divergences_kwargs,
)
if point_estimate:
pe_x = calculate_point_estimate(point_estimate, x)
pe_y = calculate_point_estimate(point_estimate, y)
if diagonal:
ax_hist_x.axvline(pe_x, **point_estimate_kwargs)
ax_hist_y.axhline(pe_y, **point_estimate_kwargs)
ax.axvline(pe_x, **point_estimate_kwargs)
ax.axhline(pe_y, **point_estimate_kwargs)
ax.scatter(
pe_x,
pe_y,
marker="s",
s=figsize[0] + 50,
**point_estimate_kwargs,
zorder=4,
)
ax.set_xlabel("{}".format(flat_var_names[0]), fontsize=ax_labelsize, wrap=True)
ax.set_ylabel("{}".format(flat_var_names[1]), fontsize=ax_labelsize, wrap=True)
ax.tick_params(labelsize=xt_labelsize)
else:
max_plots = (
numvars**2
if rcParams["plot.max_subplots"] is None
else rcParams["plot.max_subplots"]
)
vars_to_plot = np.sum(np.arange(numvars).cumsum() < max_plots)
if vars_to_plot < numvars:
warnings.warn(
"rcParams['plot.max_subplots'] ({max_plots}) is smaller than the number "
"of resulting pair plots with these variables, generating only a "
"{side}x{side} grid".format(max_plots=max_plots, side=vars_to_plot),
UserWarning,
)
numvars = vars_to_plot
(figsize, ax_labelsize, _, xt_labelsize, _, _) = _scale_fig_size(
figsize, textsize, numvars - 2, numvars - 2
)
if ax is None:
fig, ax = plt.subplots(numvars, numvars, figsize=figsize, **backend_kwargs)
hexbin_values = []
for i in range(0, numvars):
var1 = infdata_group[i]
for j in range(0, numvars):
var2 = infdata_group[j]
if i > j:
if ax[j, i].get_figure() is not None:
ax[j, i].remove()
continue
elif i == j:
if diagonal:
loc = "right"
plot_dist(var1, ax=ax[i, j], **marginal_kwargs)
else:
loc = "left"
if ax[j, i].get_figure() is not None:
ax[j, i].remove()
continue
else:
if "scatter" in kind:
ax[j, i].plot(var1, var2, **scatter_kwargs)
if "kde" in kind:
plot_kde(
var1,
var2,
ax=ax[j, i],
**kde_kwargs,
)
if "hexbin" in kind:
ax[j, i].grid(False)
hexbin = ax[j, i].hexbin(
var1, var2, gridsize=gridsize, **hexbin_kwargs
)
if divergences:
ax[j, i].plot(
var1[diverging_mask],
var2[diverging_mask],
**divergences_kwargs,
)
if kind == "hexbin" and colorbar:
hexbin_values.append(hexbin.norm.vmin)
hexbin_values.append(hexbin.norm.vmax)
divider = make_axes_locatable(ax[-1, -1])
cax = divider.append_axes(loc, size="7%", pad="5%")
cbar = fig.colorbar(
hexbin, ticks=[hexbin.norm.vmin, hexbin.norm.vmax], cax=cax
)
cbar.ax.set_yticklabels(["low", "high"], fontsize=ax_labelsize)
if point_estimate:
pe_x = calculate_point_estimate(point_estimate, var1)
pe_y = calculate_point_estimate(point_estimate, var2)
ax[j, i].axvline(pe_x, **point_estimate_kwargs)
ax[j, i].axhline(pe_y, **point_estimate_kwargs)
if diagonal:
ax[j - 1, i].axvline(pe_x, **point_estimate_kwargs)
pe_last = calculate_point_estimate(
point_estimate, infdata_group[-1]
)
ax[-1, -1].axvline(pe_last, **point_estimate_kwargs)
ax[j, i].scatter(
pe_x,
pe_y,
s=figsize[0] + 50,
zorder=4,
**point_estimate_marker_kwargs,
)
if j != numvars - 1:
ax[j, i].axes.get_xaxis().set_major_formatter(NullFormatter())
else:
ax[j, i].set_xlabel(
"{}".format(flat_var_names[i]), fontsize=ax_labelsize, wrap=True
)
if i != 0:
ax[j, i].axes.get_yaxis().set_major_formatter(NullFormatter())
else:
ax[j, i].set_ylabel(
"{}".format(flat_var_names[j]), fontsize=ax_labelsize, wrap=True
)
ax[j, i].tick_params(labelsize=xt_labelsize)
if backend_show(show):
plt.show()
if diagonal and numvars == 2:
return ax_return
return ax
|
def plot_pair(
ax,
infdata_group,
numvars,
figsize,
textsize,
kind,
fill_last, # pylint: disable=unused-argument
contour, # pylint: disable=unused-argument
plot_kwargs, # pylint: disable=unused-argument
scatter_kwargs,
kde_kwargs,
hexbin_kwargs,
gridsize,
colorbar,
divergences,
diverging_mask,
divergences_kwargs,
flat_var_names,
backend_kwargs,
marginal_kwargs,
show,
diagonal,
point_estimate,
point_estimate_kwargs,
point_estimate_marker_kwargs,
):
"""Matplotlib pairplot."""
if backend_kwargs is None:
backend_kwargs = {}
backend_kwargs = {
**backend_kwarg_defaults(),
**backend_kwargs,
}
backend_kwargs.pop("constrained_layout")
if hexbin_kwargs is None:
hexbin_kwargs = {}
hexbin_kwargs.setdefault("mincnt", 1)
if kind != "kde":
kde_kwargs.setdefault("contourf_kwargs", {"alpha": 0})
kde_kwargs.setdefault("contour_kwargs", {})
kde_kwargs["contour_kwargs"].setdefault("colors", "k")
# pylint: disable=too-many-nested-blocks
if numvars == 2:
(figsize, ax_labelsize, _, xt_labelsize, linewidth, _) = _scale_fig_size(
figsize, textsize, numvars - 1, numvars - 1
)
marginal_kwargs.setdefault("plot_kwargs", {})
marginal_kwargs["plot_kwargs"].setdefault("linewidth", linewidth)
# Flatten data
x = infdata_group[0].flatten()
y = infdata_group[1].flatten()
if ax is None:
if diagonal:
# Instantiate figure and grid
fig, _ = plt.subplots(0, 0, figsize=figsize, **backend_kwargs)
grid = plt.GridSpec(4, 4, hspace=0.1, wspace=0.1, figure=fig)
# Set up main plot
ax = fig.add_subplot(grid[1:, :-1])
# Set up top KDE
ax_hist_x = fig.add_subplot(grid[0, :-1], sharex=ax)
# Set up right KDE
ax_hist_y = fig.add_subplot(grid[1:, -1], sharey=ax)
for val, ax_, rotate in ((x, ax_hist_x, False), (y, ax_hist_y, True)):
plot_dist(
val,
textsize=xt_labelsize,
rotated=rotate,
ax=ax_,
**marginal_kwargs,
)
ax_hist_x.set_xlim(ax.get_xlim())
ax_hist_y.set_ylim(ax.get_ylim())
# Personalize axes
ax_hist_x.tick_params(labelleft=False, labelbottom=False)
ax_hist_y.tick_params(labelleft=False, labelbottom=False)
else:
fig, ax = plt.subplots(
numvars - 1, numvars - 1, figsize=figsize, **backend_kwargs
)
else:
if diagonal:
assert ax.shape == (numvars, numvars)
ax[0, 1].remove()
ax_hist_x = ax[0, 0]
ax_hist_y = ax[1, 1]
ax = ax[1, 0]
for val, ax_, rotate in ((x, ax_hist_x, False), (y, ax_hist_y, True)):
plot_dist(
val,
textsize=xt_labelsize,
rotated=rotate,
ax=ax_,
**marginal_kwargs,
)
else:
ax = np.atleast_2d(ax)[0, 0]
if "scatter" in kind:
ax.plot(infdata_group[0], infdata_group[1], **scatter_kwargs)
if "kde" in kind:
plot_kde(infdata_group[0], infdata_group[1], ax=ax, **kde_kwargs)
if "hexbin" in kind:
hexbin = ax.hexbin(
infdata_group[0],
infdata_group[1],
gridsize=gridsize,
**hexbin_kwargs,
)
ax.grid(False)
if kind == "hexbin" and colorbar:
cbar = ax.figure.colorbar(
hexbin, ticks=[hexbin.norm.vmin, hexbin.norm.vmax], ax=ax
)
cbar.ax.set_yticklabels(["low", "high"], fontsize=ax_labelsize)
if divergences:
ax.plot(
infdata_group[0][diverging_mask],
infdata_group[1][diverging_mask],
**divergences_kwargs,
)
if point_estimate:
pe_x = calculate_point_estimate(point_estimate, x)
pe_y = calculate_point_estimate(point_estimate, y)
if diagonal:
ax_hist_x.axvline(pe_x, **point_estimate_kwargs)
ax_hist_y.axhline(pe_y, **point_estimate_kwargs)
ax.axvline(pe_x, **point_estimate_kwargs)
ax.axhline(pe_y, **point_estimate_kwargs)
ax.scatter(
pe_x,
pe_y,
marker="s",
s=figsize[0] + 50,
**point_estimate_kwargs,
zorder=4,
)
ax.set_xlabel("{}".format(flat_var_names[0]), fontsize=ax_labelsize, wrap=True)
ax.set_ylabel("{}".format(flat_var_names[1]), fontsize=ax_labelsize, wrap=True)
ax.tick_params(labelsize=xt_labelsize)
else:
max_plots = (
numvars**2
if rcParams["plot.max_subplots"] is None
else rcParams["plot.max_subplots"]
)
vars_to_plot = np.sum(np.arange(numvars).cumsum() < max_plots)
if vars_to_plot < numvars:
warnings.warn(
"rcParams['plot.max_subplots'] ({max_plots}) is smaller than the number "
"of resulting pair plots with these variables, generating only a "
"{side}x{side} grid".format(max_plots=max_plots, side=vars_to_plot),
UserWarning,
)
numvars = vars_to_plot
(figsize, ax_labelsize, _, xt_labelsize, _, _) = _scale_fig_size(
figsize, textsize, numvars - 2, numvars - 2
)
if ax is None:
fig, ax = plt.subplots(numvars, numvars, figsize=figsize, **backend_kwargs)
hexbin_values = []
for i in range(0, numvars):
var1 = infdata_group[i]
for j in range(0, numvars):
var2 = infdata_group[j]
if i > j:
ax[j, i].remove()
continue
elif i == j:
if diagonal:
loc = "right"
plot_dist(var1, ax=ax[i, j], **marginal_kwargs)
else:
loc = "left"
ax[j, i].remove()
continue
else:
if "scatter" in kind:
ax[j, i].plot(var1, var2, **scatter_kwargs)
if "kde" in kind:
plot_kde(
var1,
var2,
ax=ax[j, i],
**kde_kwargs,
)
if "hexbin" in kind:
ax[j, i].grid(False)
hexbin = ax[j, i].hexbin(
var1, var2, gridsize=gridsize, **hexbin_kwargs
)
if divergences:
ax[j, i].plot(
var1[diverging_mask],
var2[diverging_mask],
**divergences_kwargs,
)
if kind == "hexbin" and colorbar:
hexbin_values.append(hexbin.norm.vmin)
hexbin_values.append(hexbin.norm.vmax)
divider = make_axes_locatable(ax[-1, -1])
cax = divider.append_axes(loc, size="7%", pad="5%")
cbar = fig.colorbar(
hexbin, ticks=[hexbin.norm.vmin, hexbin.norm.vmax], cax=cax
)
cbar.ax.set_yticklabels(["low", "high"], fontsize=ax_labelsize)
if point_estimate:
pe_x = calculate_point_estimate(point_estimate, var1)
pe_y = calculate_point_estimate(point_estimate, var2)
ax[j, i].axvline(pe_x, **point_estimate_kwargs)
ax[j, i].axhline(pe_y, **point_estimate_kwargs)
if diagonal:
ax[j - 1, i].axvline(pe_x, **point_estimate_kwargs)
pe_last = calculate_point_estimate(
point_estimate, infdata_group[-1]
)
ax[-1, -1].axvline(pe_last, **point_estimate_kwargs)
ax[j, i].scatter(
pe_x,
pe_y,
s=figsize[0] + 50,
zorder=4,
**point_estimate_marker_kwargs,
)
if j != numvars - 1:
ax[j, i].axes.get_xaxis().set_major_formatter(NullFormatter())
else:
ax[j, i].set_xlabel(
"{}".format(flat_var_names[i]), fontsize=ax_labelsize, wrap=True
)
if i != 0:
ax[j, i].axes.get_yaxis().set_major_formatter(NullFormatter())
else:
ax[j, i].set_ylabel(
"{}".format(flat_var_names[j]), fontsize=ax_labelsize, wrap=True
)
ax[j, i].tick_params(labelsize=xt_labelsize)
if backend_show(show):
plt.show()
return ax
|
https://github.com/arviz-devs/arviz/issues/1130
|
Traceback (most recent call last):
File "test_model_pyro_torch_jit.py", line 311, in <module>
coords = {"Mc_dim_1": [0, 1], "Mc_dim_0": [0]}, ax=axes)
File "/Users/landerson/.local/lib/python3.7/site-packages/arviz-0.7.0-py3.7.egg/arviz/plots/pairplot.py", line 298, in plot_pair
ax = plot(**pairplot_kwargs)
File "/Users/landerson/.local/lib/python3.7/site-packages/arviz-0.7.0-py3.7.egg/arviz/plots/backends/matplotlib/pairplot.py", line 174, in plot_pair
ax[j, i].remove()
File "/opt/anaconda3/lib/python3.7/site-packages/matplotlib/artist.py", line 137, in remove
self._remove_method(self)
File "/opt/anaconda3/lib/python3.7/site-packages/matplotlib/figure.py", line 1615, in _remove_ax
self.delaxes(ax)
File "/opt/anaconda3/lib/python3.7/site-packages/matplotlib/figure.py", line 1031, in delaxes
self._axstack.remove(ax)
File "/opt/anaconda3/lib/python3.7/site-packages/matplotlib/figure.py", line 106, in remove
super().remove(self._entry_from_axes(a))
File "/opt/anaconda3/lib/python3.7/site-packages/matplotlib/figure.py", line 101, in _entry_from_axes
ind, k = {a: (ind, k) for k, (ind, a) in self._elements}[e]
KeyError: <matplotlib.axes._subplots.AxesSubplot object at 0x14f6a3890>
|
KeyError
|
def run(self):
"""
Blocking and long running tasks for application startup should be
called from here.
"""
venv.ensure_and_create()
self.finished.emit() # Always called last.
|
def run(self):
"""
Blocking and long running tasks for application startup should be
called from here.
"""
venv.ensure()
self.finished.emit() # Always called last.
|
https://github.com/mu-editor/mu/issues/1291
|
2021-02-11 08:24:50,511 - root:172(run) INFO:
-----------------
Starting Mu 1.1.0.beta.1
2021-02-11 08:24:50,516 - root:173(run) INFO: uname_result(system='Darwin', node='Carlos-MBP-8.local', release='18.7.0', version='Darwin Kernel Version 18.7.0: Tue Jan 12 22:04:47 PST 2021; root:xnu-4903.278.56~1/RELEASE_X86_64', machine='x86_64', processor='i386')
2021-02-11 08:24:50,517 - root:174(run) INFO: Python path: ['/', '/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python37.zip', '/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7', '/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7/lib-dynload', '/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7/site-packages', '/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7/site-packages/setuptools-49.6.0-py3.7.egg', '/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7/site-packages/pip-20.2.2-py3.7.egg', '/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7/site-packages/IPython/extensions']
2021-02-11 08:24:50,517 - root:175(run) INFO: Language code: en_GB
2021-02-11 08:24:50,776 - mu.virtual_environment:371(ensure) DEBUG: Found existing virtual environment at /Users/microbit-carlos/Library/Application Support/mu/mu_venv
2021-02-11 08:24:50,776 - mu.virtual_environment:383(ensure_interpreter) ERROR: Interpreter not found where expected at /Users/microbit-carlos/Library/Application Support/mu/mu_venv/bin/python
2021-02-11 08:24:50,776 - root:106(excepthook) ERROR: Unrecoverable error
Traceback (most recent call last):
File "/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7/site-packages/mu/app.py", line 98, in run
venv.ensure()
File "/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7/site-packages/mu/virtual_environment.py", line 373, in ensure
self.ensure_interpreter()
File "/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7/site-packages/mu/virtual_environment.py", line 384, in ensure_interpreter
raise VirtualEnvironmentError(message)
mu.virtual_environment.VirtualEnvironmentError: Interpreter not found where expected at /Users/microbit-carlos/Library/Application Support/mu/mu_venv/bin/python
2021-02-11 08:24:50,777 - mu.settings:169(save) DEBUG: Saving to /Users/microbit-carlos/Library/Application Support/mu/session.json
2021-02-11 08:24:50,778 - mu.settings:169(save) DEBUG: Saving to /Users/microbit-carlos/Library/Application Support/mu/venv.json
2021-02-11 08:25:06,987 - root:172(run) INFO:
-----------------
Starting Mu 1.1.0.beta.1
2021-02-11 08:25:06,987 - root:173(run) INFO: uname_result(system='Darwin', node='Carlos-MBP-8.local', release='18.7.0', version='Darwin Kernel Version 18.7.0: Tue Jan 12 22:04:47 PST 2021; root:xnu-4903.278.56~1/RELEASE_X86_64', machine='x86_64', processor='i386')
2021-02-11 08:25:06,987 - root:174(run) INFO: Python path: ['/', '/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python37.zip', '/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7', '/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7/lib-dynload', '/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7/site-packages', '/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7/site-packages/setuptools-49.6.0-py3.7.egg', '/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7/site-packages/pip-20.2.2-py3.7.egg', '/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7/site-packages/IPython/extensions']
2021-02-11 08:25:06,987 - root:175(run) INFO: Language code: en_GB
2021-02-11 08:25:07,184 - mu.virtual_environment:371(ensure) DEBUG: Found existing virtual environment at /Users/microbit-carlos/Library/Application Support/mu/mu_venv
2021-02-11 08:25:07,184 - mu.virtual_environment:383(ensure_interpreter) ERROR: Interpreter not found where expected at /Users/microbit-carlos/Library/Application Support/mu/mu_venv/bin/python
2021-02-11 08:25:07,184 - root:106(excepthook) ERROR: Unrecoverable error
Traceback (most recent call last):
File "/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7/site-packages/mu/app.py", line 98, in run
venv.ensure()
File "/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7/site-packages/mu/virtual_environment.py", line 373, in ensure
self.ensure_interpreter()
File "/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7/site-packages/mu/virtual_environment.py", line 384, in ensure_interpreter
raise VirtualEnvironmentError(message)
mu.virtual_environment.VirtualEnvironmentError: Interpreter not found where expected at /Users/microbit-carlos/Library/Application Support/mu/mu_venv/bin/python
2021-02-11 08:25:07,185 - mu.settings:169(save) DEBUG: Saving to /Users/microbit-carlos/Library/Application Support/mu/session.json
2021-02-11 08:25:07,186 - mu.settings:169(save) DEBUG: Saving to /Users/microbit-carlos/Library/Application Support/mu/venv.json
2021-02-11 08:25:15,345 - root:172(run) INFO:
-----------------
|
mu.virtual_environment.VirtualEnvironmentError
|
def __init__(self, dirpath=None):
self.process = Process()
self._is_windows = sys.platform == "win32"
self._bin_extension = ".exe" if self._is_windows else ""
self.settings = settings.VirtualEnvironmentSettings()
self.settings.init()
dirpath_to_use = dirpath or self.settings.get("dirpath") or self._generate_dirpath()
logger.info("Using dirpath: %s", dirpath_to_use)
self.relocate(dirpath_to_use)
|
def __init__(self, dirpath=None):
self.process = Process()
self._is_windows = sys.platform == "win32"
self._bin_extension = ".exe" if self._is_windows else ""
self.settings = settings.VirtualEnvironmentSettings()
self.settings.init()
self.relocate(dirpath or self.settings["dirpath"])
|
https://github.com/mu-editor/mu/issues/1291
|
2021-02-11 08:24:50,511 - root:172(run) INFO:
-----------------
Starting Mu 1.1.0.beta.1
2021-02-11 08:24:50,516 - root:173(run) INFO: uname_result(system='Darwin', node='Carlos-MBP-8.local', release='18.7.0', version='Darwin Kernel Version 18.7.0: Tue Jan 12 22:04:47 PST 2021; root:xnu-4903.278.56~1/RELEASE_X86_64', machine='x86_64', processor='i386')
2021-02-11 08:24:50,517 - root:174(run) INFO: Python path: ['/', '/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python37.zip', '/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7', '/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7/lib-dynload', '/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7/site-packages', '/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7/site-packages/setuptools-49.6.0-py3.7.egg', '/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7/site-packages/pip-20.2.2-py3.7.egg', '/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7/site-packages/IPython/extensions']
2021-02-11 08:24:50,517 - root:175(run) INFO: Language code: en_GB
2021-02-11 08:24:50,776 - mu.virtual_environment:371(ensure) DEBUG: Found existing virtual environment at /Users/microbit-carlos/Library/Application Support/mu/mu_venv
2021-02-11 08:24:50,776 - mu.virtual_environment:383(ensure_interpreter) ERROR: Interpreter not found where expected at /Users/microbit-carlos/Library/Application Support/mu/mu_venv/bin/python
2021-02-11 08:24:50,776 - root:106(excepthook) ERROR: Unrecoverable error
Traceback (most recent call last):
File "/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7/site-packages/mu/app.py", line 98, in run
venv.ensure()
File "/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7/site-packages/mu/virtual_environment.py", line 373, in ensure
self.ensure_interpreter()
File "/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7/site-packages/mu/virtual_environment.py", line 384, in ensure_interpreter
raise VirtualEnvironmentError(message)
mu.virtual_environment.VirtualEnvironmentError: Interpreter not found where expected at /Users/microbit-carlos/Library/Application Support/mu/mu_venv/bin/python
2021-02-11 08:24:50,777 - mu.settings:169(save) DEBUG: Saving to /Users/microbit-carlos/Library/Application Support/mu/session.json
2021-02-11 08:24:50,778 - mu.settings:169(save) DEBUG: Saving to /Users/microbit-carlos/Library/Application Support/mu/venv.json
2021-02-11 08:25:06,987 - root:172(run) INFO:
-----------------
Starting Mu 1.1.0.beta.1
2021-02-11 08:25:06,987 - root:173(run) INFO: uname_result(system='Darwin', node='Carlos-MBP-8.local', release='18.7.0', version='Darwin Kernel Version 18.7.0: Tue Jan 12 22:04:47 PST 2021; root:xnu-4903.278.56~1/RELEASE_X86_64', machine='x86_64', processor='i386')
2021-02-11 08:25:06,987 - root:174(run) INFO: Python path: ['/', '/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python37.zip', '/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7', '/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7/lib-dynload', '/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7/site-packages', '/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7/site-packages/setuptools-49.6.0-py3.7.egg', '/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7/site-packages/pip-20.2.2-py3.7.egg', '/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7/site-packages/IPython/extensions']
2021-02-11 08:25:06,987 - root:175(run) INFO: Language code: en_GB
2021-02-11 08:25:07,184 - mu.virtual_environment:371(ensure) DEBUG: Found existing virtual environment at /Users/microbit-carlos/Library/Application Support/mu/mu_venv
2021-02-11 08:25:07,184 - mu.virtual_environment:383(ensure_interpreter) ERROR: Interpreter not found where expected at /Users/microbit-carlos/Library/Application Support/mu/mu_venv/bin/python
2021-02-11 08:25:07,184 - root:106(excepthook) ERROR: Unrecoverable error
Traceback (most recent call last):
File "/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7/site-packages/mu/app.py", line 98, in run
venv.ensure()
File "/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7/site-packages/mu/virtual_environment.py", line 373, in ensure
self.ensure_interpreter()
File "/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7/site-packages/mu/virtual_environment.py", line 384, in ensure_interpreter
raise VirtualEnvironmentError(message)
mu.virtual_environment.VirtualEnvironmentError: Interpreter not found where expected at /Users/microbit-carlos/Library/Application Support/mu/mu_venv/bin/python
2021-02-11 08:25:07,185 - mu.settings:169(save) DEBUG: Saving to /Users/microbit-carlos/Library/Application Support/mu/session.json
2021-02-11 08:25:07,186 - mu.settings:169(save) DEBUG: Saving to /Users/microbit-carlos/Library/Application Support/mu/venv.json
2021-02-11 08:25:15,345 - root:172(run) INFO:
-----------------
|
mu.virtual_environment.VirtualEnvironmentError
|
def relocate(self, dirpath):
"""Relocate sets up variables for, eg, the expected location and name of
the Python and Pip binaries, but doesn't access the file system. That's
done by code in or called from `create`
"""
self.path = str(dirpath)
self.name = os.path.basename(self.path)
self._bin_directory = os.path.join(
self.path, "scripts" if self._is_windows else "bin"
)
#
# Pip and the interpreter will be set up when the virtualenv is created
#
self.interpreter = os.path.join(self._bin_directory, "python" + self._bin_extension)
self.pip = Pip(os.path.join(self._bin_directory, "pip" + self._bin_extension))
logger.debug("Virtual environment set up %s at %s", self.name, self.path)
self.settings["dirpath"] = self.path
|
def relocate(self, dirpath):
self.path = str(dirpath)
self.name = os.path.basename(self.path)
self._bin_directory = os.path.join(
self.path, "scripts" if self._is_windows else "bin"
)
#
# Pip and the interpreter will be set up when the virtualenv is created
#
self.interpreter = os.path.join(self._bin_directory, "python" + self._bin_extension)
self.pip = Pip(os.path.join(self._bin_directory, "pip" + self._bin_extension))
logger.debug("Virtual environment set up %s at %s", self.name, self.path)
self.settings["dirpath"] = self.path
|
https://github.com/mu-editor/mu/issues/1291
|
2021-02-11 08:24:50,511 - root:172(run) INFO:
-----------------
Starting Mu 1.1.0.beta.1
2021-02-11 08:24:50,516 - root:173(run) INFO: uname_result(system='Darwin', node='Carlos-MBP-8.local', release='18.7.0', version='Darwin Kernel Version 18.7.0: Tue Jan 12 22:04:47 PST 2021; root:xnu-4903.278.56~1/RELEASE_X86_64', machine='x86_64', processor='i386')
2021-02-11 08:24:50,517 - root:174(run) INFO: Python path: ['/', '/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python37.zip', '/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7', '/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7/lib-dynload', '/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7/site-packages', '/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7/site-packages/setuptools-49.6.0-py3.7.egg', '/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7/site-packages/pip-20.2.2-py3.7.egg', '/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7/site-packages/IPython/extensions']
2021-02-11 08:24:50,517 - root:175(run) INFO: Language code: en_GB
2021-02-11 08:24:50,776 - mu.virtual_environment:371(ensure) DEBUG: Found existing virtual environment at /Users/microbit-carlos/Library/Application Support/mu/mu_venv
2021-02-11 08:24:50,776 - mu.virtual_environment:383(ensure_interpreter) ERROR: Interpreter not found where expected at /Users/microbit-carlos/Library/Application Support/mu/mu_venv/bin/python
2021-02-11 08:24:50,776 - root:106(excepthook) ERROR: Unrecoverable error
Traceback (most recent call last):
File "/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7/site-packages/mu/app.py", line 98, in run
venv.ensure()
File "/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7/site-packages/mu/virtual_environment.py", line 373, in ensure
self.ensure_interpreter()
File "/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7/site-packages/mu/virtual_environment.py", line 384, in ensure_interpreter
raise VirtualEnvironmentError(message)
mu.virtual_environment.VirtualEnvironmentError: Interpreter not found where expected at /Users/microbit-carlos/Library/Application Support/mu/mu_venv/bin/python
2021-02-11 08:24:50,777 - mu.settings:169(save) DEBUG: Saving to /Users/microbit-carlos/Library/Application Support/mu/session.json
2021-02-11 08:24:50,778 - mu.settings:169(save) DEBUG: Saving to /Users/microbit-carlos/Library/Application Support/mu/venv.json
2021-02-11 08:25:06,987 - root:172(run) INFO:
-----------------
Starting Mu 1.1.0.beta.1
2021-02-11 08:25:06,987 - root:173(run) INFO: uname_result(system='Darwin', node='Carlos-MBP-8.local', release='18.7.0', version='Darwin Kernel Version 18.7.0: Tue Jan 12 22:04:47 PST 2021; root:xnu-4903.278.56~1/RELEASE_X86_64', machine='x86_64', processor='i386')
2021-02-11 08:25:06,987 - root:174(run) INFO: Python path: ['/', '/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python37.zip', '/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7', '/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7/lib-dynload', '/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7/site-packages', '/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7/site-packages/setuptools-49.6.0-py3.7.egg', '/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7/site-packages/pip-20.2.2-py3.7.egg', '/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7/site-packages/IPython/extensions']
2021-02-11 08:25:06,987 - root:175(run) INFO: Language code: en_GB
2021-02-11 08:25:07,184 - mu.virtual_environment:371(ensure) DEBUG: Found existing virtual environment at /Users/microbit-carlos/Library/Application Support/mu/mu_venv
2021-02-11 08:25:07,184 - mu.virtual_environment:383(ensure_interpreter) ERROR: Interpreter not found where expected at /Users/microbit-carlos/Library/Application Support/mu/mu_venv/bin/python
2021-02-11 08:25:07,184 - root:106(excepthook) ERROR: Unrecoverable error
Traceback (most recent call last):
File "/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7/site-packages/mu/app.py", line 98, in run
venv.ensure()
File "/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7/site-packages/mu/virtual_environment.py", line 373, in ensure
self.ensure_interpreter()
File "/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7/site-packages/mu/virtual_environment.py", line 384, in ensure_interpreter
raise VirtualEnvironmentError(message)
mu.virtual_environment.VirtualEnvironmentError: Interpreter not found where expected at /Users/microbit-carlos/Library/Application Support/mu/mu_venv/bin/python
2021-02-11 08:25:07,185 - mu.settings:169(save) DEBUG: Saving to /Users/microbit-carlos/Library/Application Support/mu/session.json
2021-02-11 08:25:07,186 - mu.settings:169(save) DEBUG: Saving to /Users/microbit-carlos/Library/Application Support/mu/venv.json
2021-02-11 08:25:15,345 - root:172(run) INFO:
-----------------
|
mu.virtual_environment.VirtualEnvironmentError
|
def ensure(self):
"""Ensure that virtual environment exists and is in a good state"""
self.ensure_path()
self.ensure_interpreter()
self.ensure_interpreter_version()
self.ensure_pip()
self.ensure_key_modules()
|
def ensure(self):
"""Ensure that a virtual environment exists, creating it if needed"""
if not os.path.exists(self.path):
logger.debug("%s does not exist; creating", self.path)
self.create()
elif not os.path.isdir(self.path):
message = "%s exists but is not a directory" % self.path
logger.error(message)
raise VirtualEnvironmentError(message)
elif not self._directory_is_venv():
message = "Directory %s exists but is not a venv" % self.path
logger.error(message)
raise VirtualEnvironmentError(message)
else:
logger.debug("Found existing virtual environment at %s", self.path)
self.ensure_interpreter()
self.ensure_pip()
|
https://github.com/mu-editor/mu/issues/1291
|
2021-02-11 08:24:50,511 - root:172(run) INFO:
-----------------
Starting Mu 1.1.0.beta.1
2021-02-11 08:24:50,516 - root:173(run) INFO: uname_result(system='Darwin', node='Carlos-MBP-8.local', release='18.7.0', version='Darwin Kernel Version 18.7.0: Tue Jan 12 22:04:47 PST 2021; root:xnu-4903.278.56~1/RELEASE_X86_64', machine='x86_64', processor='i386')
2021-02-11 08:24:50,517 - root:174(run) INFO: Python path: ['/', '/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python37.zip', '/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7', '/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7/lib-dynload', '/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7/site-packages', '/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7/site-packages/setuptools-49.6.0-py3.7.egg', '/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7/site-packages/pip-20.2.2-py3.7.egg', '/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7/site-packages/IPython/extensions']
2021-02-11 08:24:50,517 - root:175(run) INFO: Language code: en_GB
2021-02-11 08:24:50,776 - mu.virtual_environment:371(ensure) DEBUG: Found existing virtual environment at /Users/microbit-carlos/Library/Application Support/mu/mu_venv
2021-02-11 08:24:50,776 - mu.virtual_environment:383(ensure_interpreter) ERROR: Interpreter not found where expected at /Users/microbit-carlos/Library/Application Support/mu/mu_venv/bin/python
2021-02-11 08:24:50,776 - root:106(excepthook) ERROR: Unrecoverable error
Traceback (most recent call last):
File "/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7/site-packages/mu/app.py", line 98, in run
venv.ensure()
File "/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7/site-packages/mu/virtual_environment.py", line 373, in ensure
self.ensure_interpreter()
File "/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7/site-packages/mu/virtual_environment.py", line 384, in ensure_interpreter
raise VirtualEnvironmentError(message)
mu.virtual_environment.VirtualEnvironmentError: Interpreter not found where expected at /Users/microbit-carlos/Library/Application Support/mu/mu_venv/bin/python
2021-02-11 08:24:50,777 - mu.settings:169(save) DEBUG: Saving to /Users/microbit-carlos/Library/Application Support/mu/session.json
2021-02-11 08:24:50,778 - mu.settings:169(save) DEBUG: Saving to /Users/microbit-carlos/Library/Application Support/mu/venv.json
2021-02-11 08:25:06,987 - root:172(run) INFO:
-----------------
Starting Mu 1.1.0.beta.1
2021-02-11 08:25:06,987 - root:173(run) INFO: uname_result(system='Darwin', node='Carlos-MBP-8.local', release='18.7.0', version='Darwin Kernel Version 18.7.0: Tue Jan 12 22:04:47 PST 2021; root:xnu-4903.278.56~1/RELEASE_X86_64', machine='x86_64', processor='i386')
2021-02-11 08:25:06,987 - root:174(run) INFO: Python path: ['/', '/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python37.zip', '/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7', '/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7/lib-dynload', '/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7/site-packages', '/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7/site-packages/setuptools-49.6.0-py3.7.egg', '/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7/site-packages/pip-20.2.2-py3.7.egg', '/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7/site-packages/IPython/extensions']
2021-02-11 08:25:06,987 - root:175(run) INFO: Language code: en_GB
2021-02-11 08:25:07,184 - mu.virtual_environment:371(ensure) DEBUG: Found existing virtual environment at /Users/microbit-carlos/Library/Application Support/mu/mu_venv
2021-02-11 08:25:07,184 - mu.virtual_environment:383(ensure_interpreter) ERROR: Interpreter not found where expected at /Users/microbit-carlos/Library/Application Support/mu/mu_venv/bin/python
2021-02-11 08:25:07,184 - root:106(excepthook) ERROR: Unrecoverable error
Traceback (most recent call last):
File "/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7/site-packages/mu/app.py", line 98, in run
venv.ensure()
File "/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7/site-packages/mu/virtual_environment.py", line 373, in ensure
self.ensure_interpreter()
File "/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7/site-packages/mu/virtual_environment.py", line 384, in ensure_interpreter
raise VirtualEnvironmentError(message)
mu.virtual_environment.VirtualEnvironmentError: Interpreter not found where expected at /Users/microbit-carlos/Library/Application Support/mu/mu_venv/bin/python
2021-02-11 08:25:07,185 - mu.settings:169(save) DEBUG: Saving to /Users/microbit-carlos/Library/Application Support/mu/session.json
2021-02-11 08:25:07,186 - mu.settings:169(save) DEBUG: Saving to /Users/microbit-carlos/Library/Application Support/mu/venv.json
2021-02-11 08:25:15,345 - root:172(run) INFO:
-----------------
|
mu.virtual_environment.VirtualEnvironmentError
|
def ensure_interpreter(self):
"""Ensure there is an interpreter of the expected name at the expected
location, given the platform and naming conventions
NB if the interpreter is present as a symlink to a system interpreter (likely
for a venv) but the link is broken, then os.path.isfile will fail as though
the file wasn't there. Which is what we want in these circumstances
"""
if os.path.isfile(self.interpreter):
logger.info("Interpreter found at %s", self.interpreter)
else:
message = "Interpreter not found where expected at %s" % self.interpreter
logger.error(message)
raise VirtualEnvironmentError(message)
|
def ensure_interpreter(self):
if os.path.isfile(self.interpreter):
logger.info("Interpreter found at %s", self.interpreter)
else:
message = "Interpreter not found where expected at %s" % self.interpreter
logger.error(message)
raise VirtualEnvironmentError(message)
|
https://github.com/mu-editor/mu/issues/1291
|
2021-02-11 08:24:50,511 - root:172(run) INFO:
-----------------
Starting Mu 1.1.0.beta.1
2021-02-11 08:24:50,516 - root:173(run) INFO: uname_result(system='Darwin', node='Carlos-MBP-8.local', release='18.7.0', version='Darwin Kernel Version 18.7.0: Tue Jan 12 22:04:47 PST 2021; root:xnu-4903.278.56~1/RELEASE_X86_64', machine='x86_64', processor='i386')
2021-02-11 08:24:50,517 - root:174(run) INFO: Python path: ['/', '/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python37.zip', '/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7', '/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7/lib-dynload', '/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7/site-packages', '/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7/site-packages/setuptools-49.6.0-py3.7.egg', '/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7/site-packages/pip-20.2.2-py3.7.egg', '/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7/site-packages/IPython/extensions']
2021-02-11 08:24:50,517 - root:175(run) INFO: Language code: en_GB
2021-02-11 08:24:50,776 - mu.virtual_environment:371(ensure) DEBUG: Found existing virtual environment at /Users/microbit-carlos/Library/Application Support/mu/mu_venv
2021-02-11 08:24:50,776 - mu.virtual_environment:383(ensure_interpreter) ERROR: Interpreter not found where expected at /Users/microbit-carlos/Library/Application Support/mu/mu_venv/bin/python
2021-02-11 08:24:50,776 - root:106(excepthook) ERROR: Unrecoverable error
Traceback (most recent call last):
File "/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7/site-packages/mu/app.py", line 98, in run
venv.ensure()
File "/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7/site-packages/mu/virtual_environment.py", line 373, in ensure
self.ensure_interpreter()
File "/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7/site-packages/mu/virtual_environment.py", line 384, in ensure_interpreter
raise VirtualEnvironmentError(message)
mu.virtual_environment.VirtualEnvironmentError: Interpreter not found where expected at /Users/microbit-carlos/Library/Application Support/mu/mu_venv/bin/python
2021-02-11 08:24:50,777 - mu.settings:169(save) DEBUG: Saving to /Users/microbit-carlos/Library/Application Support/mu/session.json
2021-02-11 08:24:50,778 - mu.settings:169(save) DEBUG: Saving to /Users/microbit-carlos/Library/Application Support/mu/venv.json
2021-02-11 08:25:06,987 - root:172(run) INFO:
-----------------
Starting Mu 1.1.0.beta.1
2021-02-11 08:25:06,987 - root:173(run) INFO: uname_result(system='Darwin', node='Carlos-MBP-8.local', release='18.7.0', version='Darwin Kernel Version 18.7.0: Tue Jan 12 22:04:47 PST 2021; root:xnu-4903.278.56~1/RELEASE_X86_64', machine='x86_64', processor='i386')
2021-02-11 08:25:06,987 - root:174(run) INFO: Python path: ['/', '/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python37.zip', '/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7', '/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7/lib-dynload', '/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7/site-packages', '/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7/site-packages/setuptools-49.6.0-py3.7.egg', '/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7/site-packages/pip-20.2.2-py3.7.egg', '/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7/site-packages/IPython/extensions']
2021-02-11 08:25:06,987 - root:175(run) INFO: Language code: en_GB
2021-02-11 08:25:07,184 - mu.virtual_environment:371(ensure) DEBUG: Found existing virtual environment at /Users/microbit-carlos/Library/Application Support/mu/mu_venv
2021-02-11 08:25:07,184 - mu.virtual_environment:383(ensure_interpreter) ERROR: Interpreter not found where expected at /Users/microbit-carlos/Library/Application Support/mu/mu_venv/bin/python
2021-02-11 08:25:07,184 - root:106(excepthook) ERROR: Unrecoverable error
Traceback (most recent call last):
File "/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7/site-packages/mu/app.py", line 98, in run
venv.ensure()
File "/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7/site-packages/mu/virtual_environment.py", line 373, in ensure
self.ensure_interpreter()
File "/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7/site-packages/mu/virtual_environment.py", line 384, in ensure_interpreter
raise VirtualEnvironmentError(message)
mu.virtual_environment.VirtualEnvironmentError: Interpreter not found where expected at /Users/microbit-carlos/Library/Application Support/mu/mu_venv/bin/python
2021-02-11 08:25:07,185 - mu.settings:169(save) DEBUG: Saving to /Users/microbit-carlos/Library/Application Support/mu/session.json
2021-02-11 08:25:07,186 - mu.settings:169(save) DEBUG: Saving to /Users/microbit-carlos/Library/Application Support/mu/venv.json
2021-02-11 08:25:15,345 - root:172(run) INFO:
-----------------
|
mu.virtual_environment.VirtualEnvironmentError
|
def install_jupyter_kernel(self):
kernel_name = '"Python/Mu ({})"'.format(self.name)
logger.info("Installing Jupyter Kernel %s", kernel_name)
return self.run_python(
"-m",
"ipykernel",
"install",
"--user",
"--name",
self.name,
"--display-name",
kernel_name,
)
|
def install_jupyter_kernel(self):
logger.info("Installing Jupyter Kernel")
return self.run_python(
"-m",
"ipykernel",
"install",
"--user",
"--name",
self.name,
"--display-name",
'"Python/Mu ({})"'.format(self.name),
)
|
https://github.com/mu-editor/mu/issues/1291
|
2021-02-11 08:24:50,511 - root:172(run) INFO:
-----------------
Starting Mu 1.1.0.beta.1
2021-02-11 08:24:50,516 - root:173(run) INFO: uname_result(system='Darwin', node='Carlos-MBP-8.local', release='18.7.0', version='Darwin Kernel Version 18.7.0: Tue Jan 12 22:04:47 PST 2021; root:xnu-4903.278.56~1/RELEASE_X86_64', machine='x86_64', processor='i386')
2021-02-11 08:24:50,517 - root:174(run) INFO: Python path: ['/', '/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python37.zip', '/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7', '/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7/lib-dynload', '/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7/site-packages', '/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7/site-packages/setuptools-49.6.0-py3.7.egg', '/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7/site-packages/pip-20.2.2-py3.7.egg', '/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7/site-packages/IPython/extensions']
2021-02-11 08:24:50,517 - root:175(run) INFO: Language code: en_GB
2021-02-11 08:24:50,776 - mu.virtual_environment:371(ensure) DEBUG: Found existing virtual environment at /Users/microbit-carlos/Library/Application Support/mu/mu_venv
2021-02-11 08:24:50,776 - mu.virtual_environment:383(ensure_interpreter) ERROR: Interpreter not found where expected at /Users/microbit-carlos/Library/Application Support/mu/mu_venv/bin/python
2021-02-11 08:24:50,776 - root:106(excepthook) ERROR: Unrecoverable error
Traceback (most recent call last):
File "/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7/site-packages/mu/app.py", line 98, in run
venv.ensure()
File "/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7/site-packages/mu/virtual_environment.py", line 373, in ensure
self.ensure_interpreter()
File "/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7/site-packages/mu/virtual_environment.py", line 384, in ensure_interpreter
raise VirtualEnvironmentError(message)
mu.virtual_environment.VirtualEnvironmentError: Interpreter not found where expected at /Users/microbit-carlos/Library/Application Support/mu/mu_venv/bin/python
2021-02-11 08:24:50,777 - mu.settings:169(save) DEBUG: Saving to /Users/microbit-carlos/Library/Application Support/mu/session.json
2021-02-11 08:24:50,778 - mu.settings:169(save) DEBUG: Saving to /Users/microbit-carlos/Library/Application Support/mu/venv.json
2021-02-11 08:25:06,987 - root:172(run) INFO:
-----------------
Starting Mu 1.1.0.beta.1
2021-02-11 08:25:06,987 - root:173(run) INFO: uname_result(system='Darwin', node='Carlos-MBP-8.local', release='18.7.0', version='Darwin Kernel Version 18.7.0: Tue Jan 12 22:04:47 PST 2021; root:xnu-4903.278.56~1/RELEASE_X86_64', machine='x86_64', processor='i386')
2021-02-11 08:25:06,987 - root:174(run) INFO: Python path: ['/', '/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python37.zip', '/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7', '/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7/lib-dynload', '/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7/site-packages', '/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7/site-packages/setuptools-49.6.0-py3.7.egg', '/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7/site-packages/pip-20.2.2-py3.7.egg', '/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7/site-packages/IPython/extensions']
2021-02-11 08:25:06,987 - root:175(run) INFO: Language code: en_GB
2021-02-11 08:25:07,184 - mu.virtual_environment:371(ensure) DEBUG: Found existing virtual environment at /Users/microbit-carlos/Library/Application Support/mu/mu_venv
2021-02-11 08:25:07,184 - mu.virtual_environment:383(ensure_interpreter) ERROR: Interpreter not found where expected at /Users/microbit-carlos/Library/Application Support/mu/mu_venv/bin/python
2021-02-11 08:25:07,184 - root:106(excepthook) ERROR: Unrecoverable error
Traceback (most recent call last):
File "/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7/site-packages/mu/app.py", line 98, in run
venv.ensure()
File "/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7/site-packages/mu/virtual_environment.py", line 373, in ensure
self.ensure_interpreter()
File "/Users/microbit-carlos/Downloads/Mu Editor.app/Contents/Resources/Python/lib/python3.7/site-packages/mu/virtual_environment.py", line 384, in ensure_interpreter
raise VirtualEnvironmentError(message)
mu.virtual_environment.VirtualEnvironmentError: Interpreter not found where expected at /Users/microbit-carlos/Library/Application Support/mu/mu_venv/bin/python
2021-02-11 08:25:07,185 - mu.settings:169(save) DEBUG: Saving to /Users/microbit-carlos/Library/Application Support/mu/session.json
2021-02-11 08:25:07,186 - mu.settings:169(save) DEBUG: Saving to /Users/microbit-carlos/Library/Application Support/mu/venv.json
2021-02-11 08:25:15,345 - root:172(run) INFO:
-----------------
|
mu.virtual_environment.VirtualEnvironmentError
|
def get_dialog_directory(self, default=None):
"""
Return the directory folder which a load/save dialog box should
open into. In order of precedence this function will return:
0) If not None, the value of default.
1) The last location used by a load/save dialog.
2) The directory containing the current file.
3) The mode's reported workspace directory.
"""
if default is not None:
folder = default
elif self.current_path and os.path.isdir(self.current_path):
folder = self.current_path
else:
current_file_path = ""
try:
workspace_path = self.modes[self.mode].workspace_dir()
except Exception as e:
# Avoid crashing if workspace_dir raises, use default path
# instead
workspace_path = self.modes["python"].workspace_dir()
logger.error(
(
"Could not open {} mode workspace directory"
'due to exception "{}". Using:'
"\n\n{}\n\n...to store your code instead"
).format(self.mode, e, workspace_path)
)
tab = self._view.current_tab
if tab and tab.path:
current_file_path = os.path.dirname(os.path.abspath(tab.path))
folder = current_file_path if current_file_path else workspace_path
logger.info("Using path for file dialog: {}".format(folder))
return folder
|
def get_dialog_directory(self, default=None):
"""
Return the directory folder which a load/save dialog box should
open into. In order of precedence this function will return:
0) If not None, the value of default.
1) The last location used by a load/save dialog.
2) The directory containing the current file.
3) The mode's reported workspace directory.
"""
if default is not None:
folder = default
elif self.current_path and os.path.isdir(self.current_path):
folder = self.current_path
else:
current_file_path = ""
workspace_path = self.modes[self.mode].workspace_dir()
tab = self._view.current_tab
if tab and tab.path:
current_file_path = os.path.dirname(os.path.abspath(tab.path))
folder = current_file_path if current_file_path else workspace_path
logger.info("Using path for file dialog: {}".format(folder))
return folder
|
https://github.com/mu-editor/mu/issues/1237
|
(base) me@ubuntu:~$ mu-editor
Logging to /home/me/snap/mu-editor/common/.cache/mu/log/mu.log
Gtk-Message: Failed to load module "gail"
Gtk-Message: Failed to load module "atk-bridge"
Gtk-Message: Failed to load module "canberra-gtk-module"
Traceback (most recent call last):
File "/snap/mu-editor/4/lib/python3.5/site-packages/mu/logic.py", line 1075, in select_mode
self.change_mode(new_mode)
File "/snap/mu-editor/4/lib/python3.5/site-packages/mu/logic.py", line 1112, in change_mode
self.modes[mode].workspace_dir()))
File "/snap/mu-editor/4/lib/python3.5/site-packages/mu/modes/adafruit.py", line 98, in workspace_dir
mount_output = check_output(mount_command).splitlines()
File "/snap/mu-editor/4/usr/lib/python3.5/subprocess.py", line 626, in check_output
**kwargs).stdout
File "/snap/mu-editor/4/usr/lib/python3.5/subprocess.py", line 693, in run
with Popen(*popenargs, **kwargs) as process:
File "/snap/mu-editor/4/usr/lib/python3.5/subprocess.py", line 947, in __init__
restore_signals, start_new_session)
File "/snap/mu-editor/4/usr/lib/python3.5/subprocess.py", line 1551, in _execute_child
raise child_exception_type(errno_num, err_msg)
PermissionError: [Errno 13] Permission denied
|
PermissionError
|
def change_mode(self, mode):
"""
Given the name of a mode, will make the necessary changes to put the
editor into the new mode.
"""
# Remove the old mode's REPL / filesystem / plotter if required.
old_mode = self.modes[self.mode]
if hasattr(old_mode, "remove_repl"):
old_mode.remove_repl()
if hasattr(old_mode, "remove_fs"):
old_mode.remove_fs()
if hasattr(old_mode, "remove_plotter"):
if old_mode.plotter:
old_mode.remove_plotter()
# Deactivate old mode
self.modes[self.mode].deactivate()
# Re-assign to new mode.
self.mode = mode
# Activate new mode
self.modes[mode].activate()
# Update buttons.
self._view.change_mode(self.modes[mode])
button_bar = self._view.button_bar
button_bar.connect("modes", self.select_mode, "Ctrl+Shift+M")
button_bar.connect("new", self.new, "Ctrl+N")
button_bar.connect("load", self.load, "Ctrl+O")
button_bar.connect("save", self.save, "Ctrl+S")
for action in self.modes[mode].actions():
button_bar.connect(action["name"], action["handler"], action["shortcut"])
button_bar.connect("zoom-in", self.zoom_in, "Ctrl++")
button_bar.connect("zoom-out", self.zoom_out, "Ctrl+-")
button_bar.connect("theme", self.toggle_theme, "F1")
button_bar.connect("check", self.check_code, "F2")
if sys.version_info[:2] >= (3, 6):
button_bar.connect("tidy", self.tidy_code, "F10")
button_bar.connect("help", self.show_help, "Ctrl+H")
button_bar.connect("quit", self.quit, "Ctrl+Q")
self._view.status_bar.set_mode(self.modes[mode].name)
# Update references to default file locations.
try:
workspace_dir = self.modes[mode].workspace_dir()
logger.info("Workspace directory: {}".format(workspace_dir))
except Exception as e:
# Avoid crashing if workspace_dir raises, use default path instead
workspace_dir = self.modes["python"].workspace_dir()
logger.error(
(
"Could not open {} mode workspace directory, "
'due to exception "{}".'
"Using:\n\n{}\n\n...to store your code instead"
).format(mode, repr(e), workspace_dir)
)
# Reset remembered current path for load/save dialogs.
self.current_path = ""
# Ensure auto-save timeouts are set.
if self.modes[mode].save_timeout > 0:
# Start the timer
self._view.set_timer(self.modes[mode].save_timeout, self.autosave)
else:
# Stop the timer
self._view.stop_timer()
# Update breakpoint states.
if not (self.modes[mode].is_debugger or self.modes[mode].has_debugger):
for tab in self._view.widgets:
tab.breakpoint_handles = set()
tab.reset_annotations()
self.show_status_message(_("Changed to {} mode.").format(self.modes[mode].name))
|
def change_mode(self, mode):
"""
Given the name of a mode, will make the necessary changes to put the
editor into the new mode.
"""
# Remove the old mode's REPL / filesystem / plotter if required.
old_mode = self.modes[self.mode]
if hasattr(old_mode, "remove_repl"):
old_mode.remove_repl()
if hasattr(old_mode, "remove_fs"):
old_mode.remove_fs()
if hasattr(old_mode, "remove_plotter"):
if old_mode.plotter:
old_mode.remove_plotter()
# Deactivate old mode
self.modes[self.mode].deactivate()
# Re-assign to new mode.
self.mode = mode
# Activate new mode
self.modes[mode].activate()
# Update buttons.
self._view.change_mode(self.modes[mode])
button_bar = self._view.button_bar
button_bar.connect("modes", self.select_mode, "Ctrl+Shift+M")
button_bar.connect("new", self.new, "Ctrl+N")
button_bar.connect("load", self.load, "Ctrl+O")
button_bar.connect("save", self.save, "Ctrl+S")
for action in self.modes[mode].actions():
button_bar.connect(action["name"], action["handler"], action["shortcut"])
button_bar.connect("zoom-in", self.zoom_in, "Ctrl++")
button_bar.connect("zoom-out", self.zoom_out, "Ctrl+-")
button_bar.connect("theme", self.toggle_theme, "F1")
button_bar.connect("check", self.check_code, "F2")
if sys.version_info[:2] >= (3, 6):
button_bar.connect("tidy", self.tidy_code, "F10")
button_bar.connect("help", self.show_help, "Ctrl+H")
button_bar.connect("quit", self.quit, "Ctrl+Q")
self._view.status_bar.set_mode(self.modes[mode].name)
# Update references to default file locations.
logger.info("Workspace directory: {}".format(self.modes[mode].workspace_dir()))
# Reset remembered current path for load/save dialogs.
self.current_path = ""
# Ensure auto-save timeouts are set.
if self.modes[mode].save_timeout > 0:
# Start the timer
self._view.set_timer(self.modes[mode].save_timeout, self.autosave)
else:
# Stop the timer
self._view.stop_timer()
# Update breakpoint states.
if not (self.modes[mode].is_debugger or self.modes[mode].has_debugger):
for tab in self._view.widgets:
tab.breakpoint_handles = set()
tab.reset_annotations()
self.show_status_message(_("Changed to {} mode.").format(self.modes[mode].name))
|
https://github.com/mu-editor/mu/issues/1237
|
(base) me@ubuntu:~$ mu-editor
Logging to /home/me/snap/mu-editor/common/.cache/mu/log/mu.log
Gtk-Message: Failed to load module "gail"
Gtk-Message: Failed to load module "atk-bridge"
Gtk-Message: Failed to load module "canberra-gtk-module"
Traceback (most recent call last):
File "/snap/mu-editor/4/lib/python3.5/site-packages/mu/logic.py", line 1075, in select_mode
self.change_mode(new_mode)
File "/snap/mu-editor/4/lib/python3.5/site-packages/mu/logic.py", line 1112, in change_mode
self.modes[mode].workspace_dir()))
File "/snap/mu-editor/4/lib/python3.5/site-packages/mu/modes/adafruit.py", line 98, in workspace_dir
mount_output = check_output(mount_command).splitlines()
File "/snap/mu-editor/4/usr/lib/python3.5/subprocess.py", line 626, in check_output
**kwargs).stdout
File "/snap/mu-editor/4/usr/lib/python3.5/subprocess.py", line 693, in run
with Popen(*popenargs, **kwargs) as process:
File "/snap/mu-editor/4/usr/lib/python3.5/subprocess.py", line 947, in __init__
restore_signals, start_new_session)
File "/snap/mu-editor/4/usr/lib/python3.5/subprocess.py", line 1551, in _execute_child
raise child_exception_type(errno_num, err_msg)
PermissionError: [Errno 13] Permission denied
|
PermissionError
|
def workspace_dir(self):
"""
Return the default location on the filesystem for opening and closing
files.
"""
device_dir = None
# Attempts to find the path on the filesystem that represents the
# plugged in CIRCUITPY board.
if os.name == "posix":
# We're on Linux or OSX
for mount_command in ["mount", "/sbin/mount"]:
try:
mount_output = check_output(mount_command).splitlines()
mounted_volumes = [x.split()[2] for x in mount_output]
for volume in mounted_volumes:
tail = os.path.split(volume)[-1]
if tail.startswith(b"CIRCUITPY") or tail.startswith(b"PYBFLASH"):
device_dir = volume.decode("utf-8")
break
except FileNotFoundError:
pass
except PermissionError as e:
logger.error(
"Received '{}' running command: {}".format(
repr(e),
mount_command,
)
)
m = _("Permission error running mount command")
info = _(
'The mount command ("{}") returned an error: '
"{}. Mu will continue as if a device isn't "
"plugged in."
).format(mount_command, repr(e))
self.view.show_message(m, info)
# Avoid crashing Mu, the workspace dir will be set to default
except Exception as e:
logger.error(
"Received '{}' running command: {}".format(
repr(e),
mount_command,
)
)
elif os.name == "nt":
# We're on Windows.
def get_volume_name(disk_name):
"""
Each disk or external device connected to windows has an
attribute called "volume name". This function returns the
volume name for the given disk/device.
Code from http://stackoverflow.com/a/12056414
"""
vol_name_buf = ctypes.create_unicode_buffer(1024)
ctypes.windll.kernel32.GetVolumeInformationW(
ctypes.c_wchar_p(disk_name),
vol_name_buf,
ctypes.sizeof(vol_name_buf),
None,
None,
None,
None,
0,
)
return vol_name_buf.value
#
# In certain circumstances, volumes are allocated to USB
# storage devices which cause a Windows popup to raise if their
# volume contains no media. Wrapping the check in SetErrorMode
# with SEM_FAILCRITICALERRORS (1) prevents this popup.
#
old_mode = ctypes.windll.kernel32.SetErrorMode(1)
try:
for disk in "ABCDEFGHIJKLMNOPQRSTUVWXYZ":
path = "{}:\\".format(disk)
if os.path.exists(path) and get_volume_name(path) == "CIRCUITPY":
return path
finally:
ctypes.windll.kernel32.SetErrorMode(old_mode)
else:
# No support for unknown operating systems.
raise NotImplementedError('OS "{}" not supported.'.format(os.name))
if device_dir:
# Found it!
self.connected = True
return device_dir
else:
# Not plugged in? Just return Mu's regular workspace directory
# after warning the user.
wd = super().workspace_dir()
if self.connected:
m = _("Could not find an attached CircuitPython device.")
info = _(
"Python files for CircuitPython devices"
" are stored on the device. Therefore, to edit"
" these files you need to have the device plugged in."
" Until you plug in a device, Mu will use the"
" directory found here:\n\n"
" {}\n\n...to store your code."
)
self.view.show_message(m, info.format(wd))
self.connected = False
return wd
|
def workspace_dir(self):
"""
Return the default location on the filesystem for opening and closing
files.
"""
device_dir = None
# Attempts to find the path on the filesystem that represents the
# plugged in CIRCUITPY board.
if os.name == "posix":
# We're on Linux or OSX
for mount_command in ["mount", "/sbin/mount"]:
try:
mount_output = check_output(mount_command).splitlines()
mounted_volumes = [x.split()[2] for x in mount_output]
for volume in mounted_volumes:
tail = os.path.split(volume)[-1]
if tail.startswith(b"CIRCUITPY") or tail.startswith(b"PYBFLASH"):
device_dir = volume.decode("utf-8")
break
except FileNotFoundError:
next
elif os.name == "nt":
# We're on Windows.
def get_volume_name(disk_name):
"""
Each disk or external device connected to windows has an
attribute called "volume name". This function returns the
volume name for the given disk/device.
Code from http://stackoverflow.com/a/12056414
"""
vol_name_buf = ctypes.create_unicode_buffer(1024)
ctypes.windll.kernel32.GetVolumeInformationW(
ctypes.c_wchar_p(disk_name),
vol_name_buf,
ctypes.sizeof(vol_name_buf),
None,
None,
None,
None,
0,
)
return vol_name_buf.value
#
# In certain circumstances, volumes are allocated to USB
# storage devices which cause a Windows popup to raise if their
# volume contains no media. Wrapping the check in SetErrorMode
# with SEM_FAILCRITICALERRORS (1) prevents this popup.
#
old_mode = ctypes.windll.kernel32.SetErrorMode(1)
try:
for disk in "ABCDEFGHIJKLMNOPQRSTUVWXYZ":
path = "{}:\\".format(disk)
if os.path.exists(path) and get_volume_name(path) == "CIRCUITPY":
return path
finally:
ctypes.windll.kernel32.SetErrorMode(old_mode)
else:
# No support for unknown operating systems.
raise NotImplementedError('OS "{}" not supported.'.format(os.name))
if device_dir:
# Found it!
self.connected = True
return device_dir
else:
# Not plugged in? Just return Mu's regular workspace directory
# after warning the user.
wd = super().workspace_dir()
if self.connected:
m = _("Could not find an attached CircuitPython device.")
info = _(
"Python files for CircuitPython devices"
" are stored on the device. Therefore, to edit"
" these files you need to have the device plugged in."
" Until you plug in a device, Mu will use the"
" directory found here:\n\n"
" {}\n\n...to store your code."
)
self.view.show_message(m, info.format(wd))
self.connected = False
return wd
|
https://github.com/mu-editor/mu/issues/1237
|
(base) me@ubuntu:~$ mu-editor
Logging to /home/me/snap/mu-editor/common/.cache/mu/log/mu.log
Gtk-Message: Failed to load module "gail"
Gtk-Message: Failed to load module "atk-bridge"
Gtk-Message: Failed to load module "canberra-gtk-module"
Traceback (most recent call last):
File "/snap/mu-editor/4/lib/python3.5/site-packages/mu/logic.py", line 1075, in select_mode
self.change_mode(new_mode)
File "/snap/mu-editor/4/lib/python3.5/site-packages/mu/logic.py", line 1112, in change_mode
self.modes[mode].workspace_dir()))
File "/snap/mu-editor/4/lib/python3.5/site-packages/mu/modes/adafruit.py", line 98, in workspace_dir
mount_output = check_output(mount_command).splitlines()
File "/snap/mu-editor/4/usr/lib/python3.5/subprocess.py", line 626, in check_output
**kwargs).stdout
File "/snap/mu-editor/4/usr/lib/python3.5/subprocess.py", line 693, in run
with Popen(*popenargs, **kwargs) as process:
File "/snap/mu-editor/4/usr/lib/python3.5/subprocess.py", line 947, in __init__
restore_signals, start_new_session)
File "/snap/mu-editor/4/usr/lib/python3.5/subprocess.py", line 1551, in _execute_child
raise child_exception_type(errno_num, err_msg)
PermissionError: [Errno 13] Permission denied
|
PermissionError
|
def __init__(self, connection, theme="day", parent=None):
super().__init__(parent)
self.connection = connection
self.setFont(Font().load())
self.setAcceptRichText(False)
self.setReadOnly(False)
self.setUndoRedoEnabled(False)
self.setContextMenuPolicy(Qt.CustomContextMenu)
self.customContextMenuRequested.connect(self.context_menu)
# The following variable maintains the position where we know
# the device cursor is placed. It is initialized to the beginning
# of the QTextEdit (i.e. equal to the Qt cursor position)
self.device_cursor_position = self.textCursor().position()
self.setObjectName("replpane")
self.set_theme(theme)
self.unprocessed_input = b"" # used by process_bytes
self.decoder = codecs.getincrementaldecoder("utf8")("replace")
self.vt100_regex = re.compile(
r"\x1B\[(?P<count>[\d]*)(;?[\d]*)*(?P<action>[A-Za-z])"
)
|
def __init__(self, connection, theme="day", parent=None):
super().__init__(parent)
self.connection = connection
self.setFont(Font().load())
self.setAcceptRichText(False)
self.setReadOnly(False)
self.setUndoRedoEnabled(False)
self.setContextMenuPolicy(Qt.CustomContextMenu)
self.customContextMenuRequested.connect(self.context_menu)
# The following variable maintains the position where we know
# the device cursor is placed. It is initialized to the beginning
# of the QTextEdit (i.e. equal to the Qt cursor position)
self.device_cursor_position = self.textCursor().position()
self.setObjectName("replpane")
self.set_theme(theme)
self.unprocessed_input = b"" # used by process_bytes
self.decoder = codecs.getincrementaldecoder("utf8")()
self.vt100_regex = re.compile(
r"\x1B\[(?P<count>[\d]*)(;?[\d]*)*(?P<action>[A-Za-z])"
)
|
https://github.com/mu-editor/mu/issues/1124
|
Starting Mu 1.1.0.alpha.2
2020-10-01 08:53:24,342 - root:123(run) INFO: uname_result(system='Darwin', node='dybber', release='18.7.0', version='Darwin Kernel Version 18.7.0: Tue Aug 20 16:57:14 PDT 2019; root:xnu-4903.271.2~2/RELEASE_X86_64', machine='x86_64', processor='i386')
2020-10-01 08:53:24,342 - root:124(run) INFO: Python path: ['/Users/dpr964/Development/micropython/mu', '/usr/local/opt/python@3.8/Frameworks/Python.framework/Versions/3.8/lib/python38.zip', '/usr/local/opt/python@3.8/Frameworks/Python.framework/Versions/3.8/lib/python3.8', '/usr/local/opt/python@3.8/Frameworks/Python.framework/Versions/3.8/lib/python3.8/lib-dynload', '/Users/dpr964/.virtualenvs/mu-editor/lib/python3.8/site-packages', '/Users/dpr964/Development/micropython/mu', '/Users/dpr964/Library/Application Support/mu/site-packages', '/Users/dpr964/.virtualenvs/mu-editor/lib/python3.8/site-packages/IPython/extensions']
2020-10-01 08:53:24,342 - root:125(run) INFO: Language code: en_US
2020-10-01 08:53:24,990 - mu.logic:845(__init__) INFO: Setting up editor.
2020-10-01 08:53:24,992 - mu.logic:867(__init__) INFO: Settings path: /Users/dpr964/Library/Application Support/mu/settings.json
2020-10-01 08:53:24,992 - mu.logic:868(__init__) INFO: Session path: /Users/dpr964/Library/Application Support/mu/session.json
2020-10-01 08:53:24,992 - mu.logic:869(__init__) INFO: Log directory: /Users/dpr964/Library/Logs/mu
2020-10-01 08:53:24,992 - mu.logic:870(__init__) INFO: Data directory: /Users/dpr964/Library/Application Support/mu
2020-10-01 08:53:25,012 - mu.logic:884(setup) INFO: Available modes: python, circuitpython, microbit, esp, web, debugger
2020-10-01 08:53:25,771 - mu.logic:1611(change_mode) INFO: Workspace directory: /Users/dpr964/mu_code
2020-10-01 08:53:25,773 - mu.logic:969(restore_session) INFO: Restoring session from: /Users/dpr964/Library/Application Support/mu/session.json
2020-10-01 08:53:25,773 - mu.logic:970(restore_session) DEBUG: {'theme': 'day', 'mode': 'esp', 'paths': [], 'envars': [], 'minify': False, 'microbit_runtime': '', 'zoom_level': 2, 'window': {'x': 256, 'y': 144, 'w': 2048, 'h': 1152}}
2020-10-01 08:53:25,779 - mu.logic:992(restore_session) INFO: Loaded files.
2020-10-01 08:53:25,779 - mu.logic:995(restore_session) INFO: User defined environment variables: []
2020-10-01 08:53:25,779 - mu.logic:1001(restore_session) INFO: Minify scripts on micro:bit? False
2020-10-01 08:53:25,800 - mu.logic:1611(change_mode) INFO: Workspace directory: /Users/dpr964/mu_code
2020-10-01 08:53:25,953 - mu.logic:1038(restore_session) INFO: Starting with blank file.
2020-10-01 08:53:26,139 - mu.logic:824(check_usb) INFO: esp device connected on port: /dev/cu.usbserial-A9523A07B6(VID: 0x0403, PID: 0x6001, manufacturer: 'M5STACK Inc.')
2020-10-01 08:53:27,553 - mu.modes.base:123(open) INFO: Connecting to REPL on port: /dev/cu.usbserial-A9523A07B6
2020-10-01 08:53:27,566 - mu.modes.base:141(open) INFO: Connected to REPL on port: /dev/cu.usbserial-A9523A07B6
2020-10-01 08:53:27,578 - mu.modes.base:507(add_repl) INFO: Started REPL on port: /dev/cu.usbserial-A9523A07B6
2020-10-01 08:53:27,578 - mu.modes.base:478(toggle_repl) INFO: Toggle REPL on.
2020-10-01 08:53:27,599 - root:105(excepthook) ERROR: Unrecoverable error
Traceback (most recent call last):
File "/Users/dpr964/Development/micropython/mu/mu/interface/panes.py", line 364, in process_tty_data
data = self.decoder.decode(data)
File "/usr/local/opt/python@3.8/bin/../Frameworks/Python.framework/Versions/3.8/lib/python3.8/codecs.py", line 322, in decode
(result, consumed) = self._buffer_decode(data, self.errors, final)
UnicodeDecodeError: 'utf-8' codec can't decode byte 0xd8 in position 4: invalid continuation byte
2020-10-01 08:53:38,324 - root:122(run) INFO:
|
UnicodeDecodeError
|
def _get_link(self, soup):
# Gets:
# <input type="hidden" id="id" value="MTEyMzg1">
# <input type="hidden" id="title" value="Yakusoku+no+Neverland">
# <input type="hidden" id="typesub" value="SUB">
# Used to create a download url.
soup_id = soup.select("input#id")[0]["value"]
soup_title = soup.select("input#title")[0]["value"]
soup_typesub = soup.select("input#typesub")[0].get("value", "SUB")
sources_json = helpers.get(
f"https://vidstreaming.io/ajax.php",
params={
"id": soup_id,
"typesub": soup_typesub,
"title": soup_title,
},
referer=self.url,
).json()
logger.debug("Sources json: {}".format(str(sources_json)))
"""
Maps config vidstreaming sources to json results.
When adding config in the future make sure "vidstream"
is in the name in order to pass the check above.
"""
sources_keys = {"vidstream": "source", "vidstream_bk": "source_bk"}
"""
Elaborate if statements to get sources_json["source"][0]["file"]
based on order in config.
"""
servers = Config._read_config()["siteconfig"]["vidstream"]["servers"]
for i in servers:
if i in sources_keys:
if sources_keys[i] in sources_json:
if "file" in sources_json[sources_keys[i]][0]:
return {
"stream_url": sources_json[sources_keys[i]][0]["file"],
"referer": self.url,
}
return {"stream_url": ""}
|
def _get_link(self, soup):
"""
Matches something like
f("MTE2MDIw&title=Yakusoku+no+Neverland");
"""
sources_regex = r'>\s*?f\("(.*?)"\);'
sources_url = re.search(sources_regex, str(soup)).group(1)
sources_json = helpers.get(
f"https://vidstreaming.io/ajax.php?id={sources_url}", referer=self.url
).json()
logger.debug("Sources json: {}".format(str(sources_json)))
"""
Maps config vidstreaming sources to json results.
When adding config in the future make sure "vidstream"
is in the name in order to pass the check above.
"""
sources_keys = {"vidstream": "source", "vidstream_bk": "source_bk"}
"""
Elaborate if statements to get sources_json["source"][0]["file"]
based on order in config.
"""
servers = Config._read_config()["siteconfig"]["vidstream"]["servers"]
print(sources_json["source"][0]["file"])
for i in servers:
if i in sources_keys:
if sources_keys[i] in sources_json:
if "file" in sources_json[sources_keys[i]][0]:
return {
"stream_url": sources_json[sources_keys[i]][0]["file"],
"referer": self.url,
}
return {"stream_url": ""}
|
https://github.com/anime-dl/anime-downloader/issues/484
|
2020-08-22 13:17:39 arch anime_downloader.session[11021] DEBUG uncached request
2020-08-22 13:17:39 arch anime_downloader.sites.helpers.request[11021] DEBUG https://vidstreaming.io/load.php?id=NDkyNzA=&title=Ansatsu+Kyoushitsu&typesub=SUB&sub=eyJlbiI6bnVsbCwiZXMiOm51bGx9&cover=aW1hZ2VzL3VwbG9hZC82NDU5NS5qcGc=
2020-08-22 13:17:39 arch anime_downloader.sites.helpers.request[11021] DEBUG /tmp/animedlbudirp9b/tmppc8povqh
2020-08-22 13:17:39 arch anime_downloader.extractors.vidstream[11021] DEBUG Linkserver: []
Traceback (most recent call last):
File "/home/bl/.local/bin/anime", line 8, in <module>
sys.exit(main())
File "/home/bl/.local/lib/python3.8/site-packages/anime_downloader/cli.py", line 53, in main
cli()
File "/home/bl/.local/lib/python3.8/site-packages/click/core.py", line 829, in __call__
return self.main(*args, **kwargs)
File "/home/bl/.local/lib/python3.8/site-packages/click/core.py", line 782, in main
rv = self.invoke(ctx)
File "/home/bl/.local/lib/python3.8/site-packages/click/core.py", line 1259, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/home/bl/.local/lib/python3.8/site-packages/click/core.py", line 1066, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/home/bl/.local/lib/python3.8/site-packages/click/core.py", line 610, in invoke
return callback(*args, **kwargs)
File "/home/bl/.local/lib/python3.8/site-packages/click/decorators.py", line 21, in new_func
return f(get_current_context(), *args, **kwargs)
File "/home/bl/.local/lib/python3.8/site-packages/anime_downloader/commands/dl.py", line 120, in command
for episode in animes:
File "/home/bl/.local/lib/python3.8/site-packages/anime_downloader/sites/anime.py", line 198, in __iter__
yield episode_class(ep_id[1], parent=self, ep_no=ep_id[0])
File "/home/bl/.local/lib/python3.8/site-packages/anime_downloader/sites/anime.py", line 293, in __init__
try_data()
File "/home/bl/.local/lib/python3.8/site-packages/anime_downloader/sites/anime.py", line 291, in try_data
self.source().stream_url
File "/home/bl/.local/lib/python3.8/site-packages/anime_downloader/extractors/base_extractor.py", line 29, in stream_url
self.get_data()
File "/home/bl/.local/lib/python3.8/site-packages/anime_downloader/extractors/base_extractor.py", line 41, in get_data
data = self._get_data()
File "/home/bl/.local/lib/python3.8/site-packages/anime_downloader/extractors/vidstream.py", line 43, in _get_data
return self._get_link(soup)
File "/home/bl/.local/lib/python3.8/site-packages/anime_downloader/extractors/vidstream.py", line 63, in _get_link
sources_url = re.search(sources_regex,str(soup)).group(1)
AttributeError: 'NoneType' object has no attribute 'group'
|
AttributeError
|
def search(cls, query):
soup = helpers.soupify(
helpers.get("https://dreamanime.fun/search", params={"term": query})
)
result_data = soup.select("a#epilink")
search_results = [
SearchResult(title=result.text, url=result.get("href"))
for result in result_data
]
return search_results
|
def search(cls, query):
results = helpers.get("https://dreamanime.fun/search", params={"term": query}).text
soup = helpers.soupify(results)
result_data = soup.find_all("a", {"id": "epilink"})
search_results = [
SearchResult(title=result.text, url=result.get("href"))
for result in result_data
]
return search_results
|
https://github.com/anime-dl/anime-downloader/issues/385
|
matt@matt:~$ anime -ll DEBUG dl 'penguin highway' --provider ryuanime
2020-05-31 19:09:54 matt.local anime_downloader.util[41614] INFO anime-downloader 4.3.0
2020-05-31 19:09:54 matt.local anime_downloader.util[41614] DEBUG Platform: macOS-10.14.6-x86_64-i386-64bit
2020-05-31 19:09:54 matt.local anime_downloader.util[41614] DEBUG Python 3.8.2
2020-05-31 19:09:54 matt.local anime_downloader.sites.helpers.request[41614] DEBUG -----
2020-05-31 19:09:54 matt.local anime_downloader.sites.helpers.request[41614] DEBUG GET https://www4.ryuanime.com/search
2020-05-31 19:09:54 matt.local anime_downloader.sites.helpers.request[41614] DEBUG {'params': {'term': 'penguin highway'}}
2020-05-31 19:09:54 matt.local anime_downloader.sites.helpers.request[41614] DEBUG {'user-agent': 'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.2 (KHTML, like Gecko) Chrome/22.0.1216.0 Safari/537.2'}
2020-05-31 19:09:54 matt.local anime_downloader.sites.helpers.request[41614] DEBUG -----
send: b'GET /search?term=penguin+highway HTTP/1.1\r\nHost: www4.ryuanime.com\r\nuser-agent: Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.2 (KHTML, like Gecko) Chrome/22.0.1216.0 Safari/537.2\r\nAccept-Encoding: gzip, deflate\r\nAccept: */*\r\nConnection: keep-alive\r\n\r\n'
reply: 'HTTP/1.1 200 OK\r\n'
header: Date: Sun, 31 May 2020 18:09:54 GMT
header: Content-Type: text/html; charset=UTF-8
header: Transfer-Encoding: chunked
header: Connection: keep-alive
header: Set-Cookie: __cfduid=d4d5081f03f192e51c4b768dc75a28b161590948594; expires=Tue, 30-Jun-20 18:09:54 GMT; path=/; domain=.ryuanime.com; HttpOnly; SameSite=Lax; Secure
header: Set-Cookie: PHPSESSID=r1mgv31e58tbi5e6vu211c3eoj; path=/
header: Expires: Thu, 19 Nov 1981 08:52:00 GMT
header: Cache-Control: no-store, no-cache, must-revalidate
header: Pragma: no-cache
header: CF-Cache-Status: DYNAMIC
header: cf-request-id: 030d861aca00006a0b5586e200000001
header: Expect-CT: max-age=604800, report-uri="https://report-uri.cloudflare.com/cdn-cgi/beacon/expect-ct"
header: Server: cloudflare
header: CF-RAY: 59c2a60ade546a0b-LHR
header: Content-Encoding: gzip
2020-05-31 19:09:54 matt.local anime_downloader.session[41614] DEBUG uncached request
2020-05-31 19:09:54 matt.local anime_downloader.sites.helpers.request[41614] DEBUG https://www4.ryuanime.com/search?term=penguin+highway
2020-05-31 19:09:54 matt.local anime_downloader.sites.helpers.request[41614] DEBUG /var/folders/sy/jcz9n9dn2t98677rzpmyvkr00000gn/T/animedlg3sh2stl/tmppx3ctpjn
+--------+-----------------+--------+
| 1 | Penguin Highway | |
|--------+-----------------+--------|
| SlNo | Title | Meta |
+--------+-----------------+--------+
Enter the anime no: [1]: 1
2020-05-31 19:09:55 matt.local anime_downloader.util[41614] INFO Selected Penguin Highway
2020-05-31 19:09:55 matt.local anime_downloader.sites.anime[41614] INFO Extracting episode info from page
2020-05-31 19:09:55 matt.local anime_downloader.sites.helpers.request[41614] DEBUG -----
2020-05-31 19:09:55 matt.local anime_downloader.sites.helpers.request[41614] DEBUG GET https://www4.ryuanime.com/anime/3939-penguin-highway
2020-05-31 19:09:55 matt.local anime_downloader.sites.helpers.request[41614] DEBUG {}
2020-05-31 19:09:55 matt.local anime_downloader.sites.helpers.request[41614] DEBUG {'user-agent': 'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.2 (KHTML, like Gecko) Chrome/22.0.1216.0 Safari/537.2'}
2020-05-31 19:09:55 matt.local anime_downloader.sites.helpers.request[41614] DEBUG -----
send: b'GET /anime/3939-penguin-highway HTTP/1.1\r\nHost: www4.ryuanime.com\r\nuser-agent: Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.2 (KHTML, like Gecko) Chrome/22.0.1216.0 Safari/537.2\r\nAccept-Encoding: gzip, deflate\r\nAccept: */*\r\nConnection: keep-alive\r\nCookie: __cfduid=d4d5081f03f192e51c4b768dc75a28b161590948594; PHPSESSID=r1mgv31e58tbi5e6vu211c3eoj\r\n\r\n'
reply: 'HTTP/1.1 200 OK\r\n'
header: Date: Sun, 31 May 2020 18:09:56 GMT
header: Content-Type: text/html; charset=UTF-8
header: Transfer-Encoding: chunked
header: Connection: keep-alive
header: Expires: Thu, 19 Nov 1981 08:52:00 GMT
header: Cache-Control: no-store, no-cache, must-revalidate
header: Pragma: no-cache
header: CF-Cache-Status: DYNAMIC
header: cf-request-id: 030d86201900006a0b55880200000001
header: Expect-CT: max-age=604800, report-uri="https://report-uri.cloudflare.com/cdn-cgi/beacon/expect-ct"
header: Server: cloudflare
header: CF-RAY: 59c2a613593e6a0b-LHR
header: Content-Encoding: gzip
2020-05-31 19:09:56 matt.local anime_downloader.session[41614] DEBUG uncached request
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG https://www4.ryuanime.com/anime/3939-penguin-highway
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG /var/folders/sy/jcz9n9dn2t98677rzpmyvkr00000gn/T/animedlg3sh2stl/tmplpjqs5fs
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG -----
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG GET https://www4.ryuanime.com/anime/3939-penguin-highway
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG {}
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG {'user-agent': 'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.2 (KHTML, like Gecko) Chrome/22.0.1216.0 Safari/537.2'}
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG -----
2020-05-31 19:09:56 matt.local anime_downloader.session[41614] DEBUG cached request
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG https://www4.ryuanime.com/anime/3939-penguin-highway
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG /var/folders/sy/jcz9n9dn2t98677rzpmyvkr00000gn/T/animedlg3sh2stl/tmp2rcnd8h1
2020-05-31 19:09:56 matt.local anime_downloader.sites.anime[41614] DEBUG EPISODE IDS: length: 1, ids: ['https://www4.ryuanime.com/anime/watch/189328-penguin-highway-movie-english-sub']
2020-05-31 19:09:56 matt.local anime_downloader.commands.dl[41614] INFO Found anime: Penguin Highway
2020-05-31 19:09:56 matt.local anime_downloader.commands.dl[41614] INFO Downloading to /Users/matt/Downloads/1234_JD
2020-05-31 19:09:56 matt.local anime_downloader.sites.anime[41614] DEBUG Extracting stream info of id: https://www4.ryuanime.com/anime/watch/189328-penguin-highway-movie-english-sub
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG -----
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG GET https://www4.ryuanime.com/anime/watch/189328-penguin-highway-movie-english-sub
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG {}
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG {'user-agent': 'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.2 (KHTML, like Gecko) Chrome/22.0.1216.0 Safari/537.2'}
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG -----
send: b'GET /anime/watch/189328-penguin-highway-movie-english-sub HTTP/1.1\r\nHost: www4.ryuanime.com\r\nuser-agent: Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.2 (KHTML, like Gecko) Chrome/22.0.1216.0 Safari/537.2\r\nAccept-Encoding: gzip, deflate\r\nAccept: */*\r\nConnection: keep-alive\r\nCookie: __cfduid=d4d5081f03f192e51c4b768dc75a28b161590948594; PHPSESSID=r1mgv31e58tbi5e6vu211c3eoj\r\n\r\n'
reply: 'HTTP/1.1 200 OK\r\n'
header: Date: Sun, 31 May 2020 18:09:56 GMT
header: Content-Type: text/html; charset=UTF-8
header: Transfer-Encoding: chunked
header: Connection: keep-alive
header: Expires: Thu, 19 Nov 1981 08:52:00 GMT
header: Cache-Control: no-store, no-cache, must-revalidate
header: Pragma: no-cache
header: CF-Cache-Status: DYNAMIC
header: cf-request-id: 030d86219e00006a0b55887200000001
header: Expect-CT: max-age=604800, report-uri="https://report-uri.cloudflare.com/cdn-cgi/beacon/expect-ct"
header: Server: cloudflare
header: CF-RAY: 59c2a615ca036a0b-LHR
header: Content-Encoding: gzip
2020-05-31 19:09:56 matt.local anime_downloader.session[41614] DEBUG uncached request
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG https://www4.ryuanime.com/anime/watch/189328-penguin-highway-movie-english-sub
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG /var/folders/sy/jcz9n9dn2t98677rzpmyvkr00000gn/T/animedlg3sh2stl/tmptjy94hcy
Traceback (most recent call last):
File "/usr/local/bin/anime", line 11, in <module>
load_entry_point('anime-downloader==4.3.0', 'console_scripts', 'anime')()
File "/usr/local/Cellar/anime-downloader/HEAD-16c6268/libexec/lib/python3.8/site-packages/anime_downloader/cli.py", line 53, in main
cli()
File "/usr/local/Cellar/anime-downloader/HEAD-16c6268/libexec/lib/python3.8/site-packages/click/core.py", line 829, in __call__
return self.main(*args, **kwargs)
File "/usr/local/Cellar/anime-downloader/HEAD-16c6268/libexec/lib/python3.8/site-packages/click/core.py", line 782, in main
rv = self.invoke(ctx)
File "/usr/local/Cellar/anime-downloader/HEAD-16c6268/libexec/lib/python3.8/site-packages/click/core.py", line 1259, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/usr/local/Cellar/anime-downloader/HEAD-16c6268/libexec/lib/python3.8/site-packages/click/core.py", line 1066, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/usr/local/Cellar/anime-downloader/HEAD-16c6268/libexec/lib/python3.8/site-packages/click/core.py", line 610, in invoke
return callback(*args, **kwargs)
File "/usr/local/Cellar/anime-downloader/HEAD-16c6268/libexec/lib/python3.8/site-packages/click/decorators.py", line 21, in new_func
return f(get_current_context(), *args, **kwargs)
File "/usr/local/Cellar/anime-downloader/HEAD-16c6268/libexec/lib/python3.8/site-packages/anime_downloader/commands/dl.py", line 112, in command
for episode in animes:
File "/usr/local/Cellar/anime-downloader/HEAD-16c6268/libexec/lib/python3.8/site-packages/anime_downloader/sites/anime.py", line 198, in __iter__
yield episode_class(ep_id[1], parent=self, ep_no=ep_id[0])
File "/usr/local/Cellar/anime-downloader/HEAD-16c6268/libexec/lib/python3.8/site-packages/anime_downloader/sites/anime.py", line 293, in __init__
try_data()
File "/usr/local/Cellar/anime-downloader/HEAD-16c6268/libexec/lib/python3.8/site-packages/anime_downloader/sites/anime.py", line 289, in try_data
self.get_data()
File "/usr/local/Cellar/anime-downloader/HEAD-16c6268/libexec/lib/python3.8/site-packages/anime_downloader/sites/anime.py", line 349, in get_data
self._sources = self._get_sources()
File "/usr/local/Cellar/anime-downloader/HEAD-16c6268/libexec/lib/python3.8/site-packages/anime_downloader/sites/ryuanime.py", line 64, in _get_sources
hosts = json.loads(soup.find("div", {"class":"col-sm-9"}).find("script").text[30:-6])
File "/usr/local/opt/python@3.8/Frameworks/Python.framework/Versions/3.8/lib/python3.8/json/__init__.py", line 357, in loads
return _default_decoder.decode(s)
File "/usr/local/opt/python@3.8/Frameworks/Python.framework/Versions/3.8/lib/python3.8/json/decoder.py", line 337, in decode
obj, end = self.raw_decode(s, idx=_w(s, 0).end())
File "/usr/local/opt/python@3.8/Frameworks/Python.framework/Versions/3.8/lib/python3.8/json/decoder.py", line 355, in raw_decode
raise JSONDecodeError("Expecting value", s, err.value) from None
json.decoder.JSONDecodeError: Expecting value: line 1 column 1 (char 0)
matt@matt:~$
|
json.decoder.JSONDecodeError
|
def _scrape_episodes(self):
version = self.config.get("version", "subbed")
soup = helpers.soupify(helpers.get(self.url))
episodes = []
_all = soup.select("div.episode-wrap")
for i in _all:
ep_type = i.find("div", {"class": re.compile("ep-type type-.* dscd")}).text
if ep_type == "Sub":
episodes.append(i.find("a").get("data-src"))
elif ep_type == "Dub":
episodes.append(i.find("a").get("href"))
if len(episodes) == 0:
logger.warning("No episodes found")
return episodes[::-1]
|
def _scrape_episodes(self):
version = self.config.get("version", "subbed")
soup = helpers.soupify(helpers.get(self.url))
episodes = []
_all = soup.find_all("div", {"class": "episode-wrap"})
for i in _all:
ep_type = i.find("div", {"class": re.compile("ep-type type-.* dscd")}).text
if ep_type == "Sub":
episodes.append(i.find("a").get("data-src"))
elif ep_type == "Dub":
episodes.append(i.find("a").get("href"))
if len(episodes) == 0:
logger.warning("No episodes found")
return episodes[::-1]
|
https://github.com/anime-dl/anime-downloader/issues/385
|
matt@matt:~$ anime -ll DEBUG dl 'penguin highway' --provider ryuanime
2020-05-31 19:09:54 matt.local anime_downloader.util[41614] INFO anime-downloader 4.3.0
2020-05-31 19:09:54 matt.local anime_downloader.util[41614] DEBUG Platform: macOS-10.14.6-x86_64-i386-64bit
2020-05-31 19:09:54 matt.local anime_downloader.util[41614] DEBUG Python 3.8.2
2020-05-31 19:09:54 matt.local anime_downloader.sites.helpers.request[41614] DEBUG -----
2020-05-31 19:09:54 matt.local anime_downloader.sites.helpers.request[41614] DEBUG GET https://www4.ryuanime.com/search
2020-05-31 19:09:54 matt.local anime_downloader.sites.helpers.request[41614] DEBUG {'params': {'term': 'penguin highway'}}
2020-05-31 19:09:54 matt.local anime_downloader.sites.helpers.request[41614] DEBUG {'user-agent': 'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.2 (KHTML, like Gecko) Chrome/22.0.1216.0 Safari/537.2'}
2020-05-31 19:09:54 matt.local anime_downloader.sites.helpers.request[41614] DEBUG -----
send: b'GET /search?term=penguin+highway HTTP/1.1\r\nHost: www4.ryuanime.com\r\nuser-agent: Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.2 (KHTML, like Gecko) Chrome/22.0.1216.0 Safari/537.2\r\nAccept-Encoding: gzip, deflate\r\nAccept: */*\r\nConnection: keep-alive\r\n\r\n'
reply: 'HTTP/1.1 200 OK\r\n'
header: Date: Sun, 31 May 2020 18:09:54 GMT
header: Content-Type: text/html; charset=UTF-8
header: Transfer-Encoding: chunked
header: Connection: keep-alive
header: Set-Cookie: __cfduid=d4d5081f03f192e51c4b768dc75a28b161590948594; expires=Tue, 30-Jun-20 18:09:54 GMT; path=/; domain=.ryuanime.com; HttpOnly; SameSite=Lax; Secure
header: Set-Cookie: PHPSESSID=r1mgv31e58tbi5e6vu211c3eoj; path=/
header: Expires: Thu, 19 Nov 1981 08:52:00 GMT
header: Cache-Control: no-store, no-cache, must-revalidate
header: Pragma: no-cache
header: CF-Cache-Status: DYNAMIC
header: cf-request-id: 030d861aca00006a0b5586e200000001
header: Expect-CT: max-age=604800, report-uri="https://report-uri.cloudflare.com/cdn-cgi/beacon/expect-ct"
header: Server: cloudflare
header: CF-RAY: 59c2a60ade546a0b-LHR
header: Content-Encoding: gzip
2020-05-31 19:09:54 matt.local anime_downloader.session[41614] DEBUG uncached request
2020-05-31 19:09:54 matt.local anime_downloader.sites.helpers.request[41614] DEBUG https://www4.ryuanime.com/search?term=penguin+highway
2020-05-31 19:09:54 matt.local anime_downloader.sites.helpers.request[41614] DEBUG /var/folders/sy/jcz9n9dn2t98677rzpmyvkr00000gn/T/animedlg3sh2stl/tmppx3ctpjn
+--------+-----------------+--------+
| 1 | Penguin Highway | |
|--------+-----------------+--------|
| SlNo | Title | Meta |
+--------+-----------------+--------+
Enter the anime no: [1]: 1
2020-05-31 19:09:55 matt.local anime_downloader.util[41614] INFO Selected Penguin Highway
2020-05-31 19:09:55 matt.local anime_downloader.sites.anime[41614] INFO Extracting episode info from page
2020-05-31 19:09:55 matt.local anime_downloader.sites.helpers.request[41614] DEBUG -----
2020-05-31 19:09:55 matt.local anime_downloader.sites.helpers.request[41614] DEBUG GET https://www4.ryuanime.com/anime/3939-penguin-highway
2020-05-31 19:09:55 matt.local anime_downloader.sites.helpers.request[41614] DEBUG {}
2020-05-31 19:09:55 matt.local anime_downloader.sites.helpers.request[41614] DEBUG {'user-agent': 'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.2 (KHTML, like Gecko) Chrome/22.0.1216.0 Safari/537.2'}
2020-05-31 19:09:55 matt.local anime_downloader.sites.helpers.request[41614] DEBUG -----
send: b'GET /anime/3939-penguin-highway HTTP/1.1\r\nHost: www4.ryuanime.com\r\nuser-agent: Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.2 (KHTML, like Gecko) Chrome/22.0.1216.0 Safari/537.2\r\nAccept-Encoding: gzip, deflate\r\nAccept: */*\r\nConnection: keep-alive\r\nCookie: __cfduid=d4d5081f03f192e51c4b768dc75a28b161590948594; PHPSESSID=r1mgv31e58tbi5e6vu211c3eoj\r\n\r\n'
reply: 'HTTP/1.1 200 OK\r\n'
header: Date: Sun, 31 May 2020 18:09:56 GMT
header: Content-Type: text/html; charset=UTF-8
header: Transfer-Encoding: chunked
header: Connection: keep-alive
header: Expires: Thu, 19 Nov 1981 08:52:00 GMT
header: Cache-Control: no-store, no-cache, must-revalidate
header: Pragma: no-cache
header: CF-Cache-Status: DYNAMIC
header: cf-request-id: 030d86201900006a0b55880200000001
header: Expect-CT: max-age=604800, report-uri="https://report-uri.cloudflare.com/cdn-cgi/beacon/expect-ct"
header: Server: cloudflare
header: CF-RAY: 59c2a613593e6a0b-LHR
header: Content-Encoding: gzip
2020-05-31 19:09:56 matt.local anime_downloader.session[41614] DEBUG uncached request
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG https://www4.ryuanime.com/anime/3939-penguin-highway
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG /var/folders/sy/jcz9n9dn2t98677rzpmyvkr00000gn/T/animedlg3sh2stl/tmplpjqs5fs
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG -----
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG GET https://www4.ryuanime.com/anime/3939-penguin-highway
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG {}
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG {'user-agent': 'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.2 (KHTML, like Gecko) Chrome/22.0.1216.0 Safari/537.2'}
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG -----
2020-05-31 19:09:56 matt.local anime_downloader.session[41614] DEBUG cached request
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG https://www4.ryuanime.com/anime/3939-penguin-highway
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG /var/folders/sy/jcz9n9dn2t98677rzpmyvkr00000gn/T/animedlg3sh2stl/tmp2rcnd8h1
2020-05-31 19:09:56 matt.local anime_downloader.sites.anime[41614] DEBUG EPISODE IDS: length: 1, ids: ['https://www4.ryuanime.com/anime/watch/189328-penguin-highway-movie-english-sub']
2020-05-31 19:09:56 matt.local anime_downloader.commands.dl[41614] INFO Found anime: Penguin Highway
2020-05-31 19:09:56 matt.local anime_downloader.commands.dl[41614] INFO Downloading to /Users/matt/Downloads/1234_JD
2020-05-31 19:09:56 matt.local anime_downloader.sites.anime[41614] DEBUG Extracting stream info of id: https://www4.ryuanime.com/anime/watch/189328-penguin-highway-movie-english-sub
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG -----
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG GET https://www4.ryuanime.com/anime/watch/189328-penguin-highway-movie-english-sub
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG {}
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG {'user-agent': 'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.2 (KHTML, like Gecko) Chrome/22.0.1216.0 Safari/537.2'}
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG -----
send: b'GET /anime/watch/189328-penguin-highway-movie-english-sub HTTP/1.1\r\nHost: www4.ryuanime.com\r\nuser-agent: Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.2 (KHTML, like Gecko) Chrome/22.0.1216.0 Safari/537.2\r\nAccept-Encoding: gzip, deflate\r\nAccept: */*\r\nConnection: keep-alive\r\nCookie: __cfduid=d4d5081f03f192e51c4b768dc75a28b161590948594; PHPSESSID=r1mgv31e58tbi5e6vu211c3eoj\r\n\r\n'
reply: 'HTTP/1.1 200 OK\r\n'
header: Date: Sun, 31 May 2020 18:09:56 GMT
header: Content-Type: text/html; charset=UTF-8
header: Transfer-Encoding: chunked
header: Connection: keep-alive
header: Expires: Thu, 19 Nov 1981 08:52:00 GMT
header: Cache-Control: no-store, no-cache, must-revalidate
header: Pragma: no-cache
header: CF-Cache-Status: DYNAMIC
header: cf-request-id: 030d86219e00006a0b55887200000001
header: Expect-CT: max-age=604800, report-uri="https://report-uri.cloudflare.com/cdn-cgi/beacon/expect-ct"
header: Server: cloudflare
header: CF-RAY: 59c2a615ca036a0b-LHR
header: Content-Encoding: gzip
2020-05-31 19:09:56 matt.local anime_downloader.session[41614] DEBUG uncached request
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG https://www4.ryuanime.com/anime/watch/189328-penguin-highway-movie-english-sub
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG /var/folders/sy/jcz9n9dn2t98677rzpmyvkr00000gn/T/animedlg3sh2stl/tmptjy94hcy
Traceback (most recent call last):
File "/usr/local/bin/anime", line 11, in <module>
load_entry_point('anime-downloader==4.3.0', 'console_scripts', 'anime')()
File "/usr/local/Cellar/anime-downloader/HEAD-16c6268/libexec/lib/python3.8/site-packages/anime_downloader/cli.py", line 53, in main
cli()
File "/usr/local/Cellar/anime-downloader/HEAD-16c6268/libexec/lib/python3.8/site-packages/click/core.py", line 829, in __call__
return self.main(*args, **kwargs)
File "/usr/local/Cellar/anime-downloader/HEAD-16c6268/libexec/lib/python3.8/site-packages/click/core.py", line 782, in main
rv = self.invoke(ctx)
File "/usr/local/Cellar/anime-downloader/HEAD-16c6268/libexec/lib/python3.8/site-packages/click/core.py", line 1259, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/usr/local/Cellar/anime-downloader/HEAD-16c6268/libexec/lib/python3.8/site-packages/click/core.py", line 1066, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/usr/local/Cellar/anime-downloader/HEAD-16c6268/libexec/lib/python3.8/site-packages/click/core.py", line 610, in invoke
return callback(*args, **kwargs)
File "/usr/local/Cellar/anime-downloader/HEAD-16c6268/libexec/lib/python3.8/site-packages/click/decorators.py", line 21, in new_func
return f(get_current_context(), *args, **kwargs)
File "/usr/local/Cellar/anime-downloader/HEAD-16c6268/libexec/lib/python3.8/site-packages/anime_downloader/commands/dl.py", line 112, in command
for episode in animes:
File "/usr/local/Cellar/anime-downloader/HEAD-16c6268/libexec/lib/python3.8/site-packages/anime_downloader/sites/anime.py", line 198, in __iter__
yield episode_class(ep_id[1], parent=self, ep_no=ep_id[0])
File "/usr/local/Cellar/anime-downloader/HEAD-16c6268/libexec/lib/python3.8/site-packages/anime_downloader/sites/anime.py", line 293, in __init__
try_data()
File "/usr/local/Cellar/anime-downloader/HEAD-16c6268/libexec/lib/python3.8/site-packages/anime_downloader/sites/anime.py", line 289, in try_data
self.get_data()
File "/usr/local/Cellar/anime-downloader/HEAD-16c6268/libexec/lib/python3.8/site-packages/anime_downloader/sites/anime.py", line 349, in get_data
self._sources = self._get_sources()
File "/usr/local/Cellar/anime-downloader/HEAD-16c6268/libexec/lib/python3.8/site-packages/anime_downloader/sites/ryuanime.py", line 64, in _get_sources
hosts = json.loads(soup.find("div", {"class":"col-sm-9"}).find("script").text[30:-6])
File "/usr/local/opt/python@3.8/Frameworks/Python.framework/Versions/3.8/lib/python3.8/json/__init__.py", line 357, in loads
return _default_decoder.decode(s)
File "/usr/local/opt/python@3.8/Frameworks/Python.framework/Versions/3.8/lib/python3.8/json/decoder.py", line 337, in decode
obj, end = self.raw_decode(s, idx=_w(s, 0).end())
File "/usr/local/opt/python@3.8/Frameworks/Python.framework/Versions/3.8/lib/python3.8/json/decoder.py", line 355, in raw_decode
raise JSONDecodeError("Expecting value", s, err.value) from None
json.decoder.JSONDecodeError: Expecting value: line 1 column 1 (char 0)
matt@matt:~$
|
json.decoder.JSONDecodeError
|
def _get_sources(self):
server = self.config.get("server", "trollvid")
resp = helpers.get(self.url).text
hosts = json.loads(re.search("var\s+episode\s+=\s+({.*})", resp).group(1))["videos"]
_type = hosts[0]["type"]
try:
host = list(
filter(
lambda video: video["host"] == server and video["type"] == _type, hosts
)
)[0]
except IndexError:
host = hosts[0]
if host["host"] == "mp4upload" and len(hosts) > 1:
host = hosts[1]
name = host["host"]
_id = host["id"]
link = self.getLink(name, _id)
return [(name, link)]
|
def _get_sources(self):
server = self.config.get("server", "trollvid")
soup = helpers.soupify(helpers.get(self.url))
hosts = json.loads(
soup.find("div", {"class": "spatry"}).previous_sibling.previous_sibling.text[
21:-2
]
)["videos"]
_type = hosts[0]["type"]
try:
host = list(
filter(
lambda video: video["host"] == server and video["type"] == _type, hosts
)
)[0]
except IndexError:
host = hosts[0]
if host["host"] == "mp4upload" and len(hosts) > 1:
host = hosts[1]
name = host["host"]
_id = host["id"]
link = self.getLink(name, _id)
return [(name, link)]
|
https://github.com/anime-dl/anime-downloader/issues/385
|
matt@matt:~$ anime -ll DEBUG dl 'penguin highway' --provider ryuanime
2020-05-31 19:09:54 matt.local anime_downloader.util[41614] INFO anime-downloader 4.3.0
2020-05-31 19:09:54 matt.local anime_downloader.util[41614] DEBUG Platform: macOS-10.14.6-x86_64-i386-64bit
2020-05-31 19:09:54 matt.local anime_downloader.util[41614] DEBUG Python 3.8.2
2020-05-31 19:09:54 matt.local anime_downloader.sites.helpers.request[41614] DEBUG -----
2020-05-31 19:09:54 matt.local anime_downloader.sites.helpers.request[41614] DEBUG GET https://www4.ryuanime.com/search
2020-05-31 19:09:54 matt.local anime_downloader.sites.helpers.request[41614] DEBUG {'params': {'term': 'penguin highway'}}
2020-05-31 19:09:54 matt.local anime_downloader.sites.helpers.request[41614] DEBUG {'user-agent': 'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.2 (KHTML, like Gecko) Chrome/22.0.1216.0 Safari/537.2'}
2020-05-31 19:09:54 matt.local anime_downloader.sites.helpers.request[41614] DEBUG -----
send: b'GET /search?term=penguin+highway HTTP/1.1\r\nHost: www4.ryuanime.com\r\nuser-agent: Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.2 (KHTML, like Gecko) Chrome/22.0.1216.0 Safari/537.2\r\nAccept-Encoding: gzip, deflate\r\nAccept: */*\r\nConnection: keep-alive\r\n\r\n'
reply: 'HTTP/1.1 200 OK\r\n'
header: Date: Sun, 31 May 2020 18:09:54 GMT
header: Content-Type: text/html; charset=UTF-8
header: Transfer-Encoding: chunked
header: Connection: keep-alive
header: Set-Cookie: __cfduid=d4d5081f03f192e51c4b768dc75a28b161590948594; expires=Tue, 30-Jun-20 18:09:54 GMT; path=/; domain=.ryuanime.com; HttpOnly; SameSite=Lax; Secure
header: Set-Cookie: PHPSESSID=r1mgv31e58tbi5e6vu211c3eoj; path=/
header: Expires: Thu, 19 Nov 1981 08:52:00 GMT
header: Cache-Control: no-store, no-cache, must-revalidate
header: Pragma: no-cache
header: CF-Cache-Status: DYNAMIC
header: cf-request-id: 030d861aca00006a0b5586e200000001
header: Expect-CT: max-age=604800, report-uri="https://report-uri.cloudflare.com/cdn-cgi/beacon/expect-ct"
header: Server: cloudflare
header: CF-RAY: 59c2a60ade546a0b-LHR
header: Content-Encoding: gzip
2020-05-31 19:09:54 matt.local anime_downloader.session[41614] DEBUG uncached request
2020-05-31 19:09:54 matt.local anime_downloader.sites.helpers.request[41614] DEBUG https://www4.ryuanime.com/search?term=penguin+highway
2020-05-31 19:09:54 matt.local anime_downloader.sites.helpers.request[41614] DEBUG /var/folders/sy/jcz9n9dn2t98677rzpmyvkr00000gn/T/animedlg3sh2stl/tmppx3ctpjn
+--------+-----------------+--------+
| 1 | Penguin Highway | |
|--------+-----------------+--------|
| SlNo | Title | Meta |
+--------+-----------------+--------+
Enter the anime no: [1]: 1
2020-05-31 19:09:55 matt.local anime_downloader.util[41614] INFO Selected Penguin Highway
2020-05-31 19:09:55 matt.local anime_downloader.sites.anime[41614] INFO Extracting episode info from page
2020-05-31 19:09:55 matt.local anime_downloader.sites.helpers.request[41614] DEBUG -----
2020-05-31 19:09:55 matt.local anime_downloader.sites.helpers.request[41614] DEBUG GET https://www4.ryuanime.com/anime/3939-penguin-highway
2020-05-31 19:09:55 matt.local anime_downloader.sites.helpers.request[41614] DEBUG {}
2020-05-31 19:09:55 matt.local anime_downloader.sites.helpers.request[41614] DEBUG {'user-agent': 'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.2 (KHTML, like Gecko) Chrome/22.0.1216.0 Safari/537.2'}
2020-05-31 19:09:55 matt.local anime_downloader.sites.helpers.request[41614] DEBUG -----
send: b'GET /anime/3939-penguin-highway HTTP/1.1\r\nHost: www4.ryuanime.com\r\nuser-agent: Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.2 (KHTML, like Gecko) Chrome/22.0.1216.0 Safari/537.2\r\nAccept-Encoding: gzip, deflate\r\nAccept: */*\r\nConnection: keep-alive\r\nCookie: __cfduid=d4d5081f03f192e51c4b768dc75a28b161590948594; PHPSESSID=r1mgv31e58tbi5e6vu211c3eoj\r\n\r\n'
reply: 'HTTP/1.1 200 OK\r\n'
header: Date: Sun, 31 May 2020 18:09:56 GMT
header: Content-Type: text/html; charset=UTF-8
header: Transfer-Encoding: chunked
header: Connection: keep-alive
header: Expires: Thu, 19 Nov 1981 08:52:00 GMT
header: Cache-Control: no-store, no-cache, must-revalidate
header: Pragma: no-cache
header: CF-Cache-Status: DYNAMIC
header: cf-request-id: 030d86201900006a0b55880200000001
header: Expect-CT: max-age=604800, report-uri="https://report-uri.cloudflare.com/cdn-cgi/beacon/expect-ct"
header: Server: cloudflare
header: CF-RAY: 59c2a613593e6a0b-LHR
header: Content-Encoding: gzip
2020-05-31 19:09:56 matt.local anime_downloader.session[41614] DEBUG uncached request
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG https://www4.ryuanime.com/anime/3939-penguin-highway
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG /var/folders/sy/jcz9n9dn2t98677rzpmyvkr00000gn/T/animedlg3sh2stl/tmplpjqs5fs
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG -----
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG GET https://www4.ryuanime.com/anime/3939-penguin-highway
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG {}
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG {'user-agent': 'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.2 (KHTML, like Gecko) Chrome/22.0.1216.0 Safari/537.2'}
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG -----
2020-05-31 19:09:56 matt.local anime_downloader.session[41614] DEBUG cached request
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG https://www4.ryuanime.com/anime/3939-penguin-highway
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG /var/folders/sy/jcz9n9dn2t98677rzpmyvkr00000gn/T/animedlg3sh2stl/tmp2rcnd8h1
2020-05-31 19:09:56 matt.local anime_downloader.sites.anime[41614] DEBUG EPISODE IDS: length: 1, ids: ['https://www4.ryuanime.com/anime/watch/189328-penguin-highway-movie-english-sub']
2020-05-31 19:09:56 matt.local anime_downloader.commands.dl[41614] INFO Found anime: Penguin Highway
2020-05-31 19:09:56 matt.local anime_downloader.commands.dl[41614] INFO Downloading to /Users/matt/Downloads/1234_JD
2020-05-31 19:09:56 matt.local anime_downloader.sites.anime[41614] DEBUG Extracting stream info of id: https://www4.ryuanime.com/anime/watch/189328-penguin-highway-movie-english-sub
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG -----
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG GET https://www4.ryuanime.com/anime/watch/189328-penguin-highway-movie-english-sub
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG {}
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG {'user-agent': 'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.2 (KHTML, like Gecko) Chrome/22.0.1216.0 Safari/537.2'}
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG -----
send: b'GET /anime/watch/189328-penguin-highway-movie-english-sub HTTP/1.1\r\nHost: www4.ryuanime.com\r\nuser-agent: Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.2 (KHTML, like Gecko) Chrome/22.0.1216.0 Safari/537.2\r\nAccept-Encoding: gzip, deflate\r\nAccept: */*\r\nConnection: keep-alive\r\nCookie: __cfduid=d4d5081f03f192e51c4b768dc75a28b161590948594; PHPSESSID=r1mgv31e58tbi5e6vu211c3eoj\r\n\r\n'
reply: 'HTTP/1.1 200 OK\r\n'
header: Date: Sun, 31 May 2020 18:09:56 GMT
header: Content-Type: text/html; charset=UTF-8
header: Transfer-Encoding: chunked
header: Connection: keep-alive
header: Expires: Thu, 19 Nov 1981 08:52:00 GMT
header: Cache-Control: no-store, no-cache, must-revalidate
header: Pragma: no-cache
header: CF-Cache-Status: DYNAMIC
header: cf-request-id: 030d86219e00006a0b55887200000001
header: Expect-CT: max-age=604800, report-uri="https://report-uri.cloudflare.com/cdn-cgi/beacon/expect-ct"
header: Server: cloudflare
header: CF-RAY: 59c2a615ca036a0b-LHR
header: Content-Encoding: gzip
2020-05-31 19:09:56 matt.local anime_downloader.session[41614] DEBUG uncached request
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG https://www4.ryuanime.com/anime/watch/189328-penguin-highway-movie-english-sub
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG /var/folders/sy/jcz9n9dn2t98677rzpmyvkr00000gn/T/animedlg3sh2stl/tmptjy94hcy
Traceback (most recent call last):
File "/usr/local/bin/anime", line 11, in <module>
load_entry_point('anime-downloader==4.3.0', 'console_scripts', 'anime')()
File "/usr/local/Cellar/anime-downloader/HEAD-16c6268/libexec/lib/python3.8/site-packages/anime_downloader/cli.py", line 53, in main
cli()
File "/usr/local/Cellar/anime-downloader/HEAD-16c6268/libexec/lib/python3.8/site-packages/click/core.py", line 829, in __call__
return self.main(*args, **kwargs)
File "/usr/local/Cellar/anime-downloader/HEAD-16c6268/libexec/lib/python3.8/site-packages/click/core.py", line 782, in main
rv = self.invoke(ctx)
File "/usr/local/Cellar/anime-downloader/HEAD-16c6268/libexec/lib/python3.8/site-packages/click/core.py", line 1259, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/usr/local/Cellar/anime-downloader/HEAD-16c6268/libexec/lib/python3.8/site-packages/click/core.py", line 1066, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/usr/local/Cellar/anime-downloader/HEAD-16c6268/libexec/lib/python3.8/site-packages/click/core.py", line 610, in invoke
return callback(*args, **kwargs)
File "/usr/local/Cellar/anime-downloader/HEAD-16c6268/libexec/lib/python3.8/site-packages/click/decorators.py", line 21, in new_func
return f(get_current_context(), *args, **kwargs)
File "/usr/local/Cellar/anime-downloader/HEAD-16c6268/libexec/lib/python3.8/site-packages/anime_downloader/commands/dl.py", line 112, in command
for episode in animes:
File "/usr/local/Cellar/anime-downloader/HEAD-16c6268/libexec/lib/python3.8/site-packages/anime_downloader/sites/anime.py", line 198, in __iter__
yield episode_class(ep_id[1], parent=self, ep_no=ep_id[0])
File "/usr/local/Cellar/anime-downloader/HEAD-16c6268/libexec/lib/python3.8/site-packages/anime_downloader/sites/anime.py", line 293, in __init__
try_data()
File "/usr/local/Cellar/anime-downloader/HEAD-16c6268/libexec/lib/python3.8/site-packages/anime_downloader/sites/anime.py", line 289, in try_data
self.get_data()
File "/usr/local/Cellar/anime-downloader/HEAD-16c6268/libexec/lib/python3.8/site-packages/anime_downloader/sites/anime.py", line 349, in get_data
self._sources = self._get_sources()
File "/usr/local/Cellar/anime-downloader/HEAD-16c6268/libexec/lib/python3.8/site-packages/anime_downloader/sites/ryuanime.py", line 64, in _get_sources
hosts = json.loads(soup.find("div", {"class":"col-sm-9"}).find("script").text[30:-6])
File "/usr/local/opt/python@3.8/Frameworks/Python.framework/Versions/3.8/lib/python3.8/json/__init__.py", line 357, in loads
return _default_decoder.decode(s)
File "/usr/local/opt/python@3.8/Frameworks/Python.framework/Versions/3.8/lib/python3.8/json/decoder.py", line 337, in decode
obj, end = self.raw_decode(s, idx=_w(s, 0).end())
File "/usr/local/opt/python@3.8/Frameworks/Python.framework/Versions/3.8/lib/python3.8/json/decoder.py", line 355, in raw_decode
raise JSONDecodeError("Expecting value", s, err.value) from None
json.decoder.JSONDecodeError: Expecting value: line 1 column 1 (char 0)
matt@matt:~$
|
json.decoder.JSONDecodeError
|
def search(cls, query):
soup = helpers.soupify(
helpers.get("https://www4.ryuanime.com/search", params={"term": query})
)
result_data = soup.select("ul.list-inline")[0].select("a")
search_results = [
SearchResult(title=result.text, url=result.get("href"))
for result in result_data
]
return search_results
|
def search(cls, query):
results = helpers.get(
"https://www4.ryuanime.com/search", params={"term": query}
).text
soup = helpers.soupify(results)
result_data = soup.find("ul", {"class": "list-inline"}).find_all("a")
search_results = [
SearchResult(title=result.text, url=result.get("href"))
for result in result_data
]
return search_results
|
https://github.com/anime-dl/anime-downloader/issues/385
|
matt@matt:~$ anime -ll DEBUG dl 'penguin highway' --provider ryuanime
2020-05-31 19:09:54 matt.local anime_downloader.util[41614] INFO anime-downloader 4.3.0
2020-05-31 19:09:54 matt.local anime_downloader.util[41614] DEBUG Platform: macOS-10.14.6-x86_64-i386-64bit
2020-05-31 19:09:54 matt.local anime_downloader.util[41614] DEBUG Python 3.8.2
2020-05-31 19:09:54 matt.local anime_downloader.sites.helpers.request[41614] DEBUG -----
2020-05-31 19:09:54 matt.local anime_downloader.sites.helpers.request[41614] DEBUG GET https://www4.ryuanime.com/search
2020-05-31 19:09:54 matt.local anime_downloader.sites.helpers.request[41614] DEBUG {'params': {'term': 'penguin highway'}}
2020-05-31 19:09:54 matt.local anime_downloader.sites.helpers.request[41614] DEBUG {'user-agent': 'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.2 (KHTML, like Gecko) Chrome/22.0.1216.0 Safari/537.2'}
2020-05-31 19:09:54 matt.local anime_downloader.sites.helpers.request[41614] DEBUG -----
send: b'GET /search?term=penguin+highway HTTP/1.1\r\nHost: www4.ryuanime.com\r\nuser-agent: Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.2 (KHTML, like Gecko) Chrome/22.0.1216.0 Safari/537.2\r\nAccept-Encoding: gzip, deflate\r\nAccept: */*\r\nConnection: keep-alive\r\n\r\n'
reply: 'HTTP/1.1 200 OK\r\n'
header: Date: Sun, 31 May 2020 18:09:54 GMT
header: Content-Type: text/html; charset=UTF-8
header: Transfer-Encoding: chunked
header: Connection: keep-alive
header: Set-Cookie: __cfduid=d4d5081f03f192e51c4b768dc75a28b161590948594; expires=Tue, 30-Jun-20 18:09:54 GMT; path=/; domain=.ryuanime.com; HttpOnly; SameSite=Lax; Secure
header: Set-Cookie: PHPSESSID=r1mgv31e58tbi5e6vu211c3eoj; path=/
header: Expires: Thu, 19 Nov 1981 08:52:00 GMT
header: Cache-Control: no-store, no-cache, must-revalidate
header: Pragma: no-cache
header: CF-Cache-Status: DYNAMIC
header: cf-request-id: 030d861aca00006a0b5586e200000001
header: Expect-CT: max-age=604800, report-uri="https://report-uri.cloudflare.com/cdn-cgi/beacon/expect-ct"
header: Server: cloudflare
header: CF-RAY: 59c2a60ade546a0b-LHR
header: Content-Encoding: gzip
2020-05-31 19:09:54 matt.local anime_downloader.session[41614] DEBUG uncached request
2020-05-31 19:09:54 matt.local anime_downloader.sites.helpers.request[41614] DEBUG https://www4.ryuanime.com/search?term=penguin+highway
2020-05-31 19:09:54 matt.local anime_downloader.sites.helpers.request[41614] DEBUG /var/folders/sy/jcz9n9dn2t98677rzpmyvkr00000gn/T/animedlg3sh2stl/tmppx3ctpjn
+--------+-----------------+--------+
| 1 | Penguin Highway | |
|--------+-----------------+--------|
| SlNo | Title | Meta |
+--------+-----------------+--------+
Enter the anime no: [1]: 1
2020-05-31 19:09:55 matt.local anime_downloader.util[41614] INFO Selected Penguin Highway
2020-05-31 19:09:55 matt.local anime_downloader.sites.anime[41614] INFO Extracting episode info from page
2020-05-31 19:09:55 matt.local anime_downloader.sites.helpers.request[41614] DEBUG -----
2020-05-31 19:09:55 matt.local anime_downloader.sites.helpers.request[41614] DEBUG GET https://www4.ryuanime.com/anime/3939-penguin-highway
2020-05-31 19:09:55 matt.local anime_downloader.sites.helpers.request[41614] DEBUG {}
2020-05-31 19:09:55 matt.local anime_downloader.sites.helpers.request[41614] DEBUG {'user-agent': 'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.2 (KHTML, like Gecko) Chrome/22.0.1216.0 Safari/537.2'}
2020-05-31 19:09:55 matt.local anime_downloader.sites.helpers.request[41614] DEBUG -----
send: b'GET /anime/3939-penguin-highway HTTP/1.1\r\nHost: www4.ryuanime.com\r\nuser-agent: Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.2 (KHTML, like Gecko) Chrome/22.0.1216.0 Safari/537.2\r\nAccept-Encoding: gzip, deflate\r\nAccept: */*\r\nConnection: keep-alive\r\nCookie: __cfduid=d4d5081f03f192e51c4b768dc75a28b161590948594; PHPSESSID=r1mgv31e58tbi5e6vu211c3eoj\r\n\r\n'
reply: 'HTTP/1.1 200 OK\r\n'
header: Date: Sun, 31 May 2020 18:09:56 GMT
header: Content-Type: text/html; charset=UTF-8
header: Transfer-Encoding: chunked
header: Connection: keep-alive
header: Expires: Thu, 19 Nov 1981 08:52:00 GMT
header: Cache-Control: no-store, no-cache, must-revalidate
header: Pragma: no-cache
header: CF-Cache-Status: DYNAMIC
header: cf-request-id: 030d86201900006a0b55880200000001
header: Expect-CT: max-age=604800, report-uri="https://report-uri.cloudflare.com/cdn-cgi/beacon/expect-ct"
header: Server: cloudflare
header: CF-RAY: 59c2a613593e6a0b-LHR
header: Content-Encoding: gzip
2020-05-31 19:09:56 matt.local anime_downloader.session[41614] DEBUG uncached request
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG https://www4.ryuanime.com/anime/3939-penguin-highway
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG /var/folders/sy/jcz9n9dn2t98677rzpmyvkr00000gn/T/animedlg3sh2stl/tmplpjqs5fs
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG -----
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG GET https://www4.ryuanime.com/anime/3939-penguin-highway
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG {}
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG {'user-agent': 'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.2 (KHTML, like Gecko) Chrome/22.0.1216.0 Safari/537.2'}
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG -----
2020-05-31 19:09:56 matt.local anime_downloader.session[41614] DEBUG cached request
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG https://www4.ryuanime.com/anime/3939-penguin-highway
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG /var/folders/sy/jcz9n9dn2t98677rzpmyvkr00000gn/T/animedlg3sh2stl/tmp2rcnd8h1
2020-05-31 19:09:56 matt.local anime_downloader.sites.anime[41614] DEBUG EPISODE IDS: length: 1, ids: ['https://www4.ryuanime.com/anime/watch/189328-penguin-highway-movie-english-sub']
2020-05-31 19:09:56 matt.local anime_downloader.commands.dl[41614] INFO Found anime: Penguin Highway
2020-05-31 19:09:56 matt.local anime_downloader.commands.dl[41614] INFO Downloading to /Users/matt/Downloads/1234_JD
2020-05-31 19:09:56 matt.local anime_downloader.sites.anime[41614] DEBUG Extracting stream info of id: https://www4.ryuanime.com/anime/watch/189328-penguin-highway-movie-english-sub
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG -----
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG GET https://www4.ryuanime.com/anime/watch/189328-penguin-highway-movie-english-sub
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG {}
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG {'user-agent': 'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.2 (KHTML, like Gecko) Chrome/22.0.1216.0 Safari/537.2'}
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG -----
send: b'GET /anime/watch/189328-penguin-highway-movie-english-sub HTTP/1.1\r\nHost: www4.ryuanime.com\r\nuser-agent: Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.2 (KHTML, like Gecko) Chrome/22.0.1216.0 Safari/537.2\r\nAccept-Encoding: gzip, deflate\r\nAccept: */*\r\nConnection: keep-alive\r\nCookie: __cfduid=d4d5081f03f192e51c4b768dc75a28b161590948594; PHPSESSID=r1mgv31e58tbi5e6vu211c3eoj\r\n\r\n'
reply: 'HTTP/1.1 200 OK\r\n'
header: Date: Sun, 31 May 2020 18:09:56 GMT
header: Content-Type: text/html; charset=UTF-8
header: Transfer-Encoding: chunked
header: Connection: keep-alive
header: Expires: Thu, 19 Nov 1981 08:52:00 GMT
header: Cache-Control: no-store, no-cache, must-revalidate
header: Pragma: no-cache
header: CF-Cache-Status: DYNAMIC
header: cf-request-id: 030d86219e00006a0b55887200000001
header: Expect-CT: max-age=604800, report-uri="https://report-uri.cloudflare.com/cdn-cgi/beacon/expect-ct"
header: Server: cloudflare
header: CF-RAY: 59c2a615ca036a0b-LHR
header: Content-Encoding: gzip
2020-05-31 19:09:56 matt.local anime_downloader.session[41614] DEBUG uncached request
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG https://www4.ryuanime.com/anime/watch/189328-penguin-highway-movie-english-sub
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG /var/folders/sy/jcz9n9dn2t98677rzpmyvkr00000gn/T/animedlg3sh2stl/tmptjy94hcy
Traceback (most recent call last):
File "/usr/local/bin/anime", line 11, in <module>
load_entry_point('anime-downloader==4.3.0', 'console_scripts', 'anime')()
File "/usr/local/Cellar/anime-downloader/HEAD-16c6268/libexec/lib/python3.8/site-packages/anime_downloader/cli.py", line 53, in main
cli()
File "/usr/local/Cellar/anime-downloader/HEAD-16c6268/libexec/lib/python3.8/site-packages/click/core.py", line 829, in __call__
return self.main(*args, **kwargs)
File "/usr/local/Cellar/anime-downloader/HEAD-16c6268/libexec/lib/python3.8/site-packages/click/core.py", line 782, in main
rv = self.invoke(ctx)
File "/usr/local/Cellar/anime-downloader/HEAD-16c6268/libexec/lib/python3.8/site-packages/click/core.py", line 1259, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/usr/local/Cellar/anime-downloader/HEAD-16c6268/libexec/lib/python3.8/site-packages/click/core.py", line 1066, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/usr/local/Cellar/anime-downloader/HEAD-16c6268/libexec/lib/python3.8/site-packages/click/core.py", line 610, in invoke
return callback(*args, **kwargs)
File "/usr/local/Cellar/anime-downloader/HEAD-16c6268/libexec/lib/python3.8/site-packages/click/decorators.py", line 21, in new_func
return f(get_current_context(), *args, **kwargs)
File "/usr/local/Cellar/anime-downloader/HEAD-16c6268/libexec/lib/python3.8/site-packages/anime_downloader/commands/dl.py", line 112, in command
for episode in animes:
File "/usr/local/Cellar/anime-downloader/HEAD-16c6268/libexec/lib/python3.8/site-packages/anime_downloader/sites/anime.py", line 198, in __iter__
yield episode_class(ep_id[1], parent=self, ep_no=ep_id[0])
File "/usr/local/Cellar/anime-downloader/HEAD-16c6268/libexec/lib/python3.8/site-packages/anime_downloader/sites/anime.py", line 293, in __init__
try_data()
File "/usr/local/Cellar/anime-downloader/HEAD-16c6268/libexec/lib/python3.8/site-packages/anime_downloader/sites/anime.py", line 289, in try_data
self.get_data()
File "/usr/local/Cellar/anime-downloader/HEAD-16c6268/libexec/lib/python3.8/site-packages/anime_downloader/sites/anime.py", line 349, in get_data
self._sources = self._get_sources()
File "/usr/local/Cellar/anime-downloader/HEAD-16c6268/libexec/lib/python3.8/site-packages/anime_downloader/sites/ryuanime.py", line 64, in _get_sources
hosts = json.loads(soup.find("div", {"class":"col-sm-9"}).find("script").text[30:-6])
File "/usr/local/opt/python@3.8/Frameworks/Python.framework/Versions/3.8/lib/python3.8/json/__init__.py", line 357, in loads
return _default_decoder.decode(s)
File "/usr/local/opt/python@3.8/Frameworks/Python.framework/Versions/3.8/lib/python3.8/json/decoder.py", line 337, in decode
obj, end = self.raw_decode(s, idx=_w(s, 0).end())
File "/usr/local/opt/python@3.8/Frameworks/Python.framework/Versions/3.8/lib/python3.8/json/decoder.py", line 355, in raw_decode
raise JSONDecodeError("Expecting value", s, err.value) from None
json.decoder.JSONDecodeError: Expecting value: line 1 column 1 (char 0)
matt@matt:~$
|
json.decoder.JSONDecodeError
|
def _scrape_episodes(self):
version = self.config.get("version", "subbed")
soup = helpers.soupify(helpers.get(self.url))
ep_list = [
x for x in soup.select("div.col-sm-6") if x.find("h5").text == version.title()
][0].find_all("a")
episodes = [x.get("href") for x in ep_list]
if len(episodes) == 0:
logger.warning("No episodes found")
return episodes[::-1]
|
def _scrape_episodes(self):
version = self.config.get("version", "subbed")
soup = helpers.soupify(helpers.get(self.url))
ep_list = [
x
for x in soup.find_all("div", {"class": "col-sm-6"})
if x.find("h5").text == version.title()
][0].find_all("a")
episodes = [x.get("href") for x in ep_list]
if len(episodes) == 0:
logger.warning("No episodes found")
return episodes[::-1]
|
https://github.com/anime-dl/anime-downloader/issues/385
|
matt@matt:~$ anime -ll DEBUG dl 'penguin highway' --provider ryuanime
2020-05-31 19:09:54 matt.local anime_downloader.util[41614] INFO anime-downloader 4.3.0
2020-05-31 19:09:54 matt.local anime_downloader.util[41614] DEBUG Platform: macOS-10.14.6-x86_64-i386-64bit
2020-05-31 19:09:54 matt.local anime_downloader.util[41614] DEBUG Python 3.8.2
2020-05-31 19:09:54 matt.local anime_downloader.sites.helpers.request[41614] DEBUG -----
2020-05-31 19:09:54 matt.local anime_downloader.sites.helpers.request[41614] DEBUG GET https://www4.ryuanime.com/search
2020-05-31 19:09:54 matt.local anime_downloader.sites.helpers.request[41614] DEBUG {'params': {'term': 'penguin highway'}}
2020-05-31 19:09:54 matt.local anime_downloader.sites.helpers.request[41614] DEBUG {'user-agent': 'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.2 (KHTML, like Gecko) Chrome/22.0.1216.0 Safari/537.2'}
2020-05-31 19:09:54 matt.local anime_downloader.sites.helpers.request[41614] DEBUG -----
send: b'GET /search?term=penguin+highway HTTP/1.1\r\nHost: www4.ryuanime.com\r\nuser-agent: Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.2 (KHTML, like Gecko) Chrome/22.0.1216.0 Safari/537.2\r\nAccept-Encoding: gzip, deflate\r\nAccept: */*\r\nConnection: keep-alive\r\n\r\n'
reply: 'HTTP/1.1 200 OK\r\n'
header: Date: Sun, 31 May 2020 18:09:54 GMT
header: Content-Type: text/html; charset=UTF-8
header: Transfer-Encoding: chunked
header: Connection: keep-alive
header: Set-Cookie: __cfduid=d4d5081f03f192e51c4b768dc75a28b161590948594; expires=Tue, 30-Jun-20 18:09:54 GMT; path=/; domain=.ryuanime.com; HttpOnly; SameSite=Lax; Secure
header: Set-Cookie: PHPSESSID=r1mgv31e58tbi5e6vu211c3eoj; path=/
header: Expires: Thu, 19 Nov 1981 08:52:00 GMT
header: Cache-Control: no-store, no-cache, must-revalidate
header: Pragma: no-cache
header: CF-Cache-Status: DYNAMIC
header: cf-request-id: 030d861aca00006a0b5586e200000001
header: Expect-CT: max-age=604800, report-uri="https://report-uri.cloudflare.com/cdn-cgi/beacon/expect-ct"
header: Server: cloudflare
header: CF-RAY: 59c2a60ade546a0b-LHR
header: Content-Encoding: gzip
2020-05-31 19:09:54 matt.local anime_downloader.session[41614] DEBUG uncached request
2020-05-31 19:09:54 matt.local anime_downloader.sites.helpers.request[41614] DEBUG https://www4.ryuanime.com/search?term=penguin+highway
2020-05-31 19:09:54 matt.local anime_downloader.sites.helpers.request[41614] DEBUG /var/folders/sy/jcz9n9dn2t98677rzpmyvkr00000gn/T/animedlg3sh2stl/tmppx3ctpjn
+--------+-----------------+--------+
| 1 | Penguin Highway | |
|--------+-----------------+--------|
| SlNo | Title | Meta |
+--------+-----------------+--------+
Enter the anime no: [1]: 1
2020-05-31 19:09:55 matt.local anime_downloader.util[41614] INFO Selected Penguin Highway
2020-05-31 19:09:55 matt.local anime_downloader.sites.anime[41614] INFO Extracting episode info from page
2020-05-31 19:09:55 matt.local anime_downloader.sites.helpers.request[41614] DEBUG -----
2020-05-31 19:09:55 matt.local anime_downloader.sites.helpers.request[41614] DEBUG GET https://www4.ryuanime.com/anime/3939-penguin-highway
2020-05-31 19:09:55 matt.local anime_downloader.sites.helpers.request[41614] DEBUG {}
2020-05-31 19:09:55 matt.local anime_downloader.sites.helpers.request[41614] DEBUG {'user-agent': 'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.2 (KHTML, like Gecko) Chrome/22.0.1216.0 Safari/537.2'}
2020-05-31 19:09:55 matt.local anime_downloader.sites.helpers.request[41614] DEBUG -----
send: b'GET /anime/3939-penguin-highway HTTP/1.1\r\nHost: www4.ryuanime.com\r\nuser-agent: Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.2 (KHTML, like Gecko) Chrome/22.0.1216.0 Safari/537.2\r\nAccept-Encoding: gzip, deflate\r\nAccept: */*\r\nConnection: keep-alive\r\nCookie: __cfduid=d4d5081f03f192e51c4b768dc75a28b161590948594; PHPSESSID=r1mgv31e58tbi5e6vu211c3eoj\r\n\r\n'
reply: 'HTTP/1.1 200 OK\r\n'
header: Date: Sun, 31 May 2020 18:09:56 GMT
header: Content-Type: text/html; charset=UTF-8
header: Transfer-Encoding: chunked
header: Connection: keep-alive
header: Expires: Thu, 19 Nov 1981 08:52:00 GMT
header: Cache-Control: no-store, no-cache, must-revalidate
header: Pragma: no-cache
header: CF-Cache-Status: DYNAMIC
header: cf-request-id: 030d86201900006a0b55880200000001
header: Expect-CT: max-age=604800, report-uri="https://report-uri.cloudflare.com/cdn-cgi/beacon/expect-ct"
header: Server: cloudflare
header: CF-RAY: 59c2a613593e6a0b-LHR
header: Content-Encoding: gzip
2020-05-31 19:09:56 matt.local anime_downloader.session[41614] DEBUG uncached request
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG https://www4.ryuanime.com/anime/3939-penguin-highway
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG /var/folders/sy/jcz9n9dn2t98677rzpmyvkr00000gn/T/animedlg3sh2stl/tmplpjqs5fs
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG -----
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG GET https://www4.ryuanime.com/anime/3939-penguin-highway
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG {}
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG {'user-agent': 'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.2 (KHTML, like Gecko) Chrome/22.0.1216.0 Safari/537.2'}
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG -----
2020-05-31 19:09:56 matt.local anime_downloader.session[41614] DEBUG cached request
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG https://www4.ryuanime.com/anime/3939-penguin-highway
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG /var/folders/sy/jcz9n9dn2t98677rzpmyvkr00000gn/T/animedlg3sh2stl/tmp2rcnd8h1
2020-05-31 19:09:56 matt.local anime_downloader.sites.anime[41614] DEBUG EPISODE IDS: length: 1, ids: ['https://www4.ryuanime.com/anime/watch/189328-penguin-highway-movie-english-sub']
2020-05-31 19:09:56 matt.local anime_downloader.commands.dl[41614] INFO Found anime: Penguin Highway
2020-05-31 19:09:56 matt.local anime_downloader.commands.dl[41614] INFO Downloading to /Users/matt/Downloads/1234_JD
2020-05-31 19:09:56 matt.local anime_downloader.sites.anime[41614] DEBUG Extracting stream info of id: https://www4.ryuanime.com/anime/watch/189328-penguin-highway-movie-english-sub
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG -----
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG GET https://www4.ryuanime.com/anime/watch/189328-penguin-highway-movie-english-sub
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG {}
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG {'user-agent': 'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.2 (KHTML, like Gecko) Chrome/22.0.1216.0 Safari/537.2'}
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG -----
send: b'GET /anime/watch/189328-penguin-highway-movie-english-sub HTTP/1.1\r\nHost: www4.ryuanime.com\r\nuser-agent: Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.2 (KHTML, like Gecko) Chrome/22.0.1216.0 Safari/537.2\r\nAccept-Encoding: gzip, deflate\r\nAccept: */*\r\nConnection: keep-alive\r\nCookie: __cfduid=d4d5081f03f192e51c4b768dc75a28b161590948594; PHPSESSID=r1mgv31e58tbi5e6vu211c3eoj\r\n\r\n'
reply: 'HTTP/1.1 200 OK\r\n'
header: Date: Sun, 31 May 2020 18:09:56 GMT
header: Content-Type: text/html; charset=UTF-8
header: Transfer-Encoding: chunked
header: Connection: keep-alive
header: Expires: Thu, 19 Nov 1981 08:52:00 GMT
header: Cache-Control: no-store, no-cache, must-revalidate
header: Pragma: no-cache
header: CF-Cache-Status: DYNAMIC
header: cf-request-id: 030d86219e00006a0b55887200000001
header: Expect-CT: max-age=604800, report-uri="https://report-uri.cloudflare.com/cdn-cgi/beacon/expect-ct"
header: Server: cloudflare
header: CF-RAY: 59c2a615ca036a0b-LHR
header: Content-Encoding: gzip
2020-05-31 19:09:56 matt.local anime_downloader.session[41614] DEBUG uncached request
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG https://www4.ryuanime.com/anime/watch/189328-penguin-highway-movie-english-sub
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG /var/folders/sy/jcz9n9dn2t98677rzpmyvkr00000gn/T/animedlg3sh2stl/tmptjy94hcy
Traceback (most recent call last):
File "/usr/local/bin/anime", line 11, in <module>
load_entry_point('anime-downloader==4.3.0', 'console_scripts', 'anime')()
File "/usr/local/Cellar/anime-downloader/HEAD-16c6268/libexec/lib/python3.8/site-packages/anime_downloader/cli.py", line 53, in main
cli()
File "/usr/local/Cellar/anime-downloader/HEAD-16c6268/libexec/lib/python3.8/site-packages/click/core.py", line 829, in __call__
return self.main(*args, **kwargs)
File "/usr/local/Cellar/anime-downloader/HEAD-16c6268/libexec/lib/python3.8/site-packages/click/core.py", line 782, in main
rv = self.invoke(ctx)
File "/usr/local/Cellar/anime-downloader/HEAD-16c6268/libexec/lib/python3.8/site-packages/click/core.py", line 1259, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/usr/local/Cellar/anime-downloader/HEAD-16c6268/libexec/lib/python3.8/site-packages/click/core.py", line 1066, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/usr/local/Cellar/anime-downloader/HEAD-16c6268/libexec/lib/python3.8/site-packages/click/core.py", line 610, in invoke
return callback(*args, **kwargs)
File "/usr/local/Cellar/anime-downloader/HEAD-16c6268/libexec/lib/python3.8/site-packages/click/decorators.py", line 21, in new_func
return f(get_current_context(), *args, **kwargs)
File "/usr/local/Cellar/anime-downloader/HEAD-16c6268/libexec/lib/python3.8/site-packages/anime_downloader/commands/dl.py", line 112, in command
for episode in animes:
File "/usr/local/Cellar/anime-downloader/HEAD-16c6268/libexec/lib/python3.8/site-packages/anime_downloader/sites/anime.py", line 198, in __iter__
yield episode_class(ep_id[1], parent=self, ep_no=ep_id[0])
File "/usr/local/Cellar/anime-downloader/HEAD-16c6268/libexec/lib/python3.8/site-packages/anime_downloader/sites/anime.py", line 293, in __init__
try_data()
File "/usr/local/Cellar/anime-downloader/HEAD-16c6268/libexec/lib/python3.8/site-packages/anime_downloader/sites/anime.py", line 289, in try_data
self.get_data()
File "/usr/local/Cellar/anime-downloader/HEAD-16c6268/libexec/lib/python3.8/site-packages/anime_downloader/sites/anime.py", line 349, in get_data
self._sources = self._get_sources()
File "/usr/local/Cellar/anime-downloader/HEAD-16c6268/libexec/lib/python3.8/site-packages/anime_downloader/sites/ryuanime.py", line 64, in _get_sources
hosts = json.loads(soup.find("div", {"class":"col-sm-9"}).find("script").text[30:-6])
File "/usr/local/opt/python@3.8/Frameworks/Python.framework/Versions/3.8/lib/python3.8/json/__init__.py", line 357, in loads
return _default_decoder.decode(s)
File "/usr/local/opt/python@3.8/Frameworks/Python.framework/Versions/3.8/lib/python3.8/json/decoder.py", line 337, in decode
obj, end = self.raw_decode(s, idx=_w(s, 0).end())
File "/usr/local/opt/python@3.8/Frameworks/Python.framework/Versions/3.8/lib/python3.8/json/decoder.py", line 355, in raw_decode
raise JSONDecodeError("Expecting value", s, err.value) from None
json.decoder.JSONDecodeError: Expecting value: line 1 column 1 (char 0)
matt@matt:~$
|
json.decoder.JSONDecodeError
|
def _scrape_metadata(self):
soup = helpers.soupify(helpers.get(self.url))
self.title = soup.select("div.card-header")[0].find("h1").text
|
def _scrape_metadata(self):
soup = helpers.soupify(helpers.get(self.url))
self.title = soup.find("div", {"class": "card-header"}).find("h1").text
|
https://github.com/anime-dl/anime-downloader/issues/385
|
matt@matt:~$ anime -ll DEBUG dl 'penguin highway' --provider ryuanime
2020-05-31 19:09:54 matt.local anime_downloader.util[41614] INFO anime-downloader 4.3.0
2020-05-31 19:09:54 matt.local anime_downloader.util[41614] DEBUG Platform: macOS-10.14.6-x86_64-i386-64bit
2020-05-31 19:09:54 matt.local anime_downloader.util[41614] DEBUG Python 3.8.2
2020-05-31 19:09:54 matt.local anime_downloader.sites.helpers.request[41614] DEBUG -----
2020-05-31 19:09:54 matt.local anime_downloader.sites.helpers.request[41614] DEBUG GET https://www4.ryuanime.com/search
2020-05-31 19:09:54 matt.local anime_downloader.sites.helpers.request[41614] DEBUG {'params': {'term': 'penguin highway'}}
2020-05-31 19:09:54 matt.local anime_downloader.sites.helpers.request[41614] DEBUG {'user-agent': 'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.2 (KHTML, like Gecko) Chrome/22.0.1216.0 Safari/537.2'}
2020-05-31 19:09:54 matt.local anime_downloader.sites.helpers.request[41614] DEBUG -----
send: b'GET /search?term=penguin+highway HTTP/1.1\r\nHost: www4.ryuanime.com\r\nuser-agent: Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.2 (KHTML, like Gecko) Chrome/22.0.1216.0 Safari/537.2\r\nAccept-Encoding: gzip, deflate\r\nAccept: */*\r\nConnection: keep-alive\r\n\r\n'
reply: 'HTTP/1.1 200 OK\r\n'
header: Date: Sun, 31 May 2020 18:09:54 GMT
header: Content-Type: text/html; charset=UTF-8
header: Transfer-Encoding: chunked
header: Connection: keep-alive
header: Set-Cookie: __cfduid=d4d5081f03f192e51c4b768dc75a28b161590948594; expires=Tue, 30-Jun-20 18:09:54 GMT; path=/; domain=.ryuanime.com; HttpOnly; SameSite=Lax; Secure
header: Set-Cookie: PHPSESSID=r1mgv31e58tbi5e6vu211c3eoj; path=/
header: Expires: Thu, 19 Nov 1981 08:52:00 GMT
header: Cache-Control: no-store, no-cache, must-revalidate
header: Pragma: no-cache
header: CF-Cache-Status: DYNAMIC
header: cf-request-id: 030d861aca00006a0b5586e200000001
header: Expect-CT: max-age=604800, report-uri="https://report-uri.cloudflare.com/cdn-cgi/beacon/expect-ct"
header: Server: cloudflare
header: CF-RAY: 59c2a60ade546a0b-LHR
header: Content-Encoding: gzip
2020-05-31 19:09:54 matt.local anime_downloader.session[41614] DEBUG uncached request
2020-05-31 19:09:54 matt.local anime_downloader.sites.helpers.request[41614] DEBUG https://www4.ryuanime.com/search?term=penguin+highway
2020-05-31 19:09:54 matt.local anime_downloader.sites.helpers.request[41614] DEBUG /var/folders/sy/jcz9n9dn2t98677rzpmyvkr00000gn/T/animedlg3sh2stl/tmppx3ctpjn
+--------+-----------------+--------+
| 1 | Penguin Highway | |
|--------+-----------------+--------|
| SlNo | Title | Meta |
+--------+-----------------+--------+
Enter the anime no: [1]: 1
2020-05-31 19:09:55 matt.local anime_downloader.util[41614] INFO Selected Penguin Highway
2020-05-31 19:09:55 matt.local anime_downloader.sites.anime[41614] INFO Extracting episode info from page
2020-05-31 19:09:55 matt.local anime_downloader.sites.helpers.request[41614] DEBUG -----
2020-05-31 19:09:55 matt.local anime_downloader.sites.helpers.request[41614] DEBUG GET https://www4.ryuanime.com/anime/3939-penguin-highway
2020-05-31 19:09:55 matt.local anime_downloader.sites.helpers.request[41614] DEBUG {}
2020-05-31 19:09:55 matt.local anime_downloader.sites.helpers.request[41614] DEBUG {'user-agent': 'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.2 (KHTML, like Gecko) Chrome/22.0.1216.0 Safari/537.2'}
2020-05-31 19:09:55 matt.local anime_downloader.sites.helpers.request[41614] DEBUG -----
send: b'GET /anime/3939-penguin-highway HTTP/1.1\r\nHost: www4.ryuanime.com\r\nuser-agent: Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.2 (KHTML, like Gecko) Chrome/22.0.1216.0 Safari/537.2\r\nAccept-Encoding: gzip, deflate\r\nAccept: */*\r\nConnection: keep-alive\r\nCookie: __cfduid=d4d5081f03f192e51c4b768dc75a28b161590948594; PHPSESSID=r1mgv31e58tbi5e6vu211c3eoj\r\n\r\n'
reply: 'HTTP/1.1 200 OK\r\n'
header: Date: Sun, 31 May 2020 18:09:56 GMT
header: Content-Type: text/html; charset=UTF-8
header: Transfer-Encoding: chunked
header: Connection: keep-alive
header: Expires: Thu, 19 Nov 1981 08:52:00 GMT
header: Cache-Control: no-store, no-cache, must-revalidate
header: Pragma: no-cache
header: CF-Cache-Status: DYNAMIC
header: cf-request-id: 030d86201900006a0b55880200000001
header: Expect-CT: max-age=604800, report-uri="https://report-uri.cloudflare.com/cdn-cgi/beacon/expect-ct"
header: Server: cloudflare
header: CF-RAY: 59c2a613593e6a0b-LHR
header: Content-Encoding: gzip
2020-05-31 19:09:56 matt.local anime_downloader.session[41614] DEBUG uncached request
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG https://www4.ryuanime.com/anime/3939-penguin-highway
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG /var/folders/sy/jcz9n9dn2t98677rzpmyvkr00000gn/T/animedlg3sh2stl/tmplpjqs5fs
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG -----
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG GET https://www4.ryuanime.com/anime/3939-penguin-highway
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG {}
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG {'user-agent': 'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.2 (KHTML, like Gecko) Chrome/22.0.1216.0 Safari/537.2'}
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG -----
2020-05-31 19:09:56 matt.local anime_downloader.session[41614] DEBUG cached request
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG https://www4.ryuanime.com/anime/3939-penguin-highway
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG /var/folders/sy/jcz9n9dn2t98677rzpmyvkr00000gn/T/animedlg3sh2stl/tmp2rcnd8h1
2020-05-31 19:09:56 matt.local anime_downloader.sites.anime[41614] DEBUG EPISODE IDS: length: 1, ids: ['https://www4.ryuanime.com/anime/watch/189328-penguin-highway-movie-english-sub']
2020-05-31 19:09:56 matt.local anime_downloader.commands.dl[41614] INFO Found anime: Penguin Highway
2020-05-31 19:09:56 matt.local anime_downloader.commands.dl[41614] INFO Downloading to /Users/matt/Downloads/1234_JD
2020-05-31 19:09:56 matt.local anime_downloader.sites.anime[41614] DEBUG Extracting stream info of id: https://www4.ryuanime.com/anime/watch/189328-penguin-highway-movie-english-sub
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG -----
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG GET https://www4.ryuanime.com/anime/watch/189328-penguin-highway-movie-english-sub
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG {}
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG {'user-agent': 'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.2 (KHTML, like Gecko) Chrome/22.0.1216.0 Safari/537.2'}
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG -----
send: b'GET /anime/watch/189328-penguin-highway-movie-english-sub HTTP/1.1\r\nHost: www4.ryuanime.com\r\nuser-agent: Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.2 (KHTML, like Gecko) Chrome/22.0.1216.0 Safari/537.2\r\nAccept-Encoding: gzip, deflate\r\nAccept: */*\r\nConnection: keep-alive\r\nCookie: __cfduid=d4d5081f03f192e51c4b768dc75a28b161590948594; PHPSESSID=r1mgv31e58tbi5e6vu211c3eoj\r\n\r\n'
reply: 'HTTP/1.1 200 OK\r\n'
header: Date: Sun, 31 May 2020 18:09:56 GMT
header: Content-Type: text/html; charset=UTF-8
header: Transfer-Encoding: chunked
header: Connection: keep-alive
header: Expires: Thu, 19 Nov 1981 08:52:00 GMT
header: Cache-Control: no-store, no-cache, must-revalidate
header: Pragma: no-cache
header: CF-Cache-Status: DYNAMIC
header: cf-request-id: 030d86219e00006a0b55887200000001
header: Expect-CT: max-age=604800, report-uri="https://report-uri.cloudflare.com/cdn-cgi/beacon/expect-ct"
header: Server: cloudflare
header: CF-RAY: 59c2a615ca036a0b-LHR
header: Content-Encoding: gzip
2020-05-31 19:09:56 matt.local anime_downloader.session[41614] DEBUG uncached request
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG https://www4.ryuanime.com/anime/watch/189328-penguin-highway-movie-english-sub
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG /var/folders/sy/jcz9n9dn2t98677rzpmyvkr00000gn/T/animedlg3sh2stl/tmptjy94hcy
Traceback (most recent call last):
File "/usr/local/bin/anime", line 11, in <module>
load_entry_point('anime-downloader==4.3.0', 'console_scripts', 'anime')()
File "/usr/local/Cellar/anime-downloader/HEAD-16c6268/libexec/lib/python3.8/site-packages/anime_downloader/cli.py", line 53, in main
cli()
File "/usr/local/Cellar/anime-downloader/HEAD-16c6268/libexec/lib/python3.8/site-packages/click/core.py", line 829, in __call__
return self.main(*args, **kwargs)
File "/usr/local/Cellar/anime-downloader/HEAD-16c6268/libexec/lib/python3.8/site-packages/click/core.py", line 782, in main
rv = self.invoke(ctx)
File "/usr/local/Cellar/anime-downloader/HEAD-16c6268/libexec/lib/python3.8/site-packages/click/core.py", line 1259, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/usr/local/Cellar/anime-downloader/HEAD-16c6268/libexec/lib/python3.8/site-packages/click/core.py", line 1066, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/usr/local/Cellar/anime-downloader/HEAD-16c6268/libexec/lib/python3.8/site-packages/click/core.py", line 610, in invoke
return callback(*args, **kwargs)
File "/usr/local/Cellar/anime-downloader/HEAD-16c6268/libexec/lib/python3.8/site-packages/click/decorators.py", line 21, in new_func
return f(get_current_context(), *args, **kwargs)
File "/usr/local/Cellar/anime-downloader/HEAD-16c6268/libexec/lib/python3.8/site-packages/anime_downloader/commands/dl.py", line 112, in command
for episode in animes:
File "/usr/local/Cellar/anime-downloader/HEAD-16c6268/libexec/lib/python3.8/site-packages/anime_downloader/sites/anime.py", line 198, in __iter__
yield episode_class(ep_id[1], parent=self, ep_no=ep_id[0])
File "/usr/local/Cellar/anime-downloader/HEAD-16c6268/libexec/lib/python3.8/site-packages/anime_downloader/sites/anime.py", line 293, in __init__
try_data()
File "/usr/local/Cellar/anime-downloader/HEAD-16c6268/libexec/lib/python3.8/site-packages/anime_downloader/sites/anime.py", line 289, in try_data
self.get_data()
File "/usr/local/Cellar/anime-downloader/HEAD-16c6268/libexec/lib/python3.8/site-packages/anime_downloader/sites/anime.py", line 349, in get_data
self._sources = self._get_sources()
File "/usr/local/Cellar/anime-downloader/HEAD-16c6268/libexec/lib/python3.8/site-packages/anime_downloader/sites/ryuanime.py", line 64, in _get_sources
hosts = json.loads(soup.find("div", {"class":"col-sm-9"}).find("script").text[30:-6])
File "/usr/local/opt/python@3.8/Frameworks/Python.framework/Versions/3.8/lib/python3.8/json/__init__.py", line 357, in loads
return _default_decoder.decode(s)
File "/usr/local/opt/python@3.8/Frameworks/Python.framework/Versions/3.8/lib/python3.8/json/decoder.py", line 337, in decode
obj, end = self.raw_decode(s, idx=_w(s, 0).end())
File "/usr/local/opt/python@3.8/Frameworks/Python.framework/Versions/3.8/lib/python3.8/json/decoder.py", line 355, in raw_decode
raise JSONDecodeError("Expecting value", s, err.value) from None
json.decoder.JSONDecodeError: Expecting value: line 1 column 1 (char 0)
matt@matt:~$
|
json.decoder.JSONDecodeError
|
def _get_sources(self):
server = self.config.get("server", "trollvid")
soup = helpers.soupify(helpers.get(self.url))
hosts = json.loads(
re.search(
"\[.*?\]", soup.select("div.col-sm-9")[0].select("script")[0].text
).group()
)
_type = hosts[0]["type"]
try:
host = list(
filter(
lambda video: video["host"] == server and video["type"] == _type, hosts
)
)[0]
except IndexError:
host = hosts[0]
# I will try to avoid mp4upload since it mostly doesn't work
if host["host"] == "mp4upload" and len(hosts) > 1:
host = hosts[1]
name = host["host"]
_id = host["id"]
link = self.getLink(name, _id)
return [(name, link)]
|
def _get_sources(self):
server = self.config.get("server", "trollvid")
soup = helpers.soupify(helpers.get(self.url))
hosts = json.loads(
soup.find("div", {"class": "col-sm-9"}).find("script").text[30:-6]
)
_type = hosts[0]["type"]
try:
host = list(
filter(
lambda video: video["host"] == server and video["type"] == _type, hosts
)
)[0]
except IndexError:
host = hosts[0]
# I will try to avoid mp4upload since it mostly doesn't work
if host["host"] == "mp4upload" and len(hosts) > 1:
host = hosts[1]
name = host["host"]
_id = host["id"]
link = self.getLink(name, _id)
return [(name, link)]
|
https://github.com/anime-dl/anime-downloader/issues/385
|
matt@matt:~$ anime -ll DEBUG dl 'penguin highway' --provider ryuanime
2020-05-31 19:09:54 matt.local anime_downloader.util[41614] INFO anime-downloader 4.3.0
2020-05-31 19:09:54 matt.local anime_downloader.util[41614] DEBUG Platform: macOS-10.14.6-x86_64-i386-64bit
2020-05-31 19:09:54 matt.local anime_downloader.util[41614] DEBUG Python 3.8.2
2020-05-31 19:09:54 matt.local anime_downloader.sites.helpers.request[41614] DEBUG -----
2020-05-31 19:09:54 matt.local anime_downloader.sites.helpers.request[41614] DEBUG GET https://www4.ryuanime.com/search
2020-05-31 19:09:54 matt.local anime_downloader.sites.helpers.request[41614] DEBUG {'params': {'term': 'penguin highway'}}
2020-05-31 19:09:54 matt.local anime_downloader.sites.helpers.request[41614] DEBUG {'user-agent': 'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.2 (KHTML, like Gecko) Chrome/22.0.1216.0 Safari/537.2'}
2020-05-31 19:09:54 matt.local anime_downloader.sites.helpers.request[41614] DEBUG -----
send: b'GET /search?term=penguin+highway HTTP/1.1\r\nHost: www4.ryuanime.com\r\nuser-agent: Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.2 (KHTML, like Gecko) Chrome/22.0.1216.0 Safari/537.2\r\nAccept-Encoding: gzip, deflate\r\nAccept: */*\r\nConnection: keep-alive\r\n\r\n'
reply: 'HTTP/1.1 200 OK\r\n'
header: Date: Sun, 31 May 2020 18:09:54 GMT
header: Content-Type: text/html; charset=UTF-8
header: Transfer-Encoding: chunked
header: Connection: keep-alive
header: Set-Cookie: __cfduid=d4d5081f03f192e51c4b768dc75a28b161590948594; expires=Tue, 30-Jun-20 18:09:54 GMT; path=/; domain=.ryuanime.com; HttpOnly; SameSite=Lax; Secure
header: Set-Cookie: PHPSESSID=r1mgv31e58tbi5e6vu211c3eoj; path=/
header: Expires: Thu, 19 Nov 1981 08:52:00 GMT
header: Cache-Control: no-store, no-cache, must-revalidate
header: Pragma: no-cache
header: CF-Cache-Status: DYNAMIC
header: cf-request-id: 030d861aca00006a0b5586e200000001
header: Expect-CT: max-age=604800, report-uri="https://report-uri.cloudflare.com/cdn-cgi/beacon/expect-ct"
header: Server: cloudflare
header: CF-RAY: 59c2a60ade546a0b-LHR
header: Content-Encoding: gzip
2020-05-31 19:09:54 matt.local anime_downloader.session[41614] DEBUG uncached request
2020-05-31 19:09:54 matt.local anime_downloader.sites.helpers.request[41614] DEBUG https://www4.ryuanime.com/search?term=penguin+highway
2020-05-31 19:09:54 matt.local anime_downloader.sites.helpers.request[41614] DEBUG /var/folders/sy/jcz9n9dn2t98677rzpmyvkr00000gn/T/animedlg3sh2stl/tmppx3ctpjn
+--------+-----------------+--------+
| 1 | Penguin Highway | |
|--------+-----------------+--------|
| SlNo | Title | Meta |
+--------+-----------------+--------+
Enter the anime no: [1]: 1
2020-05-31 19:09:55 matt.local anime_downloader.util[41614] INFO Selected Penguin Highway
2020-05-31 19:09:55 matt.local anime_downloader.sites.anime[41614] INFO Extracting episode info from page
2020-05-31 19:09:55 matt.local anime_downloader.sites.helpers.request[41614] DEBUG -----
2020-05-31 19:09:55 matt.local anime_downloader.sites.helpers.request[41614] DEBUG GET https://www4.ryuanime.com/anime/3939-penguin-highway
2020-05-31 19:09:55 matt.local anime_downloader.sites.helpers.request[41614] DEBUG {}
2020-05-31 19:09:55 matt.local anime_downloader.sites.helpers.request[41614] DEBUG {'user-agent': 'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.2 (KHTML, like Gecko) Chrome/22.0.1216.0 Safari/537.2'}
2020-05-31 19:09:55 matt.local anime_downloader.sites.helpers.request[41614] DEBUG -----
send: b'GET /anime/3939-penguin-highway HTTP/1.1\r\nHost: www4.ryuanime.com\r\nuser-agent: Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.2 (KHTML, like Gecko) Chrome/22.0.1216.0 Safari/537.2\r\nAccept-Encoding: gzip, deflate\r\nAccept: */*\r\nConnection: keep-alive\r\nCookie: __cfduid=d4d5081f03f192e51c4b768dc75a28b161590948594; PHPSESSID=r1mgv31e58tbi5e6vu211c3eoj\r\n\r\n'
reply: 'HTTP/1.1 200 OK\r\n'
header: Date: Sun, 31 May 2020 18:09:56 GMT
header: Content-Type: text/html; charset=UTF-8
header: Transfer-Encoding: chunked
header: Connection: keep-alive
header: Expires: Thu, 19 Nov 1981 08:52:00 GMT
header: Cache-Control: no-store, no-cache, must-revalidate
header: Pragma: no-cache
header: CF-Cache-Status: DYNAMIC
header: cf-request-id: 030d86201900006a0b55880200000001
header: Expect-CT: max-age=604800, report-uri="https://report-uri.cloudflare.com/cdn-cgi/beacon/expect-ct"
header: Server: cloudflare
header: CF-RAY: 59c2a613593e6a0b-LHR
header: Content-Encoding: gzip
2020-05-31 19:09:56 matt.local anime_downloader.session[41614] DEBUG uncached request
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG https://www4.ryuanime.com/anime/3939-penguin-highway
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG /var/folders/sy/jcz9n9dn2t98677rzpmyvkr00000gn/T/animedlg3sh2stl/tmplpjqs5fs
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG -----
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG GET https://www4.ryuanime.com/anime/3939-penguin-highway
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG {}
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG {'user-agent': 'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.2 (KHTML, like Gecko) Chrome/22.0.1216.0 Safari/537.2'}
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG -----
2020-05-31 19:09:56 matt.local anime_downloader.session[41614] DEBUG cached request
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG https://www4.ryuanime.com/anime/3939-penguin-highway
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG /var/folders/sy/jcz9n9dn2t98677rzpmyvkr00000gn/T/animedlg3sh2stl/tmp2rcnd8h1
2020-05-31 19:09:56 matt.local anime_downloader.sites.anime[41614] DEBUG EPISODE IDS: length: 1, ids: ['https://www4.ryuanime.com/anime/watch/189328-penguin-highway-movie-english-sub']
2020-05-31 19:09:56 matt.local anime_downloader.commands.dl[41614] INFO Found anime: Penguin Highway
2020-05-31 19:09:56 matt.local anime_downloader.commands.dl[41614] INFO Downloading to /Users/matt/Downloads/1234_JD
2020-05-31 19:09:56 matt.local anime_downloader.sites.anime[41614] DEBUG Extracting stream info of id: https://www4.ryuanime.com/anime/watch/189328-penguin-highway-movie-english-sub
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG -----
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG GET https://www4.ryuanime.com/anime/watch/189328-penguin-highway-movie-english-sub
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG {}
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG {'user-agent': 'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.2 (KHTML, like Gecko) Chrome/22.0.1216.0 Safari/537.2'}
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG -----
send: b'GET /anime/watch/189328-penguin-highway-movie-english-sub HTTP/1.1\r\nHost: www4.ryuanime.com\r\nuser-agent: Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.2 (KHTML, like Gecko) Chrome/22.0.1216.0 Safari/537.2\r\nAccept-Encoding: gzip, deflate\r\nAccept: */*\r\nConnection: keep-alive\r\nCookie: __cfduid=d4d5081f03f192e51c4b768dc75a28b161590948594; PHPSESSID=r1mgv31e58tbi5e6vu211c3eoj\r\n\r\n'
reply: 'HTTP/1.1 200 OK\r\n'
header: Date: Sun, 31 May 2020 18:09:56 GMT
header: Content-Type: text/html; charset=UTF-8
header: Transfer-Encoding: chunked
header: Connection: keep-alive
header: Expires: Thu, 19 Nov 1981 08:52:00 GMT
header: Cache-Control: no-store, no-cache, must-revalidate
header: Pragma: no-cache
header: CF-Cache-Status: DYNAMIC
header: cf-request-id: 030d86219e00006a0b55887200000001
header: Expect-CT: max-age=604800, report-uri="https://report-uri.cloudflare.com/cdn-cgi/beacon/expect-ct"
header: Server: cloudflare
header: CF-RAY: 59c2a615ca036a0b-LHR
header: Content-Encoding: gzip
2020-05-31 19:09:56 matt.local anime_downloader.session[41614] DEBUG uncached request
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG https://www4.ryuanime.com/anime/watch/189328-penguin-highway-movie-english-sub
2020-05-31 19:09:56 matt.local anime_downloader.sites.helpers.request[41614] DEBUG /var/folders/sy/jcz9n9dn2t98677rzpmyvkr00000gn/T/animedlg3sh2stl/tmptjy94hcy
Traceback (most recent call last):
File "/usr/local/bin/anime", line 11, in <module>
load_entry_point('anime-downloader==4.3.0', 'console_scripts', 'anime')()
File "/usr/local/Cellar/anime-downloader/HEAD-16c6268/libexec/lib/python3.8/site-packages/anime_downloader/cli.py", line 53, in main
cli()
File "/usr/local/Cellar/anime-downloader/HEAD-16c6268/libexec/lib/python3.8/site-packages/click/core.py", line 829, in __call__
return self.main(*args, **kwargs)
File "/usr/local/Cellar/anime-downloader/HEAD-16c6268/libexec/lib/python3.8/site-packages/click/core.py", line 782, in main
rv = self.invoke(ctx)
File "/usr/local/Cellar/anime-downloader/HEAD-16c6268/libexec/lib/python3.8/site-packages/click/core.py", line 1259, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/usr/local/Cellar/anime-downloader/HEAD-16c6268/libexec/lib/python3.8/site-packages/click/core.py", line 1066, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/usr/local/Cellar/anime-downloader/HEAD-16c6268/libexec/lib/python3.8/site-packages/click/core.py", line 610, in invoke
return callback(*args, **kwargs)
File "/usr/local/Cellar/anime-downloader/HEAD-16c6268/libexec/lib/python3.8/site-packages/click/decorators.py", line 21, in new_func
return f(get_current_context(), *args, **kwargs)
File "/usr/local/Cellar/anime-downloader/HEAD-16c6268/libexec/lib/python3.8/site-packages/anime_downloader/commands/dl.py", line 112, in command
for episode in animes:
File "/usr/local/Cellar/anime-downloader/HEAD-16c6268/libexec/lib/python3.8/site-packages/anime_downloader/sites/anime.py", line 198, in __iter__
yield episode_class(ep_id[1], parent=self, ep_no=ep_id[0])
File "/usr/local/Cellar/anime-downloader/HEAD-16c6268/libexec/lib/python3.8/site-packages/anime_downloader/sites/anime.py", line 293, in __init__
try_data()
File "/usr/local/Cellar/anime-downloader/HEAD-16c6268/libexec/lib/python3.8/site-packages/anime_downloader/sites/anime.py", line 289, in try_data
self.get_data()
File "/usr/local/Cellar/anime-downloader/HEAD-16c6268/libexec/lib/python3.8/site-packages/anime_downloader/sites/anime.py", line 349, in get_data
self._sources = self._get_sources()
File "/usr/local/Cellar/anime-downloader/HEAD-16c6268/libexec/lib/python3.8/site-packages/anime_downloader/sites/ryuanime.py", line 64, in _get_sources
hosts = json.loads(soup.find("div", {"class":"col-sm-9"}).find("script").text[30:-6])
File "/usr/local/opt/python@3.8/Frameworks/Python.framework/Versions/3.8/lib/python3.8/json/__init__.py", line 357, in loads
return _default_decoder.decode(s)
File "/usr/local/opt/python@3.8/Frameworks/Python.framework/Versions/3.8/lib/python3.8/json/decoder.py", line 337, in decode
obj, end = self.raw_decode(s, idx=_w(s, 0).end())
File "/usr/local/opt/python@3.8/Frameworks/Python.framework/Versions/3.8/lib/python3.8/json/decoder.py", line 355, in raw_decode
raise JSONDecodeError("Expecting value", s, err.value) from None
json.decoder.JSONDecodeError: Expecting value: line 1 column 1 (char 0)
matt@matt:~$
|
json.decoder.JSONDecodeError
|
def _get_ongoing_dict_key(self, result):
if not isinstance(result, BaseResult):
raise ValueError(
"Any result using _get_ongoing_dict_key must subclass from "
"BaseResult. Provided result is of type: %s" % type(result)
)
key_parts = []
for result_property in [result.transfer_type, result.src, result.dest]:
if result_property is not None:
key_parts.append(ensure_text_type(result_property))
return ":".join(key_parts)
|
def _get_ongoing_dict_key(self, result):
if not isinstance(result, BaseResult):
raise ValueError(
"Any result using _get_ongoing_dict_key must subclass from "
"BaseResult. Provided result is of type: %s" % type(result)
)
return ":".join(str(el) for el in [result.transfer_type, result.src, result.dest])
|
https://github.com/aws/aws-cli/issues/2738
|
UnicodeEncodeError: 'ascii' codec can't encode character u'\u0303' in position 11: ordinal not in range(128)
2017-07-29 00:49:40,775 - Thread-1 - awscli.customizations.s3.results - DEBUG - Error processing result QueuedResult(transfer_type='upload'
Traceback (most recent call last):
File "/Users/jasonsturges/Library/Python/2.7/lib/python/site-packages/awscli/customizations/s3/results.py", line 604, in _process_result
result_handler(result)
File "/Users/jasonsturges/Library/Python/2.7/lib/python/site-packages/awscli/customizations/s3/results.py", line 241, in __call__
result=result)
File "/Users/jasonsturges/Library/Python/2.7/lib/python/site-packages/awscli/customizations/s3/results.py", line 267, in _record_queued_result
self._get_ongoing_dict_key(result)] = total_transfer_size
File "/Users/jasonsturges/Library/Python/2.7/lib/python/site-packages/awscli/customizations/s3/results.py", line 250, in _get_ongoing_dict_key
str(el) for el in [result.transfer_type, result.src, result.dest])
File "/Users/jasonsturges/Library/Python/2.7/lib/python/site-packages/awscli/customizations/s3/results.py", line 250, in <genexpr>
str(el) for el in [result.transfer_type, result.src, result.dest])
|
UnicodeEncodeError
|
def validate_arguments(self, args):
"""
Validates command line arguments using the retrieved information.
"""
if args.hostname:
instances = self.opsworks.describe_instances(StackId=self._stack["StackId"])[
"Instances"
]
if any(args.hostname.lower() == instance["Hostname"] for instance in instances):
raise ValueError(
"Invalid hostname: '%s'. Hostnames must be unique within "
"a stack." % args.hostname
)
if args.infrastructure_class == "ec2" and args.local:
# make sure the regions match
region = json.loads(ensure_text_type(urlopen(IDENTITY_URL).read()))["region"]
if region != self._stack["Region"]:
raise ValueError("The stack's and the instance's region must match.")
|
def validate_arguments(self, args):
"""
Validates command line arguments using the retrieved information.
"""
if args.hostname:
instances = self.opsworks.describe_instances(StackId=self._stack["StackId"])[
"Instances"
]
if any(args.hostname.lower() == instance["Hostname"] for instance in instances):
raise ValueError(
"Invalid hostname: '%s'. Hostnames must be unique within "
"a stack." % args.hostname
)
if args.infrastructure_class == "ec2" and args.local:
# make sure the regions match
region = json.loads(urlopen(IDENTITY_URL).read())["region"]
if region != self._stack["Region"]:
raise ValueError("The stack's and the instance's region must match.")
|
https://github.com/aws/aws-cli/issues/2247
|
aws --debug opsworks register --infrastructure-class ec2 --override-hostname 'somehostname' --region us-east-1 --stack-id 'some-stack-id' --local --use-instance-profile
2016-10-22 18:05:56,267 - MainThread - awscli.clidriver - DEBUG - CLI version: aws-cli/1.11.8 Python/3.5.2 Linux/4.4.0-45-generic botocore/1.4.65
2016-10-22 18:05:56,267 - MainThread - awscli.clidriver - DEBUG - Arguments entered to CLI: ['--debug', 'opsworks', 'register', '--infrastructure-class', 'ec2', '--override-hostname', 'somehostname', '--region', 'us-east-1', '--stack-id', 'some-stack-id', '--local', '--use-instance-profile']
2016-10-22 18:05:56,268 - MainThread - botocore.hooks - DEBUG - Event session-initialized: calling handler <function add_scalar_parsers at 0x7f10955af8c8>
2016-10-22 18:05:56,268 - MainThread - botocore.hooks - DEBUG - Event session-initialized: calling handler <function inject_assume_role_provider_cache at 0x7f109443b1e0>
2016-10-22 18:05:56,268 - MainThread - botocore.credentials - DEBUG - Skipping environment variable credential check because profile name was explicitly set.
2016-10-22 18:05:56,269 - MainThread - botocore.loaders - DEBUG - Loading JSON file: /usr/local/lib/python3.5/dist-packages/botocore/data/opsworks/2013-02-18/service-2.json
2016-10-22 18:05:56,274 - MainThread - botocore.hooks - DEBUG - Event service-data-loaded.opsworks: calling handler <function register_retries_for_service at 0x7f109481d840>
2016-10-22 18:05:56,275 - MainThread - botocore.handlers - DEBUG - Registering retry handlers for service: opsworks
2016-10-22 18:05:56,276 - MainThread - botocore.hooks - DEBUG - Event building-command-table.opsworks: calling handler <function inject_commands at 0x7f1094040ea0>
2016-10-22 18:05:56,277 - MainThread - botocore.hooks - DEBUG - Event building-command-table.opsworks: calling handler <function add_waiters at 0x7f10955b5c80>
2016-10-22 18:05:56,278 - MainThread - botocore.loaders - DEBUG - Loading JSON file: /usr/local/lib/python3.5/dist-packages/botocore/data/opsworks/2013-02-18/waiters-2.json
2016-10-22 18:05:56,279 - MainThread - botocore.hooks - DEBUG - Event building-command-table.register: calling handler <function add_waiters at 0x7f10955b5c80>
2016-10-22 18:05:56,280 - MainThread - botocore.hooks - DEBUG - Event load-cli-arg.custom.register.stack-id: calling handler <function uri_param at 0x7f109446ac80>
2016-10-22 18:05:56,280 - MainThread - botocore.hooks - DEBUG - Event process-cli-arg.custom.register: calling handler <awscli.argprocess.ParamShorthandParser object at 0x7f109447acc0>
2016-10-22 18:05:56,280 - MainThread - botocore.hooks - DEBUG - Event load-cli-arg.custom.register.anonymous: calling handler <function uri_param at 0x7f109446ac80>
2016-10-22 18:05:56,280 - MainThread - botocore.hooks - DEBUG - Event load-cli-arg.custom.register.anonymous: calling handler <function uri_param at 0x7f109446ac80>
2016-10-22 18:05:56,280 - MainThread - botocore.hooks - DEBUG - Event load-cli-arg.custom.register.anonymous: calling handler <function uri_param at 0x7f109446ac80>
2016-10-22 18:05:56,281 - MainThread - botocore.hooks - DEBUG - Event load-cli-arg.custom.register.target: calling handler <function uri_param at 0x7f109446ac80>
2016-10-22 18:05:56,281 - MainThread - botocore.hooks - DEBUG - Event load-cli-arg.custom.register.infrastructure-class: calling handler <function uri_param at 0x7f109446ac80>
2016-10-22 18:05:56,281 - MainThread - botocore.hooks - DEBUG - Event process-cli-arg.custom.register: calling handler <awscli.argprocess.ParamShorthandParser object at 0x7f109447acc0>
2016-10-22 18:05:56,281 - MainThread - botocore.hooks - DEBUG - Event load-cli-arg.custom.register.local: calling handler <function uri_param at 0x7f109446ac80>
2016-10-22 18:05:56,281 - MainThread - botocore.hooks - DEBUG - Event process-cli-arg.custom.register: calling handler <awscli.argprocess.ParamShorthandParser object at 0x7f109447acc0>
2016-10-22 18:05:56,281 - MainThread - botocore.hooks - DEBUG - Event load-cli-arg.custom.register.anonymous: calling handler <function uri_param at 0x7f109446ac80>
2016-10-22 18:05:56,281 - MainThread - botocore.hooks - DEBUG - Event load-cli-arg.custom.register.use-instance-profile: calling handler <function uri_param at 0x7f109446ac80>
2016-10-22 18:05:56,281 - MainThread - botocore.hooks - DEBUG - Event process-cli-arg.custom.register: calling handler <awscli.argprocess.ParamShorthandParser object at 0x7f109447acc0>
2016-10-22 18:05:56,281 - MainThread - botocore.hooks - DEBUG - Event load-cli-arg.custom.register.anonymous: calling handler <function uri_param at 0x7f109446ac80>
2016-10-22 18:05:56,282 - MainThread - botocore.hooks - DEBUG - Event load-cli-arg.custom.register.anonymous: calling handler <function uri_param at 0x7f109446ac80>
2016-10-22 18:05:56,282 - MainThread - botocore.credentials - DEBUG - Looking for credentials via: env
2016-10-22 18:05:56,282 - MainThread - botocore.credentials - DEBUG - Looking for credentials via: assume-role
2016-10-22 18:05:56,282 - MainThread - botocore.credentials - DEBUG - Looking for credentials via: shared-credentials-file
2016-10-22 18:05:56,282 - MainThread - botocore.credentials - DEBUG - Looking for credentials via: config-file
2016-10-22 18:05:56,282 - MainThread - botocore.credentials - DEBUG - Looking for credentials via: ec2-credentials-file
2016-10-22 18:05:56,282 - MainThread - botocore.credentials - DEBUG - Looking for credentials via: boto-config
2016-10-22 18:05:56,282 - MainThread - botocore.credentials - DEBUG - Looking for credentials via: container-role
2016-10-22 18:05:56,283 - MainThread - botocore.credentials - DEBUG - Looking for credentials via: iam-role
2016-10-22 18:05:56,286 - MainThread - botocore.vendored.requests.packages.urllib3.connectionpool - INFO - Starting new HTTP connection (1): 169.254.169.254
2016-10-22 18:05:56,287 - MainThread - botocore.vendored.requests.packages.urllib3.connectionpool - DEBUG - "GET /latest/meta-data/iam/security-credentials/ HTTP/1.1" 200 17
2016-10-22 18:05:56,289 - MainThread - botocore.vendored.requests.packages.urllib3.connectionpool - INFO - Starting new HTTP connection (1): 169.254.169.254
2016-10-22 18:05:56,290 - MainThread - botocore.vendored.requests.packages.urllib3.connectionpool - DEBUG - "GET /latest/meta-data/iam/security-credentials/openvpn-acme-test HTTP/1.1" 200 846
2016-10-22 18:05:56,290 - MainThread - botocore.credentials - INFO - Found credentials from IAM Role: openvpn-acme-test
2016-10-22 18:05:56,291 - MainThread - botocore.loaders - DEBUG - Loading JSON file: /usr/local/lib/python3.5/dist-packages/botocore/data/endpoints.json
2016-10-22 18:05:56,293 - MainThread - botocore.loaders - DEBUG - Loading JSON file: /usr/local/lib/python3.5/dist-packages/botocore/data/iam/2010-05-08/service-2.json
2016-10-22 18:05:56,300 - MainThread - botocore.client - DEBUG - Registering retry handlers for service: iam
2016-10-22 18:05:56,304 - MainThread - botocore.hooks - DEBUG - Event creating-client-class.iam: calling handler <function add_generate_presigned_url at 0x7f1094cc9b70>
2016-10-22 18:05:56,304 - MainThread - botocore.args - DEBUG - The s3 config key is not a dictionary type, ignoring its value of: None
2016-10-22 18:05:56,307 - MainThread - botocore.endpoint - DEBUG - Setting iam timeout as (60, 60)
2016-10-22 18:05:56,307 - MainThread - botocore.client - DEBUG - Registering retry handlers for service: opsworks
2016-10-22 18:05:56,309 - MainThread - botocore.hooks - DEBUG - Event creating-client-class.opsworks: calling handler <function add_generate_presigned_url at 0x7f1094cc9b70>
2016-10-22 18:05:56,309 - MainThread - botocore.args - DEBUG - The s3 config key is not a dictionary type, ignoring its value of: None
2016-10-22 18:05:56,311 - MainThread - botocore.endpoint - DEBUG - Setting opsworks timeout as (60, 60)
2016-10-22 18:05:56,311 - MainThread - awscli.customizations.opsworks - DEBUG - Retrieving stack and provisioning parameters
2016-10-22 18:05:56,312 - MainThread - botocore.endpoint - DEBUG - Making request for OperationModel(name=DescribeStacks) (verify_ssl=True) with params: {'url': 'https://opsworks.us-east-1.amazonaws.com/', 'method': 'POST', 'url_path': '/', 'query_string': '', 'context': {'client_region': 'us-east-1', 'has_streaming_input': False, 'client_config': <botocore.config.Config object at 0x7f1093c325c0>}, 'headers': {'User-Agent': 'aws-cli/1.11.8 Python/3.5.2 Linux/4.4.0-45-generic botocore/1.4.65', 'Content-Type': 'application/x-amz-json-1.1', 'X-Amz-Target': 'OpsWorks_20130218.DescribeStacks'}, 'body': b'{"StackIds": ["some-stack-id"]}'}
2016-10-22 18:05:56,312 - MainThread - botocore.hooks - DEBUG - Event request-created.opsworks.DescribeStacks: calling handler <bound method RequestSigner.handler of <botocore.signers.RequestSigner object at 0x7f1093c32550>>
2016-10-22 18:05:56,312 - MainThread - botocore.auth - DEBUG - Calculating signature using v4 auth.
2016-10-22 18:05:56,312 - MainThread - botocore.auth - DEBUG - CanonicalRequest:
POST
/
content-type:application/x-amz-json-1.1
host:opsworks.us-east-1.amazonaws.com
x-amz-date:20161022T180556Z
x-amz-security-token: some-token
x-amz-target:OpsWorks_20130218.DescribeStacks
content-type;host;x-amz-date;x-amz-security-token;x-amz-target
2bb6d9079ddc2fa0bb8fbf907d068de54083ba642ec1716e25ae9144e448fd33
2016-10-22 18:05:56,313 - MainThread - botocore.auth - DEBUG - StringToSign:
AWS4-HMAC-SHA256
20161022T180556Z
20161022/us-east-1/opsworks/aws4_request
bf2d221183683f7af42044ddfd0807ee49bf5fa822f65162ce75f8efb45c32a0
2016-10-22 18:05:56,313 - MainThread - botocore.auth - DEBUG - Signature:
6484214817d02cc9ca02450aea527c1c14eb00aafa476c177dc2edd7920f38da
2016-10-22 18:05:56,313 - MainThread - botocore.endpoint - DEBUG - Sending http request: <PreparedRequest [POST]>
2016-10-22 18:05:56,314 - MainThread - botocore.vendored.requests.packages.urllib3.connectionpool - INFO - Starting new HTTPS connection (1): opsworks.us-east-1.amazonaws.com
2016-10-22 18:05:56,429 - MainThread - botocore.vendored.requests.packages.urllib3.connectionpool - DEBUG - "POST / HTTP/1.1" 200 1106
2016-10-22 18:05:56,430 - MainThread - botocore.parsers - DEBUG - Response headers: {'content-type': 'application/x-amz-json-1.1', 'date': 'Sat, 22 Oct 2016 18:05:56 GMT', 'x-amzn-requestid': '2dd31f64-9882-11e6-8835-01c3aa973fc7', 'content-length': '1106'}
2016-10-22 18:05:56,430 - MainThread - botocore.parsers - DEBUG - Response body:
b'{"Stacks":[{"AgentVersion":"LATEST","Arn":"arn:aws:opsworks:us-east-1:some-aws-id:stack/some-stack-id/","Attributes":{"Color":null},"ChefConfiguration":{"BerkshelfVersion":"3.2.0","ManageBerkshelf":false},"ConfigurationManager":{"Name":"Chef","Version":"12"},"CreatedAt":"2016-10-22T17:08:38+00:00","CustomCookbooksSource":{"Type":"s3","Url":"http://cookbooks2-openvpn-acme-test.s3-website-us-east-1.amazonaws.com/chef/stacks/networking/cookbooks.zip"},"CustomJson":"{}","DefaultAvailabilityZone":"us-east-1a","DefaultInstanceProfileArn":"arn:aws:iam::some-aws-id:instance-profile/ec2-infrastructure-openvpn-acme-test","DefaultOs":"Ubuntu 16.04 LTS","DefaultRootDeviceType":"instance-store","DefaultSshKeyName":"openvpn-acme-test","DefaultSubnetId":"subnet-141d9e5d","HostnameTheme":"Layer_Dependent","Name":"infrastructure-openvpn-acme-test","Region":"us-east-1","ServiceRoleArn":"arn:aws:iam::some-aws-id:role/infrastructure-openvpn-acme-test","StackId":"some-stack-id","UseCustomCookbooks":true,"UseOpsworksSecurityGroups":false,"VpcId":"vpc-8fbeb0e8"}]}'
2016-10-22 18:05:56,431 - MainThread - botocore.hooks - DEBUG - Event needs-retry.opsworks.DescribeStacks: calling handler <botocore.retryhandler.RetryHandler object at 0x7f1093da9470>
2016-10-22 18:05:56,431 - MainThread - botocore.retryhandler - DEBUG - No retry needed.
2016-10-22 18:05:56,431 - MainThread - botocore.endpoint - DEBUG - Making request for OperationModel(name=DescribeStackProvisioningParameters) (verify_ssl=True) with params: {'url': 'https://opsworks.us-east-1.amazonaws.com/', 'method': 'POST', 'url_path': '/', 'query_string': '', 'context': {'client_region': 'us-east-1', 'has_streaming_input': False, 'client_config': <botocore.config.Config object at 0x7f1093c325c0>}, 'headers': {'User-Agent': 'aws-cli/1.11.8 Python/3.5.2 Linux/4.4.0-45-generic botocore/1.4.65', 'Content-Type': 'application/x-amz-json-1.1', 'X-Amz-Target': 'OpsWorks_20130218.DescribeStackProvisioningParameters'}, 'body': b'{"StackId": "some-stack-id"}'}
2016-10-22 18:05:56,431 - MainThread - botocore.hooks - DEBUG - Event request-created.opsworks.DescribeStackProvisioningParameters: calling handler <bound method RequestSigner.handler of <botocore.signers.RequestSigner object at 0x7f1093c32550>>
2016-10-22 18:05:56,432 - MainThread - botocore.auth - DEBUG - Calculating signature using v4 auth.
2016-10-22 18:05:56,432 - MainThread - botocore.auth - DEBUG - CanonicalRequest:
POST
/
content-type:application/x-amz-json-1.1
host:opsworks.us-east-1.amazonaws.com
x-amz-date:20161022T180556Z
x-amz-security-token:some-token
x-amz-target:OpsWorks_20130218.DescribeStackProvisioningParameters
content-type;host;x-amz-date;x-amz-security-token;x-amz-target
ccf31206ea969d241cc87b22035e2e3ad196d6c996ec070531b946fc65753d88
2016-10-22 18:05:56,432 - MainThread - botocore.auth - DEBUG - StringToSign:
AWS4-HMAC-SHA256
20161022T180556Z
20161022/us-east-1/opsworks/aws4_request
d9e2299fc5c70b002be0a7069931417b432f5e95a50c6f6b5f077af3c802dd01
2016-10-22 18:05:56,432 - MainThread - botocore.auth - DEBUG - Signature:
97edbd6285455316bbf83e77d0504c9af16d0ca8bac776accdaf91d7f29a8599
2016-10-22 18:05:56,433 - MainThread - botocore.endpoint - DEBUG - Sending http request: <PreparedRequest [POST]>
2016-10-22 18:05:56,550 - MainThread - botocore.vendored.requests.packages.urllib3.connectionpool - DEBUG - "POST / HTTP/1.1" 200 1287
2016-10-22 18:05:56,551 - MainThread - botocore.parsers - DEBUG - Response headers: {'content-type': 'application/x-amz-json-1.1', 'date': 'Sat, 22 Oct 2016 18:05:56 GMT', 'x-amzn-requestid': '2de2af8d-9882-11e6-954b-3362066917b5', 'content-length': '1287'}
2016-10-22 18:05:56,551 - MainThread - botocore.parsers - DEBUG - Response body:
b'{"AgentInstallerUrl":"https://opsworks-instance-agent-us-east-1.s3.amazonaws.com/4018-20160825092619/opsworks-agent-installer.tgz","Parameters":{"agent_installer_base_url":"https://opsworks-instance-agent-us-east-1.s3.amazonaws.com","agent_installer_tgz":"opsworks-agent-installer.tgz","assets_download_bucket":"opsworks-instance-assets-us-east-1.s3.amazonaws.com","charlie_public_key":"-----BEGIN PUBLIC KEY-----\\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAni7eKdm34oaCvGiw96Fk\\nlyLX+aPfInYzilkk+AY3pXF6nijpQ2cm3ZeM2EoqZFTv3a/meosNBAs3Q3Sy1e4G\\n7Ibn/xwMof+iSBvimx3PGKFzNP0BhY9yS6AMEMxtmqksHb0glwmFeJcomdhxZV1F\\nziWTtL6ZEyvCg0I7rxGm1ceQmD25eK90VcZVh4LJtNfnwcZRM4eC+KK9Qllxw5hW\\nvB4Z52JMMZbEG9MYCLydWSY9rnVkAyQ0ngJUaJ3q7JsbkBV/J5BcrGgcbioR1k+h\\nINRoHwBQU9WnT/x8W+N6vwJb4o6v2hBR1H2GSDLwyZ7wC8EVH+XafWYpU1g/nSEe\\naQIDAQAB\\n-----END PUBLIC KEY-----","instance_service_endpoint":"opsworks-instance-service.us-east-1.amazonaws.com","instance_service_port":"443","instance_service_region":"us-east-1","instance_service_ssl_verify_peer":"true","instance_service_use_ssl":"true","ops_works_endpoint":"opsworks.us-east-1.amazonaws.com","ops_works_port":"443","ops_works_region":"us-east-1","ops_works_ssl_verify_peer":"true","ops_works_use_ssl":"true","verbose":"false","wait_between_runs":"30"}}'
2016-10-22 18:05:56,552 - MainThread - botocore.hooks - DEBUG - Event needs-retry.opsworks.DescribeStackProvisioningParameters: calling handler <botocore.retryhandler.RetryHandler object at 0x7f1093da9470>
2016-10-22 18:05:56,552 - MainThread - botocore.retryhandler - DEBUG - No retry needed.
2016-10-22 18:05:56,552 - MainThread - botocore.endpoint - DEBUG - Making request for OperationModel(name=DescribeInstances) (verify_ssl=True) with params: {'url': 'https://opsworks.us-east-1.amazonaws.com/', 'method': 'POST', 'url_path': '/', 'query_string': '', 'context': {'client_region': 'us-east-1', 'has_streaming_input': False, 'client_config': <botocore.config.Config object at 0x7f1093c325c0>}, 'headers': {'User-Agent': 'aws-cli/1.11.8 Python/3.5.2 Linux/4.4.0-45-generic botocore/1.4.65', 'Content-Type': 'application/x-amz-json-1.1', 'X-Amz-Target': 'OpsWorks_20130218.DescribeInstances'}, 'body': b'{"StackId": "some-stack-id"}'}
2016-10-22 18:05:56,552 - MainThread - botocore.hooks - DEBUG - Event request-created.opsworks.DescribeInstances: calling handler <bound method RequestSigner.handler of <botocore.signers.RequestSigner object at 0x7f1093c32550>>
2016-10-22 18:05:56,553 - MainThread - botocore.auth - DEBUG - Calculating signature using v4 auth.
2016-10-22 18:05:56,553 - MainThread - botocore.auth - DEBUG - CanonicalRequest:
POST
/
content-type:application/x-amz-json-1.1
host:opsworks.us-east-1.amazonaws.com
x-amz-date:20161022T180556Z
x-amz-security-token:some-token
x-amz-target:OpsWorks_20130218.DescribeInstances
content-type;host;x-amz-date;x-amz-security-token;x-amz-target
ccf31206ea969d241cc87b22035e2e3ad196d6c996ec070531b946fc65753d88
2016-10-22 18:05:56,553 - MainThread - botocore.auth - DEBUG - StringToSign:
AWS4-HMAC-SHA256
20161022T180556Z
20161022/us-east-1/opsworks/aws4_request
a30847fe72cbf9f6a813d492fbf1e6fb30a36d9a76bd1ba43e38db982b03c429
2016-10-22 18:05:56,553 - MainThread - botocore.auth - DEBUG - Signature:
b658ca9d578704199b5d528aa3bda0bbee78f3d82e3cce5b06c940ff8b2f6834
2016-10-22 18:05:56,553 - MainThread - botocore.endpoint - DEBUG - Sending http request: <PreparedRequest [POST]>
2016-10-22 18:05:56,665 - MainThread - botocore.vendored.requests.packages.urllib3.connectionpool - DEBUG - "POST / HTTP/1.1" 200 16
2016-10-22 18:05:56,666 - MainThread - botocore.parsers - DEBUG - Response headers: {'content-type': 'application/x-amz-json-1.1', 'date': 'Sat, 22 Oct 2016 18:05:55 GMT', 'x-amzn-requestid': '2df52610-9882-11e6-9c94-bbbf2061570e', 'content-length': '16'}
2016-10-22 18:05:56,666 - MainThread - botocore.parsers - DEBUG - Response body:
b'{"Instances":[]}'
2016-10-22 18:05:56,666 - MainThread - botocore.hooks - DEBUG - Event needs-retry.opsworks.DescribeInstances: calling handler <botocore.retryhandler.RetryHandler object at 0x7f1093da9470>
2016-10-22 18:05:56,666 - MainThread - botocore.retryhandler - DEBUG - No retry needed.
2016-10-22 18:05:56,669 - MainThread - awscli.clidriver - DEBUG - Exception caught in main()
Traceback (most recent call last):
File "/usr/local/lib/python3.5/dist-packages/awscli/clidriver.py", line 186, in main
return command_table[parsed_args.command](remaining, parsed_args)
File "/usr/local/lib/python3.5/dist-packages/awscli/clidriver.py", line 381, in __call__
return command_table[parsed_args.operation](remaining, parsed_globals)
File "/usr/local/lib/python3.5/dist-packages/awscli/customizations/commands.py", line 187, in __call__
return self._run_main(parsed_args, parsed_globals)
File "/usr/local/lib/python3.5/dist-packages/awscli/customizations/opsworks.py", line 144, in _run_main
self.validate_arguments(args)
File "/usr/local/lib/python3.5/dist-packages/awscli/customizations/opsworks.py", line 281, in validate_arguments
region = json.loads(urlopen(IDENTITY_URL).read())['region']
File "/usr/lib/python3.5/json/__init__.py", line 312, in loads
s.__class__.__name__))
TypeError: the JSON object must be str, not 'bytes'
2016-10-22 18:05:56,671 - MainThread - awscli.clidriver - DEBUG - Exiting with rc 255
the JSON object must be str, not 'bytes'
|
TypeError
|
def __init__(self):
# `autoreset` allows us to not have to sent reset sequences for every
# string. `strip` lets us preserve color when redirecting.
colorama.init(autoreset=True, strip=False)
|
def __init__(self):
# autoreset allows us to not have to sent
# reset sequences for every string.
colorama.init(autoreset=True)
|
https://github.com/aws/aws-cli/issues/2043
|
root@jeff-desktop:~# pip install awscli
Collecting awscli
Downloading awscli-1.10.43-py2.py3-none-any.whl (969kB)
100% |████████████████████████████████| 972kB 1.9MB/s
Exception:
Traceback (most recent call last):
File "/usr/lib/python3/dist-packages/pip/req/req_install.py", line 1006, in check_if_exists
self.satisfied_by = pkg_resources.get_distribution(str(no_marker))
File "/usr/share/python-wheels/pkg_resources-0.0.0-py2.py3-none-any.whl/pkg_resources/__init__.py", line 535, in get_distribution
dist = get_provider(dist)
File "/usr/share/python-wheels/pkg_resources-0.0.0-py2.py3-none-any.whl/pkg_resources/__init__.py", line 415, in get_provider
return working_set.find(moduleOrReq) or require(str(moduleOrReq))[0]
File "/usr/share/python-wheels/pkg_resources-0.0.0-py2.py3-none-any.whl/pkg_resources/__init__.py", line 695, in find
raise VersionConflict(dist, req)
pkg_resources.VersionConflict: (colorama 0.3.7 (/usr/lib/python3/dist-packages), Requirement.parse('colorama<=0.3.3,>=0.2.5'))
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/usr/lib/python3/dist-packages/pip/basecommand.py", line 209, in main
status = self.run(options, args)
File "/usr/lib/python3/dist-packages/pip/commands/install.py", line 328, in run
wb.build(autobuilding=True)
File "/usr/lib/python3/dist-packages/pip/wheel.py", line 748, in build
self.requirement_set.prepare_files(self.finder)
File "/usr/lib/python3/dist-packages/pip/req/req_set.py", line 360, in prepare_files
ignore_dependencies=self.ignore_dependencies))
File "/usr/lib/python3/dist-packages/pip/req/req_set.py", line 448, in _prepare_file
req_to_install, finder)
File "/usr/lib/python3/dist-packages/pip/req/req_set.py", line 387, in _check_skip_installed
req_to_install.check_if_exists()
File "/usr/lib/python3/dist-packages/pip/req/req_install.py", line 1011, in check_if_exists
self.req.project_name
AttributeError: 'Requirement' object has no attribute 'project_name'
You are using pip version 8.1.1, however version 8.1.2 is available.
You should consider upgrading via the 'pip install --upgrade pip' command.
root@jeff-desktop:~#
|
AttributeError
|
def _send_output_to_pager(self, output):
cmdline = self.get_pager_cmdline()
if not self._exists_on_path(cmdline[0]):
LOG.debug("Pager '%s' not found in PATH, printing raw help." % cmdline[0])
self.output_stream.write(output.decode("utf-8") + "\n")
self.output_stream.flush()
return
LOG.debug("Running command: %s", cmdline)
with ignore_ctrl_c():
# We can't rely on the KeyboardInterrupt from
# the CLIDriver being caught because when we
# send the output to a pager it will use various
# control characters that need to be cleaned
# up gracefully. Otherwise if we simply catch
# the Ctrl-C and exit, it will likely leave the
# users terminals in a bad state and they'll need
# to manually run ``reset`` to fix this issue.
# Ignoring Ctrl-C solves this issue. It's also
# the default behavior of less (you can't ctrl-c
# out of a manpage).
p = self._popen(cmdline, stdin=PIPE)
p.communicate(input=output)
|
def _send_output_to_pager(self, output):
cmdline = self.get_pager_cmdline()
LOG.debug("Running command: %s", cmdline)
with ignore_ctrl_c():
# We can't rely on the KeyboardInterrupt from
# the CLIDriver being caught because when we
# send the output to a pager it will use various
# control characters that need to be cleaned
# up gracefully. Otherwise if we simply catch
# the Ctrl-C and exit, it will likely leave the
# users terminals in a bad state and they'll need
# to manually run ``reset`` to fix this issue.
# Ignoring Ctrl-C solves this issue. It's also
# the default behavior of less (you can't ctrl-c
# out of a manpage).
p = self._popen(cmdline, stdin=PIPE)
p.communicate(input=output)
|
https://github.com/aws/aws-cli/issues/1957
|
root@redacted:~# aws --debug help
2016-05-05 16:32:19,477 - MainThread - awscli.clidriver - DEBUG - CLI version: aws-cli/1.10.25 Python/2.7.9 Linux/4.4.5-15.26.amzn1.x86_64 botocore/1.4.16
2016-05-05 16:32:19,477 - MainThread - awscli.clidriver - DEBUG - Arguments entered to CLI: ['--debug', 'help']
2016-05-05 16:32:19,478 - MainThread - botocore.hooks - DEBUG - Event session-initialized: calling handler <function add_scalar_parsers at 0x7f9faee85e60>
2016-05-05 16:32:19,478 - MainThread - botocore.hooks - DEBUG - Event session-initialized: calling handler <function inject_assume_role_provider_cache at 0x7f9faee681b8>
2016-05-05 16:32:19,478 - MainThread - botocore.credentials - DEBUG - Skipping environment variable credential check because profile name was explicitly set.
2016-05-05 16:32:19,479 - MainThread - botocore.hooks - DEBUG - Event doc-breadcrumbs.aws: calling handler <bound method ProviderDocumentEventHandler.doc_breadcrumbs of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,479 - MainThread - botocore.hooks - DEBUG - Event doc-title.aws: calling handler <bound method ProviderDocumentEventHandler.doc_title of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,479 - MainThread - botocore.hooks - DEBUG - Event doc-description.aws: calling handler <bound method ProviderDocumentEventHandler.doc_description of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,479 - MainThread - botocore.hooks - DEBUG - Event doc-description.aws: calling handler <function add_paging_description at 0x7f9faf01a5f0>
2016-05-05 16:32:19,480 - MainThread - botocore.hooks - DEBUG - Event doc-synopsis-start.aws: calling handler <bound method ProviderDocumentEventHandler.doc_synopsis_start of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,480 - MainThread - botocore.hooks - DEBUG - Event doc-synopsis-option.aws.debug: calling handler <bound method ProviderDocumentEventHandler.doc_synopsis_option of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,480 - MainThread - botocore.hooks - DEBUG - Event doc-synopsis-option.aws.endpoint-url: calling handler <bound method ProviderDocumentEventHandler.doc_synopsis_option of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,480 - MainThread - botocore.hooks - DEBUG - Event doc-synopsis-option.aws.no-verify-ssl: calling handler <bound method ProviderDocumentEventHandler.doc_synopsis_option of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,480 - MainThread - botocore.hooks - DEBUG - Event doc-synopsis-option.aws.no-paginate: calling handler <bound method ProviderDocumentEventHandler.doc_synopsis_option of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,481 - MainThread - botocore.hooks - DEBUG - Event doc-synopsis-option.aws.output: calling handler <bound method ProviderDocumentEventHandler.doc_synopsis_option of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,481 - MainThread - botocore.hooks - DEBUG - Event doc-synopsis-option.aws.query: calling handler <bound method ProviderDocumentEventHandler.doc_synopsis_option of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,481 - MainThread - botocore.hooks - DEBUG - Event doc-synopsis-option.aws.profile: calling handler <bound method ProviderDocumentEventHandler.doc_synopsis_option of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,481 - MainThread - botocore.hooks - DEBUG - Event doc-synopsis-option.aws.region: calling handler <bound method ProviderDocumentEventHandler.doc_synopsis_option of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,481 - MainThread - botocore.hooks - DEBUG - Event doc-synopsis-option.aws.version: calling handler <bound method ProviderDocumentEventHandler.doc_synopsis_option of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,482 - MainThread - botocore.hooks - DEBUG - Event doc-synopsis-option.aws.color: calling handler <bound method ProviderDocumentEventHandler.doc_synopsis_option of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,482 - MainThread - botocore.hooks - DEBUG - Event doc-synopsis-option.aws.no-sign-request: calling handler <bound method ProviderDocumentEventHandler.doc_synopsis_option of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,482 - MainThread - botocore.hooks - DEBUG - Event doc-synopsis-option.aws.ca-bundle: calling handler <bound method ProviderDocumentEventHandler.doc_synopsis_option of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,482 - MainThread - botocore.hooks - DEBUG - Event doc-synopsis-option.aws.cli-read-timeout: calling handler <bound method ProviderDocumentEventHandler.doc_synopsis_option of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,482 - MainThread - botocore.hooks - DEBUG - Event doc-synopsis-option.aws.cli-connect-timeout: calling handler <bound method ProviderDocumentEventHandler.doc_synopsis_option of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,483 - MainThread - botocore.hooks - DEBUG - Event doc-synopsis-end.aws: calling handler <bound method ProviderDocumentEventHandler.doc_synopsis_end of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,483 - MainThread - botocore.hooks - DEBUG - Event doc-options-start.aws: calling handler <bound method ProviderDocumentEventHandler.doc_options_start of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,483 - MainThread - botocore.hooks - DEBUG - Event doc-option.aws.debug: calling handler <bound method ProviderDocumentEventHandler.doc_option of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,483 - MainThread - botocore.hooks - DEBUG - Event doc-option.aws.endpoint-url: calling handler <bound method ProviderDocumentEventHandler.doc_option of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,484 - MainThread - botocore.hooks - DEBUG - Event doc-option.aws.no-verify-ssl: calling handler <bound method ProviderDocumentEventHandler.doc_option of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,484 - MainThread - botocore.hooks - DEBUG - Event doc-option.aws.no-paginate: calling handler <bound method ProviderDocumentEventHandler.doc_option of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,484 - MainThread - botocore.hooks - DEBUG - Event doc-option.aws.output: calling handler <bound method ProviderDocumentEventHandler.doc_option of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,485 - MainThread - botocore.hooks - DEBUG - Event doc-option.aws.query: calling handler <bound method ProviderDocumentEventHandler.doc_option of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,485 - MainThread - botocore.hooks - DEBUG - Event doc-option.aws.profile: calling handler <bound method ProviderDocumentEventHandler.doc_option of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,485 - MainThread - botocore.hooks - DEBUG - Event doc-option.aws.region: calling handler <bound method ProviderDocumentEventHandler.doc_option of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,486 - MainThread - botocore.hooks - DEBUG - Event doc-option.aws.version: calling handler <bound method ProviderDocumentEventHandler.doc_option of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,486 - MainThread - botocore.hooks - DEBUG - Event doc-option.aws.color: calling handler <bound method ProviderDocumentEventHandler.doc_option of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,486 - MainThread - botocore.hooks - DEBUG - Event doc-option.aws.no-sign-request: calling handler <bound method ProviderDocumentEventHandler.doc_option of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,487 - MainThread - botocore.hooks - DEBUG - Event doc-option.aws.ca-bundle: calling handler <bound method ProviderDocumentEventHandler.doc_option of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,487 - MainThread - botocore.hooks - DEBUG - Event doc-option.aws.cli-read-timeout: calling handler <bound method ProviderDocumentEventHandler.doc_option of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,487 - MainThread - botocore.hooks - DEBUG - Event doc-option.aws.cli-connect-timeout: calling handler <bound method ProviderDocumentEventHandler.doc_option of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,488 - MainThread - botocore.hooks - DEBUG - Event doc-subitems-start.aws: calling handler <bound method ProviderDocumentEventHandler.doc_subitems_start of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,488 - MainThread - botocore.hooks - DEBUG - Event doc-subitem.aws.acm: calling handler <bound method ProviderDocumentEventHandler.doc_subitem of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,488 - MainThread - botocore.hooks - DEBUG - Event doc-subitem.aws.apigateway: calling handler <bound method ProviderDocumentEventHandler.doc_subitem of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,488 - MainThread - botocore.hooks - DEBUG - Event doc-subitem.aws.autoscaling: calling handler <bound method ProviderDocumentEventHandler.doc_subitem of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,489 - MainThread - botocore.hooks - DEBUG - Event doc-subitem.aws.cloudformation: calling handler <bound method ProviderDocumentEventHandler.doc_subitem of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,489 - MainThread - botocore.hooks - DEBUG - Event doc-subitem.aws.cloudfront: calling handler <bound method ProviderDocumentEventHandler.doc_subitem of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,489 - MainThread - botocore.hooks - DEBUG - Event doc-subitem.aws.cloudhsm: calling handler <bound method ProviderDocumentEventHandler.doc_subitem of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,489 - MainThread - botocore.hooks - DEBUG - Event doc-subitem.aws.cloudsearch: calling handler <bound method ProviderDocumentEventHandler.doc_subitem of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,489 - MainThread - botocore.hooks - DEBUG - Event doc-subitem.aws.cloudsearchdomain: calling handler <bound method ProviderDocumentEventHandler.doc_subitem of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,490 - MainThread - botocore.hooks - DEBUG - Event doc-subitem.aws.cloudtrail: calling handler <bound method ProviderDocumentEventHandler.doc_subitem of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,490 - MainThread - botocore.hooks - DEBUG - Event doc-subitem.aws.cloudwatch: calling handler <bound method ProviderDocumentEventHandler.doc_subitem of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,490 - MainThread - botocore.hooks - DEBUG - Event doc-subitem.aws.codecommit: calling handler <bound method ProviderDocumentEventHandler.doc_subitem of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,490 - MainThread - botocore.hooks - DEBUG - Event doc-subitem.aws.codepipeline: calling handler <bound method ProviderDocumentEventHandler.doc_subitem of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,490 - MainThread - botocore.hooks - DEBUG - Event doc-subitem.aws.cognito-identity: calling handler <bound method ProviderDocumentEventHandler.doc_subitem of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,491 - MainThread - botocore.hooks - DEBUG - Event doc-subitem.aws.cognito-idp: calling handler <bound method ProviderDocumentEventHandler.doc_subitem of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,491 - MainThread - botocore.hooks - DEBUG - Event doc-subitem.aws.cognito-sync: calling handler <bound method ProviderDocumentEventHandler.doc_subitem of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,491 - MainThread - botocore.hooks - DEBUG - Event doc-subitem.aws.configservice: calling handler <bound method ProviderDocumentEventHandler.doc_subitem of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,491 - MainThread - botocore.hooks - DEBUG - Event doc-subitem.aws.configure: calling handler <bound method ProviderDocumentEventHandler.doc_subitem of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,491 - MainThread - botocore.hooks - DEBUG - Event doc-subitem.aws.datapipeline: calling handler <bound method ProviderDocumentEventHandler.doc_subitem of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,491 - MainThread - botocore.hooks - DEBUG - Event doc-subitem.aws.deploy: calling handler <bound method ProviderDocumentEventHandler.doc_subitem of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,492 - MainThread - botocore.hooks - DEBUG - Event doc-subitem.aws.devicefarm: calling handler <bound method ProviderDocumentEventHandler.doc_subitem of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,492 - MainThread - botocore.hooks - DEBUG - Event doc-subitem.aws.directconnect: calling handler <bound method ProviderDocumentEventHandler.doc_subitem of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,492 - MainThread - botocore.hooks - DEBUG - Event doc-subitem.aws.dms: calling handler <bound method ProviderDocumentEventHandler.doc_subitem of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,492 - MainThread - botocore.hooks - DEBUG - Event doc-subitem.aws.ds: calling handler <bound method ProviderDocumentEventHandler.doc_subitem of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,493 - MainThread - botocore.hooks - DEBUG - Event doc-subitem.aws.dynamodb: calling handler <bound method ProviderDocumentEventHandler.doc_subitem of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,493 - MainThread - botocore.hooks - DEBUG - Event doc-subitem.aws.dynamodbstreams: calling handler <bound method ProviderDocumentEventHandler.doc_subitem of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,493 - MainThread - botocore.hooks - DEBUG - Event doc-subitem.aws.ec2: calling handler <bound method ProviderDocumentEventHandler.doc_subitem of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,493 - MainThread - botocore.hooks - DEBUG - Event doc-subitem.aws.ecr: calling handler <bound method ProviderDocumentEventHandler.doc_subitem of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,493 - MainThread - botocore.hooks - DEBUG - Event doc-subitem.aws.ecs: calling handler <bound method ProviderDocumentEventHandler.doc_subitem of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,494 - MainThread - botocore.hooks - DEBUG - Event doc-subitem.aws.efs: calling handler <bound method ProviderDocumentEventHandler.doc_subitem of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,494 - MainThread - botocore.hooks - DEBUG - Event doc-subitem.aws.elasticache: calling handler <bound method ProviderDocumentEventHandler.doc_subitem of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,494 - MainThread - botocore.hooks - DEBUG - Event doc-subitem.aws.elasticbeanstalk: calling handler <bound method ProviderDocumentEventHandler.doc_subitem of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,494 - MainThread - botocore.hooks - DEBUG - Event doc-subitem.aws.elastictranscoder: calling handler <bound method ProviderDocumentEventHandler.doc_subitem of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,494 - MainThread - botocore.hooks - DEBUG - Event doc-subitem.aws.elb: calling handler <bound method ProviderDocumentEventHandler.doc_subitem of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,495 - MainThread - botocore.hooks - DEBUG - Event doc-subitem.aws.emr: calling handler <bound method ProviderDocumentEventHandler.doc_subitem of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,495 - MainThread - botocore.hooks - DEBUG - Event doc-subitem.aws.es: calling handler <bound method ProviderDocumentEventHandler.doc_subitem of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,495 - MainThread - botocore.hooks - DEBUG - Event doc-subitem.aws.events: calling handler <bound method ProviderDocumentEventHandler.doc_subitem of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,495 - MainThread - botocore.hooks - DEBUG - Event doc-subitem.aws.firehose: calling handler <bound method ProviderDocumentEventHandler.doc_subitem of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,495 - MainThread - botocore.hooks - DEBUG - Event doc-subitem.aws.gamelift: calling handler <bound method ProviderDocumentEventHandler.doc_subitem of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,496 - MainThread - botocore.hooks - DEBUG - Event doc-subitem.aws.glacier: calling handler <bound method ProviderDocumentEventHandler.doc_subitem of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,496 - MainThread - botocore.hooks - DEBUG - Event doc-subitem.aws.help: calling handler <bound method ProviderDocumentEventHandler.doc_subitem of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,496 - MainThread - botocore.hooks - DEBUG - Event doc-subitem.aws.iam: calling handler <bound method ProviderDocumentEventHandler.doc_subitem of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,496 - MainThread - botocore.hooks - DEBUG - Event doc-subitem.aws.importexport: calling handler <bound method ProviderDocumentEventHandler.doc_subitem of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,496 - MainThread - botocore.hooks - DEBUG - Event doc-subitem.aws.inspector: calling handler <bound method ProviderDocumentEventHandler.doc_subitem of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,497 - MainThread - botocore.hooks - DEBUG - Event doc-subitem.aws.iot: calling handler <bound method ProviderDocumentEventHandler.doc_subitem of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,497 - MainThread - botocore.hooks - DEBUG - Event doc-subitem.aws.iot-data: calling handler <bound method ProviderDocumentEventHandler.doc_subitem of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,497 - MainThread - botocore.hooks - DEBUG - Event doc-subitem.aws.kinesis: calling handler <bound method ProviderDocumentEventHandler.doc_subitem of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,497 - MainThread - botocore.hooks - DEBUG - Event doc-subitem.aws.kms: calling handler <bound method ProviderDocumentEventHandler.doc_subitem of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,497 - MainThread - botocore.hooks - DEBUG - Event doc-subitem.aws.lambda: calling handler <bound method ProviderDocumentEventHandler.doc_subitem of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,497 - MainThread - botocore.hooks - DEBUG - Event doc-subitem.aws.logs: calling handler <bound method ProviderDocumentEventHandler.doc_subitem of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,498 - MainThread - botocore.hooks - DEBUG - Event doc-subitem.aws.machinelearning: calling handler <bound method ProviderDocumentEventHandler.doc_subitem of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,498 - MainThread - botocore.hooks - DEBUG - Event doc-subitem.aws.marketplacecommerceanalytics: calling handler <bound method ProviderDocumentEventHandler.doc_subitem of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,498 - MainThread - botocore.hooks - DEBUG - Event doc-subitem.aws.meteringmarketplace: calling handler <bound method ProviderDocumentEventHandler.doc_subitem of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,498 - MainThread - botocore.hooks - DEBUG - Event doc-subitem.aws.opsworks: calling handler <bound method ProviderDocumentEventHandler.doc_subitem of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,499 - MainThread - botocore.hooks - DEBUG - Event doc-subitem.aws.rds: calling handler <bound method ProviderDocumentEventHandler.doc_subitem of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,499 - MainThread - botocore.hooks - DEBUG - Event doc-subitem.aws.redshift: calling handler <bound method ProviderDocumentEventHandler.doc_subitem of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,499 - MainThread - botocore.hooks - DEBUG - Event doc-subitem.aws.route53: calling handler <bound method ProviderDocumentEventHandler.doc_subitem of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,499 - MainThread - botocore.hooks - DEBUG - Event doc-subitem.aws.route53domains: calling handler <bound method ProviderDocumentEventHandler.doc_subitem of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,499 - MainThread - botocore.hooks - DEBUG - Event doc-subitem.aws.s3: calling handler <bound method ProviderDocumentEventHandler.doc_subitem of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,500 - MainThread - botocore.hooks - DEBUG - Event doc-subitem.aws.s3api: calling handler <bound method ProviderDocumentEventHandler.doc_subitem of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,500 - MainThread - botocore.hooks - DEBUG - Event doc-subitem.aws.sdb: calling handler <bound method ProviderDocumentEventHandler.doc_subitem of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,500 - MainThread - botocore.hooks - DEBUG - Event doc-subitem.aws.ses: calling handler <bound method ProviderDocumentEventHandler.doc_subitem of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,500 - MainThread - botocore.hooks - DEBUG - Event doc-subitem.aws.sns: calling handler <bound method ProviderDocumentEventHandler.doc_subitem of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,500 - MainThread - botocore.hooks - DEBUG - Event doc-subitem.aws.sqs: calling handler <bound method ProviderDocumentEventHandler.doc_subitem of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,501 - MainThread - botocore.hooks - DEBUG - Event doc-subitem.aws.ssm: calling handler <bound method ProviderDocumentEventHandler.doc_subitem of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,501 - MainThread - botocore.hooks - DEBUG - Event doc-subitem.aws.storagegateway: calling handler <bound method ProviderDocumentEventHandler.doc_subitem of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,501 - MainThread - botocore.hooks - DEBUG - Event doc-subitem.aws.sts: calling handler <bound method ProviderDocumentEventHandler.doc_subitem of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,501 - MainThread - botocore.hooks - DEBUG - Event doc-subitem.aws.support: calling handler <bound method ProviderDocumentEventHandler.doc_subitem of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,501 - MainThread - botocore.hooks - DEBUG - Event doc-subitem.aws.swf: calling handler <bound method ProviderDocumentEventHandler.doc_subitem of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,502 - MainThread - botocore.hooks - DEBUG - Event doc-subitem.aws.waf: calling handler <bound method ProviderDocumentEventHandler.doc_subitem of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,502 - MainThread - botocore.hooks - DEBUG - Event doc-subitem.aws.workspaces: calling handler <bound method ProviderDocumentEventHandler.doc_subitem of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,503 - MainThread - botocore.hooks - DEBUG - Event doc-relateditems-start.aws: calling handler <bound method ProviderDocumentEventHandler.doc_relateditems_start of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,503 - MainThread - botocore.hooks - DEBUG - Event doc-relateditem.aws.aws help topics: calling handler <bound method ProviderDocumentEventHandler.doc_relateditem of <awscli.clidocs.ProviderDocumentEventHandler object at 0x7f9fae9519d0>>
2016-05-05 16:32:19,566 - MainThread - awscli.help - DEBUG - Running command: ['groff', '-m', 'man', '-T', 'ascii']
2016-05-05 16:32:19,582 - MainThread - awscli.help - DEBUG - Running command: ['less', '-R']
2016-05-05 16:32:19,584 - MainThread - awscli.clidriver - DEBUG - Exception caught in main()
Traceback (most recent call last):
File "/usr/local/lib/python2.7/dist-packages/awscli/clidriver.py", line 186, in main
return command_table[parsed_args.command](remaining, parsed_args)
File "/usr/local/lib/python2.7/dist-packages/awscli/help.py", line 264, in __call__
self.renderer.render(self.doc.getvalue())
File "/usr/local/lib/python2.7/dist-packages/awscli/help.py", line 82, in render
self._send_output_to_pager(converted_content)
File "/usr/local/lib/python2.7/dist-packages/awscli/help.py", line 130, in _send_output_to_pager
p = self._popen(cmdline, stdin=PIPE)
File "/usr/local/lib/python2.7/dist-packages/awscli/help.py", line 91, in _popen
return Popen(*args, **kwargs)
File "/usr/lib/python2.7/subprocess.py", line 710, in __init__
errread, errwrite)
File "/usr/lib/python2.7/subprocess.py", line 1335, in _execute_child
raise child_exception
OSError: [Errno 2] No such file or directory
2016-05-05 16:32:19,587 - MainThread - awscli.clidriver - DEBUG - Exiting with rc 255
[Errno 2] No such file or directory
|
OSError
|
def unify_paging_params(argument_table, operation_model, event_name, session, **kwargs):
paginator_config = get_paginator_config(
session, operation_model.service_model.service_name, operation_model.name
)
if paginator_config is None:
# We only apply these customizations to paginated responses.
return
logger.debug("Modifying paging parameters for operation: %s", operation_model.name)
_remove_existing_paging_arguments(argument_table, paginator_config)
parsed_args_event = event_name.replace(
"building-argument-table.", "operation-args-parsed."
)
shadowed_args = {}
add_paging_argument(
argument_table,
"starting-token",
PageArgument("starting-token", STARTING_TOKEN_HELP, parse_type="string"),
shadowed_args,
)
input_members = operation_model.input_shape.members
type_name = "integer"
if "limit_key" in paginator_config:
limit_key_shape = input_members[paginator_config["limit_key"]]
type_name = limit_key_shape.type_name
if type_name not in PageArgument.type_map:
raise TypeError(
(
"Unsupported pagination type {0} for operation {1}"
" and parameter {2}"
).format(type_name, operation_model.name, paginator_config["limit_key"])
)
add_paging_argument(
argument_table,
"page-size",
PageArgument("page-size", PAGE_SIZE_HELP, parse_type=type_name),
shadowed_args,
)
add_paging_argument(
argument_table,
"max-items",
PageArgument("max-items", MAX_ITEMS_HELP, parse_type=type_name),
shadowed_args,
)
session.register(
parsed_args_event,
partial(
check_should_enable_pagination,
list(_get_all_cli_input_tokens(paginator_config)),
shadowed_args,
argument_table,
),
)
|
def unify_paging_params(argument_table, operation_model, event_name, session, **kwargs):
paginator_config = get_paginator_config(
session, operation_model.service_model.service_name, operation_model.name
)
if paginator_config is None:
# We only apply these customizations to paginated responses.
return
logger.debug("Modifying paging parameters for operation: %s", operation_model.name)
_remove_existing_paging_arguments(argument_table, paginator_config)
parsed_args_event = event_name.replace(
"building-argument-table.", "operation-args-parsed."
)
session.register(
parsed_args_event,
partial(
check_should_enable_pagination,
list(_get_all_cli_input_tokens(paginator_config)),
),
)
argument_table["starting-token"] = PageArgument(
"starting-token", STARTING_TOKEN_HELP, parse_type="string"
)
input_members = operation_model.input_shape.members
type_name = "integer"
if "limit_key" in paginator_config:
limit_key_shape = input_members[paginator_config["limit_key"]]
type_name = limit_key_shape.type_name
if type_name not in PageArgument.type_map:
raise TypeError(
(
"Unsupported pagination type {0} for operation {1}"
" and parameter {2}"
).format(type_name, operation_model.name, paginator_config["limit_key"])
)
argument_table["page-size"] = PageArgument(
"page-size", PAGE_SIZE_HELP, parse_type=type_name
)
argument_table["max-items"] = PageArgument(
"max-items", MAX_ITEMS_HELP, parse_type=type_name
)
|
https://github.com/aws/aws-cli/issues/1247
|
2015-03-26 06:10:27,908 - MainThread - awscli.clidriver - DEBUG - CLI version: aws-cli/1.7.16 Python/2.7.6 Linux/3.13.0-48-generic, botocore version: 0.97.0
2015-03-26 06:10:27,908 - MainThread - awscli.clidriver - DEBUG - Arguments entered to CLI: ['route53', 'list-resource-record-sets', '--hosted-zone-id', 'XYZABC', '--max-items', '1', '--start-record-name', 'hostname.example.com', '--start-record-type', 'CNAME', '--debug']
2015-03-26 06:10:27,908 - MainThread - botocore.hooks - DEBUG - Event session-initialized: calling handler <function add_scalar_parsers at 0x7f4cac5109b0>
2015-03-26 06:10:27,909 - MainThread - botocore.hooks - DEBUG - Event session-initialized: calling handler <function inject_assume_role_provider at 0x7f4cac7d0668>
2015-03-26 06:10:27,925 - MainThread - botocore.hooks - DEBUG - Event service-data-loaded.route53: calling handler <function register_retries_for_service at 0x7f4caccae050>
2015-03-26 06:10:27,928 - MainThread - botocore.handlers - DEBUG - Registering retry handlers for service: route53
2015-03-26 06:10:27,929 - MainThread - botocore.hooks - DEBUG - Event building-command-table.route53: calling handler <function add_waiters at 0x7f4cac75d410>
2015-03-26 06:10:27,931 - MainThread - awscli.clidriver - DEBUG - OrderedDict([(u'hosted-zone-id', <awscli.arguments.CLIArgument object at 0x7f4cac389650>), (u'start-record-name', <awscli.arguments.CLIArgument object at 0x7f4cac3896d0>), (u'start-record-type', <awscli.arguments.CLIArgument object at 0x7f4cac389710>), (u'start-record-identifier', <awscli.arguments.CLIArgument object at 0x7f4cac389410>), (u'max-items', <awscli.arguments.CLIArgument object at 0x7f4cac389750>)])
2015-03-26 06:10:27,931 - MainThread - botocore.hooks - DEBUG - Event building-argument-table.route53.list-resource-record-sets: calling handler <function add_streaming_output_arg at 0x7f4cac8b2410>
2015-03-26 06:10:27,931 - MainThread - botocore.hooks - DEBUG - Event building-argument-table.route53.list-resource-record-sets: calling handler <function add_cli_input_json at 0x7f4cac7cb8c0>
2015-03-26 06:10:27,931 - MainThread - botocore.hooks - DEBUG - Event building-argument-table.route53.list-resource-record-sets: calling handler <function unify_paging_params at 0x7f4cac8b8320>
2015-03-26 06:10:27,932 - MainThread - awscli.customizations.paginate - DEBUG - Modifying paging parameters for operation: ListResourceRecordSets
2015-03-26 06:10:27,932 - MainThread - botocore.hooks - DEBUG - Event building-argument-table.route53.list-resource-record-sets: calling handler <function add_generate_skeleton at 0x7f4cac7cbde8>
2015-03-26 06:10:27,932 - MainThread - botocore.hooks - DEBUG - Event before-building-argument-table-parser.route53.list-resource-record-sets: calling handler <bound method CliInputJSONArgument.override_required_args of <awscli.customizations.cliinputjson.CliInputJSONArgument object at 0x7f4cac389790>>
2015-03-26 06:10:27,932 - MainThread - botocore.hooks - DEBUG - Event before-building-argument-table-parser.route53.list-resource-record-sets: calling handler <bound method GenerateCliSkeletonArgument.override_required_args of <awscli.customizations.generatecliskeleton.GenerateCliSkeletonArgument object at 0x7f4cac389bd0>>
2015-03-26 06:10:27,933 - MainThread - botocore.hooks - DEBUG - Event operation-args-parsed.route53.list-resource-record-sets: calling handler <functools.partial object at 0x7f4cac3831b0>
2015-03-26 06:10:27,934 - MainThread - awscli.customizations.paginate - DEBUG - User has specified a manual pagination arg. Automatically setting --no-paginate.
2015-03-26 06:10:27,934 - MainThread - awscli.customizations.paginate - DEBUG - User has specified a manual pagination arg. Automatically setting --no-paginate.
2015-03-26 06:10:27,934 - MainThread - botocore.hooks - DEBUG - Event load-cli-arg.route53.list-resource-record-sets.hosted-zone-id: calling handler <function uri_param at 0x7f4cac8edc80>
2015-03-26 06:10:27,934 - MainThread - botocore.hooks - DEBUG - Event process-cli-arg.route53.list-resource-record-sets: calling handler <awscli.argprocess.ParamShorthand object at 0x7f4cac5193d0>
2015-03-26 06:10:27,934 - MainThread - awscli.argprocess - DEBUG - Detected structure: scalar
2015-03-26 06:10:27,934 - MainThread - awscli.arguments - DEBUG - Unpacked value of u'XYZABC' for parameter "hosted_zone_id": u'XYZABC'
2015-03-26 06:10:27,934 - MainThread - botocore.hooks - DEBUG - Event load-cli-arg.route53.list-resource-record-sets.start-record-name: calling handler <function uri_param at 0x7f4cac8edc80>
2015-03-26 06:10:27,934 - MainThread - botocore.hooks - DEBUG - Event process-cli-arg.route53.list-resource-record-sets: calling handler <awscli.argprocess.ParamShorthand object at 0x7f4cac5193d0>
2015-03-26 06:10:27,934 - MainThread - awscli.argprocess - DEBUG - Detected structure: scalar
2015-03-26 06:10:27,934 - MainThread - awscli.arguments - DEBUG - Unpacked value of u'hostname.example.com' for parameter "start_record_name": u'hostname.example.com'
2015-03-26 06:10:27,934 - MainThread - botocore.hooks - DEBUG - Event load-cli-arg.route53.list-resource-record-sets.start-record-type: calling handler <function uri_param at 0x7f4cac8edc80>
2015-03-26 06:10:27,935 - MainThread - botocore.hooks - DEBUG - Event process-cli-arg.route53.list-resource-record-sets: calling handler <awscli.argprocess.ParamShorthand object at 0x7f4cac5193d0>
2015-03-26 06:10:27,935 - MainThread - awscli.argprocess - DEBUG - Detected structure: scalar
2015-03-26 06:10:27,935 - MainThread - awscli.arguments - DEBUG - Unpacked value of u'CNAME' for parameter "start_record_type": u'CNAME'
2015-03-26 06:10:27,935 - MainThread - botocore.hooks - DEBUG - Event load-cli-arg.route53.list-resource-record-sets.start-record-identifier: calling handler <function uri_param at 0x7f4cac8edc80>
2015-03-26 06:10:27,935 - MainThread - botocore.hooks - DEBUG - Event load-cli-arg.route53.list-resource-record-sets.max-items: calling handler <function uri_param at 0x7f4cac8edc80>
2015-03-26 06:10:27,935 - MainThread - botocore.hooks - DEBUG - Event load-cli-arg.route53.list-resource-record-sets.cli-input-json: calling handler <function uri_param at 0x7f4cac8edc80>
2015-03-26 06:10:27,935 - MainThread - botocore.hooks - DEBUG - Event load-cli-arg.route53.list-resource-record-sets.starting-token: calling handler <function uri_param at 0x7f4cac8edc80>
2015-03-26 06:10:27,935 - MainThread - botocore.hooks - DEBUG - Event load-cli-arg.route53.list-resource-record-sets.page-size: calling handler <function uri_param at 0x7f4cac8edc80>
2015-03-26 06:10:27,935 - MainThread - botocore.hooks - DEBUG - Event load-cli-arg.route53.list-resource-record-sets.generate-cli-skeleton: calling handler <function uri_param at 0x7f4cac8edc80>
2015-03-26 06:10:27,935 - MainThread - botocore.hooks - DEBUG - Event calling-command.route53.list-resource-record-sets: calling handler <bound method GenerateCliSkeletonArgument.generate_json_skeleton of <awscli.customizations.generatecliskeleton.GenerateCliSkeletonArgument object at 0x7f4cac389bd0>>
2015-03-26 06:10:27,935 - MainThread - botocore.hooks - DEBUG - Event calling-command.route53.list-resource-record-sets: calling handler <bound method CliInputJSONArgument.add_to_call_parameters of <awscli.customizations.cliinputjson.CliInputJSONArgument object at 0x7f4cac389790>>
2015-03-26 06:10:27,936 - MainThread - botocore.credentials - DEBUG - Looking for credentials via: env
2015-03-26 06:10:27,936 - MainThread - botocore.credentials - INFO - Found credentials in environment variables.
2015-03-26 06:10:27,953 - MainThread - botocore.client - DEBUG - Registering retry handlers for service: route53
2015-03-26 06:10:27,953 - MainThread - botocore.client - DEBUG - Registering retry handlers for service: route53
2015-03-26 06:10:27,957 - MainThread - botocore.hooks - DEBUG - Event before-parameter-build.route53.ListResourceRecordSets: calling handler <function fix_route53_ids at 0x7f4caccae578>
2015-03-26 06:10:27,957 - MainThread - botocore.handlers - DEBUG - HostedZoneId XYZABC -> XYZABC
2015-03-26 06:10:27,957 - MainThread - awscli.clidriver - DEBUG - Exception caught in main()
Traceback (most recent call last):
File "/usr/local/lib/python2.7/dist-packages/awscli/clidriver.py", line 183, in main
return command_table[parsed_args.command](remaining, parsed_args)
File "/usr/local/lib/python2.7/dist-packages/awscli/clidriver.py", line 367, in __call__
return command_table[parsed_args.operation](remaining, parsed_globals)
File "/usr/local/lib/python2.7/dist-packages/awscli/clidriver.py", line 534, in __call__
call_parameters, parsed_globals)
File "/usr/local/lib/python2.7/dist-packages/awscli/clidriver.py", line 657, in invoke
**parameters)
File "/usr/local/lib/python2.7/dist-packages/botocore/client.py", line 187, in _api_call
return self._make_api_call(operation_name, kwargs)
File "/usr/local/lib/python2.7/dist-packages/botocore/client.py", line 228, in _make_api_call
api_params, operation_model)
File "/usr/local/lib/python2.7/dist-packages/botocore/client.py", line 259, in _convert_to_request_dict
api_params, operation_model)
File "/usr/local/lib/python2.7/dist-packages/botocore/validate.py", line 271, in serialize_to_request
raise ParamValidationError(report=report.generate_report())
ParamValidationError: Parameter validation failed:
Unknown parameter in input: "max_items", must be one of: HostedZoneId, StartRecordName, StartRecordType, StartRecordIdentifier, MaxItems
2015-03-26 06:10:27,958 - MainThread - awscli.clidriver - DEBUG - Exiting with rc 255
Parameter validation failed:
Unknown parameter in input: "max_items", must be one of: HostedZoneId, StartRecordName, StartRecordType, StartRecordIdentifier, MaxItems
|
ParamValidationError
|
def check_should_enable_pagination(
input_tokens, shadowed_args, argument_table, parsed_args, parsed_globals, **kwargs
):
normalized_paging_args = ["start_token", "max_items"]
for token in input_tokens:
py_name = token.replace("-", "_")
if (
getattr(parsed_args, py_name) is not None
and py_name not in normalized_paging_args
):
# The user has specified a manual (undocumented) pagination arg.
# We need to automatically turn pagination off.
logger.debug(
"User has specified a manual pagination arg. "
"Automatically setting --no-paginate."
)
parsed_globals.paginate = False
# Because we've now disabled pagination, there's a chance that
# we were shadowing arguments. For example, we inject a
# --max-items argument in unify_paging_params(). If the
# the operation also provides its own MaxItems (which we
# expose as --max-items) then our custom pagination arg
# was shadowing the customers arg. When we turn pagination
# off we need to put back the original argument which is
# what we're doing here.
for key, value in shadowed_args.items():
argument_table[key] = value
|
def check_should_enable_pagination(input_tokens, parsed_args, parsed_globals, **kwargs):
normalized_paging_args = ["start_token", "max_items"]
for token in input_tokens:
py_name = token.replace("-", "_")
if (
getattr(parsed_args, py_name) is not None
and py_name not in normalized_paging_args
):
# The user has specified a manual (undocumented) pagination arg.
# We need to automatically turn pagination off.
logger.debug(
"User has specified a manual pagination arg. "
"Automatically setting --no-paginate."
)
parsed_globals.paginate = False
|
https://github.com/aws/aws-cli/issues/1247
|
2015-03-26 06:10:27,908 - MainThread - awscli.clidriver - DEBUG - CLI version: aws-cli/1.7.16 Python/2.7.6 Linux/3.13.0-48-generic, botocore version: 0.97.0
2015-03-26 06:10:27,908 - MainThread - awscli.clidriver - DEBUG - Arguments entered to CLI: ['route53', 'list-resource-record-sets', '--hosted-zone-id', 'XYZABC', '--max-items', '1', '--start-record-name', 'hostname.example.com', '--start-record-type', 'CNAME', '--debug']
2015-03-26 06:10:27,908 - MainThread - botocore.hooks - DEBUG - Event session-initialized: calling handler <function add_scalar_parsers at 0x7f4cac5109b0>
2015-03-26 06:10:27,909 - MainThread - botocore.hooks - DEBUG - Event session-initialized: calling handler <function inject_assume_role_provider at 0x7f4cac7d0668>
2015-03-26 06:10:27,925 - MainThread - botocore.hooks - DEBUG - Event service-data-loaded.route53: calling handler <function register_retries_for_service at 0x7f4caccae050>
2015-03-26 06:10:27,928 - MainThread - botocore.handlers - DEBUG - Registering retry handlers for service: route53
2015-03-26 06:10:27,929 - MainThread - botocore.hooks - DEBUG - Event building-command-table.route53: calling handler <function add_waiters at 0x7f4cac75d410>
2015-03-26 06:10:27,931 - MainThread - awscli.clidriver - DEBUG - OrderedDict([(u'hosted-zone-id', <awscli.arguments.CLIArgument object at 0x7f4cac389650>), (u'start-record-name', <awscli.arguments.CLIArgument object at 0x7f4cac3896d0>), (u'start-record-type', <awscli.arguments.CLIArgument object at 0x7f4cac389710>), (u'start-record-identifier', <awscli.arguments.CLIArgument object at 0x7f4cac389410>), (u'max-items', <awscli.arguments.CLIArgument object at 0x7f4cac389750>)])
2015-03-26 06:10:27,931 - MainThread - botocore.hooks - DEBUG - Event building-argument-table.route53.list-resource-record-sets: calling handler <function add_streaming_output_arg at 0x7f4cac8b2410>
2015-03-26 06:10:27,931 - MainThread - botocore.hooks - DEBUG - Event building-argument-table.route53.list-resource-record-sets: calling handler <function add_cli_input_json at 0x7f4cac7cb8c0>
2015-03-26 06:10:27,931 - MainThread - botocore.hooks - DEBUG - Event building-argument-table.route53.list-resource-record-sets: calling handler <function unify_paging_params at 0x7f4cac8b8320>
2015-03-26 06:10:27,932 - MainThread - awscli.customizations.paginate - DEBUG - Modifying paging parameters for operation: ListResourceRecordSets
2015-03-26 06:10:27,932 - MainThread - botocore.hooks - DEBUG - Event building-argument-table.route53.list-resource-record-sets: calling handler <function add_generate_skeleton at 0x7f4cac7cbde8>
2015-03-26 06:10:27,932 - MainThread - botocore.hooks - DEBUG - Event before-building-argument-table-parser.route53.list-resource-record-sets: calling handler <bound method CliInputJSONArgument.override_required_args of <awscli.customizations.cliinputjson.CliInputJSONArgument object at 0x7f4cac389790>>
2015-03-26 06:10:27,932 - MainThread - botocore.hooks - DEBUG - Event before-building-argument-table-parser.route53.list-resource-record-sets: calling handler <bound method GenerateCliSkeletonArgument.override_required_args of <awscli.customizations.generatecliskeleton.GenerateCliSkeletonArgument object at 0x7f4cac389bd0>>
2015-03-26 06:10:27,933 - MainThread - botocore.hooks - DEBUG - Event operation-args-parsed.route53.list-resource-record-sets: calling handler <functools.partial object at 0x7f4cac3831b0>
2015-03-26 06:10:27,934 - MainThread - awscli.customizations.paginate - DEBUG - User has specified a manual pagination arg. Automatically setting --no-paginate.
2015-03-26 06:10:27,934 - MainThread - awscli.customizations.paginate - DEBUG - User has specified a manual pagination arg. Automatically setting --no-paginate.
2015-03-26 06:10:27,934 - MainThread - botocore.hooks - DEBUG - Event load-cli-arg.route53.list-resource-record-sets.hosted-zone-id: calling handler <function uri_param at 0x7f4cac8edc80>
2015-03-26 06:10:27,934 - MainThread - botocore.hooks - DEBUG - Event process-cli-arg.route53.list-resource-record-sets: calling handler <awscli.argprocess.ParamShorthand object at 0x7f4cac5193d0>
2015-03-26 06:10:27,934 - MainThread - awscli.argprocess - DEBUG - Detected structure: scalar
2015-03-26 06:10:27,934 - MainThread - awscli.arguments - DEBUG - Unpacked value of u'XYZABC' for parameter "hosted_zone_id": u'XYZABC'
2015-03-26 06:10:27,934 - MainThread - botocore.hooks - DEBUG - Event load-cli-arg.route53.list-resource-record-sets.start-record-name: calling handler <function uri_param at 0x7f4cac8edc80>
2015-03-26 06:10:27,934 - MainThread - botocore.hooks - DEBUG - Event process-cli-arg.route53.list-resource-record-sets: calling handler <awscli.argprocess.ParamShorthand object at 0x7f4cac5193d0>
2015-03-26 06:10:27,934 - MainThread - awscli.argprocess - DEBUG - Detected structure: scalar
2015-03-26 06:10:27,934 - MainThread - awscli.arguments - DEBUG - Unpacked value of u'hostname.example.com' for parameter "start_record_name": u'hostname.example.com'
2015-03-26 06:10:27,934 - MainThread - botocore.hooks - DEBUG - Event load-cli-arg.route53.list-resource-record-sets.start-record-type: calling handler <function uri_param at 0x7f4cac8edc80>
2015-03-26 06:10:27,935 - MainThread - botocore.hooks - DEBUG - Event process-cli-arg.route53.list-resource-record-sets: calling handler <awscli.argprocess.ParamShorthand object at 0x7f4cac5193d0>
2015-03-26 06:10:27,935 - MainThread - awscli.argprocess - DEBUG - Detected structure: scalar
2015-03-26 06:10:27,935 - MainThread - awscli.arguments - DEBUG - Unpacked value of u'CNAME' for parameter "start_record_type": u'CNAME'
2015-03-26 06:10:27,935 - MainThread - botocore.hooks - DEBUG - Event load-cli-arg.route53.list-resource-record-sets.start-record-identifier: calling handler <function uri_param at 0x7f4cac8edc80>
2015-03-26 06:10:27,935 - MainThread - botocore.hooks - DEBUG - Event load-cli-arg.route53.list-resource-record-sets.max-items: calling handler <function uri_param at 0x7f4cac8edc80>
2015-03-26 06:10:27,935 - MainThread - botocore.hooks - DEBUG - Event load-cli-arg.route53.list-resource-record-sets.cli-input-json: calling handler <function uri_param at 0x7f4cac8edc80>
2015-03-26 06:10:27,935 - MainThread - botocore.hooks - DEBUG - Event load-cli-arg.route53.list-resource-record-sets.starting-token: calling handler <function uri_param at 0x7f4cac8edc80>
2015-03-26 06:10:27,935 - MainThread - botocore.hooks - DEBUG - Event load-cli-arg.route53.list-resource-record-sets.page-size: calling handler <function uri_param at 0x7f4cac8edc80>
2015-03-26 06:10:27,935 - MainThread - botocore.hooks - DEBUG - Event load-cli-arg.route53.list-resource-record-sets.generate-cli-skeleton: calling handler <function uri_param at 0x7f4cac8edc80>
2015-03-26 06:10:27,935 - MainThread - botocore.hooks - DEBUG - Event calling-command.route53.list-resource-record-sets: calling handler <bound method GenerateCliSkeletonArgument.generate_json_skeleton of <awscli.customizations.generatecliskeleton.GenerateCliSkeletonArgument object at 0x7f4cac389bd0>>
2015-03-26 06:10:27,935 - MainThread - botocore.hooks - DEBUG - Event calling-command.route53.list-resource-record-sets: calling handler <bound method CliInputJSONArgument.add_to_call_parameters of <awscli.customizations.cliinputjson.CliInputJSONArgument object at 0x7f4cac389790>>
2015-03-26 06:10:27,936 - MainThread - botocore.credentials - DEBUG - Looking for credentials via: env
2015-03-26 06:10:27,936 - MainThread - botocore.credentials - INFO - Found credentials in environment variables.
2015-03-26 06:10:27,953 - MainThread - botocore.client - DEBUG - Registering retry handlers for service: route53
2015-03-26 06:10:27,953 - MainThread - botocore.client - DEBUG - Registering retry handlers for service: route53
2015-03-26 06:10:27,957 - MainThread - botocore.hooks - DEBUG - Event before-parameter-build.route53.ListResourceRecordSets: calling handler <function fix_route53_ids at 0x7f4caccae578>
2015-03-26 06:10:27,957 - MainThread - botocore.handlers - DEBUG - HostedZoneId XYZABC -> XYZABC
2015-03-26 06:10:27,957 - MainThread - awscli.clidriver - DEBUG - Exception caught in main()
Traceback (most recent call last):
File "/usr/local/lib/python2.7/dist-packages/awscli/clidriver.py", line 183, in main
return command_table[parsed_args.command](remaining, parsed_args)
File "/usr/local/lib/python2.7/dist-packages/awscli/clidriver.py", line 367, in __call__
return command_table[parsed_args.operation](remaining, parsed_globals)
File "/usr/local/lib/python2.7/dist-packages/awscli/clidriver.py", line 534, in __call__
call_parameters, parsed_globals)
File "/usr/local/lib/python2.7/dist-packages/awscli/clidriver.py", line 657, in invoke
**parameters)
File "/usr/local/lib/python2.7/dist-packages/botocore/client.py", line 187, in _api_call
return self._make_api_call(operation_name, kwargs)
File "/usr/local/lib/python2.7/dist-packages/botocore/client.py", line 228, in _make_api_call
api_params, operation_model)
File "/usr/local/lib/python2.7/dist-packages/botocore/client.py", line 259, in _convert_to_request_dict
api_params, operation_model)
File "/usr/local/lib/python2.7/dist-packages/botocore/validate.py", line 271, in serialize_to_request
raise ParamValidationError(report=report.generate_report())
ParamValidationError: Parameter validation failed:
Unknown parameter in input: "max_items", must be one of: HostedZoneId, StartRecordName, StartRecordType, StartRecordIdentifier, MaxItems
2015-03-26 06:10:27,958 - MainThread - awscli.clidriver - DEBUG - Exiting with rc 255
Parameter validation failed:
Unknown parameter in input: "max_items", must be one of: HostedZoneId, StartRecordName, StartRecordType, StartRecordIdentifier, MaxItems
|
ParamValidationError
|
def add_to_params(self, parameters, value):
if value:
try:
if value == "-1" or value == "all":
fromstr = "-1"
tostr = "-1"
elif "-" in value:
# We can get away with simple logic here because
# argparse will not allow values such as
# "-1-8", and these aren't actually valid
# values any from from/to ports.
fromstr, tostr = value.split("-", 1)
else:
fromstr, tostr = (value, value)
_build_ip_permissions(parameters, "FromPort", int(fromstr))
_build_ip_permissions(parameters, "ToPort", int(tostr))
except ValueError:
msg = "port parameter should be of the form <from[-to]> (e.g. 22 or 22-25)"
raise ValueError(msg)
|
def add_to_params(self, parameters, value):
if value:
try:
if value == "-1" or value == "all":
fromstr = "-1"
tostr = "-1"
elif "-" in value:
fromstr, tostr = value.split("-")
else:
fromstr, tostr = (value, value)
_build_ip_permissions(parameters, "FromPort", int(fromstr))
_build_ip_permissions(parameters, "ToPort", int(tostr))
except ValueError:
msg = "port parameter should be of the form <from[-to]> (e.g. 22 or 22-25)"
raise ValueError(msg)
|
https://github.com/aws/aws-cli/issues/1075
|
2014-12-30 15:27:33,378 - MainThread - awscli.clidriver - DEBUG - CLI version: aws-cli/1.6.10 Python/2.7.8 Linux/3.17.7-300.fc21.x86_64, botocore version: 0.80.0
2014-12-30 15:27:33,378 - MainThread - botocore.hooks - DEBUG - Event session-initialized: calling handler <function add_scalar_parsers at 0x7fa461ddded8>
2014-12-30 15:27:33,378 - MainThread - botocore.hooks - DEBUG - Event session-initialized: calling handler <function inject_assume_role_provider at 0x7fa4622362a8>
2014-12-30 15:27:33,378 - MainThread - botocore.service - DEBUG - Creating service object for: ec2
2014-12-30 15:27:33,451 - MainThread - botocore.hooks - DEBUG - Event service-data-loaded.ec2: calling handler <function register_retries_for_service at 0x7fa462a37320>
2014-12-30 15:27:33,458 - MainThread - botocore.handlers - DEBUG - Registering retry handlers for service: ec2
2014-12-30 15:27:33,458 - MainThread - botocore.hooks - DEBUG - Event service-data-loaded.ec2: calling handler <function signature_overrides at 0x7fa462a37488>
2014-12-30 15:27:33,458 - MainThread - botocore.hooks - DEBUG - Event service-data-loaded.ec2: calling handler <function register_retries_for_service at 0x7fa462a37320>
2014-12-30 15:27:33,459 - MainThread - botocore.handlers - DEBUG - Registering retry handlers for service: ec2
2014-12-30 15:27:33,459 - MainThread - botocore.hooks - DEBUG - Event service-data-loaded.ec2: calling handler <function signature_overrides at 0x7fa462a37488>
2014-12-30 15:27:33,459 - MainThread - botocore.service - DEBUG - Creating operation objects for: Service(ec2)
2014-12-30 15:27:33,474 - MainThread - botocore.hooks - DEBUG - Event building-command-table.ec2: calling handler <functools.partial object at 0x7fa461df5578>
2014-12-30 15:27:33,474 - MainThread - awscli.customizations.removals - DEBUG - Removing operation: import-instance
2014-12-30 15:27:33,474 - MainThread - awscli.customizations.removals - DEBUG - Removing operation: import-volume
2014-12-30 15:27:33,474 - MainThread - botocore.hooks - DEBUG - Event building-command-table.ec2: calling handler <function add_waiters at 0x7fa462236de8>
2014-12-30 15:27:33,477 - MainThread - awscli.clidriver - DEBUG - OrderedDict([(u'dry-run', <awscli.arguments.BooleanArgument object at 0x7fa461d4e310>), (u'no-dry-run', <awscli.arguments.BooleanArgument object at 0x7fa461d4e350>), (u'group-name', <awscli.arguments.CLIArgument object at 0x7fa461d4e390>), (u'group-id', <awscli.arguments.CLIArgument object at 0x7fa461d4e3d0>), (u'source-security-group-name', <awscli.arguments.CLIArgument object at 0x7fa461d4e410>), (u'source-security-group-owner-id', <awscli.arguments.CLIArgument object at 0x7fa461d4e450>), (u'ip-protocol', <awscli.arguments.CLIArgument object at 0x7fa461d4e490>), (u'from-port', <awscli.arguments.CLIArgument object at 0x7fa461d4e4d0>), (u'to-port', <awscli.arguments.CLIArgument object at 0x7fa461d4e510>), (u'cidr-ip', <awscli.arguments.CLIArgument object at 0x7fa461d4e550>), (u'ip-permissions', <awscli.arguments.ListArgument object at 0x7fa461d4e590>)])
2014-12-30 15:27:33,478 - MainThread - botocore.hooks - DEBUG - Event building-argument-table.ec2.revoke-security-group-ingress: calling handler <function add_streaming_output_arg at 0x7fa4623719b0>
2014-12-30 15:27:33,478 - MainThread - botocore.hooks - DEBUG - Event building-argument-table.ec2.revoke-security-group-ingress: calling handler <function _rename_arg at 0x7fa461df0b90>
2014-12-30 15:27:33,478 - MainThread - botocore.hooks - DEBUG - Event building-argument-table.ec2.revoke-security-group-ingress: calling handler <function _rename_arg at 0x7fa461df0c80>
2014-12-30 15:27:33,478 - MainThread - botocore.hooks - DEBUG - Event building-argument-table.ec2.revoke-security-group-ingress: calling handler <functools.partial object at 0x7fa461df5fc8>
2014-12-30 15:27:33,479 - MainThread - botocore.hooks - DEBUG - Event building-argument-table.ec2.revoke-security-group-ingress: calling handler <function _add_params at 0x7fa46232f1b8>
2014-12-30 15:27:33,479 - MainThread - botocore.hooks - DEBUG - Event building-argument-table.ec2.revoke-security-group-ingress: calling handler <function add_cli_input_json at 0x7fa462227578>
2014-12-30 15:27:33,479 - MainThread - botocore.hooks - DEBUG - Event building-argument-table.ec2.revoke-security-group-ingress: calling handler <function unify_paging_params at 0x7fa4623919b0>
2014-12-30 15:27:33,480 - MainThread - botocore.hooks - DEBUG - Event building-argument-table.ec2.revoke-security-group-ingress: calling handler <function add_generate_skeleton at 0x7fa4622272a8>
2014-12-30 15:27:33,480 - MainThread - botocore.hooks - DEBUG - Event before-building-argument-table-parser.ec2.revoke-security-group-ingress: calling handler <bound method CliInputJSONArgument.override_required_args of <awscli.customizations.cliinputjson.CliInputJSONArgument object at 0x7fa461d4ea50>>
2014-12-30 15:27:33,480 - MainThread - botocore.hooks - DEBUG - Event before-building-argument-table-parser.ec2.revoke-security-group-ingress: calling handler <bound method GenerateCliSkeletonArgument.override_required_args of <awscli.customizations.generatecliskeleton.GenerateCliSkeletonArgument object at 0x7fa461d2ddd0>>
2014-12-30 15:27:33,481 - MainThread - botocore.hooks - DEBUG - Event operation-args-parsed.ec2.revoke-security-group-ingress: calling handler <functools.partial object at 0x7fa46181acb0>
2014-12-30 15:27:33,482 - MainThread - botocore.hooks - DEBUG - Event operation-args-parsed.ec2.revoke-security-group-ingress: calling handler <function _check_args at 0x7fa46232f230>
2014-12-30 15:27:33,482 - MainThread - botocore.hooks - DEBUG - Event load-cli-arg.ec2.revoke-security-group-ingress.dry-run: calling handler <function uri_param at 0x7fa4623c8d70>
2014-12-30 15:27:33,482 - MainThread - botocore.hooks - DEBUG - Event load-cli-arg.ec2.revoke-security-group-ingress.group-name: calling handler <function uri_param at 0x7fa4623c8d70>
2014-12-30 15:27:33,482 - MainThread - botocore.hooks - DEBUG - Event process-cli-arg.ec2.revoke-security-group-ingress: calling handler <awscli.argprocess.ParamShorthand object at 0x7fa461de87d0>
2014-12-30 15:27:33,482 - MainThread - awscli.argprocess - DEBUG - Detected structure: scalar
2014-12-30 15:27:33,482 - MainThread - awscli.arguments - DEBUG - Unpacked value of u'testingicmp' for parameter "group_name": u'testingicmp'
2014-12-30 15:27:33,482 - MainThread - botocore.hooks - DEBUG - Event load-cli-arg.ec2.revoke-security-group-ingress.group-id: calling handler <function uri_param at 0x7fa4623c8d70>
2014-12-30 15:27:33,482 - MainThread - botocore.hooks - DEBUG - Event load-cli-arg.ec2.revoke-security-group-ingress.source-security-group-name: calling handler <function uri_param at 0x7fa4623c8d70>
2014-12-30 15:27:33,482 - MainThread - botocore.hooks - DEBUG - Event load-cli-arg.ec2.revoke-security-group-ingress.source-security-group-owner-id: calling handler <function uri_param at 0x7fa4623c8d70>
2014-12-30 15:27:33,483 - MainThread - botocore.hooks - DEBUG - Event load-cli-arg.ec2.revoke-security-group-ingress.ip-protocol: calling handler <function uri_param at 0x7fa4623c8d70>
2014-12-30 15:27:33,483 - MainThread - botocore.hooks - DEBUG - Event load-cli-arg.ec2.revoke-security-group-ingress.from-port: calling handler <function uri_param at 0x7fa4623c8d70>
2014-12-30 15:27:33,483 - MainThread - botocore.hooks - DEBUG - Event process-cli-arg.ec2.revoke-security-group-ingress: calling handler <awscli.argprocess.ParamShorthand object at 0x7fa461de87d0>
2014-12-30 15:27:33,483 - MainThread - awscli.argprocess - DEBUG - Detected structure: scalar
2014-12-30 15:27:33,483 - MainThread - awscli.arguments - DEBUG - Unpacked value of u'8' for parameter "from_port": 8
2014-12-30 15:27:33,483 - MainThread - botocore.hooks - DEBUG - Event load-cli-arg.ec2.revoke-security-group-ingress.to-port: calling handler <function uri_param at 0x7fa4623c8d70>
2014-12-30 15:27:33,483 - MainThread - botocore.hooks - DEBUG - Event process-cli-arg.ec2.revoke-security-group-ingress: calling handler <awscli.argprocess.ParamShorthand object at 0x7fa461de87d0>
2014-12-30 15:27:33,483 - MainThread - awscli.argprocess - DEBUG - Detected structure: scalar
2014-12-30 15:27:33,483 - MainThread - awscli.arguments - DEBUG - Unpacked value of u'-1' for parameter "to_port": -1
2014-12-30 15:27:33,483 - MainThread - botocore.hooks - DEBUG - Event load-cli-arg.ec2.revoke-security-group-ingress.cidr-ip: calling handler <function uri_param at 0x7fa4623c8d70>
2014-12-30 15:27:33,483 - MainThread - botocore.hooks - DEBUG - Event load-cli-arg.ec2.revoke-security-group-ingress.ip-permissions: calling handler <function uri_param at 0x7fa4623c8d70>
2014-12-30 15:27:33,484 - MainThread - botocore.hooks - DEBUG - Event load-cli-arg.ec2.revoke-security-group-ingress.protocol: calling handler <function uri_param at 0x7fa4623c8d70>
2014-12-30 15:27:33,484 - MainThread - botocore.hooks - DEBUG - Event load-cli-arg.ec2.revoke-security-group-ingress.port: calling handler <function uri_param at 0x7fa4623c8d70>
2014-12-30 15:27:33,484 - MainThread - botocore.hooks - DEBUG - Event load-cli-arg.ec2.revoke-security-group-ingress.cidr: calling handler <function uri_param at 0x7fa4623c8d70>
2014-12-30 15:27:33,484 - MainThread - botocore.hooks - DEBUG - Event load-cli-arg.ec2.revoke-security-group-ingress.source-group: calling handler <function uri_param at 0x7fa4623c8d70>
2014-12-30 15:27:33,484 - MainThread - botocore.hooks - DEBUG - Event load-cli-arg.ec2.revoke-security-group-ingress.group-owner: calling handler <function uri_param at 0x7fa4623c8d70>
2014-12-30 15:27:33,484 - MainThread - botocore.hooks - DEBUG - Event load-cli-arg.ec2.revoke-security-group-ingress.cli-input-json: calling handler <function uri_param at 0x7fa4623c8d70>
2014-12-30 15:27:33,484 - MainThread - botocore.hooks - DEBUG - Event load-cli-arg.ec2.revoke-security-group-ingress.generate-cli-skeleton: calling handler <function uri_param at 0x7fa4623c8d70>
2014-12-30 15:27:33,484 - MainThread - botocore.hooks - DEBUG - Event calling-command.ec2.revoke-security-group-ingress: calling handler <bound method GenerateCliSkeletonArgument.generate_json_skeleton of <awscli.customizations.generatecliskeleton.GenerateCliSkeletonArgument object at 0x7fa461d2ddd0>>
2014-12-30 15:27:33,484 - MainThread - botocore.hooks - DEBUG - Event calling-command.ec2.revoke-security-group-ingress: calling handler <bound method CliInputJSONArgument.add_to_call_parameters of <awscli.customizations.cliinputjson.CliInputJSONArgument object at 0x7fa461d4ea50>>
2014-12-30 15:27:33,484 - MainThread - botocore.credentials - DEBUG - Looking for credentials via: env
2014-12-30 15:27:33,485 - MainThread - botocore.credentials - DEBUG - Looking for credentials via: assume-role
2014-12-30 15:27:33,485 - MainThread - botocore.credentials - DEBUG - Looking for credentials via: shared-credentials-file
2014-12-30 15:27:33,485 - MainThread - botocore.credentials - DEBUG - Looking for credentials via: config-file
2014-12-30 15:27:33,486 - MainThread - botocore.credentials - INFO - Credentials found in config file: ~/.aws/config
2014-12-30 15:27:33,487 - MainThread - botocore.operation - DEBUG - Operation:RevokeSecurityGroupIngress called with kwargs: {u'ToPort': -1, u'GroupName': u'testingicmp', 'ip_permissions': [{'IpProtocol': u'icmp', 'IpRanges': [{'CidrIp': u'8.8.8.8/32'}]}], u'FromPort': 8}
2014-12-30 15:27:33,488 - MainThread - botocore.endpoint - DEBUG - Making request for <botocore.model.OperationModel object at 0x7fa461d8d650> (verify_ssl=True) with params: {'query_string': '', 'headers': {}, 'url_path': '/', 'body': {u'FromPort': 8, u'GroupName': u'testingicmp', u'IpPermissions.1.IpRanges.1.CidrIp': u'8.8.8.8/32', u'IpPermissions.1.IpProtocol': u'icmp', u'ToPort': -1, 'Version': u'2014-10-01', 'Action': u'RevokeSecurityGroupIngress'}, 'method': u'POST'}
2014-12-30 15:27:33,489 - MainThread - botocore.auth - DEBUG - Calculating signature using v4 auth.
2014-12-30 15:27:33,489 - MainThread - botocore.auth - DEBUG - CanonicalRequest:
POST
/
host:ec2.us-east-1.amazonaws.com
user-agent:aws-cli/1.6.10 Python/2.7.8 Linux/3.17.7-300.fc21.x86_64
x-amz-date:20141230T202733Z
host;user-agent;x-amz-date
5e834b9b723192c8a6ec022405624471eadcbb3909f7e8f2ff11b3795fe97a36
2014-12-30 15:27:33,489 - MainThread - botocore.auth - DEBUG - StringToSign:
AWS4-HMAC-SHA256
20141230T202733Z
20141230/us-east-1/ec2/aws4_request
8bdf3d1f589178a78bfa831893eaa194bf23a812f250b6988b9257e56b420764
2014-12-30 15:27:33,489 - MainThread - botocore.auth - DEBUG - Signature:
d79376a375bba9d7d25d2ed41853bd7c681b5bc3f69fc5a8899f6692adb47620
2014-12-30 15:27:33,494 - MainThread - botocore.endpoint - DEBUG - Sending http request: <PreparedRequest [POST]>
2014-12-30 15:27:33,495 - MainThread - botocore.vendored.requests.packages.urllib3.connectionpool - INFO - Starting new HTTPS connection (1): ec2.us-east-1.amazonaws.com
2014-12-30 15:27:34,024 - MainThread - botocore.vendored.requests.packages.urllib3.connectionpool - DEBUG - "POST / HTTP/1.1" 400 None
2014-12-30 15:27:34,026 - MainThread - botocore.parsers - DEBUG - Response headers:
{'cneonction': 'close',
'date': 'Tue, 30 Dec 2014 20:27:33 GMT',
'server': 'AmazonEC2',
'transfer-encoding': 'chunked'}
2014-12-30 15:27:34,026 - MainThread - botocore.parsers - DEBUG - Response body:
<?xml version="1.0" encoding="UTF-8"?>
<Response><Errors><Error><Code>InvalidParameterCombination</Code><Message>The parameter 'ipPermissions' may not be used in combination with 'fromPort'</Message></Error></Errors><RequestID>3bf79ad0-f5be-4915-a091-487b3619b410</RequestID></Response>
2014-12-30 15:27:34,026 - MainThread - botocore.hooks - DEBUG - Event needs-retry.ec2.RevokeSecurityGroupIngress: calling handler <botocore.retryhandler.RetryHandler object at 0x7fa4618cf950>
2014-12-30 15:27:34,026 - MainThread - botocore.retryhandler - DEBUG - No retry needed.
2014-12-30 15:27:34,027 - MainThread - botocore.hooks - DEBUG - Event after-call.ec2.RevokeSecurityGroupIngress: calling handler <awscli.errorhandler.ErrorHandler object at 0x7fa461de8810>
2014-12-30 15:27:34,027 - MainThread - awscli.errorhandler - DEBUG - HTTP Response Code: 400
2014-12-30 15:27:34,027 - MainThread - awscli.clidriver - DEBUG - Exception caught in main()
Traceback (most recent call last):
File "/usr/local/aws/lib/python2.7/site-packages/awscli/clidriver.py", line 197, in main
return command_table[parsed_args.command](remaining, parsed_args)
File "/usr/local/aws/lib/python2.7/site-packages/awscli/clidriver.py", line 357, in __call__
return command_table[parsed_args.operation](remaining, parsed_globals)
File "/usr/local/aws/lib/python2.7/site-packages/awscli/clidriver.py", line 492, in __call__
self._operation_object, call_parameters, parsed_globals)
File "/usr/local/aws/lib/python2.7/site-packages/awscli/clidriver.py", line 597, in invoke
**parameters)
File "/usr/local/aws/lib/python2.7/site-packages/botocore/operation.py", line 98, in call
parsed=response[1])
File "/usr/local/aws/lib/python2.7/site-packages/botocore/session.py", line 735, in emit
return self._events.emit(event_name, **kwargs)
File "/usr/local/aws/lib/python2.7/site-packages/botocore/hooks.py", line 182, in emit
response = handler(**kwargs)
File "/usr/local/aws/lib/python2.7/site-packages/awscli/errorhandler.py", line 70, in __call__
http_status_code=http_response.status_code)
ClientError: A client error (InvalidParameterCombination) occurred when calling the RevokeSecurityGroupIngress operation: The parameter 'ipPermissions' may not be used in combination with 'fromPort'
2014-12-30 15:27:34,029 - MainThread - awscli.clidriver - DEBUG - Exiting with rc 255
A client error (InvalidParameterCombination) occurred when calling the RevokeSecurityGroupIngress operation: The parameter 'ipPermissions' may not be used in combination with 'fromPort'
|
ClientError
|
def save_file(self, parsed, **kwargs):
if self._response_key not in parsed:
# If the response key is not in parsed, then
# we've received an error message and we'll let the AWS CLI
# error handler print out an error message. We have no
# file to save in this situation.
return
body = parsed[self._response_key]
buffer_size = self._buffer_size
with open(self._output_file, "wb") as fp:
data = body.read(buffer_size)
while data:
fp.write(data)
data = body.read(buffer_size)
# We don't want to include the streaming param in
# the returned response.
del parsed[self._response_key]
|
def save_file(self, parsed, **kwargs):
body = parsed[self._response_key]
buffer_size = self._buffer_size
with open(self._output_file, "wb") as fp:
data = body.read(buffer_size)
while data:
fp.write(data)
data = body.read(buffer_size)
# We don't want to include the streaming param in
# the returned response.
del parsed[self._response_key]
|
https://github.com/aws/aws-cli/issues/1006
|
685b3588202f:lego_iam maitreyr$ aws s3api get-object --bucket maitreyr-kms-test --key ngc6960_FinalPugh900.jpg ./ngc.jpg --debug
2014-11-16 10:58:31,128 - MainThread - awscli.clidriver - DEBUG - CLI version: aws-cli/1.6.2 Python/2.7.6 Darwin/13.4.0, botocore version: 0.73.0
2014-11-16 10:58:31,128 - MainThread - botocore.service - DEBUG - Creating service object for: s3
2014-11-16 10:58:31,156 - MainThread - botocore.hooks - DEBUG - Event service-data-loaded.s3: calling handler <function signature_overrides at 0x108628398>
2014-11-16 10:58:31,156 - MainThread - botocore.hooks - DEBUG - Event service-created: calling handler <function register_retries_for_service at 0x108628230>
2014-11-16 10:58:31,159 - MainThread - botocore.handlers - DEBUG - Registering retry handlers for service: Service(s3)
2014-11-16 10:58:31,160 - MainThread - botocore.service - DEBUG - Creating operation objects for: Service(s3)
2014-11-16 10:58:31,168 - MainThread - botocore.hooks - DEBUG - Event building-command-table.s3api: calling handler <function inject_assume_role_provider at 0x1089759b0>
2014-11-16 10:58:31,168 - MainThread - botocore.hooks - DEBUG - Event building-command-table.s3api: calling handler <function add_waiters at 0x108980758>
2014-11-16 10:58:31,173 - MainThread - awscli.clidriver - DEBUG - OrderedDict([(u'bucket', <awscli.arguments.CLIArgument object at 0x108ab3550>), (u'if-match', <awscli.arguments.CLIArgument object at 0x108ab3590>), (u'if-modified-since', <awscli.arguments.CLIArgument object at 0x108ab35d0>), (u'if-none-match', <awscli.arguments.CLIArgument object at 0x108ab3610>), (u'if-unmodified-since', <awscli.arguments.CLIArgument object at 0x108ab3650>), (u'key', <awscli.arguments.CLIArgument object at 0x108ab3690>), (u'range', <awscli.arguments.CLIArgument object at 0x108ab36d0>), (u'response-cache-control', <awscli.arguments.CLIArgument object at 0x108ab3710>), (u'response-content-disposition', <awscli.arguments.CLIArgument object at 0x108ab3750>), (u'response-content-encoding', <awscli.arguments.CLIArgument object at 0x108ab37d0>), (u'response-content-language', <awscli.arguments.CLIArgument object at 0x108ab3810>), (u'response-content-type', <awscli.arguments.CLIArgument object at 0x108ab3850>), (u'response-expires', <awscli.arguments.CLIArgument object at 0x108ab3890>), (u'version-id', <awscli.arguments.CLIArgument object at 0x108ab38d0>), (u'sse-customer-algorithm', <awscli.arguments.CLIArgument object at 0x108ab3910>), (u'sse-customer-key', <awscli.arguments.CLIArgument object at 0x108ab3950>), (u'sse-customer-key-md5', <awscli.arguments.CLIArgument object at 0x108ab3990>), (u'ssekms-key-id', <awscli.arguments.CLIArgument object at 0x108ab39d0>)])
2014-11-16 10:58:31,173 - MainThread - botocore.hooks - DEBUG - Event building-argument-table.s3api.get-object: calling handler <function add_streaming_output_arg at 0x108894320>
2014-11-16 10:58:31,175 - MainThread - botocore.hooks - DEBUG - Event building-argument-table.s3api.get-object: calling handler <function add_cli_input_json at 0x10896db90>
2014-11-16 10:58:31,175 - MainThread - botocore.hooks - DEBUG - Event building-argument-table.s3api.get-object: calling handler <function unify_paging_params at 0x1088971b8>
2014-11-16 10:58:31,175 - MainThread - botocore.hooks - DEBUG - Event building-argument-table.s3api.get-object: calling handler <function add_generate_skeleton at 0x108975140>
2014-11-16 10:58:31,177 - MainThread - botocore.hooks - DEBUG - Event load-cli-arg.s3.get-object.bucket: calling handler <function uri_param at 0x108868f50>
2014-11-16 10:58:31,177 - MainThread - botocore.hooks - DEBUG - Event process-cli-arg.s3.get-object: calling handler <awscli.argprocess.ParamShorthand object at 0x1089b1450>
2014-11-16 10:58:31,177 - MainThread - awscli.argprocess - DEBUG - Detected structure: scalar
2014-11-16 10:58:31,178 - MainThread - awscli.arguments - DEBUG - Unpacked value of "maitreyr-kms-test" for parameter "bucket": maitreyr-kms-test
2014-11-16 10:58:31,178 - MainThread - botocore.hooks - DEBUG - Event load-cli-arg.s3.get-object.if-match: calling handler <function uri_param at 0x108868f50>
2014-11-16 10:58:31,178 - MainThread - botocore.hooks - DEBUG - Event load-cli-arg.s3.get-object.if-modified-since: calling handler <function uri_param at 0x108868f50>
2014-11-16 10:58:31,178 - MainThread - botocore.hooks - DEBUG - Event load-cli-arg.s3.get-object.if-none-match: calling handler <function uri_param at 0x108868f50>
2014-11-16 10:58:31,178 - MainThread - botocore.hooks - DEBUG - Event load-cli-arg.s3.get-object.if-unmodified-since: calling handler <function uri_param at 0x108868f50>
2014-11-16 10:58:31,178 - MainThread - botocore.hooks - DEBUG - Event load-cli-arg.s3.get-object.key: calling handler <function uri_param at 0x108868f50>
2014-11-16 10:58:31,178 - MainThread - botocore.hooks - DEBUG - Event process-cli-arg.s3.get-object: calling handler <awscli.argprocess.ParamShorthand object at 0x1089b1450>
2014-11-16 10:58:31,178 - MainThread - awscli.argprocess - DEBUG - Detected structure: scalar
2014-11-16 10:58:31,178 - MainThread - awscli.arguments - DEBUG - Unpacked value of "ngc6960_FinalPugh900.jpg" for parameter "key": ngc6960_FinalPugh900.jpg
2014-11-16 10:58:31,179 - MainThread - botocore.hooks - DEBUG - Event load-cli-arg.s3.get-object.range: calling handler <function uri_param at 0x108868f50>
2014-11-16 10:58:31,179 - MainThread - botocore.hooks - DEBUG - Event load-cli-arg.s3.get-object.response-cache-control: calling handler <function uri_param at 0x108868f50>
2014-11-16 10:58:31,179 - MainThread - botocore.hooks - DEBUG - Event load-cli-arg.s3.get-object.response-content-disposition: calling handler <function uri_param at 0x108868f50>
2014-11-16 10:58:31,179 - MainThread - botocore.hooks - DEBUG - Event load-cli-arg.s3.get-object.response-content-encoding: calling handler <function uri_param at 0x108868f50>
2014-11-16 10:58:31,179 - MainThread - botocore.hooks - DEBUG - Event load-cli-arg.s3.get-object.response-content-language: calling handler <function uri_param at 0x108868f50>
2014-11-16 10:58:31,179 - MainThread - botocore.hooks - DEBUG - Event load-cli-arg.s3.get-object.response-content-type: calling handler <function uri_param at 0x108868f50>
2014-11-16 10:58:31,179 - MainThread - botocore.hooks - DEBUG - Event load-cli-arg.s3.get-object.response-expires: calling handler <function uri_param at 0x108868f50>
2014-11-16 10:58:31,179 - MainThread - botocore.hooks - DEBUG - Event load-cli-arg.s3.get-object.version-id: calling handler <function uri_param at 0x108868f50>
2014-11-16 10:58:31,179 - MainThread - botocore.hooks - DEBUG - Event load-cli-arg.s3.get-object.sse-customer-algorithm: calling handler <function uri_param at 0x108868f50>
2014-11-16 10:58:31,179 - MainThread - botocore.hooks - DEBUG - Event load-cli-arg.s3.get-object.sse-customer-key: calling handler <function uri_param at 0x108868f50>
2014-11-16 10:58:31,180 - MainThread - botocore.hooks - DEBUG - Event load-cli-arg.s3.get-object.sse-customer-key-md5: calling handler <function uri_param at 0x108868f50>
2014-11-16 10:58:31,180 - MainThread - botocore.hooks - DEBUG - Event load-cli-arg.s3.get-object.ssekms-key-id: calling handler <function uri_param at 0x108868f50>
2014-11-16 10:58:31,180 - MainThread - botocore.hooks - DEBUG - Event load-cli-arg.s3.get-object.outfile: calling handler <function uri_param at 0x108868f50>
2014-11-16 10:58:31,180 - MainThread - botocore.credentials - DEBUG - Looking for credentials via: env
2014-11-16 10:58:31,180 - MainThread - botocore.credentials - DEBUG - Looking for credentials via: assume-role
2014-11-16 10:58:31,180 - MainThread - botocore.credentials - DEBUG - Looking for credentials via: shared-credentials-file
2014-11-16 10:58:31,181 - MainThread - botocore.credentials - INFO - Found credentials in shared credentials file: ~/.aws/credentials
2014-11-16 10:58:31,219 - MainThread - botocore.operation - DEBUG - Operation:GetObject called with kwargs: {u'Bucket': u'maitreyr-kms-test', u'Key': u'ngc6960_FinalPugh900.jpg'}
2014-11-16 10:58:31,221 - MainThread - botocore.hooks - DEBUG - Event before-call.s3.GetObject: calling handler <function sse_md5 at 0x108628050>
2014-11-16 10:58:31,222 - MainThread - botocore.hooks - DEBUG - Event before-call.s3.GetObject: calling handler <function add_expect_header at 0x108628410>
2014-11-16 10:58:31,222 - MainThread - botocore.endpoint - DEBUG - Making request for <botocore.model.OperationModel object at 0x108aac490> (verify_ssl=True) with params: {'query_string': {}, 'headers': {}, 'url_path': u'/maitreyr-kms-test/ngc6960_FinalPugh900.jpg', 'body': '', 'method': u'GET'}
2014-11-16 10:58:31,222 - MainThread - botocore.hooks - DEBUG - Event before-auth.s3: calling handler <function fix_s3_host at 0x108628140>
2014-11-16 10:58:31,222 - MainThread - botocore.handlers - DEBUG - Checking for DNS compatible bucket for: https://s3-us-west-2.amazonaws.com/maitreyr-kms-test/ngc6960_FinalPugh900.jpg
2014-11-16 10:58:31,222 - MainThread - botocore.handlers - DEBUG - URI updated to: https://maitreyr-kms-test.s3.amazonaws.com/ngc6960_FinalPugh900.jpg
2014-11-16 10:58:31,222 - MainThread - botocore.auth - DEBUG - Calculating signature using hmacv1 auth.
2014-11-16 10:58:31,223 - MainThread - botocore.auth - DEBUG - HTTP request method: GET
2014-11-16 10:58:31,223 - MainThread - botocore.auth - DEBUG - StringToSign:
GET
Sun, 16 Nov 2014 16:58:31 GMT
/maitreyr-kms-test/ngc6960_FinalPugh900.jpg
2014-11-16 10:58:31,230 - MainThread - botocore.endpoint - DEBUG - Sending http request: <PreparedRequest [GET]>
2014-11-16 10:58:31,231 - MainThread - botocore.vendored.requests.packages.urllib3.connectionpool - INFO - Starting new HTTPS connection (1): maitreyr-kms-test.s3.amazonaws.com
2014-11-16 10:58:31,481 - MainThread - botocore.vendored.requests.packages.urllib3.connectionpool - DEBUG - "GET /ngc6960_FinalPugh900.jpg HTTP/1.1" 307 None
2014-11-16 10:58:31,483 - MainThread - botocore.vendored.requests.packages.urllib3.connectionpool - INFO - Starting new HTTPS connection (1): maitreyr-kms-test.s3-us-west-2.amazonaws.com
2014-11-16 10:58:32,419 - MainThread - botocore.vendored.requests.packages.urllib3.connectionpool - DEBUG - "GET /ngc6960_FinalPugh900.jpg HTTP/1.1" 400 None
2014-11-16 10:58:32,423 - MainThread - botocore.parsers - DEBUG - Response headers:
{'connection': 'close',
'content-type': 'application/xml',
'date': 'Sun, 16 Nov 2014 16:58:17 GMT',
'server': 'AmazonS3',
'transfer-encoding': 'chunked',
'x-amz-id-2': 'VCLi4Tph0mzbatYuzb3vNY16/eY0hJn9KcN/oMlMbLYS/gMBQI1+YxvpeatReZ1PLzp8ahhN1HQ=',
'x-amz-request-id': 'AD42152AA3A28C6A'}
2014-11-16 10:58:32,423 - MainThread - botocore.parsers - DEBUG - Response body:
<?xml version="1.0" encoding="UTF-8"?>
<Error><Code>InvalidArgument</Code><Message>Requests specifying Server Side Encryption with AWS KMS managed keys require AWS Signature Version 4.</Message><ArgumentName>Authorization</ArgumentName><ArgumentValue>null</ArgumentValue><RequestId>AD42152AA3A28C6A</RequestId><HostId>VCLi4Tph0mzbatYuzb3vNY16/eY0hJn9KcN/oMlMbLYS/gMBQI1+YxvpeatReZ1PLzp8ahhN1HQ=</HostId></Error>
2014-11-16 10:58:32,424 - MainThread - botocore.hooks - DEBUG - Event needs-retry.s3.GetObject: calling handler <botocore.retryhandler.RetryHandler object at 0x108a1f850>
2014-11-16 10:58:32,424 - MainThread - botocore.retryhandler - DEBUG - No retry needed.
2014-11-16 10:58:32,424 - MainThread - botocore.hooks - DEBUG - Event after-call.s3.GetObject: calling handler <bound method StreamingOutputArgument.save_file of <awscli.customizations.streamingoutputarg.StreamingOutputArgument object at 0x108ab3a10>>
2014-11-16 10:58:32,424 - MainThread - awscli.clidriver - DEBUG - Exception caught in main()
Traceback (most recent call last):
File "/usr/local/lib/python2.7/site-packages/awscli/clidriver.py", line 207, in main
return command_table[parsed_args.command](remaining, parsed_args)
File "/usr/local/lib/python2.7/site-packages/awscli/clidriver.py", line 359, in __call__
return command_table[parsed_args.operation](remaining, parsed_globals)
File "/usr/local/lib/python2.7/site-packages/awscli/clidriver.py", line 494, in __call__
self._operation_object, call_parameters, parsed_globals)
File "/usr/local/lib/python2.7/site-packages/awscli/clidriver.py", line 599, in invoke
**parameters)
File "/usr/local/lib/python2.7/site-packages/botocore/operation.py", line 98, in call
parsed=response[1])
File "/usr/local/lib/python2.7/site-packages/botocore/session.py", line 729, in emit
return self._events.emit(event_name, **kwargs)
File "/usr/local/lib/python2.7/site-packages/botocore/hooks.py", line 185, in emit
response = handler(**kwargs)
File "/usr/local/lib/python2.7/site-packages/awscli/customizations/streamingoutputarg.py", line 92, in save_file
body = parsed[self._response_key]
KeyError: u'Body'
2014-11-16 10:58:32,425 - MainThread - awscli.clidriver - DEBUG - Exiting with rc 255
u'Body'
|
KeyError
|
def _complete_command(self):
retval = []
if self.current_word == self.command_name:
if self.command_hc:
retval = self.command_hc.command_table.keys()
elif self.current_word.startswith("-"):
retval = self._find_possible_options()
else:
# See if they have entered a partial command name
if self.command_hc:
retval = [
n
for n in self.command_hc.command_table
if n.startswith(self.current_word)
]
return retval
|
def _complete_command(self):
retval = []
if self.current_word == self.command_name:
retval = self.command_hc.command_table.keys()
elif self.current_word.startswith("-"):
retval = self._find_possible_options()
else:
# See if they have entered a partial command name
retval = [
n for n in self.command_hc.command_table if n.startswith(self.current_word)
]
return retval
|
https://github.com/aws/aws-cli/issues/309
|
$ aws emr Traceback (most recent call last):
File "/usr/local/bin/aws_completer", line 22, in <module>
awscli.completer.complete(cline, cpoint)
File "/usr/local/lib/python2.7/dist-packages/awscli/completer.py", line 153, in complete
choices = Completer().complete(cmdline, point)
File "/usr/local/lib/python2.7/dist-packages/awscli/completer.py", line 142, in complete
self._process_command_line()
File "/usr/local/lib/python2.7/dist-packages/awscli/completer.py", line 126, in _process_command_line
if self.command_hc.command_table:
AttributeError: 'NoneType' object has no attribute 'command_table'
|
AttributeError
|
def _process_command_line(self):
# Process the command line and try to find:
# - command_name
# - subcommand_name
# - words
# - current_word
# - previous_word
# - non_options
# - options
self.command_name = None
self.subcommand_name = None
self.words = self.cmdline[0 : self.point].split()
self.current_word = self.words[-1]
if len(self.words) >= 2:
self.previous_word = self.words[-2]
else:
self.previous_word = None
self.non_options = [w for w in self.words if not w.startswith("-")]
self.options = [w for w in self.words if w.startswith("-")]
# Look for a command name in the non_options
for w in self.non_options:
if w in self.main_hc.command_table:
self.command_name = w
cmd_obj = self.main_hc.command_table[self.command_name]
self.command_hc = cmd_obj.create_help_command()
if self.command_hc and self.command_hc.command_table:
# Look for subcommand name
for w in self.non_options:
if w in self.command_hc.command_table:
self.subcommand_name = w
cmd_obj = self.command_hc.command_table[self.subcommand_name]
self.subcommand_hc = cmd_obj.create_help_command()
break
break
|
def _process_command_line(self):
# Process the command line and try to find:
# - command_name
# - subcommand_name
# - words
# - current_word
# - previous_word
# - non_options
# - options
self.command_name = None
self.subcommand_name = None
self.words = self.cmdline[0 : self.point].split()
self.current_word = self.words[-1]
if len(self.words) >= 2:
self.previous_word = self.words[-2]
else:
self.previous_word = None
self.non_options = [w for w in self.words if not w.startswith("-")]
self.options = [w for w in self.words if w.startswith("-")]
# Look for a command name in the non_options
for w in self.non_options:
if w in self.main_hc.command_table:
self.command_name = w
cmd_obj = self.main_hc.command_table[self.command_name]
self.command_hc = cmd_obj.create_help_command()
if self.command_hc.command_table:
# Look for subcommand name
for w in self.non_options:
if w in self.command_hc.command_table:
self.subcommand_name = w
cmd_obj = self.command_hc.command_table[self.subcommand_name]
self.subcommand_hc = cmd_obj.create_help_command()
break
break
|
https://github.com/aws/aws-cli/issues/309
|
$ aws emr Traceback (most recent call last):
File "/usr/local/bin/aws_completer", line 22, in <module>
awscli.completer.complete(cline, cpoint)
File "/usr/local/lib/python2.7/dist-packages/awscli/completer.py", line 153, in complete
choices = Completer().complete(cmdline, point)
File "/usr/local/lib/python2.7/dist-packages/awscli/completer.py", line 142, in complete
self._process_command_line()
File "/usr/local/lib/python2.7/dist-packages/awscli/completer.py", line 126, in _process_command_line
if self.command_hc.command_table:
AttributeError: 'NoneType' object has no attribute 'command_table'
|
AttributeError
|
def __init__(
self,
schema: BaseSchema,
root_value: typing.Any = None,
graphiql: bool = True,
keep_alive: bool = False,
keep_alive_interval: float = 1,
debug: bool = False,
) -> None:
self.schema = schema
self.graphiql = graphiql
self.root_value = root_value
self.keep_alive = keep_alive
self.keep_alive_interval = keep_alive_interval
self._keep_alive_task = None
self.debug = debug
|
def __init__(
self,
schema: GraphQLSchema,
root_value: typing.Any = None,
graphiql: bool = True,
keep_alive: bool = False,
keep_alive_interval: float = 1,
debug: bool = False,
) -> None:
self.schema = schema
self.graphiql = graphiql
self.root_value = root_value
self.keep_alive = keep_alive
self.keep_alive_interval = keep_alive_interval
self._keep_alive_task = None
self.debug = debug
|
https://github.com/strawberry-graphql/strawberry/issues/349
|
Traceback (most recent call last):
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 735, in fields
fields = resolve_thunk(self._fields)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 293, in resolve_thunk
return thunk() if callable(thunk) else thunk
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/type.py", line 133, in <lambda>
lambda types_replacement_map=None: _get_fields(wrapped, types_replacement_map),
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/type.py", line 76, in _get_fields
fields[field_name] = field(
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/field.py", line 315, in field
return field(wrap)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/field.py", line 171, in __call__
setattr(wrap, IS_STRAWBERRY_FIELD, True)
AttributeError: 'method' object has no attribute '_is_strawberry_field'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/ignormies/.config/JetBrains/PyCharm2020.1/scratches/scratch_7.py", line 30, in <module>
schema = strawberry.Schema(query=Query)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/schema.py", line 25, in __init__
super().__init__(
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/schema.py", line 205, in __init__
collect_referenced_types(query)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/schema.py", line 382, in collect_referenced_types
for field in named_type.fields.values():
File "/usr/lib/python3.8/functools.py", line 967, in __get__
val = self.func(instance)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 737, in fields
raise TypeError(f"{self.name} fields cannot be resolved. {error}")
TypeError: Query fields cannot be resolved. 'method' object has no attribute '_is_strawberry_field'
Process finished with exit code 1
|
AttributeError
|
async def start_subscription(self, data, operation_id: str, websocket: WebSocket):
query = data["query"]
variables = data.get("variables")
operation_name = data.get("operation_name")
if self.debug:
pretty_print_graphql_operation(operation_name, query, variables)
context = {"websocket": websocket}
data = await self.schema.subscribe(
query,
variable_values=variables,
operation_name=operation_name,
context_value=context,
)
try:
async for result in data:
payload = {"data": result.data}
if result.errors:
payload["errors"] = [format_graphql_error(err) for err in result.errors]
await self._send_message(websocket, GQL_DATA, payload, operation_id)
except Exception as error:
if not isinstance(error, GraphQLError):
error = GraphQLError(str(error), original_error=error)
await self._send_message(
websocket,
GQL_DATA,
{"data": None, "errors": [format_graphql_error(error)]},
operation_id,
)
await self._send_message(websocket, GQL_COMPLETE, None, operation_id)
if self._keep_alive_task:
self._keep_alive_task.cancel()
await websocket.close()
|
async def start_subscription(self, data, operation_id: str, websocket: WebSocket):
query = data["query"]
variables = data.get("variables")
operation_name = data.get("operation_name")
if self.debug:
pretty_print_graphql_operation(operation_name, query, variables)
context = {"websocket": websocket}
data = await subscribe(
self.schema,
query,
variable_values=variables,
operation_name=operation_name,
context_value=context,
)
try:
async for result in data:
payload = {"data": result.data}
if result.errors:
payload["errors"] = [format_graphql_error(err) for err in result.errors]
await self._send_message(websocket, GQL_DATA, payload, operation_id)
except Exception as error:
if not isinstance(error, GraphQLError):
error = GraphQLError(str(error), original_error=error)
await self._send_message(
websocket,
GQL_DATA,
{"data": None, "errors": [format_graphql_error(error)]},
operation_id,
)
await self._send_message(websocket, GQL_COMPLETE, None, operation_id)
if self._keep_alive_task:
self._keep_alive_task.cancel()
await websocket.close()
|
https://github.com/strawberry-graphql/strawberry/issues/349
|
Traceback (most recent call last):
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 735, in fields
fields = resolve_thunk(self._fields)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 293, in resolve_thunk
return thunk() if callable(thunk) else thunk
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/type.py", line 133, in <lambda>
lambda types_replacement_map=None: _get_fields(wrapped, types_replacement_map),
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/type.py", line 76, in _get_fields
fields[field_name] = field(
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/field.py", line 315, in field
return field(wrap)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/field.py", line 171, in __call__
setattr(wrap, IS_STRAWBERRY_FIELD, True)
AttributeError: 'method' object has no attribute '_is_strawberry_field'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/ignormies/.config/JetBrains/PyCharm2020.1/scratches/scratch_7.py", line 30, in <module>
schema = strawberry.Schema(query=Query)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/schema.py", line 25, in __init__
super().__init__(
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/schema.py", line 205, in __init__
collect_referenced_types(query)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/schema.py", line 382, in collect_referenced_types
for field in named_type.fields.values():
File "/usr/lib/python3.8/functools.py", line 967, in __get__
val = self.func(instance)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 737, in fields
raise TypeError(f"{self.name} fields cannot be resolved. {error}")
TypeError: Query fields cannot be resolved. 'method' object has no attribute '_is_strawberry_field'
Process finished with exit code 1
|
AttributeError
|
async def execute(self, query, variables=None, context=None, operation_name=None):
if self.debug:
pretty_print_graphql_operation(operation_name, query, variables)
return await self.schema.execute(
query,
root_value=self.root_value,
variable_values=variables,
operation_name=operation_name,
context_value=context,
)
|
async def execute(self, query, variables=None, context=None, operation_name=None):
if self.debug:
pretty_print_graphql_operation(operation_name, query, variables)
return await execute(
self.schema,
query,
root_value=self.root_value,
variable_values=variables,
operation_name=operation_name,
context_value=context,
)
|
https://github.com/strawberry-graphql/strawberry/issues/349
|
Traceback (most recent call last):
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 735, in fields
fields = resolve_thunk(self._fields)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 293, in resolve_thunk
return thunk() if callable(thunk) else thunk
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/type.py", line 133, in <lambda>
lambda types_replacement_map=None: _get_fields(wrapped, types_replacement_map),
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/type.py", line 76, in _get_fields
fields[field_name] = field(
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/field.py", line 315, in field
return field(wrap)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/field.py", line 171, in __call__
setattr(wrap, IS_STRAWBERRY_FIELD, True)
AttributeError: 'method' object has no attribute '_is_strawberry_field'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/ignormies/.config/JetBrains/PyCharm2020.1/scratches/scratch_7.py", line 30, in <module>
schema = strawberry.Schema(query=Query)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/schema.py", line 25, in __init__
super().__init__(
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/schema.py", line 205, in __init__
collect_referenced_types(query)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/schema.py", line 382, in collect_referenced_types
for field in named_type.fields.values():
File "/usr/lib/python3.8/functools.py", line 967, in __get__
val = self.func(instance)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 737, in fields
raise TypeError(f"{self.name} fields cannot be resolved. {error}")
TypeError: Query fields cannot be resolved. 'method' object has no attribute '_is_strawberry_field'
Process finished with exit code 1
|
AttributeError
|
def run(): # pragma: no cover
pass
|
def run():
pass
|
https://github.com/strawberry-graphql/strawberry/issues/349
|
Traceback (most recent call last):
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 735, in fields
fields = resolve_thunk(self._fields)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 293, in resolve_thunk
return thunk() if callable(thunk) else thunk
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/type.py", line 133, in <lambda>
lambda types_replacement_map=None: _get_fields(wrapped, types_replacement_map),
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/type.py", line 76, in _get_fields
fields[field_name] = field(
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/field.py", line 315, in field
return field(wrap)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/field.py", line 171, in __call__
setattr(wrap, IS_STRAWBERRY_FIELD, True)
AttributeError: 'method' object has no attribute '_is_strawberry_field'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/ignormies/.config/JetBrains/PyCharm2020.1/scratches/scratch_7.py", line 30, in <module>
schema = strawberry.Schema(query=Query)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/schema.py", line 25, in __init__
super().__init__(
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/schema.py", line 205, in __init__
collect_referenced_types(query)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/schema.py", line 382, in collect_referenced_types
for field in named_type.fields.values():
File "/usr/lib/python3.8/functools.py", line 967, in __get__
val = self.func(instance)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 737, in fields
raise TypeError(f"{self.name} fields cannot be resolved. {error}")
TypeError: Query fields cannot be resolved. 'method' object has no attribute '_is_strawberry_field'
Process finished with exit code 1
|
AttributeError
|
def _process_scalar(
cls,
*,
name: str,
description: str,
serialize: Callable,
parse_value: Callable,
parse_literal: Callable,
):
name = name or to_camel_case(cls.__name__)
wrapper = ScalarWrapper(cls)
wrapper._scalar_definition = ScalarDefinition(
name=name,
description=description,
serialize=serialize,
parse_literal=parse_literal,
parse_value=parse_value,
)
SCALAR_REGISTRY[cls] = wrapper._scalar_definition
return wrapper
|
def _process_scalar(cls, *, name, description, serialize, parse_value, parse_literal):
if name is None:
name = cls.__name__
graphql_type = GraphQLScalarType(
name=name,
description=description,
serialize=serialize,
parse_value=parse_value,
parse_literal=parse_literal,
)
register_type(cls, graphql_type, store_type_information=False)
return cls
|
https://github.com/strawberry-graphql/strawberry/issues/349
|
Traceback (most recent call last):
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 735, in fields
fields = resolve_thunk(self._fields)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 293, in resolve_thunk
return thunk() if callable(thunk) else thunk
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/type.py", line 133, in <lambda>
lambda types_replacement_map=None: _get_fields(wrapped, types_replacement_map),
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/type.py", line 76, in _get_fields
fields[field_name] = field(
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/field.py", line 315, in field
return field(wrap)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/field.py", line 171, in __call__
setattr(wrap, IS_STRAWBERRY_FIELD, True)
AttributeError: 'method' object has no attribute '_is_strawberry_field'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/ignormies/.config/JetBrains/PyCharm2020.1/scratches/scratch_7.py", line 30, in <module>
schema = strawberry.Schema(query=Query)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/schema.py", line 25, in __init__
super().__init__(
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/schema.py", line 205, in __init__
collect_referenced_types(query)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/schema.py", line 382, in collect_referenced_types
for field in named_type.fields.values():
File "/usr/lib/python3.8/functools.py", line 967, in __get__
val = self.func(instance)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 737, in fields
raise TypeError(f"{self.name} fields cannot be resolved. {error}")
TypeError: Query fields cannot be resolved. 'method' object has no attribute '_is_strawberry_field'
Process finished with exit code 1
|
AttributeError
|
def scalar(
cls=None,
*,
name: str = None,
description: str = None,
serialize: Callable = identity,
parse_value: Optional[Callable] = None,
parse_literal: Optional[Callable] = None,
):
"""Annotates a class or type as a GraphQL custom scalar.
Example usages:
>>> strawberry.scalar(
>>> datetime.date,
>>> serialize=lambda value: value.isoformat(),
>>> parse_value=datetime.parse_date
>>> )
>>> Base64Encoded = strawberry.scalar(
>>> NewType("Base64Encoded", bytes),
>>> serialize=base64.b64encode,
>>> parse_value=base64.b64decode
>>> )
>>> @strawberry.scalar(
>>> serialize=lambda value: ",".join(value.items),
>>> parse_value=lambda value: CustomList(value.split(","))
>>> )
>>> class CustomList:
>>> def __init__(self, items):
>>> self.items = items
"""
if parse_value is None:
parse_value = cls
def wrap(cls):
return _process_scalar(
cls,
name=name,
description=description,
serialize=serialize,
parse_value=parse_value,
parse_literal=parse_literal,
)
if cls is None:
return wrap
return wrap(cls)
|
def scalar(
cls=None,
*,
name=None,
description=None,
serialize=identity,
parse_value=None,
parse_literal=None,
):
"""Annotates a class or type as a GraphQL custom scalar.
Example usages:
>>> strawberry.scalar(
>>> datetime.date,
>>> serialize=lambda value: value.isoformat(),
>>> parse_value=datetime.parse_date
>>> )
>>> Base64Encoded = strawberry.scalar(
>>> NewType("Base64Encoded", bytes),
>>> serialize=base64.b64encode,
>>> parse_value=base64.b64decode
>>> )
>>> @strawberry.scalar(
>>> serialize=lambda value: ",".join(value.items),
>>> parse_value=lambda value: CustomList(value.split(","))
>>> )
>>> class CustomList:
>>> def __init__(self, items):
>>> self.items = items
"""
if parse_value is None:
parse_value = cls
def wrap(cls):
return _process_scalar(
cls,
name=name,
description=description,
serialize=serialize,
parse_value=parse_value,
parse_literal=parse_literal,
)
if cls is None:
return wrap
return wrap(cls)
|
https://github.com/strawberry-graphql/strawberry/issues/349
|
Traceback (most recent call last):
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 735, in fields
fields = resolve_thunk(self._fields)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 293, in resolve_thunk
return thunk() if callable(thunk) else thunk
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/type.py", line 133, in <lambda>
lambda types_replacement_map=None: _get_fields(wrapped, types_replacement_map),
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/type.py", line 76, in _get_fields
fields[field_name] = field(
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/field.py", line 315, in field
return field(wrap)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/field.py", line 171, in __call__
setattr(wrap, IS_STRAWBERRY_FIELD, True)
AttributeError: 'method' object has no attribute '_is_strawberry_field'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/ignormies/.config/JetBrains/PyCharm2020.1/scratches/scratch_7.py", line 30, in <module>
schema = strawberry.Schema(query=Query)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/schema.py", line 25, in __init__
super().__init__(
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/schema.py", line 205, in __init__
collect_referenced_types(query)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/schema.py", line 382, in collect_referenced_types
for field in named_type.fields.values():
File "/usr/lib/python3.8/functools.py", line 967, in __get__
val = self.func(instance)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 737, in fields
raise TypeError(f"{self.name} fields cannot be resolved. {error}")
TypeError: Query fields cannot be resolved. 'method' object has no attribute '_is_strawberry_field'
Process finished with exit code 1
|
AttributeError
|
def directive(*, locations: List[DirectiveLocation], description=None, name=None):
def _wrap(f):
directive_name = name or to_camel_case(f.__name__)
f.directive_definition = DirectiveDefinition(
name=directive_name,
locations=locations,
description=description,
resolver=f,
)
return f
return _wrap
|
def directive(
*, locations: typing.List[DirectiveLocation], description=None, name=None
):
def _wrap(func):
directive_name = name or to_camel_case(func.__name__)
func.directive = GraphQLDirective(
name=directive_name,
locations=locations,
args=_get_arguments(func),
description=description,
)
DIRECTIVE_REGISTRY[directive_name] = func
return func
return _wrap
|
https://github.com/strawberry-graphql/strawberry/issues/349
|
Traceback (most recent call last):
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 735, in fields
fields = resolve_thunk(self._fields)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 293, in resolve_thunk
return thunk() if callable(thunk) else thunk
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/type.py", line 133, in <lambda>
lambda types_replacement_map=None: _get_fields(wrapped, types_replacement_map),
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/type.py", line 76, in _get_fields
fields[field_name] = field(
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/field.py", line 315, in field
return field(wrap)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/field.py", line 171, in __call__
setattr(wrap, IS_STRAWBERRY_FIELD, True)
AttributeError: 'method' object has no attribute '_is_strawberry_field'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/ignormies/.config/JetBrains/PyCharm2020.1/scratches/scratch_7.py", line 30, in <module>
schema = strawberry.Schema(query=Query)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/schema.py", line 25, in __init__
super().__init__(
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/schema.py", line 205, in __init__
collect_referenced_types(query)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/schema.py", line 382, in collect_referenced_types
for field in named_type.fields.values():
File "/usr/lib/python3.8/functools.py", line 967, in __get__
val = self.func(instance)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 737, in fields
raise TypeError(f"{self.name} fields cannot be resolved. {error}")
TypeError: Query fields cannot be resolved. 'method' object has no attribute '_is_strawberry_field'
Process finished with exit code 1
|
AttributeError
|
def _wrap(f):
directive_name = name or to_camel_case(f.__name__)
f.directive_definition = DirectiveDefinition(
name=directive_name,
locations=locations,
description=description,
resolver=f,
)
return f
|
def _wrap(func):
directive_name = name or to_camel_case(func.__name__)
func.directive = GraphQLDirective(
name=directive_name,
locations=locations,
args=_get_arguments(func),
description=description,
)
DIRECTIVE_REGISTRY[directive_name] = func
return func
|
https://github.com/strawberry-graphql/strawberry/issues/349
|
Traceback (most recent call last):
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 735, in fields
fields = resolve_thunk(self._fields)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 293, in resolve_thunk
return thunk() if callable(thunk) else thunk
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/type.py", line 133, in <lambda>
lambda types_replacement_map=None: _get_fields(wrapped, types_replacement_map),
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/type.py", line 76, in _get_fields
fields[field_name] = field(
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/field.py", line 315, in field
return field(wrap)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/field.py", line 171, in __call__
setattr(wrap, IS_STRAWBERRY_FIELD, True)
AttributeError: 'method' object has no attribute '_is_strawberry_field'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/ignormies/.config/JetBrains/PyCharm2020.1/scratches/scratch_7.py", line 30, in <module>
schema = strawberry.Schema(query=Query)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/schema.py", line 25, in __init__
super().__init__(
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/schema.py", line 205, in __init__
collect_referenced_types(query)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/schema.py", line 382, in collect_referenced_types
for field in named_type.fields.values():
File "/usr/lib/python3.8/functools.py", line 967, in __get__
val = self.func(instance)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 737, in fields
raise TypeError(f"{self.name} fields cannot be resolved. {error}")
TypeError: Query fields cannot be resolved. 'method' object has no attribute '_is_strawberry_field'
Process finished with exit code 1
|
AttributeError
|
def __init__(self, schema: BaseSchema, graphiql=True):
self.schema = schema
self.graphiql = graphiql
|
def __init__(self, schema=None, graphiql=True):
if not schema:
raise ValueError("You must pass in a schema to GraphQLView")
if not isinstance(schema, GraphQLSchema):
raise ValueError("You must pass in a valid schema to GraphQLView")
self.schema = schema
self.graphiql = graphiql
|
https://github.com/strawberry-graphql/strawberry/issues/349
|
Traceback (most recent call last):
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 735, in fields
fields = resolve_thunk(self._fields)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 293, in resolve_thunk
return thunk() if callable(thunk) else thunk
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/type.py", line 133, in <lambda>
lambda types_replacement_map=None: _get_fields(wrapped, types_replacement_map),
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/type.py", line 76, in _get_fields
fields[field_name] = field(
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/field.py", line 315, in field
return field(wrap)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/field.py", line 171, in __call__
setattr(wrap, IS_STRAWBERRY_FIELD, True)
AttributeError: 'method' object has no attribute '_is_strawberry_field'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/ignormies/.config/JetBrains/PyCharm2020.1/scratches/scratch_7.py", line 30, in <module>
schema = strawberry.Schema(query=Query)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/schema.py", line 25, in __init__
super().__init__(
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/schema.py", line 205, in __init__
collect_referenced_types(query)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/schema.py", line 382, in collect_referenced_types
for field in named_type.fields.values():
File "/usr/lib/python3.8/functools.py", line 967, in __get__
val = self.func(instance)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 737, in fields
raise TypeError(f"{self.name} fields cannot be resolved. {error}")
TypeError: Query fields cannot be resolved. 'method' object has no attribute '_is_strawberry_field'
Process finished with exit code 1
|
AttributeError
|
def dispatch(self, request, *args, **kwargs):
if request.method.lower() not in ("get", "post"):
return HttpResponseNotAllowed(
["GET", "POST"], "GraphQL only supports GET and POST requests."
)
if "text/html" in request.META.get("HTTP_ACCEPT", ""):
if not self.graphiql:
raise Http404("GraphiQL has been disabled")
return self._render_graphiql(request)
data = json.loads(request.body)
try:
query = data["query"]
variables = data.get("variables")
operation_name = data.get("operationName")
except KeyError:
return HttpResponseBadRequest("No GraphQL query found in the request")
context = {"request": request}
result = self.schema.execute_sync(
query,
root_value=self.get_root_value(),
variable_values=variables,
context_value=context,
operation_name=operation_name,
)
response_data = {"data": result.data}
if result.errors:
response_data["errors"] = [format_graphql_error(err) for err in result.errors]
return JsonResponse(response_data)
|
def dispatch(self, request, *args, **kwargs):
if request.method.lower() not in ("get", "post"):
return HttpResponseNotAllowed(
["GET", "POST"], "GraphQL only supports GET and POST requests."
)
if "text/html" in request.META.get("HTTP_ACCEPT", ""):
if not self.graphiql:
raise Http404("GraphiQL has been disabled")
return self._render_graphiql(request)
data = json.loads(request.body)
try:
query = data["query"]
variables = data.get("variables")
operation_name = data.get("operationName")
except KeyError:
return HttpResponseBadRequest("No GraphQL query found in the request")
context = {"request": request}
result = graphql_sync(
self.schema,
query,
root_value=self.get_root_value(),
variable_values=variables,
context_value=context,
operation_name=operation_name,
)
response_data = {"data": result.data}
if result.errors:
response_data["errors"] = [format_graphql_error(err) for err in result.errors]
return JsonResponse(response_data)
|
https://github.com/strawberry-graphql/strawberry/issues/349
|
Traceback (most recent call last):
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 735, in fields
fields = resolve_thunk(self._fields)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 293, in resolve_thunk
return thunk() if callable(thunk) else thunk
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/type.py", line 133, in <lambda>
lambda types_replacement_map=None: _get_fields(wrapped, types_replacement_map),
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/type.py", line 76, in _get_fields
fields[field_name] = field(
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/field.py", line 315, in field
return field(wrap)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/field.py", line 171, in __call__
setattr(wrap, IS_STRAWBERRY_FIELD, True)
AttributeError: 'method' object has no attribute '_is_strawberry_field'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/ignormies/.config/JetBrains/PyCharm2020.1/scratches/scratch_7.py", line 30, in <module>
schema = strawberry.Schema(query=Query)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/schema.py", line 25, in __init__
super().__init__(
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/schema.py", line 205, in __init__
collect_referenced_types(query)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/schema.py", line 382, in collect_referenced_types
for field in named_type.fields.values():
File "/usr/lib/python3.8/functools.py", line 967, in __get__
val = self.func(instance)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 737, in fields
raise TypeError(f"{self.name} fields cannot be resolved. {error}")
TypeError: Query fields cannot be resolved. 'method' object has no attribute '_is_strawberry_field'
Process finished with exit code 1
|
AttributeError
|
def _process_enum(cls, name=None, description=None):
if not isinstance(cls, EnumMeta):
raise NotAnEnum()
if not name:
name = cls.__name__
description = description
cls._enum_definition = EnumDefinition(
name=name,
values=[EnumValue(item.name, item.value) for item in cls],
description=description,
)
return cls
|
def _process_enum(cls, name=None, description=None):
if not isinstance(cls, EnumMeta):
raise NotAnEnum()
if not name:
name = cls.__name__
description = description or cls.__doc__
graphql_type = GraphQLEnumType(
name=name,
values=[(item.name, GraphQLEnumValue(item.value)) for item in cls],
description=description,
)
register_type(cls, graphql_type)
return cls
|
https://github.com/strawberry-graphql/strawberry/issues/349
|
Traceback (most recent call last):
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 735, in fields
fields = resolve_thunk(self._fields)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 293, in resolve_thunk
return thunk() if callable(thunk) else thunk
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/type.py", line 133, in <lambda>
lambda types_replacement_map=None: _get_fields(wrapped, types_replacement_map),
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/type.py", line 76, in _get_fields
fields[field_name] = field(
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/field.py", line 315, in field
return field(wrap)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/field.py", line 171, in __call__
setattr(wrap, IS_STRAWBERRY_FIELD, True)
AttributeError: 'method' object has no attribute '_is_strawberry_field'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/ignormies/.config/JetBrains/PyCharm2020.1/scratches/scratch_7.py", line 30, in <module>
schema = strawberry.Schema(query=Query)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/schema.py", line 25, in __init__
super().__init__(
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/schema.py", line 205, in __init__
collect_referenced_types(query)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/schema.py", line 382, in collect_referenced_types
for field in named_type.fields.values():
File "/usr/lib/python3.8/functools.py", line 967, in __get__
val = self.func(instance)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 737, in fields
raise TypeError(f"{self.name} fields cannot be resolved. {error}")
TypeError: Query fields cannot be resolved. 'method' object has no attribute '_is_strawberry_field'
Process finished with exit code 1
|
AttributeError
|
def __init__(self, field_name: str, annotation):
message = (
f'The type "{annotation.__name__}" of the field "{field_name}" '
f"is generic, but no type has been passed"
)
super().__init__(message)
|
def __init__(self, field_name: str, annotation):
message = (
f'The type "{annotation}" of the field "{field_name}" '
f"is generic, but no type has been passed"
)
super().__init__(message)
|
https://github.com/strawberry-graphql/strawberry/issues/349
|
Traceback (most recent call last):
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 735, in fields
fields = resolve_thunk(self._fields)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 293, in resolve_thunk
return thunk() if callable(thunk) else thunk
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/type.py", line 133, in <lambda>
lambda types_replacement_map=None: _get_fields(wrapped, types_replacement_map),
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/type.py", line 76, in _get_fields
fields[field_name] = field(
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/field.py", line 315, in field
return field(wrap)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/field.py", line 171, in __call__
setattr(wrap, IS_STRAWBERRY_FIELD, True)
AttributeError: 'method' object has no attribute '_is_strawberry_field'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/ignormies/.config/JetBrains/PyCharm2020.1/scratches/scratch_7.py", line 30, in <module>
schema = strawberry.Schema(query=Query)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/schema.py", line 25, in __init__
super().__init__(
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/schema.py", line 205, in __init__
collect_referenced_types(query)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/schema.py", line 382, in collect_referenced_types
for field in named_type.fields.values():
File "/usr/lib/python3.8/functools.py", line 967, in __get__
val = self.func(instance)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 737, in fields
raise TypeError(f"{self.name} fields cannot be resolved. {error}")
TypeError: Query fields cannot be resolved. 'method' object has no attribute '_is_strawberry_field'
Process finished with exit code 1
|
AttributeError
|
def type(
cls: Type = None,
*,
name: str = None,
description: str = None,
keys: List[str] = None,
extend: bool = False,
):
return base_type(
cls,
name=name,
description=description,
federation=FederationTypeParams(keys=keys or [], extend=extend),
)
|
def type(cls=None, *args, **kwargs):
def wrap(cls):
keys = kwargs.pop("keys", [])
extend = kwargs.pop("extend", False)
wrapped = _process_type(cls, *args, **kwargs)
wrapped._federation_keys = keys
wrapped._federation_extend = extend
return wrapped
if cls is None:
return wrap
return wrap(cls)
|
https://github.com/strawberry-graphql/strawberry/issues/349
|
Traceback (most recent call last):
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 735, in fields
fields = resolve_thunk(self._fields)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 293, in resolve_thunk
return thunk() if callable(thunk) else thunk
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/type.py", line 133, in <lambda>
lambda types_replacement_map=None: _get_fields(wrapped, types_replacement_map),
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/type.py", line 76, in _get_fields
fields[field_name] = field(
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/field.py", line 315, in field
return field(wrap)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/field.py", line 171, in __call__
setattr(wrap, IS_STRAWBERRY_FIELD, True)
AttributeError: 'method' object has no attribute '_is_strawberry_field'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/ignormies/.config/JetBrains/PyCharm2020.1/scratches/scratch_7.py", line 30, in <module>
schema = strawberry.Schema(query=Query)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/schema.py", line 25, in __init__
super().__init__(
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/schema.py", line 205, in __init__
collect_referenced_types(query)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/schema.py", line 382, in collect_referenced_types
for field in named_type.fields.values():
File "/usr/lib/python3.8/functools.py", line 967, in __get__
val = self.func(instance)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 737, in fields
raise TypeError(f"{self.name} fields cannot be resolved. {error}")
TypeError: Query fields cannot be resolved. 'method' object has no attribute '_is_strawberry_field'
Process finished with exit code 1
|
AttributeError
|
def field(
f=None,
*,
name: Optional[str] = None,
provides: Optional[List[str]] = None,
requires: Optional[List[str]] = None,
external: bool = False,
is_subscription: bool = False,
description: Optional[str] = None,
resolver: Optional[Callable] = None,
permission_classes: Optional[List[Type[BasePermission]]] = None,
):
return base_field(
f,
name=name,
is_subscription=is_subscription,
description=description,
resolver=resolver,
permission_classes=permission_classes,
federation=FederationFieldParams(
provides=provides or [], requires=requires or [], external=external
),
)
|
def field(wrap=None, *args, **kwargs):
field = strawberry_federation_field(*args, **kwargs)
if wrap is None:
return field
return field(wrap)
|
https://github.com/strawberry-graphql/strawberry/issues/349
|
Traceback (most recent call last):
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 735, in fields
fields = resolve_thunk(self._fields)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 293, in resolve_thunk
return thunk() if callable(thunk) else thunk
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/type.py", line 133, in <lambda>
lambda types_replacement_map=None: _get_fields(wrapped, types_replacement_map),
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/type.py", line 76, in _get_fields
fields[field_name] = field(
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/field.py", line 315, in field
return field(wrap)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/field.py", line 171, in __call__
setattr(wrap, IS_STRAWBERRY_FIELD, True)
AttributeError: 'method' object has no attribute '_is_strawberry_field'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/ignormies/.config/JetBrains/PyCharm2020.1/scratches/scratch_7.py", line 30, in <module>
schema = strawberry.Schema(query=Query)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/schema.py", line 25, in __init__
super().__init__(
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/schema.py", line 205, in __init__
collect_referenced_types(query)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/schema.py", line 382, in collect_referenced_types
for field in named_type.fields.values():
File "/usr/lib/python3.8/functools.py", line 967, in __get__
val = self.func(instance)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 737, in fields
raise TypeError(f"{self.name} fields cannot be resolved. {error}")
TypeError: Query fields cannot be resolved. 'method' object has no attribute '_is_strawberry_field'
Process finished with exit code 1
|
AttributeError
|
def _get_entity_type(type_map: TypeMap):
# https://www.apollographql.com/docs/apollo-server/federation/federation-spec/#resolve-requests-for-entities
# To implement the _Entity union, each type annotated with @key
# should be added to the _Entity union.
federation_key_types = [
type.implementation
for type in type_map.values()
if _has_federation_keys(type.definition)
]
# If no types are annotated with the key directive, then the _Entity
# union and Query._entities field should be removed from the schema.
if not federation_key_types:
return None
entity_type = GraphQLUnionType("_Entity", federation_key_types) # type: ignore
def _resolve_type(self, value, _type):
return type_map[self._type_definition.name].implementation
entity_type.resolve_type = _resolve_type
return entity_type
|
def _get_entity_type(self):
# https://www.apollographql.com/docs/apollo-server/federation/federation-spec/#resolve-requests-for-entities
# To implement the _Entity union, each type annotated with @key
# should be added to the _Entity union.
federation_key_types = [
graphql_type
for graphql_type in self.type_map.values()
if has_federation_keys(graphql_type)
]
# If no types are annotated with the key directive, then the _Entity
# union and Query._entities field should be removed from the schema.
if not federation_key_types:
return None
entity_type = GraphQLUnionType("_Entity", federation_key_types)
def _resolve_type(self, value, _type):
return self.graphql_type
entity_type.resolve_type = _resolve_type
return entity_type
|
https://github.com/strawberry-graphql/strawberry/issues/349
|
Traceback (most recent call last):
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 735, in fields
fields = resolve_thunk(self._fields)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 293, in resolve_thunk
return thunk() if callable(thunk) else thunk
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/type.py", line 133, in <lambda>
lambda types_replacement_map=None: _get_fields(wrapped, types_replacement_map),
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/type.py", line 76, in _get_fields
fields[field_name] = field(
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/field.py", line 315, in field
return field(wrap)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/field.py", line 171, in __call__
setattr(wrap, IS_STRAWBERRY_FIELD, True)
AttributeError: 'method' object has no attribute '_is_strawberry_field'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/ignormies/.config/JetBrains/PyCharm2020.1/scratches/scratch_7.py", line 30, in <module>
schema = strawberry.Schema(query=Query)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/schema.py", line 25, in __init__
super().__init__(
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/schema.py", line 205, in __init__
collect_referenced_types(query)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/schema.py", line 382, in collect_referenced_types
for field in named_type.fields.values():
File "/usr/lib/python3.8/functools.py", line 967, in __get__
val = self.func(instance)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 737, in fields
raise TypeError(f"{self.name} fields cannot be resolved. {error}")
TypeError: Query fields cannot be resolved. 'method' object has no attribute '_is_strawberry_field'
Process finished with exit code 1
|
AttributeError
|
def _resolve_type(self, value, _type):
return type_map[self._type_definition.name].implementation
|
def _resolve_type(self, value, _type):
return self.graphql_type
|
https://github.com/strawberry-graphql/strawberry/issues/349
|
Traceback (most recent call last):
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 735, in fields
fields = resolve_thunk(self._fields)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 293, in resolve_thunk
return thunk() if callable(thunk) else thunk
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/type.py", line 133, in <lambda>
lambda types_replacement_map=None: _get_fields(wrapped, types_replacement_map),
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/type.py", line 76, in _get_fields
fields[field_name] = field(
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/field.py", line 315, in field
return field(wrap)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/field.py", line 171, in __call__
setattr(wrap, IS_STRAWBERRY_FIELD, True)
AttributeError: 'method' object has no attribute '_is_strawberry_field'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/ignormies/.config/JetBrains/PyCharm2020.1/scratches/scratch_7.py", line 30, in <module>
schema = strawberry.Schema(query=Query)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/schema.py", line 25, in __init__
super().__init__(
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/schema.py", line 205, in __init__
collect_referenced_types(query)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/schema.py", line 382, in collect_referenced_types
for field in named_type.fields.values():
File "/usr/lib/python3.8/functools.py", line 967, in __get__
val = self.func(instance)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 737, in fields
raise TypeError(f"{self.name} fields cannot be resolved. {error}")
TypeError: Query fields cannot be resolved. 'method' object has no attribute '_is_strawberry_field'
Process finished with exit code 1
|
AttributeError
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.