after_merge
stringlengths 28
79.6k
| before_merge
stringlengths 20
79.6k
| url
stringlengths 38
71
| full_traceback
stringlengths 43
922k
| traceback_type
stringclasses 555
values |
|---|---|---|---|---|
def process(
self, *, in_str: str, fname: Optional[str] = None, config=None
) -> Tuple[Optional[TemplatedFile], list]:
"""Process a string and return the new string.
Note that the arguments are enforced as keywords
because Templaters can have differences in their
`process` method signature.
A Templater that only supports reading from a file
would need the following signature:
process(*, fname, in_str=None, config=None)
(arguments are swapped)
Args:
in_str (:obj:`str`): The input string.
fname (:obj:`str`, optional): The filename of this string. This is
mostly for loading config files at runtime.
config (:obj:`FluffConfig`): A specific config to use for this
templating operation. Only necessary for some templaters.
"""
if not config:
raise ValueError(
"For the jinja templater, the `process()` method requires a config object."
)
# Load the context
live_context = self.get_context(fname=fname, config=config)
# Apply dbt builtin functions if we're allowed.
apply_dbt_builtins = config.get_section(
(self.templater_selector, self.name, "apply_dbt_builtins")
)
if apply_dbt_builtins:
# This feels a bit wrong defining these here, they should probably
# be configurable somewhere sensible. But for now they're not.
# TODO: Come up with a better solution.
dbt_builtins = self._generate_dbt_builtins()
for name in dbt_builtins:
# Only apply if it hasn't already been set at this stage.
if name not in live_context:
live_context[name] = dbt_builtins[name]
# Load config macros
env = self._get_jinja_env()
ctx = self._extract_macros_from_config(config=config, env=env, ctx=live_context)
# Load macros from path (if applicable)
macros_path = config.get_section(
(self.templater_selector, self.name, "load_macros_from_path")
)
if macros_path:
ctx.update(
self._extract_macros_from_path(macros_path, env=env, ctx=live_context)
)
live_context.update(ctx)
# Load the template, passing the global context.
try:
template = env.from_string(in_str, globals=live_context)
except TemplateSyntaxError as err:
# Something in the template didn't parse, return the original
# and a violation around what happened.
(len(line) for line in in_str.split("\n")[: err.lineno])
return (
TemplatedFile(source_str=in_str, fname=fname),
[
SQLTemplaterError(
"Failure to parse jinja template: {0}.".format(err),
pos=FilePositionMarker(
None,
err.lineno,
None,
# Calculate the charpos for sorting.
sum(len(line) for line in in_str.split("\n")[: err.lineno - 1]),
),
)
],
)
violations = []
# Attempt to identify any undeclared variables. The majority
# will be found during the _crawl_tree step rather than this
# first Exception which serves only to catch catastrophic errors.
try:
syntax_tree = env.parse(in_str)
undefined_variables = meta.find_undeclared_variables(syntax_tree)
except Exception as err:
# TODO: Add a url here so people can get more help.
raise SQLTemplaterError(
"Failure in identifying Jinja variables: {0}.".format(err)
)
# Get rid of any that *are* actually defined.
for val in live_context:
if val in undefined_variables:
undefined_variables.remove(val)
if undefined_variables:
# Lets go through and find out where they are:
for val in self._crawl_tree(syntax_tree, undefined_variables, in_str):
violations.append(val)
try:
# NB: Passing no context. Everything is loaded when the template is loaded.
out_str = template.render()
# Slice the file once rendered.
raw_sliced, sliced_file, out_str = self.slice_file(
in_str, out_str, config=config
)
return (
TemplatedFile(
source_str=in_str,
templated_str=out_str,
fname=fname,
sliced_file=sliced_file,
raw_sliced=raw_sliced,
),
violations,
)
except (TemplateError, TypeError) as err:
templater_logger.info("Unrecoverable Jinja Error: %s", err)
violations.append(
SQLTemplaterError(
(
"Unrecoverable failure in Jinja templating: {0}. Have you configured "
"your variables? https://docs.sqlfluff.com/en/latest/configuration.html"
).format(err)
)
)
return None, violations
|
def process(
self, *, in_str: str, fname: Optional[str] = None, config=None
) -> Tuple[Optional[TemplatedFile], list]:
"""Process a string and return the new string.
Note that the arguments are enforced as keywords
because Templaters can have differences in their
`process` method signature.
A Templater that only supports reading from a file
would need the following signature:
process(*, fname, in_str=None, config=None)
(arguments are swapped)
Args:
in_str (:obj:`str`): The input string.
fname (:obj:`str`, optional): The filename of this string. This is
mostly for loading config files at runtime.
config (:obj:`FluffConfig`): A specific config to use for this
templating operation. Only necessary for some templaters.
"""
if not config:
raise ValueError(
"For the jinja templater, the `process()` method requires a config object."
)
# Load the context
live_context = self.get_context(fname=fname, config=config)
# Apply dbt builtin functions if we're allowed.
apply_dbt_builtins = config.get_section(
(self.templater_selector, self.name, "apply_dbt_builtins")
)
if apply_dbt_builtins:
# This feels a bit wrong defining these here, they should probably
# be configurable somewhere sensible. But for now they're not.
# TODO: Come up with a better solution.
dbt_builtins = self._generate_dbt_builtins()
for name in dbt_builtins:
# Only apply if it hasn't already been set at this stage.
if name not in live_context:
live_context[name] = dbt_builtins[name]
# Load config macros
env = self._get_jinja_env()
ctx = self._extract_macros_from_config(config=config, env=env, ctx=live_context)
# Load macros from path (if applicable)
macros_path = config.get_section(
(self.templater_selector, self.name, "load_macros_from_path")
)
if macros_path:
ctx.update(
self._extract_macros_from_path(macros_path, env=env, ctx=live_context)
)
live_context.update(ctx)
# Load the template, passing the global context.
try:
template = env.from_string(in_str, globals=live_context)
except TemplateSyntaxError as err:
# Something in the template didn't parse, return the original
# and a violation around what happened.
(len(line) for line in in_str.split("\n")[: err.lineno])
return (
TemplatedFile(source_str=in_str, fname=fname),
[
SQLTemplaterError(
"Failure to parse jinja template: {0}.".format(err),
pos=FilePositionMarker(
None,
err.lineno,
None,
# Calculate the charpos for sorting.
sum(len(line) for line in in_str.split("\n")[: err.lineno - 1]),
),
)
],
)
violations = []
# Attempt to identify any undeclared variables. The majority
# will be found during the _crawl_tree step rather than this
# first Exception which serves only to catch catastrophic errors.
try:
syntax_tree = env.parse(in_str)
undefined_variables = meta.find_undeclared_variables(syntax_tree)
except Exception as err:
# TODO: Add a url here so people can get more help.
raise SQLTemplaterError(
"Failure in identifying Jinja variables: {0}.".format(err)
)
# Get rid of any that *are* actually defined.
for val in live_context:
if val in undefined_variables:
undefined_variables.remove(val)
if undefined_variables:
# Lets go through and find out where they are:
for val in self._crawl_tree(syntax_tree, undefined_variables, in_str):
violations.append(val)
try:
# NB: Passing no context. Everything is loaded when the template is loaded.
out_str = template.render()
# Slice the file once rendered.
raw_sliced, sliced_file = self.slice_file(in_str, out_str)
return (
TemplatedFile(
source_str=in_str,
templated_str=out_str,
fname=fname,
sliced_file=sliced_file,
raw_sliced=raw_sliced,
),
violations,
)
except (TemplateError, TypeError) as err:
templater_logger.info("Unrecoverable Jinja Error: %s", err)
violations.append(
SQLTemplaterError(
(
"Unrecoverable failure in Jinja templating: {0}. Have you configured "
"your variables? https://docs.sqlfluff.com/en/latest/configuration.html"
).format(err)
)
)
return None, violations
|
https://github.com/sqlfluff/sqlfluff/issues/600
|
Traceback (most recent call last):
File "/Users/niallwoodward/dev/pull_requests/dbt_sqlfluff_venv/bin/sqlfluff", line 11, in <module>
load_entry_point('sqlfluff', 'console_scripts', 'sqlfluff')()
File "/Users/niallwoodward/dev/pull_requests/dbt_sqlfluff_venv/lib/python3.6/site-packages/click/core.py", line 829, in __call__
return self.main(*args, **kwargs)
File "/Users/niallwoodward/dev/pull_requests/dbt_sqlfluff_venv/lib/python3.6/site-packages/click/core.py", line 782, in main
rv = self.invoke(ctx)
File "/Users/niallwoodward/dev/pull_requests/dbt_sqlfluff_venv/lib/python3.6/site-packages/click/core.py", line 1259, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/Users/niallwoodward/dev/pull_requests/dbt_sqlfluff_venv/lib/python3.6/site-packages/click/core.py", line 1066, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/Users/niallwoodward/dev/pull_requests/dbt_sqlfluff_venv/lib/python3.6/site-packages/click/core.py", line 610, in invoke
return callback(*args, **kwargs)
File "/Users/niallwoodward/dev/pull_requests/sqlfluff/src/sqlfluff/cli/commands.py", line 323, in lint
ignore_files=not disregard_sqlfluffignores,
File "/Users/niallwoodward/dev/pull_requests/sqlfluff/src/sqlfluff/core/linter.py", line 1161, in lint_paths
ignore_files=ignore_files,
File "/Users/niallwoodward/dev/pull_requests/sqlfluff/src/sqlfluff/core/linter.py", line 1139, in lint_path
target_file.read(), fname=fname, fix=fix, config=config
File "/Users/niallwoodward/dev/pull_requests/sqlfluff/src/sqlfluff/core/linter.py", line 939, in lint_string
parsed = self.parse_string(in_str=in_str, fname=fname, config=config)
File "/Users/niallwoodward/dev/pull_requests/sqlfluff/src/sqlfluff/core/linter.py", line 723, in parse_string
tokens, lex_vs = lexer.lex(templated_file)
File "/Users/niallwoodward/dev/pull_requests/sqlfluff/src/sqlfluff/core/parser/lexer.py", line 343, in lex
return self.enrich_segments(segment_buff, raw), violations
File "/Users/niallwoodward/dev/pull_requests/sqlfluff/src/sqlfluff/core/parser/lexer.py", line 373, in enrich_segments
templated_slice
File "/Users/niallwoodward/dev/pull_requests/sqlfluff/src/sqlfluff/core/templaters/base.py", line 283, in templated_slice_to_source_slice
elif start_slices[0][0] == "literal":
IndexError: list index out of range
|
IndexError
|
def process(
self, *, in_str: str, fname: Optional[str] = None, config=None
) -> Tuple[Optional[TemplatedFile], list]:
"""Process a string and return a TemplatedFile.
Note that the arguments are enforced as keywords
because Templaters can have differences in their
`process` method signature.
A Templater that only supports reading from a file
would need the following signature:
process(*, fname, in_str=None, config=None)
(arguments are swapped)
Args:
in_str (:obj:`str`): The input string.
fname (:obj:`str`, optional): The filename of this string. This is
mostly for loading config files at runtime.
config (:obj:`FluffConfig`): A specific config to use for this
templating operation. Only necessary for some templaters.
"""
live_context = self.get_context(fname=fname, config=config)
try:
new_str = in_str.format(**live_context)
except KeyError as err:
# TODO: Add a url here so people can get more help.
raise SQLTemplaterError(
"Failure in Python templating: {0}. Have you configured your variables?".format(
err
)
)
raw_sliced, sliced_file, new_str = self.slice_file(in_str, new_str, config=config)
return (
TemplatedFile(
source_str=in_str,
templated_str=new_str,
fname=fname,
sliced_file=sliced_file,
raw_sliced=raw_sliced,
),
[],
)
|
def process(
self, *, in_str: str, fname: Optional[str] = None, config=None
) -> Tuple[Optional[TemplatedFile], list]:
"""Process a string and return a TemplatedFile.
Note that the arguments are enforced as keywords
because Templaters can have differences in their
`process` method signature.
A Templater that only supports reading from a file
would need the following signature:
process(*, fname, in_str=None, config=None)
(arguments are swapped)
Args:
in_str (:obj:`str`): The input string.
fname (:obj:`str`, optional): The filename of this string. This is
mostly for loading config files at runtime.
config (:obj:`FluffConfig`): A specific config to use for this
templating operation. Only necessary for some templaters.
"""
live_context = self.get_context(fname=fname, config=config)
try:
new_str = in_str.format(**live_context)
except KeyError as err:
# TODO: Add a url here so people can get more help.
raise SQLTemplaterError(
"Failure in Python templating: {0}. Have you configured your variables?".format(
err
)
)
raw_sliced, sliced_file = self.slice_file(in_str, new_str)
return (
TemplatedFile(
source_str=in_str,
templated_str=new_str,
fname=fname,
sliced_file=sliced_file,
raw_sliced=raw_sliced,
),
[],
)
|
https://github.com/sqlfluff/sqlfluff/issues/600
|
Traceback (most recent call last):
File "/Users/niallwoodward/dev/pull_requests/dbt_sqlfluff_venv/bin/sqlfluff", line 11, in <module>
load_entry_point('sqlfluff', 'console_scripts', 'sqlfluff')()
File "/Users/niallwoodward/dev/pull_requests/dbt_sqlfluff_venv/lib/python3.6/site-packages/click/core.py", line 829, in __call__
return self.main(*args, **kwargs)
File "/Users/niallwoodward/dev/pull_requests/dbt_sqlfluff_venv/lib/python3.6/site-packages/click/core.py", line 782, in main
rv = self.invoke(ctx)
File "/Users/niallwoodward/dev/pull_requests/dbt_sqlfluff_venv/lib/python3.6/site-packages/click/core.py", line 1259, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/Users/niallwoodward/dev/pull_requests/dbt_sqlfluff_venv/lib/python3.6/site-packages/click/core.py", line 1066, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/Users/niallwoodward/dev/pull_requests/dbt_sqlfluff_venv/lib/python3.6/site-packages/click/core.py", line 610, in invoke
return callback(*args, **kwargs)
File "/Users/niallwoodward/dev/pull_requests/sqlfluff/src/sqlfluff/cli/commands.py", line 323, in lint
ignore_files=not disregard_sqlfluffignores,
File "/Users/niallwoodward/dev/pull_requests/sqlfluff/src/sqlfluff/core/linter.py", line 1161, in lint_paths
ignore_files=ignore_files,
File "/Users/niallwoodward/dev/pull_requests/sqlfluff/src/sqlfluff/core/linter.py", line 1139, in lint_path
target_file.read(), fname=fname, fix=fix, config=config
File "/Users/niallwoodward/dev/pull_requests/sqlfluff/src/sqlfluff/core/linter.py", line 939, in lint_string
parsed = self.parse_string(in_str=in_str, fname=fname, config=config)
File "/Users/niallwoodward/dev/pull_requests/sqlfluff/src/sqlfluff/core/linter.py", line 723, in parse_string
tokens, lex_vs = lexer.lex(templated_file)
File "/Users/niallwoodward/dev/pull_requests/sqlfluff/src/sqlfluff/core/parser/lexer.py", line 343, in lex
return self.enrich_segments(segment_buff, raw), violations
File "/Users/niallwoodward/dev/pull_requests/sqlfluff/src/sqlfluff/core/parser/lexer.py", line 373, in enrich_segments
templated_slice
File "/Users/niallwoodward/dev/pull_requests/sqlfluff/src/sqlfluff/core/templaters/base.py", line 283, in templated_slice_to_source_slice
elif start_slices[0][0] == "literal":
IndexError: list index out of range
|
IndexError
|
def slice_file(
cls, raw_str: str, templated_str: str, config=None
) -> Tuple[List[RawFileSlice], List[TemplatedFileSlice], str]:
"""Slice the file to determine regions where we can fix."""
templater_logger.info("Slicing File Template")
templater_logger.debug(" Raw String: %r", raw_str)
templater_logger.debug(" Templated String: %r", templated_str)
# Slice the raw file
raw_sliced = list(cls._slice_template(raw_str))
# Find the literals
literals = [
raw_slice.raw for raw_slice in raw_sliced if raw_slice.slice_type == "literal"
]
templater_logger.debug(" Literals: %s", literals)
for loop_idx in range(2):
templater_logger.debug(" # Slice Loop %s", loop_idx)
# Calculate occurrences
raw_occurrences = cls._substring_occurances(raw_str, literals)
templated_occurances = cls._substring_occurances(templated_str, literals)
templater_logger.debug(
" Occurances: Raw: %s, Templated: %s",
raw_occurrences,
templated_occurances,
)
# Split on invariants
split_sliced = list(
cls._split_invariants(
raw_sliced,
literals,
raw_occurrences,
templated_occurances,
templated_str,
)
)
templater_logger.debug(" Split Sliced: %s", split_sliced)
# Deal with uniques and coalesce the rest
sliced_file = list(
cls._split_uniques_coalesce_rest(
split_sliced, raw_occurrences, templated_occurances, templated_str
)
)
templater_logger.debug(" Fully Sliced: %s", sliced_file)
unwrap_wrapped = (
True
if config is None
else config.get("unwrap_wrapped_queries", section="templater", default=True)
)
sliced_file, new_templated_str = cls._check_for_wrapped(
sliced_file, templated_str, unwrap_wrapped=unwrap_wrapped
)
if new_templated_str == templated_str:
# If we didn't change it then we're done.
break
else:
# If it's not equal, loop around
templated_str = new_templated_str
return raw_sliced, sliced_file, new_templated_str
|
def slice_file(
cls, raw_str: str, templated_str: str
) -> Tuple[List[RawFileSlice], List[TemplatedFileSlice]]:
"""Slice the file to determine regions where we can fix."""
templater_logger.info("Slicing File Template")
templater_logger.debug(" Raw String: %r", raw_str)
templater_logger.debug(" Templated String: %r", templated_str)
# Slice the raw file
raw_sliced = list(cls._slice_template(raw_str))
# Find the literals
literals = [
raw_slice.raw for raw_slice in raw_sliced if raw_slice.slice_type == "literal"
]
templater_logger.debug(" Literals: %s", literals)
# Calculate occurrences
raw_occurrences = cls._substring_occurances(raw_str, literals)
templated_occurances = cls._substring_occurances(templated_str, literals)
templater_logger.debug(
" Occurances: Raw: %s, Templated: %s",
raw_occurrences,
templated_occurances,
)
# Split on invariants
split_sliced = list(
cls._split_invariants(
raw_sliced,
literals,
raw_occurrences,
templated_occurances,
templated_str,
)
)
templater_logger.debug(" Split Sliced: %s", split_sliced)
# Deal with uniques and coalesce the rest
sliced_file = list(
cls._split_uniques_coalesce_rest(
split_sliced, raw_occurrences, templated_occurances, templated_str
)
)
templater_logger.debug(" Fully Sliced: %s", sliced_file)
return raw_sliced, sliced_file
|
https://github.com/sqlfluff/sqlfluff/issues/600
|
Traceback (most recent call last):
File "/Users/niallwoodward/dev/pull_requests/dbt_sqlfluff_venv/bin/sqlfluff", line 11, in <module>
load_entry_point('sqlfluff', 'console_scripts', 'sqlfluff')()
File "/Users/niallwoodward/dev/pull_requests/dbt_sqlfluff_venv/lib/python3.6/site-packages/click/core.py", line 829, in __call__
return self.main(*args, **kwargs)
File "/Users/niallwoodward/dev/pull_requests/dbt_sqlfluff_venv/lib/python3.6/site-packages/click/core.py", line 782, in main
rv = self.invoke(ctx)
File "/Users/niallwoodward/dev/pull_requests/dbt_sqlfluff_venv/lib/python3.6/site-packages/click/core.py", line 1259, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/Users/niallwoodward/dev/pull_requests/dbt_sqlfluff_venv/lib/python3.6/site-packages/click/core.py", line 1066, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/Users/niallwoodward/dev/pull_requests/dbt_sqlfluff_venv/lib/python3.6/site-packages/click/core.py", line 610, in invoke
return callback(*args, **kwargs)
File "/Users/niallwoodward/dev/pull_requests/sqlfluff/src/sqlfluff/cli/commands.py", line 323, in lint
ignore_files=not disregard_sqlfluffignores,
File "/Users/niallwoodward/dev/pull_requests/sqlfluff/src/sqlfluff/core/linter.py", line 1161, in lint_paths
ignore_files=ignore_files,
File "/Users/niallwoodward/dev/pull_requests/sqlfluff/src/sqlfluff/core/linter.py", line 1139, in lint_path
target_file.read(), fname=fname, fix=fix, config=config
File "/Users/niallwoodward/dev/pull_requests/sqlfluff/src/sqlfluff/core/linter.py", line 939, in lint_string
parsed = self.parse_string(in_str=in_str, fname=fname, config=config)
File "/Users/niallwoodward/dev/pull_requests/sqlfluff/src/sqlfluff/core/linter.py", line 723, in parse_string
tokens, lex_vs = lexer.lex(templated_file)
File "/Users/niallwoodward/dev/pull_requests/sqlfluff/src/sqlfluff/core/parser/lexer.py", line 343, in lex
return self.enrich_segments(segment_buff, raw), violations
File "/Users/niallwoodward/dev/pull_requests/sqlfluff/src/sqlfluff/core/parser/lexer.py", line 373, in enrich_segments
templated_slice
File "/Users/niallwoodward/dev/pull_requests/sqlfluff/src/sqlfluff/core/templaters/base.py", line 283, in templated_slice_to_source_slice
elif start_slices[0][0] == "literal":
IndexError: list index out of range
|
IndexError
|
def _lint_references_and_aliases(self, aliases, references, using_cols, parent_select):
"""Check whether any aliases are duplicates.
NB: Subclasses of this error should override this function.
"""
# Are any of the aliases the same?
for a1, a2 in itertools.combinations(aliases, 2):
# Compare the strings
if a1[0] == a2[0] and a1[0]:
# If there are any, then the rest of the code
# won't make sense so just return here.
return [
LintResult(
# Reference the element, not the string.
anchor=a2[1],
description=(
"Duplicate table alias {0!r}. Table aliases should be unique."
).format(a2[0]),
)
]
return None
|
def _lint_references_and_aliases(self, aliases, references, using_cols, parent_select):
"""Check whether any aliases are duplicates.
NB: Subclasses of this error should override this function.
"""
# Are any of the aliases the same?
for a1, a2 in itertools.combinations(aliases, 2):
# Compare the strings
if a1[0] == a2[0] and a1[0]:
# If there are any, then the rest of the code
# won't make sense so just return here.
return [
LintResult(
# Reference the element, not the string.
anchor=a2[1],
description=(
"Duplicate table alias {0!r}. Table aliases should be unique."
).format(a2.raw),
)
]
return None
|
https://github.com/sqlfluff/sqlfluff/issues/377
|
Traceback (most recent call last):
File "/Users/florentpezet/.pyenv/versions/sqlfluff-3.7.5/bin/sqlfluff", line 11, in <module>
load_entry_point('sqlfluff', 'console_scripts', 'sqlfluff')()
File "/Users/florentpezet/.pyenv/versions/sqlfluff-3.7.5/lib/python3.7/site-packages/click/core.py", line 829, in __call__
return self.main(*args, **kwargs)
File "/Users/florentpezet/.pyenv/versions/sqlfluff-3.7.5/lib/python3.7/site-packages/click/core.py", line 782, in main
rv = self.invoke(ctx)
File "/Users/florentpezet/.pyenv/versions/sqlfluff-3.7.5/lib/python3.7/site-packages/click/core.py", line 1259, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/Users/florentpezet/.pyenv/versions/sqlfluff-3.7.5/lib/python3.7/site-packages/click/core.py", line 1066, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/Users/florentpezet/.pyenv/versions/sqlfluff-3.7.5/lib/python3.7/site-packages/click/core.py", line 610, in invoke
return callback(*args, **kwargs)
File "/Users/florentpezet/Documents/sqlfluff/src/sqlfluff/cli/commands.py", line 162, in lint
result = lnt.lint_paths(paths, verbosity=verbose, ignore_non_existent_files=False)
File "/Users/florentpezet/Documents/sqlfluff/src/sqlfluff/linter.py", line 891, in lint_paths
ignore_non_existent_files=ignore_non_existent_files))
File "/Users/florentpezet/Documents/sqlfluff/src/sqlfluff/linter.py", line 877, in lint_path
fix=fix, config=config))
File "/Users/florentpezet/Documents/sqlfluff/src/sqlfluff/linter.py", line 727, in lint_string
lerrs, _, _, _ = crawler.crawl(parsed, dialect=config.get('dialect_obj'))
File "/Users/florentpezet/Documents/sqlfluff/src/sqlfluff/rules/base.py", line 264, in crawl
dialect=dialect)
File "/Users/florentpezet/Documents/sqlfluff/src/sqlfluff/rules/base.py", line 264, in crawl
dialect=dialect)
File "/Users/florentpezet/Documents/sqlfluff/src/sqlfluff/rules/base.py", line 217, in crawl
raw_stack=raw_stack, memory=memory, dialect=dialect)
File "/Users/florentpezet/Documents/sqlfluff/src/sqlfluff/rules/std.py", line 2194, in _eval
return self._lint_references_and_aliases(aliases, reference_buffer, using_cols, parent_select)
File "/Users/florentpezet/Documents/sqlfluff/src/sqlfluff/rules/std.py", line 2119, in _lint_references_and_aliases
"aliases should be unique.").format(a2.raw)
AttributeError: 'tuple' object has no attribute 'raw'
|
AttributeError
|
def stats(self):
"""Return a stats dictionary of this result."""
all_stats = dict(files=0, clean=0, unclean=0, violations=0)
for path in self.paths:
all_stats = self.sum_dicts(path.stats(), all_stats)
if all_stats["files"] > 0:
all_stats["avg per file"] = all_stats["violations"] * 1.0 / all_stats["files"]
all_stats["unclean rate"] = all_stats["unclean"] * 1.0 / all_stats["files"]
else:
all_stats["avg per file"] = 0
all_stats["unclean rate"] = 0
all_stats["clean files"] = all_stats["clean"]
all_stats["unclean files"] = all_stats["unclean"]
all_stats["exit code"] = 65 if all_stats["violations"] > 0 else 0
all_stats["status"] = "FAIL" if all_stats["violations"] > 0 else "PASS"
return all_stats
|
def stats(self):
"""Return a stats dictionary of this result."""
all_stats = dict(files=0, clean=0, unclean=0, violations=0)
for path in self.paths:
all_stats = self.sum_dicts(path.stats(), all_stats)
all_stats["avg per file"] = all_stats["violations"] * 1.0 / all_stats["files"]
all_stats["unclean rate"] = all_stats["unclean"] * 1.0 / all_stats["files"]
all_stats["clean files"] = all_stats["clean"]
all_stats["unclean files"] = all_stats["unclean"]
all_stats["exit code"] = 65 if all_stats["violations"] > 0 else 0
all_stats["status"] = "FAIL" if all_stats["violations"] > 0 else "PASS"
return all_stats
|
https://github.com/sqlfluff/sqlfluff/issues/224
|
Traceback (most recent call last):
File "/usr/local/pyenv/versions/lib-3.7.3/bin/sqlfluff", line 11, in <module>
load_entry_point('sqlfluff==0.3.1', 'console_scripts', 'sqlfluff')()
File "/usr/local/pyenv/versions/3.7.3/envs/lib-3.7.3/lib/python3.7/site-packages/click/core.py", line 829, in __call__
return self.main(*args, **kwargs)
File "/usr/local/pyenv/versions/3.7.3/envs/lib-3.7.3/lib/python3.7/site-packages/click/core.py", line 782, in main
rv = self.invoke(ctx)
File "/usr/local/pyenv/versions/3.7.3/envs/lib-3.7.3/lib/python3.7/site-packages/click/core.py", line 1259, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/usr/local/pyenv/versions/3.7.3/envs/lib-3.7.3/lib/python3.7/site-packages/click/core.py", line 1066, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/usr/local/pyenv/versions/3.7.3/envs/lib-3.7.3/lib/python3.7/site-packages/click/core.py", line 610, in invoke
return callback(*args, **kwargs)
File "/usr/local/pyenv/versions/3.7.3/envs/lib-3.7.3/lib/python3.7/site-packages/sqlfluff/cli/commands.py", line 150, in lint
lnt.log(format_linting_result_footer(result, verbose=verbose))
File "/usr/local/pyenv/versions/3.7.3/envs/lib-3.7.3/lib/python3.7/site-packages/sqlfluff/cli/formatters.py", line 146, in format_linting_result_footer
text_buffer.write(format_linting_stats(result, verbose=verbose))
File "/usr/local/pyenv/versions/3.7.3/envs/lib-3.7.3/lib/python3.7/site-packages/sqlfluff/cli/formatters.py", line 106, in format_linting_stats
all_stats = result.stats()
File "/usr/local/pyenv/versions/3.7.3/envs/lib-3.7.3/lib/python3.7/site-packages/sqlfluff/linter.py", line 381, in stats
all_stats['avg per file'] = all_stats['violations'] * 1.0 / all_stats['files']
ZeroDivisionError: float division by zero
|
ZeroDivisionError
|
def fix(force, paths, **kwargs):
"""Fix SQL files.
PATH is the path to a sql file or directory to lint. This can be either a
file ('path/to/file.sql'), a path ('directory/of/sql/files'), a single ('-')
character to indicate reading from *stdin* or a dot/blank ('.'/' ') which will
be interpreted like passing the current working directory as a path argument.
"""
c = get_config(**kwargs)
lnt = get_linter(c)
verbose = c.get("verbose")
config_string = format_config(lnt, verbose=verbose)
if len(config_string) > 0:
lnt.log(config_string)
# Check that if fix is specified, that we have picked only a subset of rules
if lnt.config.get("rule_whitelist") is None:
lnt.log(
(
"The fix option is only available in combination"
" with --rules. This is for your own safety!"
)
)
sys.exit(1)
# handle stdin case. should output formatted sql to stdout and nothing else.
if ("-",) == paths:
stdin = sys.stdin.read()
result = lnt.lint_string_wrapped(
stdin, fname="stdin", verbosity=verbose, fix=True
)
stdout = result.paths[0].files[0].fix_string()
click.echo(stdout, nl=False)
sys.exit()
# Lint the paths (not with the fix argument at this stage), outputting as we go.
lnt.log("==== finding violations ====")
try:
result = lnt.lint_paths(paths, verbosity=verbose)
except IOError:
click.echo(
colorize(
"The path(s) {0!r} could not be accessed. Check it/they exist(s).".format(
paths
),
"red",
)
)
sys.exit(1)
# NB: We filter to linting violations here, because they're
# the only ones which can be potentially fixed.
if result.num_violations(types=SQLLintError) > 0:
click.echo("==== fixing violations ====")
click.echo(
"{0} linting violations found".format(
result.num_violations(types=SQLLintError)
)
)
if force:
click.echo("FORCE MODE: Attempting fixes...")
success = do_fixes(lnt, paths, types=SQLLintError)
if not success:
sys.exit(1)
else:
click.echo(
"Are you sure you wish to attempt to fix these? [Y/n] ", nl=False
)
c = click.getchar().lower()
click.echo("...")
if c == "y":
click.echo("Attempting fixes...")
success = do_fixes(lnt, paths)
if not success:
sys.exit(1)
elif c == "n":
click.echo("Aborting...")
else:
click.echo("Invalid input :(")
click.echo("Aborting...")
else:
click.echo("==== no linting violations found ====")
sys.exit(0)
|
def fix(force, paths, **kwargs):
"""Fix SQL files.
PATH is the path to a sql file or directory to lint. This can be either a
file ('path/to/file.sql'), a path ('directory/of/sql/files'), a single ('-')
character to indicate reading from *stdin* or a dot/blank ('.'/' ') which will
be interpreted like passing the current working directory as a path argument.
"""
c = get_config(**kwargs)
lnt = get_linter(c)
verbose = c.get("verbose")
config_string = format_config(lnt, verbose=verbose)
if len(config_string) > 0:
lnt.log(config_string)
# Check that if fix is specified, that we have picked only a subset of rules
if lnt.config.get("rule_whitelist") is None:
lnt.log(
(
"The fix option is only available in combination"
" with --rules. This is for your own safety!"
)
)
sys.exit(1)
# handle stdin case. should output formatted sql to stdout and nothing else.
if ("-",) == paths:
stdin = sys.stdin.read()
result = lnt.lint_string_wrapped(
stdin, fname="stdin", verbosity=verbose, fix=True
)
stdout = result.paths[0].files[0].fix_string()
click.echo(stdout, nl=False)
sys.exit()
# Lint the paths (not with the fix argument at this stage), outputting as we go.
lnt.log("==== finding violations ====")
try:
result = lnt.lint_paths(paths, verbosity=verbose)
except IOError:
click.echo(
colorize(
"The path(s) {0!r} could not be accessed. Check it/they exist(s).".format(
paths
),
"red",
)
)
sys.exit(1)
if result.num_violations() > 0:
click.echo("==== fixing violations ====")
click.echo("{0} violations found".format(result.num_violations()))
if force:
click.echo("FORCE MODE: Attempting fixes...")
result = lnt.lint_paths(paths, fix=True)
click.echo("Persisting Changes...")
result.persist_changes()
click.echo("Done. Please check your files to confirm.")
else:
click.echo(
"Are you sure you wish to attempt to fix these? [Y/n] ", nl=False
)
c = click.getchar().lower()
click.echo("...")
if c == "y":
click.echo("Attempting fixes...")
result = lnt.lint_paths(paths, fix=True)
click.echo("Persisting Changes...")
result.persist_changes(verbosity=verbose)
click.echo("Done. Please check your files to confirm.")
elif c == "n":
click.echo("Aborting...")
else:
click.echo("Invalid input :(")
click.echo("Aborting...")
else:
click.echo("==== no violations found ====")
sys.exit(0)
|
https://github.com/sqlfluff/sqlfluff/issues/132
|
$ sqlfluff fix --rules L001 forecastservice\sql\get_phasing_v2.sql
==== finding violations ====
== [forecastservice\sql\get_phasing_v2.sql] FAIL
L: 0 | P: 0 | ???? | Failure in Jinja templating: 'first_mon' is undefined. Have you configured your variables?
L: 7 | P: 1 | ???? | Unable to lex characters: ''{% if regi'...'
==== fixing violations ====
2 violations found
Are you sure you wish to attempt to fix these? [Y/n] ...
Attempting fixes...
Persisting Changes...
Traceback (most recent call last):
File "C:\Users\alan\dev\tails-analytics-forecastservice\env\Scripts\sqlfluff-script.py", line 11, in <module>
load_entry_point('sqlfluff==0.2.4', 'console_scripts', 'sqlfluff')()
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\click\core.py", line 764, in __call__
return self.main(*args, **kwargs)
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\click\core.py", line 717, in main
rv = self.invoke(ctx)
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\click\core.py", line 1137, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\click\core.py", line 956, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\click\core.py", line 555, in invoke
return callback(*args, **kwargs)
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\sqlfluff\cli\commands.py", line 197, in fix
result.persist_changes(verbosity=verbose)
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\sqlfluff\linter.py", line 327, in persist_changes
return self.combine_dicts(*[path.persist_changes(verbosity=verbosity) for path in self.paths])
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\sqlfluff\linter.py", line 327, in <listcomp>
return self.combine_dicts(*[path.persist_changes(verbosity=verbosity) for path in self.paths])
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\sqlfluff\linter.py", line 253, in persist_changes
return {file.path: file.persist_tree(verbosity=verbosity) for file in self.files}
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\sqlfluff\linter.py", line 253, in <dictcomp>
return {file.path: file.persist_tree(verbosity=verbosity) for file in self.files}
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\sqlfluff\linter.py", line 61, in persist_tree
diff_templ_codes = diff_templ.get_opcodes()
File "C:\Users\alan\AppData\Local\Programs\Python\Python36\lib\difflib.py", line 550, in get_opcodes
for ai, bj, size in self.get_matching_blocks():
File "C:\Users\alan\AppData\Local\Programs\Python\Python36\lib\difflib.py", line 467, in get_matching_blocks
la, lb = len(self.a), len(self.b)
TypeError: object of type 'NoneType' has no len()
|
TypeError
|
def num_violations(self, rules=None, types=None):
"""Count the number of violations.
Optionally now with filters.
"""
violations = self.violations
if types:
try:
types = tuple(types)
except TypeError:
types = (types,)
violations = [v for v in violations if isinstance(v, types)]
if rules:
if isinstance(rules, str):
rules = (rules,)
else:
rules = tuple(rules)
violations = [v for v in violations if v.rule_code() in rules]
return len(violations)
|
def num_violations(self):
"""Count the number of violations."""
return len(self.violations)
|
https://github.com/sqlfluff/sqlfluff/issues/132
|
$ sqlfluff fix --rules L001 forecastservice\sql\get_phasing_v2.sql
==== finding violations ====
== [forecastservice\sql\get_phasing_v2.sql] FAIL
L: 0 | P: 0 | ???? | Failure in Jinja templating: 'first_mon' is undefined. Have you configured your variables?
L: 7 | P: 1 | ???? | Unable to lex characters: ''{% if regi'...'
==== fixing violations ====
2 violations found
Are you sure you wish to attempt to fix these? [Y/n] ...
Attempting fixes...
Persisting Changes...
Traceback (most recent call last):
File "C:\Users\alan\dev\tails-analytics-forecastservice\env\Scripts\sqlfluff-script.py", line 11, in <module>
load_entry_point('sqlfluff==0.2.4', 'console_scripts', 'sqlfluff')()
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\click\core.py", line 764, in __call__
return self.main(*args, **kwargs)
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\click\core.py", line 717, in main
rv = self.invoke(ctx)
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\click\core.py", line 1137, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\click\core.py", line 956, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\click\core.py", line 555, in invoke
return callback(*args, **kwargs)
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\sqlfluff\cli\commands.py", line 197, in fix
result.persist_changes(verbosity=verbose)
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\sqlfluff\linter.py", line 327, in persist_changes
return self.combine_dicts(*[path.persist_changes(verbosity=verbosity) for path in self.paths])
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\sqlfluff\linter.py", line 327, in <listcomp>
return self.combine_dicts(*[path.persist_changes(verbosity=verbosity) for path in self.paths])
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\sqlfluff\linter.py", line 253, in persist_changes
return {file.path: file.persist_tree(verbosity=verbosity) for file in self.files}
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\sqlfluff\linter.py", line 253, in <dictcomp>
return {file.path: file.persist_tree(verbosity=verbosity) for file in self.files}
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\sqlfluff\linter.py", line 61, in persist_tree
diff_templ_codes = diff_templ.get_opcodes()
File "C:\Users\alan\AppData\Local\Programs\Python\Python36\lib\difflib.py", line 550, in get_opcodes
for ai, bj, size in self.get_matching_blocks():
File "C:\Users\alan\AppData\Local\Programs\Python\Python36\lib\difflib.py", line 467, in get_matching_blocks
la, lb = len(self.a), len(self.b)
TypeError: object of type 'NoneType' has no len()
|
TypeError
|
def fix_string(self, verbosity=0):
"""Obtain the changes to a path as a string.
We use the file_mask to do a safe merge, avoiding any templated
sections. First we need to detect where there have been changes
between the fixed and templated versions. The file mask is of
the format: (raw_file, templated_file, fixed_file).
We use difflib.SequenceMatcher.get_opcodes
See: https://docs.python.org/3.7/library/difflib.html#difflib.SequenceMatcher.get_opcodes
It returns a list of tuples ('equal|replace', ia1, ia2, ib1, ib2).
"""
# Do we have enough information to actually fix the file?
if any(elem is None for elem in self.file_mask):
verbosity_logger(
"Insufficient information to fix file: {0}".format(self.file_mask),
verbosity=verbosity,
)
return None, False
verbosity_logger(
"Persisting file masks: {0}".format(self.file_mask), verbosity=verbosity
)
# Compare Templated with Raw
diff_templ = SequenceMatcher(
autojunk=None, a=self.file_mask[0], b=self.file_mask[1]
)
diff_templ_codes = diff_templ.get_opcodes()
verbosity_logger(
"Templater diff codes: {0}".format(diff_templ_codes), verbosity=verbosity
)
# Compare Fixed with Templated
diff_fix = SequenceMatcher(autojunk=None, a=self.file_mask[1], b=self.file_mask[2])
# diff_fix = SequenceMatcher(autojunk=None, a=self.file_mask[1][0], b=self.file_mask[2][0])
diff_fix_codes = diff_fix.get_opcodes()
verbosity_logger(
"Fixing diff codes: {0}".format(diff_fix_codes), verbosity=verbosity
)
# If diff_templ isn't the same then we should just keep the template. If there *was*
# a fix in that space, then we should raise an issue
# If it is the same, then we can apply fixes as expected.
write_buff = ""
fixed_block = None
templ_block = None
# index in raw, templ and fix
idx = (0, 0, 0)
loop_idx = 0
while True:
loop_idx += 1
verbosity_logger(
"{0:04d}: Write Loop: idx:{1}, buff:{2!r}".format(
loop_idx, idx, write_buff
),
verbosity=verbosity,
)
if templ_block is None:
if diff_templ_codes:
templ_block = diff_templ_codes.pop(0)
# We've exhausted the template. Have we exhausted the fixes?
elif fixed_block is None:
# Yes - excellent. DONE
break
else:
raise NotImplementedError(
"Fix Block left over! DOn't know how to handle this! aeflf8wh"
)
if fixed_block is None:
if diff_fix_codes:
fixed_block = diff_fix_codes.pop(0)
else:
raise NotImplementedError("Unexpectedly depleted the fixes. Panic!")
verbosity_logger(
"{0:04d}: Blocks: template:{1}, fix:{2}".format(
loop_idx, templ_block, fixed_block
),
verbosity=verbosity,
)
if templ_block[0] == "equal":
if fixed_block[0] == "equal":
# No templating, no fixes, go with middle and advance indexes
# Find out how far we can advance (we use the middle version because it's common)
if templ_block[4] == fixed_block[2]:
buff = self.file_mask[1][idx[1] : fixed_block[2]]
# consume both blocks
fixed_block = None
templ_block = None
elif templ_block[4] > fixed_block[2]:
buff = self.file_mask[1][idx[1] : fixed_block[2]]
# consume fixed block
fixed_block = None
elif templ_block[4] < fixed_block[2]:
buff = self.file_mask[1][idx[1] : templ_block[4]]
# consume templ block
templ_block = None
idx = (idx[0] + len(buff), idx[1] + len(buff), idx[2] + len(buff))
write_buff += buff
continue
elif fixed_block[0] == "replace":
# Consider how to apply fixes.
# Can we implement the fix while staying in the equal segment?
if fixed_block[2] <= templ_block[4]:
# Yes! Write from the fixed version.
write_buff += self.file_mask[2][idx[2] : fixed_block[4]]
idx = (
idx[0] + (fixed_block[2] - fixed_block[1]),
fixed_block[2],
fixed_block[4],
)
# Consume the fixed block because we've written the whole thing.
fixed_block = None
continue
else:
raise NotImplementedError("DEF")
elif fixed_block[0] == "delete":
# We're deleting items, nothing to write but we can consume some
# blocks and advance some indexes.
idx = (
idx[0] + (fixed_block[2] - fixed_block[1]),
fixed_block[2],
fixed_block[4],
)
fixed_block = None
elif fixed_block[0] == "insert":
# We're inserting items, Write from the fix block, but only that index moves.
write_buff += self.file_mask[2][idx[2] : fixed_block[4]]
idx = (idx[0], idx[1], fixed_block[4])
fixed_block = None
else:
raise ValueError(
(
"Unexpected opcode {0} for fix block! Please report this "
"issue on github with the query and rules you're trying to "
"fix."
).format(fixed_block[0])
)
elif templ_block[0] == "replace":
# We're in a templated section - we should write the templated version.
# we should consume the whole replce block and then deal with where
# we end up.
buff = self.file_mask[0][idx[0] : templ_block[2]]
new_templ_idx = templ_block[4]
while True:
if fixed_block[2] > new_templ_idx >= fixed_block[1]:
# this block contains the end point
break
else:
if fixed_block[0] != "equal":
print("WARNING: Skipping edit block: {0}".format(fixed_block))
fixed_block = None
# Are we exaclty on a join?
if new_templ_idx == fixed_block[1]:
# GREAT - this makes things easy because we have an equality point already
idx = (templ_block[2], new_templ_idx, fixed_block[3])
else:
if fixed_block[0] == "equal":
# If it's in an equal block, we can use the same offset from the end.
idx = (
templ_block[2],
new_templ_idx,
fixed_block[3] + (new_templ_idx - fixed_block[1]),
)
else:
# TODO: We're trying to move through an templated section, but end up
# in a fixed section. We've lost track of indexes.
# We might need to panic if this happens...
print("UMMMMMM!")
print(new_templ_idx)
print(fixed_block)
raise NotImplementedError("ABC")
write_buff += buff
# consume template block
templ_block = None
elif templ_block[0] == "delete":
# The comparison, things that the templater has deleted
# some characters. This is just a quirk of the differ.
# In reality this means we just write these characters
# and don't worry about advancing the other indexes.
buff = self.file_mask[0][idx[0] : templ_block[2]]
# consume templ block
templ_block = None
idx = (idx[0] + len(buff), idx[1], idx[2])
write_buff += buff
else:
raise ValueError(
(
"Unexpected opcode {0} for template block! Please report this "
"issue on github with the query and rules you're trying to "
"fix."
).format(templ_block[0])
)
# The success metric here is whether anything ACTUALLY changed.
return write_buff, write_buff != self.file_mask[0]
|
def fix_string(self, verbosity=0):
"""Obtain the changes to a path as a string.
We use the file_mask to do a safe merge, avoiding any templated
sections. First we need to detect where there have been changes
between the fixed and templated versions.
We use difflib.SequenceMatcher.get_opcodes
See: https://docs.python.org/3.7/library/difflib.html#difflib.SequenceMatcher.get_opcodes
It returns a list of tuples ('equal|replace', ia1, ia2, ib1, ib2).
"""
verbosity_logger(
"Persisting file masks: {0}".format(self.file_mask), verbosity=verbosity
)
# Compare Templated with Raw
diff_templ = SequenceMatcher(
autojunk=None, a=self.file_mask[0], b=self.file_mask[1]
)
diff_templ_codes = diff_templ.get_opcodes()
verbosity_logger(
"Templater diff codes: {0}".format(diff_templ_codes), verbosity=verbosity
)
# Compare Fixed with Templated
diff_fix = SequenceMatcher(autojunk=None, a=self.file_mask[1], b=self.file_mask[2])
# diff_fix = SequenceMatcher(autojunk=None, a=self.file_mask[1][0], b=self.file_mask[2][0])
diff_fix_codes = diff_fix.get_opcodes()
verbosity_logger(
"Fixing diff codes: {0}".format(diff_fix_codes), verbosity=verbosity
)
# If diff_templ isn't the same then we should just keep the template. If there *was*
# a fix in that space, then we should raise an issue
# If it is the same, then we can apply fixes as expected.
write_buff = ""
fixed_block = None
templ_block = None
# index in raw, templ and fix
idx = (0, 0, 0)
loop_idx = 0
while True:
loop_idx += 1
verbosity_logger(
"{0:04d}: Write Loop: idx:{1}, buff:{2!r}".format(
loop_idx, idx, write_buff
),
verbosity=verbosity,
)
if templ_block is None:
if diff_templ_codes:
templ_block = diff_templ_codes.pop(0)
# We've exhausted the template. Have we exhausted the fixes?
elif fixed_block is None:
# Yes - excellent. DONE
break
else:
raise NotImplementedError(
"Fix Block left over! DOn't know how to handle this! aeflf8wh"
)
if fixed_block is None:
if diff_fix_codes:
fixed_block = diff_fix_codes.pop(0)
else:
raise NotImplementedError("Unexpectedly depleted the fixes. Panic!")
verbosity_logger(
"{0:04d}: Blocks: template:{1}, fix:{2}".format(
loop_idx, templ_block, fixed_block
),
verbosity=verbosity,
)
if templ_block[0] == "equal":
if fixed_block[0] == "equal":
# No templating, no fixes, go with middle and advance indexes
# Find out how far we can advance (we use the middle version because it's common)
if templ_block[4] == fixed_block[2]:
buff = self.file_mask[1][idx[1] : fixed_block[2]]
# consume both blocks
fixed_block = None
templ_block = None
elif templ_block[4] > fixed_block[2]:
buff = self.file_mask[1][idx[1] : fixed_block[2]]
# consume fixed block
fixed_block = None
elif templ_block[4] < fixed_block[2]:
buff = self.file_mask[1][idx[1] : templ_block[4]]
# consume templ block
templ_block = None
idx = (idx[0] + len(buff), idx[1] + len(buff), idx[2] + len(buff))
write_buff += buff
continue
elif fixed_block[0] == "replace":
# Consider how to apply fixes.
# Can we implement the fix while staying in the equal segment?
if fixed_block[2] <= templ_block[4]:
# Yes! Write from the fixed version.
write_buff += self.file_mask[2][idx[2] : fixed_block[4]]
idx = (
idx[0] + (fixed_block[2] - fixed_block[1]),
fixed_block[2],
fixed_block[4],
)
# Consume the fixed block because we've written the whole thing.
fixed_block = None
continue
else:
raise NotImplementedError("DEF")
elif fixed_block[0] == "delete":
# We're deleting items, nothing to write but we can consume some
# blocks and advance some indexes.
idx = (
idx[0] + (fixed_block[2] - fixed_block[1]),
fixed_block[2],
fixed_block[4],
)
fixed_block = None
elif fixed_block[0] == "insert":
# We're inserting items, Write from the fix block, but only that index moves.
write_buff += self.file_mask[2][idx[2] : fixed_block[4]]
idx = (idx[0], idx[1], fixed_block[4])
fixed_block = None
else:
raise ValueError(
(
"Unexpected opcode {0} for fix block! Please report this "
"issue on github with the query and rules you're trying to "
"fix."
).format(fixed_block[0])
)
elif templ_block[0] == "replace":
# We're in a templated section - we should write the templated version.
# we should consume the whole replce block and then deal with where
# we end up.
buff = self.file_mask[0][idx[0] : templ_block[2]]
new_templ_idx = templ_block[4]
while True:
if fixed_block[2] > new_templ_idx >= fixed_block[1]:
# this block contains the end point
break
else:
if fixed_block[0] != "equal":
print("WARNING: Skipping edit block: {0}".format(fixed_block))
fixed_block = None
# Are we exaclty on a join?
if new_templ_idx == fixed_block[1]:
# GREAT - this makes things easy because we have an equality point already
idx = (templ_block[2], new_templ_idx, fixed_block[3])
else:
if fixed_block[0] == "equal":
# If it's in an equal block, we can use the same offset from the end.
idx = (
templ_block[2],
new_templ_idx,
fixed_block[3] + (new_templ_idx - fixed_block[1]),
)
else:
# TODO: We're trying to move through an templated section, but end up
# in a fixed section. We've lost track of indexes.
# We might need to panic if this happens...
print("UMMMMMM!")
print(new_templ_idx)
print(fixed_block)
raise NotImplementedError("ABC")
write_buff += buff
# consume template block
templ_block = None
elif templ_block[0] == "delete":
# The comparison, things that the templater has deleted
# some characters. This is just a quirk of the differ.
# In reality this means we just write these characters
# and don't worry about advancing the other indexes.
buff = self.file_mask[0][idx[0] : templ_block[2]]
# consume templ block
templ_block = None
idx = (idx[0] + len(buff), idx[1], idx[2])
write_buff += buff
else:
raise ValueError(
(
"Unexpected opcode {0} for template block! Please report this "
"issue on github with the query and rules you're trying to "
"fix."
).format(templ_block[0])
)
return write_buff
|
https://github.com/sqlfluff/sqlfluff/issues/132
|
$ sqlfluff fix --rules L001 forecastservice\sql\get_phasing_v2.sql
==== finding violations ====
== [forecastservice\sql\get_phasing_v2.sql] FAIL
L: 0 | P: 0 | ???? | Failure in Jinja templating: 'first_mon' is undefined. Have you configured your variables?
L: 7 | P: 1 | ???? | Unable to lex characters: ''{% if regi'...'
==== fixing violations ====
2 violations found
Are you sure you wish to attempt to fix these? [Y/n] ...
Attempting fixes...
Persisting Changes...
Traceback (most recent call last):
File "C:\Users\alan\dev\tails-analytics-forecastservice\env\Scripts\sqlfluff-script.py", line 11, in <module>
load_entry_point('sqlfluff==0.2.4', 'console_scripts', 'sqlfluff')()
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\click\core.py", line 764, in __call__
return self.main(*args, **kwargs)
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\click\core.py", line 717, in main
rv = self.invoke(ctx)
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\click\core.py", line 1137, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\click\core.py", line 956, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\click\core.py", line 555, in invoke
return callback(*args, **kwargs)
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\sqlfluff\cli\commands.py", line 197, in fix
result.persist_changes(verbosity=verbose)
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\sqlfluff\linter.py", line 327, in persist_changes
return self.combine_dicts(*[path.persist_changes(verbosity=verbosity) for path in self.paths])
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\sqlfluff\linter.py", line 327, in <listcomp>
return self.combine_dicts(*[path.persist_changes(verbosity=verbosity) for path in self.paths])
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\sqlfluff\linter.py", line 253, in persist_changes
return {file.path: file.persist_tree(verbosity=verbosity) for file in self.files}
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\sqlfluff\linter.py", line 253, in <dictcomp>
return {file.path: file.persist_tree(verbosity=verbosity) for file in self.files}
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\sqlfluff\linter.py", line 61, in persist_tree
diff_templ_codes = diff_templ.get_opcodes()
File "C:\Users\alan\AppData\Local\Programs\Python\Python36\lib\difflib.py", line 550, in get_opcodes
for ai, bj, size in self.get_matching_blocks():
File "C:\Users\alan\AppData\Local\Programs\Python\Python36\lib\difflib.py", line 467, in get_matching_blocks
la, lb = len(self.a), len(self.b)
TypeError: object of type 'NoneType' has no len()
|
TypeError
|
def persist_tree(self, verbosity=0):
"""Persist changes to the given path.
We use the file_mask to do a safe merge, avoiding any templated
sections. First we need to detect where there have been changes
between the fixed and templated versions.
We use difflib.SequenceMatcher.get_opcodes
See: https://docs.python.org/3.7/library/difflib.html#difflib.SequenceMatcher.get_opcodes
It returns a list of tuples ('equal|replace', ia1, ia2, ib1, ib2).
"""
write_buff, success = self.fix_string(verbosity=verbosity)
if success:
# Actually write the file.
with open(self.path, "w") as f:
f.write(write_buff)
# TODO: Make return value of persist_changes() a more interesting result and then format it
# click.echo(format_linting_fixes(result, verbose=verbose), color=color)
return success
|
def persist_tree(self, verbosity=0):
"""Persist changes to the given path.
We use the file_mask to do a safe merge, avoiding any templated
sections. First we need to detect where there have been changes
between the fixed and templated versions.
We use difflib.SequenceMatcher.get_opcodes
See: https://docs.python.org/3.7/library/difflib.html#difflib.SequenceMatcher.get_opcodes
It returns a list of tuples ('equal|replace', ia1, ia2, ib1, ib2).
"""
write_buff = self.fix_string(verbosity=verbosity)
# Actually write the file.
with open(self.path, "w") as f:
f.write(write_buff)
# TODO: Make return value of persist_changes() a more interesting result and then format it
# click.echo(format_linting_fixes(result, verbose=verbose), color=color)
return True
|
https://github.com/sqlfluff/sqlfluff/issues/132
|
$ sqlfluff fix --rules L001 forecastservice\sql\get_phasing_v2.sql
==== finding violations ====
== [forecastservice\sql\get_phasing_v2.sql] FAIL
L: 0 | P: 0 | ???? | Failure in Jinja templating: 'first_mon' is undefined. Have you configured your variables?
L: 7 | P: 1 | ???? | Unable to lex characters: ''{% if regi'...'
==== fixing violations ====
2 violations found
Are you sure you wish to attempt to fix these? [Y/n] ...
Attempting fixes...
Persisting Changes...
Traceback (most recent call last):
File "C:\Users\alan\dev\tails-analytics-forecastservice\env\Scripts\sqlfluff-script.py", line 11, in <module>
load_entry_point('sqlfluff==0.2.4', 'console_scripts', 'sqlfluff')()
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\click\core.py", line 764, in __call__
return self.main(*args, **kwargs)
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\click\core.py", line 717, in main
rv = self.invoke(ctx)
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\click\core.py", line 1137, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\click\core.py", line 956, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\click\core.py", line 555, in invoke
return callback(*args, **kwargs)
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\sqlfluff\cli\commands.py", line 197, in fix
result.persist_changes(verbosity=verbose)
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\sqlfluff\linter.py", line 327, in persist_changes
return self.combine_dicts(*[path.persist_changes(verbosity=verbosity) for path in self.paths])
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\sqlfluff\linter.py", line 327, in <listcomp>
return self.combine_dicts(*[path.persist_changes(verbosity=verbosity) for path in self.paths])
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\sqlfluff\linter.py", line 253, in persist_changes
return {file.path: file.persist_tree(verbosity=verbosity) for file in self.files}
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\sqlfluff\linter.py", line 253, in <dictcomp>
return {file.path: file.persist_tree(verbosity=verbosity) for file in self.files}
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\sqlfluff\linter.py", line 61, in persist_tree
diff_templ_codes = diff_templ.get_opcodes()
File "C:\Users\alan\AppData\Local\Programs\Python\Python36\lib\difflib.py", line 550, in get_opcodes
for ai, bj, size in self.get_matching_blocks():
File "C:\Users\alan\AppData\Local\Programs\Python\Python36\lib\difflib.py", line 467, in get_matching_blocks
la, lb = len(self.a), len(self.b)
TypeError: object of type 'NoneType' has no len()
|
TypeError
|
def num_violations(self, **kwargs):
"""Count the number of violations in the path."""
return sum(file.num_violations(**kwargs) for file in self.files)
|
def num_violations(self):
"""Count the number of violations in the path."""
return sum(file.num_violations() for file in self.files)
|
https://github.com/sqlfluff/sqlfluff/issues/132
|
$ sqlfluff fix --rules L001 forecastservice\sql\get_phasing_v2.sql
==== finding violations ====
== [forecastservice\sql\get_phasing_v2.sql] FAIL
L: 0 | P: 0 | ???? | Failure in Jinja templating: 'first_mon' is undefined. Have you configured your variables?
L: 7 | P: 1 | ???? | Unable to lex characters: ''{% if regi'...'
==== fixing violations ====
2 violations found
Are you sure you wish to attempt to fix these? [Y/n] ...
Attempting fixes...
Persisting Changes...
Traceback (most recent call last):
File "C:\Users\alan\dev\tails-analytics-forecastservice\env\Scripts\sqlfluff-script.py", line 11, in <module>
load_entry_point('sqlfluff==0.2.4', 'console_scripts', 'sqlfluff')()
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\click\core.py", line 764, in __call__
return self.main(*args, **kwargs)
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\click\core.py", line 717, in main
rv = self.invoke(ctx)
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\click\core.py", line 1137, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\click\core.py", line 956, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\click\core.py", line 555, in invoke
return callback(*args, **kwargs)
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\sqlfluff\cli\commands.py", line 197, in fix
result.persist_changes(verbosity=verbose)
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\sqlfluff\linter.py", line 327, in persist_changes
return self.combine_dicts(*[path.persist_changes(verbosity=verbosity) for path in self.paths])
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\sqlfluff\linter.py", line 327, in <listcomp>
return self.combine_dicts(*[path.persist_changes(verbosity=verbosity) for path in self.paths])
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\sqlfluff\linter.py", line 253, in persist_changes
return {file.path: file.persist_tree(verbosity=verbosity) for file in self.files}
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\sqlfluff\linter.py", line 253, in <dictcomp>
return {file.path: file.persist_tree(verbosity=verbosity) for file in self.files}
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\sqlfluff\linter.py", line 61, in persist_tree
diff_templ_codes = diff_templ.get_opcodes()
File "C:\Users\alan\AppData\Local\Programs\Python\Python36\lib\difflib.py", line 550, in get_opcodes
for ai, bj, size in self.get_matching_blocks():
File "C:\Users\alan\AppData\Local\Programs\Python\Python36\lib\difflib.py", line 467, in get_matching_blocks
la, lb = len(self.a), len(self.b)
TypeError: object of type 'NoneType' has no len()
|
TypeError
|
def persist_changes(self, verbosity=0, output_func=None, **kwargs):
"""Persist changes to files in the given path.
This also logs the output using the output_func if present.
"""
# Run all the fixes for all the files and return a dict
buffer = {}
for file in self.files:
if self.num_violations(**kwargs) > 0:
buffer[file.path] = file.persist_tree(verbosity=verbosity)
result = buffer[file.path]
else:
buffer[file.path] = True
result = "SKIP"
if output_func:
output_func(
format_filename(filename=file.path, success=result, verbose=verbosity)
)
return buffer
|
def persist_changes(self, verbosity=0):
"""Persist changes to files in the given path."""
# Run all the fixes for all the files and return a dict
return {file.path: file.persist_tree(verbosity=verbosity) for file in self.files}
|
https://github.com/sqlfluff/sqlfluff/issues/132
|
$ sqlfluff fix --rules L001 forecastservice\sql\get_phasing_v2.sql
==== finding violations ====
== [forecastservice\sql\get_phasing_v2.sql] FAIL
L: 0 | P: 0 | ???? | Failure in Jinja templating: 'first_mon' is undefined. Have you configured your variables?
L: 7 | P: 1 | ???? | Unable to lex characters: ''{% if regi'...'
==== fixing violations ====
2 violations found
Are you sure you wish to attempt to fix these? [Y/n] ...
Attempting fixes...
Persisting Changes...
Traceback (most recent call last):
File "C:\Users\alan\dev\tails-analytics-forecastservice\env\Scripts\sqlfluff-script.py", line 11, in <module>
load_entry_point('sqlfluff==0.2.4', 'console_scripts', 'sqlfluff')()
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\click\core.py", line 764, in __call__
return self.main(*args, **kwargs)
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\click\core.py", line 717, in main
rv = self.invoke(ctx)
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\click\core.py", line 1137, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\click\core.py", line 956, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\click\core.py", line 555, in invoke
return callback(*args, **kwargs)
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\sqlfluff\cli\commands.py", line 197, in fix
result.persist_changes(verbosity=verbose)
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\sqlfluff\linter.py", line 327, in persist_changes
return self.combine_dicts(*[path.persist_changes(verbosity=verbosity) for path in self.paths])
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\sqlfluff\linter.py", line 327, in <listcomp>
return self.combine_dicts(*[path.persist_changes(verbosity=verbosity) for path in self.paths])
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\sqlfluff\linter.py", line 253, in persist_changes
return {file.path: file.persist_tree(verbosity=verbosity) for file in self.files}
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\sqlfluff\linter.py", line 253, in <dictcomp>
return {file.path: file.persist_tree(verbosity=verbosity) for file in self.files}
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\sqlfluff\linter.py", line 61, in persist_tree
diff_templ_codes = diff_templ.get_opcodes()
File "C:\Users\alan\AppData\Local\Programs\Python\Python36\lib\difflib.py", line 550, in get_opcodes
for ai, bj, size in self.get_matching_blocks():
File "C:\Users\alan\AppData\Local\Programs\Python\Python36\lib\difflib.py", line 467, in get_matching_blocks
la, lb = len(self.a), len(self.b)
TypeError: object of type 'NoneType' has no len()
|
TypeError
|
def num_violations(self, **kwargs):
"""Count the number of violations in thie result."""
return sum(path.num_violations(**kwargs) for path in self.paths)
|
def num_violations(self):
"""Count the number of violations in thie result."""
return sum(path.num_violations() for path in self.paths)
|
https://github.com/sqlfluff/sqlfluff/issues/132
|
$ sqlfluff fix --rules L001 forecastservice\sql\get_phasing_v2.sql
==== finding violations ====
== [forecastservice\sql\get_phasing_v2.sql] FAIL
L: 0 | P: 0 | ???? | Failure in Jinja templating: 'first_mon' is undefined. Have you configured your variables?
L: 7 | P: 1 | ???? | Unable to lex characters: ''{% if regi'...'
==== fixing violations ====
2 violations found
Are you sure you wish to attempt to fix these? [Y/n] ...
Attempting fixes...
Persisting Changes...
Traceback (most recent call last):
File "C:\Users\alan\dev\tails-analytics-forecastservice\env\Scripts\sqlfluff-script.py", line 11, in <module>
load_entry_point('sqlfluff==0.2.4', 'console_scripts', 'sqlfluff')()
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\click\core.py", line 764, in __call__
return self.main(*args, **kwargs)
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\click\core.py", line 717, in main
rv = self.invoke(ctx)
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\click\core.py", line 1137, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\click\core.py", line 956, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\click\core.py", line 555, in invoke
return callback(*args, **kwargs)
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\sqlfluff\cli\commands.py", line 197, in fix
result.persist_changes(verbosity=verbose)
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\sqlfluff\linter.py", line 327, in persist_changes
return self.combine_dicts(*[path.persist_changes(verbosity=verbosity) for path in self.paths])
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\sqlfluff\linter.py", line 327, in <listcomp>
return self.combine_dicts(*[path.persist_changes(verbosity=verbosity) for path in self.paths])
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\sqlfluff\linter.py", line 253, in persist_changes
return {file.path: file.persist_tree(verbosity=verbosity) for file in self.files}
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\sqlfluff\linter.py", line 253, in <dictcomp>
return {file.path: file.persist_tree(verbosity=verbosity) for file in self.files}
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\sqlfluff\linter.py", line 61, in persist_tree
diff_templ_codes = diff_templ.get_opcodes()
File "C:\Users\alan\AppData\Local\Programs\Python\Python36\lib\difflib.py", line 550, in get_opcodes
for ai, bj, size in self.get_matching_blocks():
File "C:\Users\alan\AppData\Local\Programs\Python\Python36\lib\difflib.py", line 467, in get_matching_blocks
la, lb = len(self.a), len(self.b)
TypeError: object of type 'NoneType' has no len()
|
TypeError
|
def persist_changes(self, verbosity=0, output_func=None, **kwargs):
"""Run all the fixes for all the files and return a dict."""
return self.combine_dicts(
*[
path.persist_changes(verbosity=verbosity, output_func=output_func, **kwargs)
for path in self.paths
]
)
|
def persist_changes(self, verbosity=0):
"""Run all the fixes for all the files and return a dict."""
return self.combine_dicts(
*[path.persist_changes(verbosity=verbosity) for path in self.paths]
)
|
https://github.com/sqlfluff/sqlfluff/issues/132
|
$ sqlfluff fix --rules L001 forecastservice\sql\get_phasing_v2.sql
==== finding violations ====
== [forecastservice\sql\get_phasing_v2.sql] FAIL
L: 0 | P: 0 | ???? | Failure in Jinja templating: 'first_mon' is undefined. Have you configured your variables?
L: 7 | P: 1 | ???? | Unable to lex characters: ''{% if regi'...'
==== fixing violations ====
2 violations found
Are you sure you wish to attempt to fix these? [Y/n] ...
Attempting fixes...
Persisting Changes...
Traceback (most recent call last):
File "C:\Users\alan\dev\tails-analytics-forecastservice\env\Scripts\sqlfluff-script.py", line 11, in <module>
load_entry_point('sqlfluff==0.2.4', 'console_scripts', 'sqlfluff')()
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\click\core.py", line 764, in __call__
return self.main(*args, **kwargs)
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\click\core.py", line 717, in main
rv = self.invoke(ctx)
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\click\core.py", line 1137, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\click\core.py", line 956, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\click\core.py", line 555, in invoke
return callback(*args, **kwargs)
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\sqlfluff\cli\commands.py", line 197, in fix
result.persist_changes(verbosity=verbose)
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\sqlfluff\linter.py", line 327, in persist_changes
return self.combine_dicts(*[path.persist_changes(verbosity=verbosity) for path in self.paths])
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\sqlfluff\linter.py", line 327, in <listcomp>
return self.combine_dicts(*[path.persist_changes(verbosity=verbosity) for path in self.paths])
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\sqlfluff\linter.py", line 253, in persist_changes
return {file.path: file.persist_tree(verbosity=verbosity) for file in self.files}
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\sqlfluff\linter.py", line 253, in <dictcomp>
return {file.path: file.persist_tree(verbosity=verbosity) for file in self.files}
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\sqlfluff\linter.py", line 61, in persist_tree
diff_templ_codes = diff_templ.get_opcodes()
File "C:\Users\alan\AppData\Local\Programs\Python\Python36\lib\difflib.py", line 550, in get_opcodes
for ai, bj, size in self.get_matching_blocks():
File "C:\Users\alan\AppData\Local\Programs\Python\Python36\lib\difflib.py", line 467, in get_matching_blocks
la, lb = len(self.a), len(self.b)
TypeError: object of type 'NoneType' has no len()
|
TypeError
|
def match(self, segments, parse_context):
"""Match a specific sequence of elements."""
if isinstance(segments, BaseSegment):
segments = tuple(segments)
matched_segments = MatchResult.from_empty()
unmatched_segments = segments
for idx, elem in enumerate(self._elements):
while True:
# Is it an indent or dedent?
if elem.is_meta:
# Work out how to find an appropriate pos_marker for
# the meta segment.
if matched_segments:
last_matched = matched_segments.matched_segments[-1]
meta_pos_marker = last_matched.get_end_pos_marker()
else:
meta_pos_marker = unmatched_segments[0].pos_marker
matched_segments += elem(pos_marker=meta_pos_marker)
break
if len(unmatched_segments) == 0:
# We've run our of sequence without matching everyting.
# Do only optional elements remain.
if all(e.is_optional() for e in self._elements[idx:]):
# then it's ok, and we can return what we've got so far.
# No need to deal with anything left over because we're at the end.
return matched_segments
else:
# we've got to the end of the sequence without matching all
# required elements.
return MatchResult.from_unmatched(segments)
else:
# We're not at the end, first detect whitespace and then try to match.
if self.code_only and not unmatched_segments[0].is_code:
# We should add this one to the match and carry on
matched_segments += (unmatched_segments[0],)
unmatched_segments = unmatched_segments[1:]
check_still_complete(
segments, matched_segments.matched_segments, unmatched_segments
)
continue
# It's not whitespace, so carry on to matching
elem_match = elem._match(
unmatched_segments,
parse_context=parse_context.copy(incr="match_depth"),
)
if elem_match.has_match():
# We're expecting mostly partial matches here, but complete
# matches are possible.
matched_segments += elem_match.matched_segments
unmatched_segments = elem_match.unmatched_segments
# Each time we do this, we do a sense check to make sure we haven't
# dropped anything. (Because it's happened before!).
check_still_complete(
segments, matched_segments.matched_segments, unmatched_segments
)
# Break out of the while loop and move to the next element.
break
else:
# If we can't match an element, we should ascertain whether it's
# required. If so then fine, move on, but otherwise we should crash
# out without a match. We have not matched the sequence.
if elem.is_optional():
# This will crash us out of the while loop and move us
# onto the next matching element
break
else:
return MatchResult.from_unmatched(segments)
# If we get to here, we've matched all of the elements (or skipped them)
# but still have some segments left (or perhaps have precisely zero left).
# In either case, we're golden. Return successfully, with any leftovers as
# the unmatched elements. UNLESS they're whitespace and we should be greedy.
if self.code_only:
while unmatched_segments and not unmatched_segments[0].is_code:
# We should add this one to the match and carry on
matched_segments += (unmatched_segments[0],)
unmatched_segments = unmatched_segments[1:]
check_still_complete(
segments, matched_segments.matched_segments, unmatched_segments
)
return MatchResult(matched_segments.matched_segments, unmatched_segments)
|
def match(self, segments, parse_context):
"""Match a specific sequence of elements."""
if isinstance(segments, BaseSegment):
segments = tuple(segments)
matched_segments = MatchResult.from_empty()
unmatched_segments = segments
for idx, elem in enumerate(self._elements):
while True:
# Is it an indent or dedent?
if elem.is_meta:
matched_segments += elem()
break
if len(unmatched_segments) == 0:
# We've run our of sequence without matching everyting.
# Do only optional elements remain.
if all(e.is_optional() for e in self._elements[idx:]):
# then it's ok, and we can return what we've got so far.
# No need to deal with anything left over because we're at the end.
return matched_segments
else:
# we've got to the end of the sequence without matching all
# required elements.
return MatchResult.from_unmatched(segments)
else:
# We're not at the end, first detect whitespace and then try to match.
if self.code_only and not unmatched_segments[0].is_code:
# We should add this one to the match and carry on
matched_segments += (unmatched_segments[0],)
unmatched_segments = unmatched_segments[1:]
check_still_complete(
segments, matched_segments.matched_segments, unmatched_segments
)
continue
# It's not whitespace, so carry on to matching
elem_match = elem._match(
unmatched_segments,
parse_context=parse_context.copy(incr="match_depth"),
)
if elem_match.has_match():
# We're expecting mostly partial matches here, but complete
# matches are possible.
matched_segments += elem_match.matched_segments
unmatched_segments = elem_match.unmatched_segments
# Each time we do this, we do a sense check to make sure we haven't
# dropped anything. (Because it's happened before!).
check_still_complete(
segments, matched_segments.matched_segments, unmatched_segments
)
# Break out of the while loop and move to the next element.
break
else:
# If we can't match an element, we should ascertain whether it's
# required. If so then fine, move on, but otherwise we should crash
# out without a match. We have not matched the sequence.
if elem.is_optional():
# This will crash us out of the while loop and move us
# onto the next matching element
break
else:
return MatchResult.from_unmatched(segments)
# If we get to here, we've matched all of the elements (or skipped them)
# but still have some segments left (or perhaps have precisely zero left).
# In either case, we're golden. Return successfully, with any leftovers as
# the unmatched elements. UNLESS they're whitespace and we should be greedy.
if self.code_only:
while unmatched_segments and not unmatched_segments[0].is_code:
# We should add this one to the match and carry on
matched_segments += (unmatched_segments[0],)
unmatched_segments = unmatched_segments[1:]
check_still_complete(
segments, matched_segments.matched_segments, unmatched_segments
)
return MatchResult(matched_segments.matched_segments, unmatched_segments)
|
https://github.com/sqlfluff/sqlfluff/issues/132
|
$ sqlfluff fix --rules L001 forecastservice\sql\get_phasing_v2.sql
==== finding violations ====
== [forecastservice\sql\get_phasing_v2.sql] FAIL
L: 0 | P: 0 | ???? | Failure in Jinja templating: 'first_mon' is undefined. Have you configured your variables?
L: 7 | P: 1 | ???? | Unable to lex characters: ''{% if regi'...'
==== fixing violations ====
2 violations found
Are you sure you wish to attempt to fix these? [Y/n] ...
Attempting fixes...
Persisting Changes...
Traceback (most recent call last):
File "C:\Users\alan\dev\tails-analytics-forecastservice\env\Scripts\sqlfluff-script.py", line 11, in <module>
load_entry_point('sqlfluff==0.2.4', 'console_scripts', 'sqlfluff')()
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\click\core.py", line 764, in __call__
return self.main(*args, **kwargs)
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\click\core.py", line 717, in main
rv = self.invoke(ctx)
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\click\core.py", line 1137, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\click\core.py", line 956, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\click\core.py", line 555, in invoke
return callback(*args, **kwargs)
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\sqlfluff\cli\commands.py", line 197, in fix
result.persist_changes(verbosity=verbose)
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\sqlfluff\linter.py", line 327, in persist_changes
return self.combine_dicts(*[path.persist_changes(verbosity=verbosity) for path in self.paths])
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\sqlfluff\linter.py", line 327, in <listcomp>
return self.combine_dicts(*[path.persist_changes(verbosity=verbosity) for path in self.paths])
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\sqlfluff\linter.py", line 253, in persist_changes
return {file.path: file.persist_tree(verbosity=verbosity) for file in self.files}
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\sqlfluff\linter.py", line 253, in <dictcomp>
return {file.path: file.persist_tree(verbosity=verbosity) for file in self.files}
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\sqlfluff\linter.py", line 61, in persist_tree
diff_templ_codes = diff_templ.get_opcodes()
File "C:\Users\alan\AppData\Local\Programs\Python\Python36\lib\difflib.py", line 550, in get_opcodes
for ai, bj, size in self.get_matching_blocks():
File "C:\Users\alan\AppData\Local\Programs\Python\Python36\lib\difflib.py", line 467, in get_matching_blocks
la, lb = len(self.a), len(self.b)
TypeError: object of type 'NoneType' has no len()
|
TypeError
|
def match(self, segments, parse_context):
"""Match if this is a bracketed sequence, with content that matches one of the elements.
1. work forwards to find the first bracket.
If we find something other that whitespace, then fail out.
2. Once we have the first bracket, we need to bracket count forward to find it's partner.
3. Assuming we find it's partner then we try and match what goes between them.
If we match, great. If not, then we return an empty match.
If we never find it's partner then we return an empty match but should probably
log a parsing warning, or error?
"""
seg_buff = segments
matched_segs = ()
# Look for the first bracket
start_match = self._code_only_sensitive_match(
seg_buff,
self.start_bracket,
parse_context=parse_context.copy(incr="match_depth"),
code_only=self.code_only,
)
if start_match:
seg_buff = start_match.unmatched_segments
else:
# Can't find the opening bracket. No Match.
return MatchResult.from_unmatched(segments)
# Look for the closing bracket
pre, end_match, _ = self._bracket_sensitive_look_ahead_match(
segments=seg_buff,
matchers=[self.end_bracket],
parse_context=parse_context,
code_only=self.code_only,
)
if not end_match:
raise SQLParseError(
"Couldn't find closing bracket for opening bracket.", segment=matched_segs
)
# Match the content now we've confirmed the brackets. We use the
# _longest helper function mostly just because it deals with multiple
# matchers.
content_match, _ = self._longest_code_only_sensitive_match(
pre,
self._elements,
parse_context=parse_context.copy(incr="match_depth"),
code_only=self.code_only,
)
# We require a complete match for the content (hopefully for obvious reasons)
if content_match.is_complete():
# We don't want to add metas if they're already there, so check.
# We also only add indents if there *is* content.
if content_match.matched_segments and content_match.matched_segments[0].is_meta:
pre_meta = ()
elif not content_match.matched_segments:
pre_meta = ()
else:
pre_meta = (
Indent(
pos_marker=content_match.matched_segments[0].get_start_pos_marker()
),
)
if end_match.matched_segments and end_match.matched_segments[0].is_meta:
post_meta = ()
elif not content_match.matched_segments:
post_meta = ()
else:
post_meta = (
Dedent(
pos_marker=content_match.matched_segments[-1].get_end_pos_marker()
),
)
return MatchResult(
start_match.matched_segments
+ pre_meta # Add a meta indent here
+ content_match.matched_segments
+ post_meta # Add a meta indent here
+ end_match.matched_segments,
end_match.unmatched_segments,
)
else:
# Now if we've not matched there's a final option. If the content is optional
# and we allow non-code, then if the content is all non-code then it could be
# empty brackets and still match.
# NB: We don't add indents here, because there's nothing to indent
if (
all(e.is_optional() for e in self._elements)
and self.code_only
and all(not e.is_code for e in pre)
):
# It worked!
return MatchResult(
start_match.matched_segments + pre + end_match.matched_segments,
end_match.unmatched_segments,
)
else:
return MatchResult.from_unmatched(segments)
|
def match(self, segments, parse_context):
"""Match if this is a bracketed sequence, with content that matches one of the elements.
1. work forwards to find the first bracket.
If we find something other that whitespace, then fail out.
2. Once we have the first bracket, we need to bracket count forward to find it's partner.
3. Assuming we find it's partner then we try and match what goes between them.
If we match, great. If not, then we return an empty match.
If we never find it's partner then we return an empty match but should probably
log a parsing warning, or error?
"""
seg_buff = segments
matched_segs = ()
# Look for the first bracket
start_match = self._code_only_sensitive_match(
seg_buff,
self.start_bracket,
parse_context=parse_context.copy(incr="match_depth"),
code_only=self.code_only,
)
if start_match:
seg_buff = start_match.unmatched_segments
else:
# Can't find the opening bracket. No Match.
return MatchResult.from_unmatched(segments)
# Look for the closing bracket
pre, end_match, _ = self._bracket_sensitive_look_ahead_match(
segments=seg_buff,
matchers=[self.end_bracket],
parse_context=parse_context,
code_only=self.code_only,
)
if not end_match:
raise SQLParseError(
"Couldn't find closing bracket for opening bracket.", segment=matched_segs
)
# Match the content now we've confirmed the brackets. We use the
# _longest helper function mostly just because it deals with multiple
# matchers.
content_match, _ = self._longest_code_only_sensitive_match(
pre,
self._elements,
parse_context=parse_context.copy(incr="match_depth"),
code_only=self.code_only,
)
# We require a complete match for the content (hopefully for obvious reasons)
if content_match.is_complete():
# We don't want to add metas if they're already there, so check
if content_match.matched_segments and content_match.matched_segments[0].is_meta:
pre_meta = ()
else:
pre_meta = (Indent(),)
if end_match.matched_segments and end_match.matched_segments[0].is_meta:
post_meta = ()
else:
post_meta = (Dedent(),)
return MatchResult(
start_match.matched_segments
+ pre_meta # Add a meta indent here
+ content_match.matched_segments
+ post_meta # Add a meta indent here
+ end_match.matched_segments,
end_match.unmatched_segments,
)
else:
# Now if we've not matched there's a final option. If the content is optional
# and we allow non-code, then if the content is all non-code then it could be
# empty brackets and still match.
# NB: We don't add indents here, because there's nothing to indent
if (
all(e.is_optional() for e in self._elements)
and self.code_only
and all(not e.is_code for e in pre)
):
# It worked!
return MatchResult(
start_match.matched_segments + pre + end_match.matched_segments,
end_match.unmatched_segments,
)
else:
return MatchResult.from_unmatched(segments)
|
https://github.com/sqlfluff/sqlfluff/issues/132
|
$ sqlfluff fix --rules L001 forecastservice\sql\get_phasing_v2.sql
==== finding violations ====
== [forecastservice\sql\get_phasing_v2.sql] FAIL
L: 0 | P: 0 | ???? | Failure in Jinja templating: 'first_mon' is undefined. Have you configured your variables?
L: 7 | P: 1 | ???? | Unable to lex characters: ''{% if regi'...'
==== fixing violations ====
2 violations found
Are you sure you wish to attempt to fix these? [Y/n] ...
Attempting fixes...
Persisting Changes...
Traceback (most recent call last):
File "C:\Users\alan\dev\tails-analytics-forecastservice\env\Scripts\sqlfluff-script.py", line 11, in <module>
load_entry_point('sqlfluff==0.2.4', 'console_scripts', 'sqlfluff')()
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\click\core.py", line 764, in __call__
return self.main(*args, **kwargs)
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\click\core.py", line 717, in main
rv = self.invoke(ctx)
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\click\core.py", line 1137, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\click\core.py", line 956, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\click\core.py", line 555, in invoke
return callback(*args, **kwargs)
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\sqlfluff\cli\commands.py", line 197, in fix
result.persist_changes(verbosity=verbose)
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\sqlfluff\linter.py", line 327, in persist_changes
return self.combine_dicts(*[path.persist_changes(verbosity=verbosity) for path in self.paths])
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\sqlfluff\linter.py", line 327, in <listcomp>
return self.combine_dicts(*[path.persist_changes(verbosity=verbosity) for path in self.paths])
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\sqlfluff\linter.py", line 253, in persist_changes
return {file.path: file.persist_tree(verbosity=verbosity) for file in self.files}
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\sqlfluff\linter.py", line 253, in <dictcomp>
return {file.path: file.persist_tree(verbosity=verbosity) for file in self.files}
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\sqlfluff\linter.py", line 61, in persist_tree
diff_templ_codes = diff_templ.get_opcodes()
File "C:\Users\alan\AppData\Local\Programs\Python\Python36\lib\difflib.py", line 550, in get_opcodes
for ai, bj, size in self.get_matching_blocks():
File "C:\Users\alan\AppData\Local\Programs\Python\Python36\lib\difflib.py", line 467, in get_matching_blocks
la, lb = len(self.a), len(self.b)
TypeError: object of type 'NoneType' has no len()
|
TypeError
|
def validate_segments(self, text="constructing", validate=True):
"""Validate the current set of segments.
Check the elements of the `segments` attribute are all
themselves segments, and that the positions match up.
`validate` confirms whether we should check contigiousness.
"""
# Placeholder variables for positions
start_pos = None
end_pos = None
prev_seg = None
for elem in self.segments:
if not isinstance(elem, BaseSegment):
raise TypeError(
"In {0} {1}, found an element of the segments tuple which"
" isn't a segment. Instead found element of type {2}.\nFound: {3}\nFull segments:{4}".format(
text, type(self), type(elem), elem, self.segments
)
)
# While applying fixes, we shouldn't validate here, because it will fail.
if validate:
# If we have a comparison point, validate that
if end_pos and elem.get_start_pos_marker() != end_pos:
raise TypeError(
"In {0} {1}, found an element of the segments tuple which"
" isn't contigious with previous: {2} > {3}".format(
text, type(self), prev_seg, elem
)
)
start_pos = elem.get_start_pos_marker()
end_pos = elem.get_end_pos_marker()
prev_seg = elem
if start_pos.advance_by(elem.raw) != end_pos:
raise TypeError(
"In {0} {1}, found an element of the segments tuple which"
" isn't self consistent: {2}".format(text, type(self), elem)
)
|
def validate_segments(self, text="constructing"):
"""Check the elements of the `segments` attribute are all themselves segments."""
for elem in self.segments:
if not isinstance(elem, BaseSegment):
raise TypeError(
"In {0} {1}, found an element of the segments tuple which"
" isn't a segment. Instead found element of type {2}.\nFound: {3}\nFull segments:{4}".format(
text, type(self), type(elem), elem, self.segments
)
)
|
https://github.com/sqlfluff/sqlfluff/issues/132
|
$ sqlfluff fix --rules L001 forecastservice\sql\get_phasing_v2.sql
==== finding violations ====
== [forecastservice\sql\get_phasing_v2.sql] FAIL
L: 0 | P: 0 | ???? | Failure in Jinja templating: 'first_mon' is undefined. Have you configured your variables?
L: 7 | P: 1 | ???? | Unable to lex characters: ''{% if regi'...'
==== fixing violations ====
2 violations found
Are you sure you wish to attempt to fix these? [Y/n] ...
Attempting fixes...
Persisting Changes...
Traceback (most recent call last):
File "C:\Users\alan\dev\tails-analytics-forecastservice\env\Scripts\sqlfluff-script.py", line 11, in <module>
load_entry_point('sqlfluff==0.2.4', 'console_scripts', 'sqlfluff')()
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\click\core.py", line 764, in __call__
return self.main(*args, **kwargs)
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\click\core.py", line 717, in main
rv = self.invoke(ctx)
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\click\core.py", line 1137, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\click\core.py", line 956, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\click\core.py", line 555, in invoke
return callback(*args, **kwargs)
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\sqlfluff\cli\commands.py", line 197, in fix
result.persist_changes(verbosity=verbose)
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\sqlfluff\linter.py", line 327, in persist_changes
return self.combine_dicts(*[path.persist_changes(verbosity=verbosity) for path in self.paths])
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\sqlfluff\linter.py", line 327, in <listcomp>
return self.combine_dicts(*[path.persist_changes(verbosity=verbosity) for path in self.paths])
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\sqlfluff\linter.py", line 253, in persist_changes
return {file.path: file.persist_tree(verbosity=verbosity) for file in self.files}
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\sqlfluff\linter.py", line 253, in <dictcomp>
return {file.path: file.persist_tree(verbosity=verbosity) for file in self.files}
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\sqlfluff\linter.py", line 61, in persist_tree
diff_templ_codes = diff_templ.get_opcodes()
File "C:\Users\alan\AppData\Local\Programs\Python\Python36\lib\difflib.py", line 550, in get_opcodes
for ai, bj, size in self.get_matching_blocks():
File "C:\Users\alan\AppData\Local\Programs\Python\Python36\lib\difflib.py", line 467, in get_matching_blocks
la, lb = len(self.a), len(self.b)
TypeError: object of type 'NoneType' has no len()
|
TypeError
|
def __init__(self, segments, pos_marker=None, validate=True):
if len(segments) == 0:
raise RuntimeError(
"Setting {0} with a zero length segment set. This shouldn't happen.".format(
self.__class__
)
)
if hasattr(segments, "matched_segments"):
# Safely extract segments from a match
self.segments = segments.matched_segments
elif isinstance(segments, tuple):
self.segments = segments
elif isinstance(segments, list):
self.segments = tuple(segments)
else:
raise TypeError(
"Unexpected type passed to BaseSegment: {0}".format(type(segments))
)
# Check elements of segments:
self.validate_segments(validate=validate)
if pos_marker:
self.pos_marker = pos_marker
else:
# If no pos given, it's the pos of the first segment
# Work out if we're dealing with a match result...
if hasattr(segments, "initial_match_pos_marker"):
self.pos_marker = segments.initial_match_pos_marker()
elif isinstance(segments, (tuple, list)):
self.pos_marker = segments[0].pos_marker
else:
raise TypeError(
"Unexpected type passed to BaseSegment: {0}".format(type(segments))
)
|
def __init__(self, segments, pos_marker=None):
if len(segments) == 0:
raise RuntimeError(
"Setting {0} with a zero length segment set. This shouldn't happen.".format(
self.__class__
)
)
if hasattr(segments, "matched_segments"):
# Safely extract segments from a match
self.segments = segments.matched_segments
elif isinstance(segments, tuple):
self.segments = segments
elif isinstance(segments, list):
self.segments = tuple(segments)
else:
raise TypeError(
"Unexpected type passed to BaseSegment: {0}".format(type(segments))
)
# Check elements of segments:
self.validate_segments()
if pos_marker:
self.pos_marker = pos_marker
else:
# If no pos given, it's the pos of the first segment
# Work out if we're dealing with a match result...
if hasattr(segments, "initial_match_pos_marker"):
self.pos_marker = segments.initial_match_pos_marker()
elif isinstance(segments, (tuple, list)):
self.pos_marker = segments[0].pos_marker
else:
raise TypeError(
"Unexpected type passed to BaseSegment: {0}".format(type(segments))
)
|
https://github.com/sqlfluff/sqlfluff/issues/132
|
$ sqlfluff fix --rules L001 forecastservice\sql\get_phasing_v2.sql
==== finding violations ====
== [forecastservice\sql\get_phasing_v2.sql] FAIL
L: 0 | P: 0 | ???? | Failure in Jinja templating: 'first_mon' is undefined. Have you configured your variables?
L: 7 | P: 1 | ???? | Unable to lex characters: ''{% if regi'...'
==== fixing violations ====
2 violations found
Are you sure you wish to attempt to fix these? [Y/n] ...
Attempting fixes...
Persisting Changes...
Traceback (most recent call last):
File "C:\Users\alan\dev\tails-analytics-forecastservice\env\Scripts\sqlfluff-script.py", line 11, in <module>
load_entry_point('sqlfluff==0.2.4', 'console_scripts', 'sqlfluff')()
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\click\core.py", line 764, in __call__
return self.main(*args, **kwargs)
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\click\core.py", line 717, in main
rv = self.invoke(ctx)
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\click\core.py", line 1137, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\click\core.py", line 956, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\click\core.py", line 555, in invoke
return callback(*args, **kwargs)
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\sqlfluff\cli\commands.py", line 197, in fix
result.persist_changes(verbosity=verbose)
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\sqlfluff\linter.py", line 327, in persist_changes
return self.combine_dicts(*[path.persist_changes(verbosity=verbosity) for path in self.paths])
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\sqlfluff\linter.py", line 327, in <listcomp>
return self.combine_dicts(*[path.persist_changes(verbosity=verbosity) for path in self.paths])
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\sqlfluff\linter.py", line 253, in persist_changes
return {file.path: file.persist_tree(verbosity=verbosity) for file in self.files}
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\sqlfluff\linter.py", line 253, in <dictcomp>
return {file.path: file.persist_tree(verbosity=verbosity) for file in self.files}
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\sqlfluff\linter.py", line 61, in persist_tree
diff_templ_codes = diff_templ.get_opcodes()
File "C:\Users\alan\AppData\Local\Programs\Python\Python36\lib\difflib.py", line 550, in get_opcodes
for ai, bj, size in self.get_matching_blocks():
File "C:\Users\alan\AppData\Local\Programs\Python\Python36\lib\difflib.py", line 467, in get_matching_blocks
la, lb = len(self.a), len(self.b)
TypeError: object of type 'NoneType' has no len()
|
TypeError
|
def apply_fixes(self, fixes):
"""Apply an iterable of fixes to this segment.
Used in applying fixes if we're fixing linting errors.
If anything changes, this should return a new version of the segment
rather than mutating the original.
Note: We need to have fixes to apply AND this must have children. In the case
of raw segments, they will be replaced or removed by their parent and
so this function should just return self.
"""
# Let's check what we've been given.
if fixes and isinstance(fixes[0], SQLLintError):
logging.error("Transforming `fixes` from errors into a list of fixes")
# We've got linting errors, let's aggregate them into a list of fixes
buff = []
for err in fixes:
buff += err.fixes
# Overwrite fixes
fixes = buff
if fixes and not self.is_raw():
# Get a reference to self to start with, but this will rapidly
# become a working copy.
r = self
# Make a working copy
seg_buffer = []
todo_buffer = list(self.segments)
while True:
if len(todo_buffer) == 0:
break
else:
seg = todo_buffer.pop(0)
# We don't apply fixes to meta segments
if seg.is_meta:
seg_buffer.append(seg)
continue
fix_buff = fixes.copy()
unused_fixes = []
while fix_buff:
f = fix_buff.pop()
if f.anchor == seg:
if f.edit_type == "delete":
# We're just getting rid of this segment.
seg = None
elif f.edit_type in ("edit", "create"):
# We're doing a replacement (it could be a single segment or an iterable)
if isinstance(f.edit, BaseSegment):
seg_buffer.append(f.edit)
else:
for s in f.edit:
seg_buffer.append(s)
if f.edit_type == "create":
# in the case of a creation, also add this segment on the end
seg_buffer.append(seg)
else:
raise ValueError(
"Unexpected edit_type: {0!r} in {1!r}".format(
f.edit_type, f
)
)
# We've applied a fix here. Move on, this also consumes the fix
# TODO: Maybe deal with overlapping fixes later.
break
else:
# We've not used the fix so we should keep it in the list for later.
unused_fixes.append(f)
else:
seg_buffer.append(seg)
# Switch over the the unused list
fixes = unused_fixes + fix_buff
# Then recurse (i.e. deal with the children) (Requeueing)
seg_queue = seg_buffer
seg_buffer = []
for seg in seg_queue:
s, fixes = seg.apply_fixes(fixes)
seg_buffer.append(s)
# Reform into a new segment
r = r.__class__(
segments=tuple(seg_buffer), pos_marker=r.pos_marker, validate=False
)
# Lastly, before returning, we should realign positions.
# Note: Realign also returns a copy
return r.realign(), fixes
else:
return self, fixes
|
def apply_fixes(self, fixes):
"""Apply an iterable of fixes to this segment.
Used in applying fixes if we're fixing linting errors.
If anything changes, this should return a new version of the segment
rather than mutating the original.
Note: We need to have fixes to apply AND this must have children. In the case
of raw segments, they will be replaced or removed by their parent and
so this function should just return self.
"""
# Let's check what we've been given.
if fixes and isinstance(fixes[0], SQLLintError):
logging.error("Transforming `fixes` from errors into a list of fixes")
# We've got linting errors, let's aggregate them into a list of fixes
buff = []
for err in fixes:
buff += err.fixes
# Overwrite fixes
fixes = buff
if fixes and not self.is_raw():
# Get a reference to self to start with, but this will rapidly
# become a working copy.
r = self
# Make a working copy
seg_buffer = []
todo_buffer = list(self.segments)
while True:
if len(todo_buffer) == 0:
break
else:
seg = todo_buffer.pop(0)
unused_fixes = []
for f in fixes:
if f.anchor == seg:
if f.edit_type == "delete":
# We're just getting rid of this segment.
seg = None
elif f.edit_type in ("edit", "create"):
# We're doing a replacement (it could be a single segment or an iterable)
if isinstance(f.edit, BaseSegment):
seg_buffer.append(f.edit)
else:
for s in f.edit:
seg_buffer.append(s)
if f.edit_type == "create":
# in the case of a creation, also add this segment on the end
seg_buffer.append(seg)
else:
raise ValueError(
"Unexpected edit_type: {0!r} in {1!r}".format(
f.edit_type, f
)
)
# We've applied a fix here. Move on, this also consumes the fix
# TODO: Maybe deal with overlapping fixes later.
break
else:
# We've not used the fix so we should keep it in the list for later.
unused_fixes.append(f)
else:
seg_buffer.append(seg)
# Switch over the the unused list
fixes = unused_fixes
# Then recurse (i.e. deal with the children) (Requeueing)
seg_queue = seg_buffer
seg_buffer = []
for seg in seg_queue:
s, fixes = seg.apply_fixes(fixes)
seg_buffer.append(s)
# Reform into a new segment
r = r.__class__(segments=tuple(seg_buffer), pos_marker=r.pos_marker)
# Lastly, before returning, we should realign positions.
# Note: Realign also returns a copy
return r.realign(), fixes
else:
return self, fixes
|
https://github.com/sqlfluff/sqlfluff/issues/132
|
$ sqlfluff fix --rules L001 forecastservice\sql\get_phasing_v2.sql
==== finding violations ====
== [forecastservice\sql\get_phasing_v2.sql] FAIL
L: 0 | P: 0 | ???? | Failure in Jinja templating: 'first_mon' is undefined. Have you configured your variables?
L: 7 | P: 1 | ???? | Unable to lex characters: ''{% if regi'...'
==== fixing violations ====
2 violations found
Are you sure you wish to attempt to fix these? [Y/n] ...
Attempting fixes...
Persisting Changes...
Traceback (most recent call last):
File "C:\Users\alan\dev\tails-analytics-forecastservice\env\Scripts\sqlfluff-script.py", line 11, in <module>
load_entry_point('sqlfluff==0.2.4', 'console_scripts', 'sqlfluff')()
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\click\core.py", line 764, in __call__
return self.main(*args, **kwargs)
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\click\core.py", line 717, in main
rv = self.invoke(ctx)
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\click\core.py", line 1137, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\click\core.py", line 956, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\click\core.py", line 555, in invoke
return callback(*args, **kwargs)
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\sqlfluff\cli\commands.py", line 197, in fix
result.persist_changes(verbosity=verbose)
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\sqlfluff\linter.py", line 327, in persist_changes
return self.combine_dicts(*[path.persist_changes(verbosity=verbosity) for path in self.paths])
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\sqlfluff\linter.py", line 327, in <listcomp>
return self.combine_dicts(*[path.persist_changes(verbosity=verbosity) for path in self.paths])
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\sqlfluff\linter.py", line 253, in persist_changes
return {file.path: file.persist_tree(verbosity=verbosity) for file in self.files}
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\sqlfluff\linter.py", line 253, in <dictcomp>
return {file.path: file.persist_tree(verbosity=verbosity) for file in self.files}
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\sqlfluff\linter.py", line 61, in persist_tree
diff_templ_codes = diff_templ.get_opcodes()
File "C:\Users\alan\AppData\Local\Programs\Python\Python36\lib\difflib.py", line 550, in get_opcodes
for ai, bj, size in self.get_matching_blocks():
File "C:\Users\alan\AppData\Local\Programs\Python\Python36\lib\difflib.py", line 467, in get_matching_blocks
la, lb = len(self.a), len(self.b)
TypeError: object of type 'NoneType' has no len()
|
TypeError
|
def realign(self):
"""Realign the positions in this segment.
Returns:
a copy of this class with the pos_markers realigned.
Note: this is used mostly during fixes.
Realign is recursive. We will assume that the pos_marker of THIS segment is
truthful, and that during recursion it will have been set by the parent.
This function will align the pos marker if it's direct children, we then
recurse to realign their children.
"""
seg_buffer = []
todo_buffer = list(self.segments)
running_pos = self.pos_marker
while True:
if len(todo_buffer) == 0:
# We're done.
break
else:
# Get the first off the buffer
seg = todo_buffer.pop(0)
# We'll preserve statement indexes so we should keep track of that.
# When recreating, we use the DELTA of the index so that's what matter...
idx = seg.pos_marker.statement_index - running_pos.statement_index
if seg.is_meta:
# It's a meta segment, just update the position
seg = seg.__class__(pos_marker=running_pos)
elif len(seg.segments) > 0:
# It's a compound segment, so keep track of it's children
child_segs = seg.segments
# Create a new segment of the same type with the new position
seg = seg.__class__(segments=child_segs, pos_marker=running_pos)
# Realign the children of that class
seg = seg.realign()
else:
# It's a raw segment...
# Create a new segment of the same type with the new position
seg = seg.__class__(raw=seg.raw, pos_marker=running_pos)
# Update the running position with the content of that segment
running_pos = running_pos.advance_by(raw=seg.raw, idx=idx)
# Add the buffer to my new segment
seg_buffer.append(seg)
# Create a new version of this class with the new details
return self.__class__(segments=tuple(seg_buffer), pos_marker=self.pos_marker)
|
def realign(self):
"""Realign the positions in this segment.
Returns:
a copy of this class with the pos_markers realigned.
Note: this is used mostly during fixes.
Realign is recursive. We will assume that the pos_marker of THIS segment is
truthful, and that during recursion it will have been set by the parent.
This function will align the pos marker if it's direct children, we then
recurse to realign their children.
"""
seg_buffer = []
todo_buffer = list(self.segments)
running_pos = self.pos_marker
while True:
if len(todo_buffer) == 0:
# We're done.
break
else:
# Get the first off the buffer
seg = todo_buffer.pop(0)
# Is it a meta segment?
if seg.is_meta:
# If so, just carry on.
seg_buffer.append(seg)
continue
# We'll preserve statement indexes so we should keep track of that.
# When recreating, we use the DELTA of the index so that's what matter...
idx = seg.pos_marker.statement_index - running_pos.statement_index
if len(seg.segments) > 0:
# It's a compound segment, so keep track of it's children
child_segs = seg.segments
# Create a new segment of the same type with the new position
seg = seg.__class__(segments=child_segs, pos_marker=running_pos)
# Realign the children of that class
seg = seg.realign()
else:
# It's a raw segment...
# Create a new segment of the same type with the new position
seg = seg.__class__(raw=seg.raw, pos_marker=running_pos)
# Update the running position with the content of that segment
running_pos = running_pos.advance_by(raw=seg.raw, idx=idx)
# Add the buffer to my new segment
seg_buffer.append(seg)
# Create a new version of this class with the new details
return self.__class__(segments=tuple(seg_buffer), pos_marker=self.pos_marker)
|
https://github.com/sqlfluff/sqlfluff/issues/132
|
$ sqlfluff fix --rules L001 forecastservice\sql\get_phasing_v2.sql
==== finding violations ====
== [forecastservice\sql\get_phasing_v2.sql] FAIL
L: 0 | P: 0 | ???? | Failure in Jinja templating: 'first_mon' is undefined. Have you configured your variables?
L: 7 | P: 1 | ???? | Unable to lex characters: ''{% if regi'...'
==== fixing violations ====
2 violations found
Are you sure you wish to attempt to fix these? [Y/n] ...
Attempting fixes...
Persisting Changes...
Traceback (most recent call last):
File "C:\Users\alan\dev\tails-analytics-forecastservice\env\Scripts\sqlfluff-script.py", line 11, in <module>
load_entry_point('sqlfluff==0.2.4', 'console_scripts', 'sqlfluff')()
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\click\core.py", line 764, in __call__
return self.main(*args, **kwargs)
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\click\core.py", line 717, in main
rv = self.invoke(ctx)
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\click\core.py", line 1137, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\click\core.py", line 956, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\click\core.py", line 555, in invoke
return callback(*args, **kwargs)
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\sqlfluff\cli\commands.py", line 197, in fix
result.persist_changes(verbosity=verbose)
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\sqlfluff\linter.py", line 327, in persist_changes
return self.combine_dicts(*[path.persist_changes(verbosity=verbosity) for path in self.paths])
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\sqlfluff\linter.py", line 327, in <listcomp>
return self.combine_dicts(*[path.persist_changes(verbosity=verbosity) for path in self.paths])
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\sqlfluff\linter.py", line 253, in persist_changes
return {file.path: file.persist_tree(verbosity=verbosity) for file in self.files}
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\sqlfluff\linter.py", line 253, in <dictcomp>
return {file.path: file.persist_tree(verbosity=verbosity) for file in self.files}
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\sqlfluff\linter.py", line 61, in persist_tree
diff_templ_codes = diff_templ.get_opcodes()
File "C:\Users\alan\AppData\Local\Programs\Python\Python36\lib\difflib.py", line 550, in get_opcodes
for ai, bj, size in self.get_matching_blocks():
File "C:\Users\alan\AppData\Local\Programs\Python\Python36\lib\difflib.py", line 467, in get_matching_blocks
la, lb = len(self.a), len(self.b)
TypeError: object of type 'NoneType' has no len()
|
TypeError
|
def __init__(self, pos_marker):
"""For the indent we override the init method.
For something without content, the content doesn't make
sense. The pos_marker, will be matched with the following
segment, but meta segments are ignored during fixes so it's
ok in this sense. We need the pos marker later for dealing
with repairs.
"""
self._raw = ""
# TODO: Make sure that we DO actually skip meta segments
# during fixes.
self.pos_marker = pos_marker
|
def __init__(self):
"""For the indent we override the init method.
For something without content, neither makes sense.
"""
self._raw = ""
self.pos_marker = None
|
https://github.com/sqlfluff/sqlfluff/issues/132
|
$ sqlfluff fix --rules L001 forecastservice\sql\get_phasing_v2.sql
==== finding violations ====
== [forecastservice\sql\get_phasing_v2.sql] FAIL
L: 0 | P: 0 | ???? | Failure in Jinja templating: 'first_mon' is undefined. Have you configured your variables?
L: 7 | P: 1 | ???? | Unable to lex characters: ''{% if regi'...'
==== fixing violations ====
2 violations found
Are you sure you wish to attempt to fix these? [Y/n] ...
Attempting fixes...
Persisting Changes...
Traceback (most recent call last):
File "C:\Users\alan\dev\tails-analytics-forecastservice\env\Scripts\sqlfluff-script.py", line 11, in <module>
load_entry_point('sqlfluff==0.2.4', 'console_scripts', 'sqlfluff')()
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\click\core.py", line 764, in __call__
return self.main(*args, **kwargs)
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\click\core.py", line 717, in main
rv = self.invoke(ctx)
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\click\core.py", line 1137, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\click\core.py", line 956, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\click\core.py", line 555, in invoke
return callback(*args, **kwargs)
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\sqlfluff\cli\commands.py", line 197, in fix
result.persist_changes(verbosity=verbose)
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\sqlfluff\linter.py", line 327, in persist_changes
return self.combine_dicts(*[path.persist_changes(verbosity=verbosity) for path in self.paths])
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\sqlfluff\linter.py", line 327, in <listcomp>
return self.combine_dicts(*[path.persist_changes(verbosity=verbosity) for path in self.paths])
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\sqlfluff\linter.py", line 253, in persist_changes
return {file.path: file.persist_tree(verbosity=verbosity) for file in self.files}
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\sqlfluff\linter.py", line 253, in <dictcomp>
return {file.path: file.persist_tree(verbosity=verbosity) for file in self.files}
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\sqlfluff\linter.py", line 61, in persist_tree
diff_templ_codes = diff_templ.get_opcodes()
File "C:\Users\alan\AppData\Local\Programs\Python\Python36\lib\difflib.py", line 550, in get_opcodes
for ai, bj, size in self.get_matching_blocks():
File "C:\Users\alan\AppData\Local\Programs\Python\Python36\lib\difflib.py", line 467, in get_matching_blocks
la, lb = len(self.a), len(self.b)
TypeError: object of type 'NoneType' has no len()
|
TypeError
|
def __init__(self, segments, role, indent_balance, indent_impulse=0):
self.segments = segments
self.role = role
self.indent_balance = indent_balance
self.indent_impulse = indent_impulse
|
def __init__(self, max_line_length=80, tab_space_size=4, indent_unit="space", **kwargs):
"""Initialise, getting the max line length."""
self.max_line_length = max_line_length
# Call out tab_space_size and indent_unit to make it clear they're still options.
super(Rule_L016, self).__init__(
tab_space_size=tab_space_size, indent_unit=indent_unit, **kwargs
)
|
https://github.com/sqlfluff/sqlfluff/issues/132
|
$ sqlfluff fix --rules L001 forecastservice\sql\get_phasing_v2.sql
==== finding violations ====
== [forecastservice\sql\get_phasing_v2.sql] FAIL
L: 0 | P: 0 | ???? | Failure in Jinja templating: 'first_mon' is undefined. Have you configured your variables?
L: 7 | P: 1 | ???? | Unable to lex characters: ''{% if regi'...'
==== fixing violations ====
2 violations found
Are you sure you wish to attempt to fix these? [Y/n] ...
Attempting fixes...
Persisting Changes...
Traceback (most recent call last):
File "C:\Users\alan\dev\tails-analytics-forecastservice\env\Scripts\sqlfluff-script.py", line 11, in <module>
load_entry_point('sqlfluff==0.2.4', 'console_scripts', 'sqlfluff')()
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\click\core.py", line 764, in __call__
return self.main(*args, **kwargs)
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\click\core.py", line 717, in main
rv = self.invoke(ctx)
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\click\core.py", line 1137, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\click\core.py", line 956, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\click\core.py", line 555, in invoke
return callback(*args, **kwargs)
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\sqlfluff\cli\commands.py", line 197, in fix
result.persist_changes(verbosity=verbose)
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\sqlfluff\linter.py", line 327, in persist_changes
return self.combine_dicts(*[path.persist_changes(verbosity=verbosity) for path in self.paths])
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\sqlfluff\linter.py", line 327, in <listcomp>
return self.combine_dicts(*[path.persist_changes(verbosity=verbosity) for path in self.paths])
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\sqlfluff\linter.py", line 253, in persist_changes
return {file.path: file.persist_tree(verbosity=verbosity) for file in self.files}
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\sqlfluff\linter.py", line 253, in <dictcomp>
return {file.path: file.persist_tree(verbosity=verbosity) for file in self.files}
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\sqlfluff\linter.py", line 61, in persist_tree
diff_templ_codes = diff_templ.get_opcodes()
File "C:\Users\alan\AppData\Local\Programs\Python\Python36\lib\difflib.py", line 550, in get_opcodes
for ai, bj, size in self.get_matching_blocks():
File "C:\Users\alan\AppData\Local\Programs\Python\Python36\lib\difflib.py", line 467, in get_matching_blocks
la, lb = len(self.a), len(self.b)
TypeError: object of type 'NoneType' has no len()
|
TypeError
|
def _eval(self, segment, raw_stack, **kwargs):
"""Line is too long.
This only triggers on newline segments, evaluating the whole line.
The detection is simple, the fixing is much trickier.
"""
if segment.name == "newline":
# iterate to buffer the whole line up to this point
this_line = []
idx = -1
while True:
if len(raw_stack) >= abs(idx):
s = raw_stack[idx]
if s.name == "newline":
break
else:
this_line.insert(0, s)
idx -= 1
else:
break
# Now we can work out the line length and deal with the content
line_len = sum(len(s.raw) for s in this_line)
if line_len > self.max_line_length:
# Problem, we'll be reporting a violation. The
# question is, can we fix it?
# We'll need the indent, so let's get it for fixing.
line_indent = []
idx = 0
for s in this_line:
if s.name == "whitespace":
line_indent.append(s)
else:
break
# Does the line end in an inline comment that we can move back?
if this_line[-1].name == "inline_comment":
# Set up to delete the original comment and the preceeding whitespace
delete_buffer = [LintFix("delete", this_line[-1])]
idx = -2
while True:
if (
len(this_line) >= abs(idx)
and this_line[idx].name == "whitespace"
):
delete_buffer.append(LintFix("delete", this_line[idx]))
idx -= 1
else:
break
# Create a newline before this one with the existing comment, an
# identical indent AND a terminating newline, copied from the current
# target segment.
create_buffer = [
LintFix(
"create", this_line[0], line_indent + [this_line[-1], segment]
)
]
return LintResult(anchor=segment, fixes=delete_buffer + create_buffer)
fixes = self._eval_line_for_breaks(this_line)
if fixes:
return LintResult(anchor=segment, fixes=fixes)
return LintResult(anchor=segment)
# Otherwise we're all good
return None
|
def _eval(self, segment, raw_stack, **kwargs):
"""Line is too long.
This only triggers on newline segments, evaluating the whole line.
The detection is simple, the fixing is much trickier.
"""
if segment.name == "newline":
# iterate to buffer the whole line up to this point
this_line = []
idx = -1
while True:
if len(raw_stack) >= abs(idx):
s = raw_stack[idx]
if s.name == "newline":
break
else:
this_line.insert(0, s)
idx -= 1
else:
break
# Now we can work out the line length and deal with the content
line_len = sum(len(s.raw) for s in this_line)
if line_len > self.max_line_length:
# Problem, we'll be reporting a violation. The
# question is, can we fix it?
# We'll need the indent, so let's get it for fixing.
line_indent = []
idx = 0
for s in this_line:
if s.name == "whitespace":
line_indent.append(s)
else:
break
# Does the line end in an inline comment that we can move back?
if this_line[-1].name == "inline_comment":
# Set up to delete the original comment and the preceeding whitespace
delete_buffer = [LintFix("delete", this_line[-1])]
idx = -2
while True:
if (
len(this_line) >= abs(idx)
and this_line[idx].name == "whitespace"
):
delete_buffer.append(LintFix("delete", this_line[idx]))
idx -= 1
else:
break
# Create a newline before this one with the existing comment, an
# identical indent AND a terminating newline, copied from the current
# target segment.
create_buffer = [
LintFix(
"create", this_line[0], line_indent + [this_line[-1], segment]
)
]
return LintResult(anchor=segment, fixes=delete_buffer + create_buffer)
# Does the line contain a place where an indent might be possible?
if any(elem.is_meta and elem.indent_val != 0 for elem in this_line):
# What's the net sum of them?
indent_balance = sum(
elem.indent_val for elem in this_line if elem.is_meta
)
# Yes, let's work out which is best.
if indent_balance == 0:
# It's even. We should break after the *last* dedent
ws_pre = []
ws_post = []
running_balance = 0
started = False
found = False
fix_buffer = None
# Work through to find the right point
for elem in this_line:
if elem.name == "whitespace":
if found:
if fix_buffer is None:
# In this case we EDIT, because
# we want to remove the existing whitespace
# here. We need to remember the INDENT.
fix_buffer = [
LintFix("edit", elem, [segment] + line_indent)
]
else:
# Store potentially unnecessary whitespace.
ws_post.append(elem)
elif started:
# Store potentially unnecessary whitespace.
ws_pre.append(elem)
elif elem.is_meta:
running_balance += elem.indent_val
started = True
# Clear the buffer.
ws_post = []
if running_balance == 0:
found = True
else:
# Something that isn't a meta or whitespace
if found:
if fix_buffer is None:
# In this case we create because we
# want to preserve what already exits
# here. We need to remember the INDENT.
fix_buffer = [
LintFix("create", elem, [segment] + line_indent)
]
# We have all we need
break
else:
# Clear the buffer.
ws_pre = []
else:
raise RuntimeError("We shouldn't get here!")
# Remove unnecessary whitespace
for elem in ws_pre + ws_post:
fix_buffer.append(LintFix("delete", elem))
return LintResult(anchor=segment, fixes=fix_buffer)
elif indent_balance > 0:
# If it's positive, we have more indents than dedents.
# Make sure the first unused indent is used.
delete_buffer = []
newline_anchor = None
found = False
for elem in this_line:
if elem.name == "whitespace":
delete_buffer.append(elem)
elif found:
newline_anchor = elem
break
elif elem.is_meta:
if elem.indent_val > 0:
found = True
else:
pass
else:
# It's not meta, and not whitespace:
# reset buffer
delete_buffer = []
else:
raise RuntimeError("We shouldn't get here!")
# Make a newline where it needs to be, with ONE EXTRA INDENT
new_indent = self._make_indent(1)
fix_buffer = [
LintFix(
"create",
newline_anchor,
# It's ok to use the current segment posmarker, because we're staying in the same statement (probably?)
[segment]
+ line_indent
+ [
self.make_whitespace(
raw=new_indent, pos_marker=segment.pos_marker
)
],
)
]
# Remove unnecessary whitespace
for elem in delete_buffer:
fix_buffer.append(LintFix("delete", elem))
return LintResult(anchor=segment, fixes=fix_buffer)
else:
# Don't know what to do here!
raise NotImplementedError(
(
"Don't know what to do with negative indent balance ({0})."
).format(indent_balance)
)
return LintResult(anchor=segment)
# Otherwise we're all good
return None
|
https://github.com/sqlfluff/sqlfluff/issues/132
|
$ sqlfluff fix --rules L001 forecastservice\sql\get_phasing_v2.sql
==== finding violations ====
== [forecastservice\sql\get_phasing_v2.sql] FAIL
L: 0 | P: 0 | ???? | Failure in Jinja templating: 'first_mon' is undefined. Have you configured your variables?
L: 7 | P: 1 | ???? | Unable to lex characters: ''{% if regi'...'
==== fixing violations ====
2 violations found
Are you sure you wish to attempt to fix these? [Y/n] ...
Attempting fixes...
Persisting Changes...
Traceback (most recent call last):
File "C:\Users\alan\dev\tails-analytics-forecastservice\env\Scripts\sqlfluff-script.py", line 11, in <module>
load_entry_point('sqlfluff==0.2.4', 'console_scripts', 'sqlfluff')()
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\click\core.py", line 764, in __call__
return self.main(*args, **kwargs)
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\click\core.py", line 717, in main
rv = self.invoke(ctx)
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\click\core.py", line 1137, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\click\core.py", line 956, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\click\core.py", line 555, in invoke
return callback(*args, **kwargs)
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\sqlfluff\cli\commands.py", line 197, in fix
result.persist_changes(verbosity=verbose)
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\sqlfluff\linter.py", line 327, in persist_changes
return self.combine_dicts(*[path.persist_changes(verbosity=verbosity) for path in self.paths])
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\sqlfluff\linter.py", line 327, in <listcomp>
return self.combine_dicts(*[path.persist_changes(verbosity=verbosity) for path in self.paths])
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\sqlfluff\linter.py", line 253, in persist_changes
return {file.path: file.persist_tree(verbosity=verbosity) for file in self.files}
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\sqlfluff\linter.py", line 253, in <dictcomp>
return {file.path: file.persist_tree(verbosity=verbosity) for file in self.files}
File "c:\users\alan\dev\tails-analytics-forecastservice\env\lib\site-packages\sqlfluff\linter.py", line 61, in persist_tree
diff_templ_codes = diff_templ.get_opcodes()
File "C:\Users\alan\AppData\Local\Programs\Python\Python36\lib\difflib.py", line 550, in get_opcodes
for ai, bj, size in self.get_matching_blocks():
File "C:\Users\alan\AppData\Local\Programs\Python\Python36\lib\difflib.py", line 467, in get_matching_blocks
la, lb = len(self.a), len(self.b)
TypeError: object of type 'NoneType' has no len()
|
TypeError
|
def _eval(self, segment, raw_stack, memory, **kwargs):
"""Inconsistent capitalisation of keywords.
We use the `memory` feature here to keep track of
what we've seen in the past.
"""
cases_seen = memory.get("cases_seen", set())
if segment.type == self._target_elem:
raw = segment.raw
uc = raw.upper()
lc = raw.lower()
cap = raw.capitalize()
seen_case = None
if uc == lc:
# Caseless
pass
elif raw == uc:
seen_case = "upper"
elif raw == lc:
seen_case = "lower"
elif raw == cap:
# NB: American spelling :(
seen_case = "capitalize"
else:
seen_case = "inconsistent"
# NOTE: We'll only add to cases_seen if we DONT
# also raise an error, so that we can focus in.
def make_replacement(seg, policy):
"""Make a replacement segment, based on seen capitalisation."""
if policy == "lower":
new_raw = seg.raw.lower()
elif policy == "upper":
new_raw = seg.raw.upper()
elif policy == "capitalize":
new_raw = seg.raw.capitalize()
elif policy == "consistent":
# The only case we DONT allow here is "inconsistent",
# because it doesn't actually help us.
filtered_cases_seen = [c for c in cases_seen if c != "inconsistent"]
if filtered_cases_seen:
# Get an element from what we've already seen.
return make_replacement(seg, list(filtered_cases_seen)[0])
else:
# If we haven't seen anything yet, then let's default
# to upper
return make_replacement(seg, "upper")
else:
raise ValueError(
"Unexpected capitalisation policy: {0!r}".format(policy)
)
# Make a new class and return it.
return seg.__class__(raw=new_raw, pos_marker=seg.pos_marker)
if not seen_case:
# Skip this if we haven't seen anything good.
# No need to update memory
return LintResult(memory=memory)
elif (
# Are we required to be consistent? (and this is inconsistent?)
(
self.capitalisation_policy == "consistent"
and (
# Either because we've seen multiple
(cases_seen and seen_case not in cases_seen)
# Or just because this one is inconsistent internally
or seen_case == "inconsistent"
)
)
# Are we just required to be specfic?
# Policy is either upper, lower or capitalize
or (
self.capitalisation_policy != "consistent"
and seen_case != self.capitalisation_policy
)
):
return LintResult(
anchor=segment,
fixes=[
LintFix(
"edit",
segment,
make_replacement(segment, self.capitalisation_policy),
)
],
memory=memory,
)
else:
# Update memory and carry on
cases_seen.add(seen_case)
memory["cases_seen"] = cases_seen
return LintResult(memory=memory)
# If it's not a keyword just carry on
return LintResult(memory=memory)
|
def _eval(self, segment, raw_stack, memory, **kwargs):
"""Inconsistent capitalisation of keywords.
We use the `memory` feature here to keep track of
what we've seen in the past.
"""
cases_seen = memory.get("cases_seen", set())
if segment.type == self._target_elem:
raw = segment.raw
uc = raw.upper()
lc = raw.lower()
cap = raw.capitalize()
seen_case = None
if uc == lc:
# Caseless
pass
elif raw == uc:
seen_case = "upper"
elif raw == lc:
seen_case = "lower"
elif raw == cap:
# NB: American spelling :(
seen_case = "capitalize"
else:
seen_case = "inconsistent"
# NOTE: We'll only add to cases_seen if we DONT
# also raise an error, so that we can focus in.
def make_replacement(seg, policy):
"""Make a replacement segment, based on seen capitalisation."""
if policy == "lower":
new_raw = seg.raw.lower()
elif policy == "upper":
new_raw = seg.raw.upper()
elif policy == "capitalize":
new_raw = seg.raw.capitalize()
elif policy == "consistent":
if cases_seen:
# Get an element from what we've already seen
return make_replacement(seg, list(cases_seen)[0])
else:
# If we haven't seen anything yet, then let's default
# to upper
return make_replacement(seg, "upper")
else:
raise ValueError(
"Unexpected capitalisation policy: {0!r}".format(policy)
)
# Make a new class and return it.
return seg.__class__(raw=new_raw, pos_marker=seg.pos_marker)
if not seen_case:
# Skip this if we haven't seen anything good.
# No need to update memory
return LintResult(memory=memory)
elif (
(
self.capitalisation_policy == "consistent"
and cases_seen
and seen_case not in cases_seen
)
# Policy is either upper, lower or capitalize
or (
self.capitalisation_policy != "consistent"
and seen_case != self.capitalisation_policy
)
):
return LintResult(
anchor=segment,
fixes=[
LintFix(
"edit",
segment,
make_replacement(segment, self.capitalisation_policy),
)
],
memory=memory,
)
else:
# Update memory and carry on
cases_seen.add(seen_case)
memory["cases_seen"] = cases_seen
return LintResult(memory=memory)
# If it's not a keyword just carry on
return LintResult(memory=memory)
|
https://github.com/sqlfluff/sqlfluff/issues/87
|
➜ sqlfluff version
0.2.4
➜ echo 'selECT * from table;' > test.sql
➜ sqlfluff fix test.sql --rules L001,L002,L003,L004,L005,L006,L007,L008,L009,L010,L011,L012,L013,L014
==== finding violations ====
Traceback (most recent call last):
File "/Users/nolan/anaconda3/bin/sqlfluff", line 10, in <module>
sys.exit(cli())
File "/Users/nolan/anaconda3/lib/python3.7/site-packages/click/core.py", line 764, in __call__
return self.main(*args, **kwargs)
File "/Users/nolan/anaconda3/lib/python3.7/site-packages/click/core.py", line 717, in main
rv = self.invoke(ctx)
File "/Users/nolan/anaconda3/lib/python3.7/site-packages/click/core.py", line 1137, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/Users/nolan/anaconda3/lib/python3.7/site-packages/click/core.py", line 956, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/Users/nolan/anaconda3/lib/python3.7/site-packages/click/core.py", line 555, in invoke
return callback(*args, **kwargs)
File "/Users/nolan/anaconda3/lib/python3.7/site-packages/sqlfluff/cli/commands.py", line 174, in fix
result = lnt.lint_paths(paths, verbosity=verbose)
File "/Users/nolan/anaconda3/lib/python3.7/site-packages/sqlfluff/linter.py", line 605, in lint_paths
result.add(self.lint_path(path, verbosity=verbosity, fix=fix))
File "/Users/nolan/anaconda3/lib/python3.7/site-packages/sqlfluff/linter.py", line 592, in lint_path
fix=fix, config=config))
File "/Users/nolan/anaconda3/lib/python3.7/site-packages/sqlfluff/linter.py", line 536, in lint_string
lerrs, _, _, _ = crawler.crawl(parsed)
File "/Users/nolan/anaconda3/lib/python3.7/site-packages/sqlfluff/rules/base.py", line 192, in crawl
raw_stack=raw_stack, fix=fix, memory=memory)
File "/Users/nolan/anaconda3/lib/python3.7/site-packages/sqlfluff/rules/base.py", line 192, in crawl
raw_stack=raw_stack, fix=fix, memory=memory)
File "/Users/nolan/anaconda3/lib/python3.7/site-packages/sqlfluff/rules/base.py", line 192, in crawl
raw_stack=raw_stack, fix=fix, memory=memory)
[Previous line repeated 1 more time]
File "/Users/nolan/anaconda3/lib/python3.7/site-packages/sqlfluff/rules/base.py", line 162, in crawl
raw_stack=raw_stack, memory=memory)
File "/Users/nolan/anaconda3/lib/python3.7/site-packages/sqlfluff/rules/std.py", line 488, in _eval
segment, self.capitalisation_policy))
File "/Users/nolan/anaconda3/lib/python3.7/site-packages/sqlfluff/rules/std.py", line 463, in make_replacement
return make_replacement(seg, list(cases_seen)[0])
File "/Users/nolan/anaconda3/lib/python3.7/site-packages/sqlfluff/rules/std.py", line 469, in make_replacement
raise ValueError("Unexpected capitalisation policy: {0!r}".format(policy))
ValueError: Unexpected capitalisation policy: 'inconsistent'
|
ValueError
|
def make_replacement(seg, policy):
"""Make a replacement segment, based on seen capitalisation."""
if policy == "lower":
new_raw = seg.raw.lower()
elif policy == "upper":
new_raw = seg.raw.upper()
elif policy == "capitalize":
new_raw = seg.raw.capitalize()
elif policy == "consistent":
# The only case we DONT allow here is "inconsistent",
# because it doesn't actually help us.
filtered_cases_seen = [c for c in cases_seen if c != "inconsistent"]
if filtered_cases_seen:
# Get an element from what we've already seen.
return make_replacement(seg, list(filtered_cases_seen)[0])
else:
# If we haven't seen anything yet, then let's default
# to upper
return make_replacement(seg, "upper")
else:
raise ValueError("Unexpected capitalisation policy: {0!r}".format(policy))
# Make a new class and return it.
return seg.__class__(raw=new_raw, pos_marker=seg.pos_marker)
|
def make_replacement(seg, policy):
"""Make a replacement segment, based on seen capitalisation."""
if policy == "lower":
new_raw = seg.raw.lower()
elif policy == "upper":
new_raw = seg.raw.upper()
elif policy == "capitalize":
new_raw = seg.raw.capitalize()
elif policy == "consistent":
if cases_seen:
# Get an element from what we've already seen
return make_replacement(seg, list(cases_seen)[0])
else:
# If we haven't seen anything yet, then let's default
# to upper
return make_replacement(seg, "upper")
else:
raise ValueError("Unexpected capitalisation policy: {0!r}".format(policy))
# Make a new class and return it.
return seg.__class__(raw=new_raw, pos_marker=seg.pos_marker)
|
https://github.com/sqlfluff/sqlfluff/issues/87
|
➜ sqlfluff version
0.2.4
➜ echo 'selECT * from table;' > test.sql
➜ sqlfluff fix test.sql --rules L001,L002,L003,L004,L005,L006,L007,L008,L009,L010,L011,L012,L013,L014
==== finding violations ====
Traceback (most recent call last):
File "/Users/nolan/anaconda3/bin/sqlfluff", line 10, in <module>
sys.exit(cli())
File "/Users/nolan/anaconda3/lib/python3.7/site-packages/click/core.py", line 764, in __call__
return self.main(*args, **kwargs)
File "/Users/nolan/anaconda3/lib/python3.7/site-packages/click/core.py", line 717, in main
rv = self.invoke(ctx)
File "/Users/nolan/anaconda3/lib/python3.7/site-packages/click/core.py", line 1137, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/Users/nolan/anaconda3/lib/python3.7/site-packages/click/core.py", line 956, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/Users/nolan/anaconda3/lib/python3.7/site-packages/click/core.py", line 555, in invoke
return callback(*args, **kwargs)
File "/Users/nolan/anaconda3/lib/python3.7/site-packages/sqlfluff/cli/commands.py", line 174, in fix
result = lnt.lint_paths(paths, verbosity=verbose)
File "/Users/nolan/anaconda3/lib/python3.7/site-packages/sqlfluff/linter.py", line 605, in lint_paths
result.add(self.lint_path(path, verbosity=verbosity, fix=fix))
File "/Users/nolan/anaconda3/lib/python3.7/site-packages/sqlfluff/linter.py", line 592, in lint_path
fix=fix, config=config))
File "/Users/nolan/anaconda3/lib/python3.7/site-packages/sqlfluff/linter.py", line 536, in lint_string
lerrs, _, _, _ = crawler.crawl(parsed)
File "/Users/nolan/anaconda3/lib/python3.7/site-packages/sqlfluff/rules/base.py", line 192, in crawl
raw_stack=raw_stack, fix=fix, memory=memory)
File "/Users/nolan/anaconda3/lib/python3.7/site-packages/sqlfluff/rules/base.py", line 192, in crawl
raw_stack=raw_stack, fix=fix, memory=memory)
File "/Users/nolan/anaconda3/lib/python3.7/site-packages/sqlfluff/rules/base.py", line 192, in crawl
raw_stack=raw_stack, fix=fix, memory=memory)
[Previous line repeated 1 more time]
File "/Users/nolan/anaconda3/lib/python3.7/site-packages/sqlfluff/rules/base.py", line 162, in crawl
raw_stack=raw_stack, memory=memory)
File "/Users/nolan/anaconda3/lib/python3.7/site-packages/sqlfluff/rules/std.py", line 488, in _eval
segment, self.capitalisation_policy))
File "/Users/nolan/anaconda3/lib/python3.7/site-packages/sqlfluff/rules/std.py", line 463, in make_replacement
return make_replacement(seg, list(cases_seen)[0])
File "/Users/nolan/anaconda3/lib/python3.7/site-packages/sqlfluff/rules/std.py", line 469, in make_replacement
raise ValueError("Unexpected capitalisation policy: {0!r}".format(policy))
ValueError: Unexpected capitalisation policy: 'inconsistent'
|
ValueError
|
def __init__(self, configs=None, overrides=None):
self._overrides = overrides # We only store this for child configs
defaults = ConfigLoader.get_global().load_default_config_file()
self._configs = nested_combine(
defaults, configs or {"core": {}}, {"core": overrides or {}}
)
# Some configs require special treatment
self._configs["core"]["color"] = (
False if self._configs["core"].get("nocolor", False) else None
)
# Whitelists and blacklists
if self._configs["core"].get("rules", None):
self._configs["core"]["rule_whitelist"] = self._configs["core"]["rules"].split(
","
)
else:
self._configs["core"]["rule_whitelist"] = None
if self._configs["core"].get("exclude_rules", None):
self._configs["core"]["rule_blacklist"] = self._configs["core"][
"exclude_rules"
].split(",")
else:
self._configs["core"]["rule_blacklist"] = None
# Configure Recursion
if self._configs["core"].get("recurse", 0) == 0:
self._configs["core"]["recurse"] = True
# Dialect and Template selection
self._configs["core"]["dialect_obj"] = dialect_selector(
self._configs["core"]["dialect"]
)
self._configs["core"]["templater_obj"] = templater_selector(
self._configs["core"]["templater"]
)
|
def __init__(self, configs=None, overrides=None):
self._overrides = overrides # We only store this for child configs
defaults = ConfigLoader.get_global().load_default_config_file()
self._configs = nested_combine(
defaults, configs or {"core": {}}, {"core": overrides or {}}
)
# Some configs require special treatment
self._configs["core"]["color"] = False if self._configs["core"]["nocolor"] else None
# Whitelists and blacklists
if self._configs["core"]["rules"]:
self._configs["core"]["rule_whitelist"] = self._configs["core"]["rules"].split(
","
)
else:
self._configs["core"]["rule_whitelist"] = None
if self._configs["core"]["exclude_rules"]:
self._configs["core"]["rule_blacklist"] = self._configs["core"][
"exclude_rules"
].split(",")
else:
self._configs["core"]["rule_blacklist"] = None
# Configure Recursion
if self._configs["core"]["recurse"] == 0:
self._configs["core"]["recurse"] = True
# Dialect and Template selection
self._configs["core"]["dialect_obj"] = dialect_selector(
self._configs["core"]["dialect"]
)
self._configs["core"]["templater_obj"] = templater_selector(
self._configs["core"]["templater"]
)
|
https://github.com/sqlfluff/sqlfluff/issues/60
|
$ sqlfluff version
Traceback (most recent call last):
File "/usr/local/pyenv/versions/3.7.3/bin/sqlfluff", line 11, in <module>
load_entry_point('sqlfluff==0.2.1', 'console_scripts', 'sqlfluff')()
File "/usr/local/pyenv/versions/3.7.3/lib/python3.7/site-packages/click/core.py", line 764, in __call__
return self.main(*args, **kwargs)
File "/usr/local/pyenv/versions/3.7.3/lib/python3.7/site-packages/click/core.py", line 717, in main
rv = self.invoke(ctx)
File "/usr/local/pyenv/versions/3.7.3/lib/python3.7/site-packages/click/core.py", line 1137, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/usr/local/pyenv/versions/3.7.3/lib/python3.7/site-packages/click/core.py", line 956, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/usr/local/pyenv/versions/3.7.3/lib/python3.7/site-packages/click/core.py", line 555, in invoke
return callback(*args, **kwargs)
File "/usr/local/pyenv/versions/3.7.3/lib/python3.7/site-packages/sqlfluff/cli/commands.py", line 82, in version
c = get_config(**kwargs)
File "/usr/local/pyenv/versions/3.7.3/lib/python3.7/site-packages/sqlfluff/cli/commands.py", line 55, in get_config
return FluffConfig.from_root(overrides=overrides)
File "/usr/local/pyenv/versions/3.7.3/lib/python3.7/site-packages/sqlfluff/config.py", line 283, in from_root
return cls(configs=c, overrides=overrides)
File "/usr/local/pyenv/versions/3.7.3/lib/python3.7/site-packages/sqlfluff/config.py", line 263, in __init__
if self._configs['core']['rules']:
KeyError: 'rules'
|
KeyError
|
def parse_file(self, f, fname=None, verbosity=0, recurse=True):
violations = []
t0 = get_time()
# Allow f to optionally be a raw string
if isinstance(f, str):
# Add it to a buffer if that's what we're doing
f = StringIO(f)
verbosity_logger("LEXING RAW ({0})".format(fname), verbosity=verbosity)
# Lex the file and log any problems
try:
fs = FileSegment.from_raw(f.read())
except SQLLexError as err:
violations.append(err)
fs = None
if fs:
verbosity_logger(fs.stringify(), verbosity=verbosity)
t1 = get_time()
verbosity_logger("PARSING ({0})".format(fname), verbosity=verbosity)
# Parse the file and log any problems
if fs:
try:
# Make a parse context and parse
context = ParseContext(
dialect=self.dialect, verbosity=verbosity, recurse=recurse
)
parsed = fs.parse(parse_context=context)
except SQLParseError as err:
violations.append(err)
parsed = None
if parsed:
verbosity_logger(frame_msg("Parsed Tree:"), verbosity=verbosity)
verbosity_logger(parsed.stringify(), verbosity=verbosity)
else:
parsed = None
t2 = get_time()
time_dict = {"lexing": t1 - t0, "parsing": t2 - t1}
return parsed, violations, time_dict
|
def parse_file(self, f, fname=None, verbosity=0, recurse=True):
violations = []
t0 = get_time()
# Allow f to optionally be a raw string
if isinstance(f, str):
# Add it to a buffer if that's what we're doing
f = StringIO(f)
verbosity_logger("LEXING RAW ({0})".format(fname), verbosity=verbosity)
# Lex the file and log any problems
try:
fs = FileSegment.from_raw(f.read())
except SQLLexError as err:
violations.append(err)
fs = None
if fs:
verbosity_logger(fs.stringify(), verbosity=verbosity)
t1 = get_time()
verbosity_logger("PARSING ({0})".format(fname), verbosity=verbosity)
# Parse the file and log any problems
if fs:
try:
parsed = fs.parse(
recurse=recurse, verbosity=verbosity, dialect=self.dialect
)
except SQLParseError as err:
violations.append(err)
parsed = None
if parsed:
verbosity_logger(frame_msg("Parsed Tree:"), verbosity=verbosity)
verbosity_logger(parsed.stringify(), verbosity=verbosity)
else:
parsed = None
t2 = get_time()
time_dict = {"lexing": t1 - t0, "parsing": t2 - t1}
return parsed, violations, time_dict
|
https://github.com/sqlfluff/sqlfluff/issues/23
|
Traceback (most recent call last):
File "/home/user/sql/.venv/bin/sqlfluff", line 11, in <module>
load_entry_point('sqlfluff==0.1.3', 'console_scripts', 'sqlfluff')()
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 764, in __call__
return self.main(*args, **kwargs)
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 717, in main
rv = self.invoke(ctx)
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 1137, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 956, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 555, in invoke
return callback(*args, **kwargs)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/cli/commands.py", line 85, in lint
result = lnt.lint_paths(paths, verbosity=verbose)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/linter.py", line 324, in lint_paths
result.add(self.lint_path(path, verbosity=verbosity, fix=fix))
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/linter.py", line 312, in lint_path
linted_path.add(self.lint_file(f, fname=fname, verbosity=verbosity, fix=fix))
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/linter.py", line 212, in lint_file
parsed, vs, time_dict = self.parse_file(f=f, fname=fname, verbosity=verbosity)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/linter.py", line 193, in parse_file
parsed = fs.parse(recurse=recurse, verbosity=verbosity, dialect=self.dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 228, in parse
verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 394, in expand
res = stmt.parse(recurse=recurse, parse_depth=parse_depth, verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 228, in parse
verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 394, in expand
res = stmt.parse(recurse=recurse, parse_depth=parse_depth, verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 228, in parse
verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 394, in expand
res = stmt.parse(recurse=recurse, parse_depth=parse_depth, verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 228, in parse
verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 394, in expand
res = stmt.parse(recurse=recurse, parse_depth=parse_depth, verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 186, in parse
dialect=dialect, match_segment=self.__class__.__name__)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 273, in match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 485, in match
match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 273, in match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 236, in match
dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 361, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 320, in match
dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 586, in match
segment=segments[start_bracket_idx])
TypeError: tuple indices must be integers or slices, not NoneType
|
TypeError
|
def match(self, segments, parse_context):
"""
Matching can be done from either the raw or the segments.
This raw function can be overridden, or a grammar defined
on the underlying class.
"""
raise NotImplementedError(
"{0} has no match function implemented".format(self.__class__.__name__)
)
|
def match(
self,
segments,
match_depth=0,
parse_depth=0,
verbosity=0,
dialect=None,
match_segment=None,
):
"""
Matching can be done from either the raw or the segments.
This raw function can be overridden, or a grammar defined
on the underlying class.
"""
raise NotImplementedError(
"{0} has no match function implemented".format(self.__class__.__name__)
)
|
https://github.com/sqlfluff/sqlfluff/issues/23
|
Traceback (most recent call last):
File "/home/user/sql/.venv/bin/sqlfluff", line 11, in <module>
load_entry_point('sqlfluff==0.1.3', 'console_scripts', 'sqlfluff')()
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 764, in __call__
return self.main(*args, **kwargs)
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 717, in main
rv = self.invoke(ctx)
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 1137, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 956, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 555, in invoke
return callback(*args, **kwargs)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/cli/commands.py", line 85, in lint
result = lnt.lint_paths(paths, verbosity=verbose)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/linter.py", line 324, in lint_paths
result.add(self.lint_path(path, verbosity=verbosity, fix=fix))
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/linter.py", line 312, in lint_path
linted_path.add(self.lint_file(f, fname=fname, verbosity=verbosity, fix=fix))
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/linter.py", line 212, in lint_file
parsed, vs, time_dict = self.parse_file(f=f, fname=fname, verbosity=verbosity)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/linter.py", line 193, in parse_file
parsed = fs.parse(recurse=recurse, verbosity=verbosity, dialect=self.dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 228, in parse
verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 394, in expand
res = stmt.parse(recurse=recurse, parse_depth=parse_depth, verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 228, in parse
verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 394, in expand
res = stmt.parse(recurse=recurse, parse_depth=parse_depth, verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 228, in parse
verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 394, in expand
res = stmt.parse(recurse=recurse, parse_depth=parse_depth, verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 228, in parse
verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 394, in expand
res = stmt.parse(recurse=recurse, parse_depth=parse_depth, verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 186, in parse
dialect=dialect, match_segment=self.__class__.__name__)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 273, in match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 485, in match
match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 273, in match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 236, in match
dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 361, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 320, in match
dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 586, in match
segment=segments[start_bracket_idx])
TypeError: tuple indices must be integers or slices, not NoneType
|
TypeError
|
def _match(self, segments, parse_context):
"""A wrapper on the match function to do some basic validation"""
t0 = get_time()
if isinstance(segments, BaseSegment):
segments = (segments,) # Make into a tuple for compatability
if not isinstance(segments, tuple):
logging.warning(
"{0}.match, was passed {1} rather than tuple or segment".format(
self.__class__.__name__, type(segments)
)
)
if isinstance(segments, list):
# Let's make it a tuple for compatibility
segments = tuple(segments)
if len(segments) == 0:
logging.info(
"{0}.match, was passed zero length segments list. NB: {0} contains {1!r}".format(
self.__class__.__name__, self._elements
)
)
# Work out the raw representation and curtail if long
parse_match_logging(
self.__class__.__name__,
"_match",
"IN",
parse_context=parse_context,
v_level=self.v_level,
le=len(self._elements),
ls=len(segments),
seg=join_segments_raw_curtailed(segments),
)
m = self.match(segments, parse_context=parse_context)
if not isinstance(m, MatchResult):
logging.warning(
"{0}.match, returned {1} rather than MatchResult".format(
self.__class__.__name__, type(m)
)
)
dt = get_time() - t0
if m.is_complete():
msg = "OUT ++"
elif m:
msg = "OUT -"
else:
msg = "OUT"
parse_match_logging(
self.__class__.__name__,
"_match",
msg,
parse_context=parse_context,
v_level=self.v_level,
dt=dt,
m=m,
)
# Basic Validation
check_still_complete(segments, m.matched_segments, m.unmatched_segments)
return m
|
def _match(
self,
segments,
match_depth=0,
parse_depth=0,
verbosity=0,
dialect=None,
match_segment=None,
):
"""A wrapper on the match function to do some basic validation"""
t0 = get_time()
if isinstance(segments, BaseSegment):
segments = (segments,) # Make into a tuple for compatability
if not isinstance(segments, tuple):
logging.warning(
"{0}.match, was passed {1} rather than tuple or segment".format(
self.__class__.__name__, type(segments)
)
)
if isinstance(segments, list):
# Let's make it a tuple for compatibility
segments = tuple(segments)
if len(segments) == 0:
logging.info(
"{0}.match, was passed zero length segments list. NB: {0} contains {1!r}".format(
self.__class__.__name__, self._elements
)
)
# Work out the raw representation and curtail if long
parse_match_logging(
parse_depth,
match_depth,
match_segment,
self.__class__.__name__,
"_match",
"IN",
verbosity=verbosity,
v_level=self.v_level,
le=len(self._elements),
ls=len(segments),
seg=join_segments_raw_curtailed(segments),
)
m = self.match(
segments,
match_depth=match_depth,
parse_depth=parse_depth,
verbosity=verbosity,
dialect=dialect,
match_segment=match_segment,
)
if not isinstance(m, MatchResult):
logging.warning(
"{0}.match, returned {1} rather than MatchResult".format(
self.__class__.__name__, type(m)
)
)
dt = get_time() - t0
if m.is_complete():
msg = "OUT ++"
elif m:
msg = "OUT -"
else:
msg = "OUT"
parse_match_logging(
parse_depth,
match_depth,
match_segment,
self.__class__.__name__,
"_match",
msg,
verbosity=verbosity,
v_level=self.v_level,
dt=dt,
m=m,
)
# Basic Validation
check_still_complete(segments, m.matched_segments, m.unmatched_segments)
return m
|
https://github.com/sqlfluff/sqlfluff/issues/23
|
Traceback (most recent call last):
File "/home/user/sql/.venv/bin/sqlfluff", line 11, in <module>
load_entry_point('sqlfluff==0.1.3', 'console_scripts', 'sqlfluff')()
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 764, in __call__
return self.main(*args, **kwargs)
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 717, in main
rv = self.invoke(ctx)
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 1137, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 956, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 555, in invoke
return callback(*args, **kwargs)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/cli/commands.py", line 85, in lint
result = lnt.lint_paths(paths, verbosity=verbose)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/linter.py", line 324, in lint_paths
result.add(self.lint_path(path, verbosity=verbosity, fix=fix))
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/linter.py", line 312, in lint_path
linted_path.add(self.lint_file(f, fname=fname, verbosity=verbosity, fix=fix))
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/linter.py", line 212, in lint_file
parsed, vs, time_dict = self.parse_file(f=f, fname=fname, verbosity=verbosity)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/linter.py", line 193, in parse_file
parsed = fs.parse(recurse=recurse, verbosity=verbosity, dialect=self.dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 228, in parse
verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 394, in expand
res = stmt.parse(recurse=recurse, parse_depth=parse_depth, verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 228, in parse
verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 394, in expand
res = stmt.parse(recurse=recurse, parse_depth=parse_depth, verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 228, in parse
verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 394, in expand
res = stmt.parse(recurse=recurse, parse_depth=parse_depth, verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 228, in parse
verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 394, in expand
res = stmt.parse(recurse=recurse, parse_depth=parse_depth, verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 186, in parse
dialect=dialect, match_segment=self.__class__.__name__)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 273, in match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 485, in match
match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 273, in match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 236, in match
dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 361, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 320, in match
dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 586, in match
segment=segments[start_bracket_idx])
TypeError: tuple indices must be integers or slices, not NoneType
|
TypeError
|
def match(self, segments, parse_context):
elem = self._get_elem(dialect=parse_context.dialect)
if elem:
# Match against that. NB We're not incrementing the match_depth here.
# References shouldn't relly count as a depth of match.
return elem._match(
segments=segments,
parse_context=parse_context.copy(match_segment=self._get_ref()),
)
else:
raise ValueError(
"Null Element returned! _elements: {0!r}".format(self._elements)
)
|
def match(
self,
segments,
match_depth=0,
parse_depth=0,
verbosity=0,
dialect=None,
match_segment=None,
):
elem = self._get_elem(dialect=dialect)
if elem:
# Match against that. NB We're not incrementing the match_depth here.
# References shouldn't relly count as a depth of match.
match_segment = self._get_ref()
return elem._match(
segments=segments,
match_depth=match_depth,
parse_depth=parse_depth,
verbosity=verbosity,
dialect=dialect,
match_segment=match_segment,
)
else:
raise ValueError(
"Null Element returned! _elements: {0!r}".format(self._elements)
)
|
https://github.com/sqlfluff/sqlfluff/issues/23
|
Traceback (most recent call last):
File "/home/user/sql/.venv/bin/sqlfluff", line 11, in <module>
load_entry_point('sqlfluff==0.1.3', 'console_scripts', 'sqlfluff')()
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 764, in __call__
return self.main(*args, **kwargs)
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 717, in main
rv = self.invoke(ctx)
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 1137, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 956, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 555, in invoke
return callback(*args, **kwargs)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/cli/commands.py", line 85, in lint
result = lnt.lint_paths(paths, verbosity=verbose)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/linter.py", line 324, in lint_paths
result.add(self.lint_path(path, verbosity=verbosity, fix=fix))
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/linter.py", line 312, in lint_path
linted_path.add(self.lint_file(f, fname=fname, verbosity=verbosity, fix=fix))
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/linter.py", line 212, in lint_file
parsed, vs, time_dict = self.parse_file(f=f, fname=fname, verbosity=verbosity)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/linter.py", line 193, in parse_file
parsed = fs.parse(recurse=recurse, verbosity=verbosity, dialect=self.dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 228, in parse
verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 394, in expand
res = stmt.parse(recurse=recurse, parse_depth=parse_depth, verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 228, in parse
verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 394, in expand
res = stmt.parse(recurse=recurse, parse_depth=parse_depth, verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 228, in parse
verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 394, in expand
res = stmt.parse(recurse=recurse, parse_depth=parse_depth, verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 228, in parse
verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 394, in expand
res = stmt.parse(recurse=recurse, parse_depth=parse_depth, verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 186, in parse
dialect=dialect, match_segment=self.__class__.__name__)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 273, in match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 485, in match
match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 273, in match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 236, in match
dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 361, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 320, in match
dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 586, in match
segment=segments[start_bracket_idx])
TypeError: tuple indices must be integers or slices, not NoneType
|
TypeError
|
def match(self, segments, parse_context):
best_match = None
# Match on each of the options
for opt in self._elements:
m = opt._match(segments, parse_context=parse_context.copy(incr="match_depth"))
# If we get a complete match, just return it. If it's incomplete, then check to
# see if it's all non-code if that allowed and match it
if m.is_complete():
# this will return on the *first* complete match
return m
elif m:
if self.code_only:
# Attempt to consume whitespace if we can
matched_segments = m.matched_segments
unmatched_segments = m.unmatched_segments
while True:
if len(unmatched_segments) > 0:
if unmatched_segments[0].is_code:
break
else:
# Append as tuple
matched_segments += (unmatched_segments[0],)
unmatched_segments = unmatched_segments[1:]
else:
break
m = MatchResult(matched_segments, unmatched_segments)
if best_match:
if len(m) > len(best_match):
best_match = m
else:
continue
else:
best_match = m
parse_match_logging(
self.__class__.__name__,
"_match",
"Saving Match of Length {0}: {1}".format(len(m), m),
parse_context=parse_context,
v_level=self.v_level,
)
else:
# No full match from the first time round. If we've got a long partial match then return that.
if best_match:
return best_match
# Ok so no match at all from the elements. Small getout if we can match any whitespace
if self.code_only:
matched_segs = tuple()
unmatched_segs = segments
# Look for non-code up front
while True:
if len(unmatched_segs) == 0:
# We can't return a successful match on JUST whitespace
return MatchResult.from_unmatched(segments)
elif not unmatched_segs[0].is_code:
matched_segs += (unmatched_segs[0],)
unmatched_segs = unmatched_segs[1:]
else:
break
# Now try and match
for opt in self._elements:
m = opt._match(
unmatched_segs, parse_context=parse_context.copy(incr="match_depth")
)
# Once again, if it's complete - return, if not wait to see if we get a more complete one
new_match = MatchResult(
matched_segs + m.matched_segments, m.unmatched_segments
)
if m.is_complete():
return new_match
elif m:
if best_match:
if len(best_match) > len(m):
best_match = m
else:
continue
else:
best_match = m
parse_match_logging(
self.__class__.__name__,
"_match",
"Last-Ditch: Saving Match of Length {0}: {1}".format(
len(m), m
),
parse_context=parse_context,
v_level=self.v_level,
)
else:
if best_match:
return MatchResult(
matched_segs + best_match.matched_segments,
best_match.unmatched_segments,
)
else:
return MatchResult.from_unmatched(segments)
|
def match(
self,
segments,
match_depth=0,
parse_depth=0,
verbosity=0,
dialect=None,
match_segment=None,
):
best_match = None
# Match on each of the options
for opt in self._elements:
m = opt._match(
segments,
match_depth=match_depth + 1,
parse_depth=parse_depth,
verbosity=verbosity,
dialect=dialect,
match_segment=match_segment,
)
# If we get a complete match, just return it. If it's incomplete, then check to
# see if it's all non-code if that allowed and match it
if m.is_complete():
# this will return on the *first* complete match
return m
elif m:
if self.code_only:
# Attempt to consume whitespace if we can
matched_segments = m.matched_segments
unmatched_segments = m.unmatched_segments
while True:
if len(unmatched_segments) > 0:
if unmatched_segments[0].is_code:
break
else:
# Append as tuple
matched_segments += (unmatched_segments[0],)
unmatched_segments = unmatched_segments[1:]
else:
break
m = MatchResult(matched_segments, unmatched_segments)
if best_match:
if len(m) > len(best_match):
best_match = m
else:
continue
else:
best_match = m
parse_match_logging(
parse_depth,
match_depth,
match_segment,
self.__class__.__name__,
"_match",
"Saving Match of Length {0}: {1}".format(len(m), m),
verbosity=verbosity,
v_level=self.v_level,
)
else:
# No full match from the first time round. If we've got a long partial match then return that.
if best_match:
return best_match
# Ok so no match at all from the elements. Small getout if we can match any whitespace
if self.code_only:
matched_segs = tuple()
unmatched_segs = segments
# Look for non-code up front
while True:
if len(unmatched_segs) == 0:
# We can't return a successful match on JUST whitespace
return MatchResult.from_unmatched(segments)
elif not unmatched_segs[0].is_code:
matched_segs += (unmatched_segs[0],)
unmatched_segs = unmatched_segs[1:]
else:
break
# Now try and match
for opt in self._elements:
m = opt._match(
unmatched_segs,
match_depth=match_depth + 1,
parse_depth=parse_depth,
verbosity=verbosity,
dialect=dialect,
match_segment=match_segment,
)
# Once again, if it's complete - return, if not wait to see if we get a more complete one
new_match = MatchResult(
matched_segs + m.matched_segments, m.unmatched_segments
)
if m.is_complete():
return new_match
elif m:
if best_match:
if len(best_match) > len(m):
best_match = m
else:
continue
else:
best_match = m
parse_match_logging(
parse_depth,
match_depth,
match_segment,
self.__class__.__name__,
"_match",
"Last-Ditch: Saving Match of Length {0}: {1}".format(
len(m), m
),
verbosity=verbosity,
v_level=self.v_level,
)
else:
if best_match:
return MatchResult(
matched_segs + best_match.matched_segments,
best_match.unmatched_segments,
)
else:
return MatchResult.from_unmatched(segments)
|
https://github.com/sqlfluff/sqlfluff/issues/23
|
Traceback (most recent call last):
File "/home/user/sql/.venv/bin/sqlfluff", line 11, in <module>
load_entry_point('sqlfluff==0.1.3', 'console_scripts', 'sqlfluff')()
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 764, in __call__
return self.main(*args, **kwargs)
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 717, in main
rv = self.invoke(ctx)
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 1137, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 956, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 555, in invoke
return callback(*args, **kwargs)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/cli/commands.py", line 85, in lint
result = lnt.lint_paths(paths, verbosity=verbose)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/linter.py", line 324, in lint_paths
result.add(self.lint_path(path, verbosity=verbosity, fix=fix))
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/linter.py", line 312, in lint_path
linted_path.add(self.lint_file(f, fname=fname, verbosity=verbosity, fix=fix))
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/linter.py", line 212, in lint_file
parsed, vs, time_dict = self.parse_file(f=f, fname=fname, verbosity=verbosity)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/linter.py", line 193, in parse_file
parsed = fs.parse(recurse=recurse, verbosity=verbosity, dialect=self.dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 228, in parse
verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 394, in expand
res = stmt.parse(recurse=recurse, parse_depth=parse_depth, verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 228, in parse
verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 394, in expand
res = stmt.parse(recurse=recurse, parse_depth=parse_depth, verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 228, in parse
verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 394, in expand
res = stmt.parse(recurse=recurse, parse_depth=parse_depth, verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 228, in parse
verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 394, in expand
res = stmt.parse(recurse=recurse, parse_depth=parse_depth, verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 186, in parse
dialect=dialect, match_segment=self.__class__.__name__)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 273, in match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 485, in match
match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 273, in match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 236, in match
dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 361, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 320, in match
dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 586, in match
segment=segments[start_bracket_idx])
TypeError: tuple indices must be integers or slices, not NoneType
|
TypeError
|
def match(self, segments, parse_context):
# Match on each of the options
matched_segments = MatchResult.from_empty()
unmatched_segments = segments
n_matches = 0
while True:
if self.max_times and n_matches >= self.max_times:
# We've matched as many times as we can
return MatchResult(matched_segments.matched_segments, unmatched_segments)
# Is there anything left to match?
if len(unmatched_segments) == 0:
# No...
if n_matches >= self.min_times:
return MatchResult(
matched_segments.matched_segments, unmatched_segments
)
else:
# We didn't meet the hurdle
return MatchResult.from_unmatched(unmatched_segments)
# Is the next segment code?
if self.code_only and not unmatched_segments[0].is_code:
# We should add this one to the match and carry on
matched_segments += (unmatched_segments[0],)
unmatched_segments = unmatched_segments[1:]
check_still_complete(
segments, matched_segments.matched_segments, unmatched_segments
)
continue
# Try the possibilities
for opt in self._elements:
m = opt._match(
unmatched_segments, parse_context=parse_context.copy(incr="match_depth")
)
if m.has_match():
matched_segments += m.matched_segments
unmatched_segments = m.unmatched_segments
n_matches += 1
# Break out of the for loop which cycles us round
break
else:
# If we get here, then we've not managed to match. And the next
# unmatched segments are meaningful, i.e. they're not what we're
# looking for.
if n_matches >= self.min_times:
return MatchResult(
matched_segments.matched_segments, unmatched_segments
)
else:
# We didn't meet the hurdle
return MatchResult.from_unmatched(unmatched_segments)
|
def match(
self,
segments,
match_depth=0,
parse_depth=0,
verbosity=0,
dialect=None,
match_segment=None,
):
# Match on each of the options
matched_segments = MatchResult.from_empty()
unmatched_segments = segments
n_matches = 0
while True:
if self.max_times and n_matches >= self.max_times:
# We've matched as many times as we can
return MatchResult(matched_segments.matched_segments, unmatched_segments)
# Is there anything left to match?
if len(unmatched_segments) == 0:
# No...
if n_matches >= self.min_times:
return MatchResult(
matched_segments.matched_segments, unmatched_segments
)
else:
# We didn't meet the hurdle
return MatchResult.from_unmatched(unmatched_segments)
# Is the next segment code?
if self.code_only and not unmatched_segments[0].is_code:
# We should add this one to the match and carry on
matched_segments += (unmatched_segments[0],)
unmatched_segments = unmatched_segments[1:]
check_still_complete(
segments, matched_segments.matched_segments, unmatched_segments
)
continue
# Try the possibilities
for opt in self._elements:
m = opt._match(
unmatched_segments,
match_depth=match_depth + 1,
parse_depth=parse_depth,
verbosity=verbosity,
dialect=dialect,
match_segment=match_segment,
)
if m.has_match():
matched_segments += m.matched_segments
unmatched_segments = m.unmatched_segments
n_matches += 1
# Break out of the for loop which cycles us round
break
else:
# If we get here, then we've not managed to match. And the next
# unmatched segments are meaningful, i.e. they're not what we're
# looking for.
if n_matches >= self.min_times:
return MatchResult(
matched_segments.matched_segments, unmatched_segments
)
else:
# We didn't meet the hurdle
return MatchResult.from_unmatched(unmatched_segments)
|
https://github.com/sqlfluff/sqlfluff/issues/23
|
Traceback (most recent call last):
File "/home/user/sql/.venv/bin/sqlfluff", line 11, in <module>
load_entry_point('sqlfluff==0.1.3', 'console_scripts', 'sqlfluff')()
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 764, in __call__
return self.main(*args, **kwargs)
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 717, in main
rv = self.invoke(ctx)
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 1137, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 956, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 555, in invoke
return callback(*args, **kwargs)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/cli/commands.py", line 85, in lint
result = lnt.lint_paths(paths, verbosity=verbose)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/linter.py", line 324, in lint_paths
result.add(self.lint_path(path, verbosity=verbosity, fix=fix))
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/linter.py", line 312, in lint_path
linted_path.add(self.lint_file(f, fname=fname, verbosity=verbosity, fix=fix))
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/linter.py", line 212, in lint_file
parsed, vs, time_dict = self.parse_file(f=f, fname=fname, verbosity=verbosity)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/linter.py", line 193, in parse_file
parsed = fs.parse(recurse=recurse, verbosity=verbosity, dialect=self.dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 228, in parse
verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 394, in expand
res = stmt.parse(recurse=recurse, parse_depth=parse_depth, verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 228, in parse
verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 394, in expand
res = stmt.parse(recurse=recurse, parse_depth=parse_depth, verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 228, in parse
verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 394, in expand
res = stmt.parse(recurse=recurse, parse_depth=parse_depth, verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 228, in parse
verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 394, in expand
res = stmt.parse(recurse=recurse, parse_depth=parse_depth, verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 186, in parse
dialect=dialect, match_segment=self.__class__.__name__)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 273, in match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 485, in match
match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 273, in match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 236, in match
dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 361, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 320, in match
dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 586, in match
segment=segments[start_bracket_idx])
TypeError: tuple indices must be integers or slices, not NoneType
|
TypeError
|
def match(self, segments, parse_context):
"""
Matching for GreedyUntil works just how you'd expect.
"""
pre, mat, _ = self._bracket_sensitive_look_ahead_match(
segments,
self._elements,
parse_context=parse_context.copy(incr="match_depth"),
code_only=self.code_only,
)
# Do we have a match?
if mat:
# Return everything up to the match
return MatchResult(pre, mat.all_segments())
else:
# Return everything
return MatchResult.from_matched(segments)
|
def match(
self,
segments,
match_depth=0,
parse_depth=0,
verbosity=0,
dialect=None,
match_segment=None,
):
"""
Matching for GreedyUntil works just how you'd expect.
"""
for idx, seg in enumerate(segments):
for opt in self._elements:
if opt._match(
seg,
match_depth=match_depth + 1,
parse_depth=parse_depth,
verbosity=verbosity,
dialect=dialect,
match_segment=match_segment,
):
# We've matched something. That means we should return everything up to this point
return MatchResult(segments[:idx], segments[idx:])
else:
continue
else:
# We've got to the end without matching anything, so return.
# We don't need to keep track of the match results, because
# if any of them were usable, then we wouldn't be returning
# anyway.
return MatchResult.from_matched(segments)
|
https://github.com/sqlfluff/sqlfluff/issues/23
|
Traceback (most recent call last):
File "/home/user/sql/.venv/bin/sqlfluff", line 11, in <module>
load_entry_point('sqlfluff==0.1.3', 'console_scripts', 'sqlfluff')()
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 764, in __call__
return self.main(*args, **kwargs)
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 717, in main
rv = self.invoke(ctx)
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 1137, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 956, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 555, in invoke
return callback(*args, **kwargs)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/cli/commands.py", line 85, in lint
result = lnt.lint_paths(paths, verbosity=verbose)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/linter.py", line 324, in lint_paths
result.add(self.lint_path(path, verbosity=verbosity, fix=fix))
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/linter.py", line 312, in lint_path
linted_path.add(self.lint_file(f, fname=fname, verbosity=verbosity, fix=fix))
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/linter.py", line 212, in lint_file
parsed, vs, time_dict = self.parse_file(f=f, fname=fname, verbosity=verbosity)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/linter.py", line 193, in parse_file
parsed = fs.parse(recurse=recurse, verbosity=verbosity, dialect=self.dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 228, in parse
verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 394, in expand
res = stmt.parse(recurse=recurse, parse_depth=parse_depth, verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 228, in parse
verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 394, in expand
res = stmt.parse(recurse=recurse, parse_depth=parse_depth, verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 228, in parse
verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 394, in expand
res = stmt.parse(recurse=recurse, parse_depth=parse_depth, verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 228, in parse
verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 394, in expand
res = stmt.parse(recurse=recurse, parse_depth=parse_depth, verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 186, in parse
dialect=dialect, match_segment=self.__class__.__name__)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 273, in match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 485, in match
match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 273, in match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 236, in match
dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 361, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 320, in match
dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 586, in match
segment=segments[start_bracket_idx])
TypeError: tuple indices must be integers or slices, not NoneType
|
TypeError
|
def match(self, segments, parse_context):
# Rewrite of sequence. We should match FORWARD, this reduced duplication.
# Sub-matchers should be greedy and so we can jsut work forward with each one.
if isinstance(segments, BaseSegment):
segments = tuple(segments)
# NB: We don't use seg_idx here because the submatchers may be mutating the length
# of the remaining segments
matched_segments = MatchResult.from_empty()
unmatched_segments = segments
for idx, elem in enumerate(self._elements):
while True:
if len(unmatched_segments) == 0:
# We've run our of sequence without matching everyting.
# Do only optional elements remain.
if all([e.is_optional() for e in self._elements[idx:]]):
# then it's ok, and we can return what we've got so far.
# No need to deal with anything left over because we're at the end.
return matched_segments
else:
# we've got to the end of the sequence without matching all
# required elements.
return MatchResult.from_unmatched(segments)
else:
# We're not at the end, first detect whitespace and then try to match.
if self.code_only and not unmatched_segments[0].is_code:
# We should add this one to the match and carry on
matched_segments += (unmatched_segments[0],)
unmatched_segments = unmatched_segments[1:]
check_still_complete(
segments, matched_segments.matched_segments, unmatched_segments
)
continue
# It's not whitespace, so carry on to matching
elem_match = elem._match(
unmatched_segments,
parse_context=parse_context.copy(incr="match_depth"),
)
if elem_match.has_match():
# We're expecting mostly partial matches here, but complete
# matches are possible.
matched_segments += elem_match.matched_segments
unmatched_segments = elem_match.unmatched_segments
# Each time we do this, we do a sense check to make sure we haven't
# dropped anything. (Because it's happened before!).
check_still_complete(
segments, matched_segments.matched_segments, unmatched_segments
)
# Break out of the while loop and move to the next element.
break
else:
# If we can't match an element, we should ascertain whether it's
# required. If so then fine, move on, but otherwise we should crash
# out without a match. We have not matched the sequence.
if elem.is_optional():
# This will crash us out of the while loop and move us
# onto the next matching element
break
else:
return MatchResult.from_unmatched(segments)
else:
# If we get to here, we've matched all of the elements (or skipped them)
# but still have some segments left (or perhaps have precisely zero left).
# In either case, we're golden. Return successfully, with any leftovers as
# the unmatched elements. UNLESS they're whitespace and we should be greedy.
if self.code_only:
while True:
if len(unmatched_segments) == 0:
break
elif not unmatched_segments[0].is_code:
# We should add this one to the match and carry on
matched_segments += (unmatched_segments[0],)
unmatched_segments = unmatched_segments[1:]
check_still_complete(
segments, matched_segments.matched_segments, unmatched_segments
)
continue
else:
break
return MatchResult(matched_segments.matched_segments, unmatched_segments)
|
def match(
self,
segments,
match_depth=0,
parse_depth=0,
verbosity=0,
dialect=None,
match_segment=None,
):
# Rewrite of sequence. We should match FORWARD, this reduced duplication.
# Sub-matchers should be greedy and so we can jsut work forward with each one.
if isinstance(segments, BaseSegment):
segments = tuple(segments)
# NB: We don't use seg_idx here because the submatchers may be mutating the length
# of the remaining segments
matched_segments = MatchResult.from_empty()
unmatched_segments = segments
for idx, elem in enumerate(self._elements):
while True:
if len(unmatched_segments) == 0:
# We've run our of sequence without matching everyting.
# Do only optional elements remain.
if all([e.is_optional() for e in self._elements[idx:]]):
# then it's ok, and we can return what we've got so far.
# No need to deal with anything left over because we're at the end.
return matched_segments
else:
# we've got to the end of the sequence without matching all
# required elements.
return MatchResult.from_unmatched(segments)
else:
# We're not at the end, first detect whitespace and then try to match.
if self.code_only and not unmatched_segments[0].is_code:
# We should add this one to the match and carry on
matched_segments += (unmatched_segments[0],)
unmatched_segments = unmatched_segments[1:]
check_still_complete(
segments, matched_segments.matched_segments, unmatched_segments
)
continue
# It's not whitespace, so carry on to matching
elem_match = elem._match(
unmatched_segments,
match_depth=match_depth + 1,
parse_depth=parse_depth,
verbosity=verbosity,
dialect=dialect,
match_segment=match_segment,
)
if elem_match.has_match():
# We're expecting mostly partial matches here, but complete
# matches are possible.
matched_segments += elem_match.matched_segments
unmatched_segments = elem_match.unmatched_segments
# Each time we do this, we do a sense check to make sure we haven't
# dropped anything. (Because it's happened before!).
check_still_complete(
segments, matched_segments.matched_segments, unmatched_segments
)
# Break out of the while loop and move to the next element.
break
else:
# If we can't match an element, we should ascertain whether it's
# required. If so then fine, move on, but otherwise we should crash
# out without a match. We have not matched the sequence.
if elem.is_optional():
# This will crash us out of the while loop and move us
# onto the next matching element
break
else:
return MatchResult.from_unmatched(segments)
else:
# If we get to here, we've matched all of the elements (or skipped them)
# but still have some segments left (or perhaps have precisely zero left).
# In either case, we're golden. Return successfully, with any leftovers as
# the unmatched elements. UNLESS they're whitespace and we should be greedy.
if self.code_only:
while True:
if len(unmatched_segments) == 0:
break
elif not unmatched_segments[0].is_code:
# We should add this one to the match and carry on
matched_segments += (unmatched_segments[0],)
unmatched_segments = unmatched_segments[1:]
check_still_complete(
segments, matched_segments.matched_segments, unmatched_segments
)
continue
else:
break
return MatchResult(matched_segments.matched_segments, unmatched_segments)
|
https://github.com/sqlfluff/sqlfluff/issues/23
|
Traceback (most recent call last):
File "/home/user/sql/.venv/bin/sqlfluff", line 11, in <module>
load_entry_point('sqlfluff==0.1.3', 'console_scripts', 'sqlfluff')()
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 764, in __call__
return self.main(*args, **kwargs)
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 717, in main
rv = self.invoke(ctx)
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 1137, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 956, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 555, in invoke
return callback(*args, **kwargs)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/cli/commands.py", line 85, in lint
result = lnt.lint_paths(paths, verbosity=verbose)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/linter.py", line 324, in lint_paths
result.add(self.lint_path(path, verbosity=verbosity, fix=fix))
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/linter.py", line 312, in lint_path
linted_path.add(self.lint_file(f, fname=fname, verbosity=verbosity, fix=fix))
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/linter.py", line 212, in lint_file
parsed, vs, time_dict = self.parse_file(f=f, fname=fname, verbosity=verbosity)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/linter.py", line 193, in parse_file
parsed = fs.parse(recurse=recurse, verbosity=verbosity, dialect=self.dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 228, in parse
verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 394, in expand
res = stmt.parse(recurse=recurse, parse_depth=parse_depth, verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 228, in parse
verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 394, in expand
res = stmt.parse(recurse=recurse, parse_depth=parse_depth, verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 228, in parse
verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 394, in expand
res = stmt.parse(recurse=recurse, parse_depth=parse_depth, verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 228, in parse
verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 394, in expand
res = stmt.parse(recurse=recurse, parse_depth=parse_depth, verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 186, in parse
dialect=dialect, match_segment=self.__class__.__name__)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 273, in match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 485, in match
match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 273, in match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 236, in match
dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 361, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 320, in match
dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 586, in match
segment=segments[start_bracket_idx])
TypeError: tuple indices must be integers or slices, not NoneType
|
TypeError
|
def __init__(self, *args, **kwargs):
if "delimiter" not in kwargs:
raise ValueError("Delimited grammars require a `delimiter`")
self.delimiter = kwargs.pop("delimiter")
self.allow_trailing = kwargs.pop("allow_trailing", False)
self.terminator = kwargs.pop("terminator", None)
# Setting min delimiters means we have to match at least this number
self.min_delimiters = kwargs.pop("min_delimiters", None)
super(Delimited, self).__init__(*args, **kwargs)
|
def __init__(self, *args, **kwargs):
if "delimiter" not in kwargs:
raise ValueError("Delimited grammars require a `delimiter`")
self.delimiter = kwargs.pop("delimiter")
self.allow_trailing = kwargs.pop("allow_trailing", False)
self.terminator = kwargs.pop("terminator", None)
# Setting min delimiters means we have to match at least this number
self.min_delimiters = kwargs.pop("min_delimiters", None)
# The details on how to match a bracket are stored in the dialect
self.start_bracket = Ref("StartBracketSegment")
self.end_bracket = Ref("EndBracketSegment")
super(Delimited, self).__init__(*args, **kwargs)
|
https://github.com/sqlfluff/sqlfluff/issues/23
|
Traceback (most recent call last):
File "/home/user/sql/.venv/bin/sqlfluff", line 11, in <module>
load_entry_point('sqlfluff==0.1.3', 'console_scripts', 'sqlfluff')()
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 764, in __call__
return self.main(*args, **kwargs)
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 717, in main
rv = self.invoke(ctx)
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 1137, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 956, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 555, in invoke
return callback(*args, **kwargs)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/cli/commands.py", line 85, in lint
result = lnt.lint_paths(paths, verbosity=verbose)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/linter.py", line 324, in lint_paths
result.add(self.lint_path(path, verbosity=verbosity, fix=fix))
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/linter.py", line 312, in lint_path
linted_path.add(self.lint_file(f, fname=fname, verbosity=verbosity, fix=fix))
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/linter.py", line 212, in lint_file
parsed, vs, time_dict = self.parse_file(f=f, fname=fname, verbosity=verbosity)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/linter.py", line 193, in parse_file
parsed = fs.parse(recurse=recurse, verbosity=verbosity, dialect=self.dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 228, in parse
verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 394, in expand
res = stmt.parse(recurse=recurse, parse_depth=parse_depth, verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 228, in parse
verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 394, in expand
res = stmt.parse(recurse=recurse, parse_depth=parse_depth, verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 228, in parse
verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 394, in expand
res = stmt.parse(recurse=recurse, parse_depth=parse_depth, verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 228, in parse
verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 394, in expand
res = stmt.parse(recurse=recurse, parse_depth=parse_depth, verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 186, in parse
dialect=dialect, match_segment=self.__class__.__name__)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 273, in match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 485, in match
match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 273, in match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 236, in match
dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 361, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 320, in match
dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 586, in match
segment=segments[start_bracket_idx])
TypeError: tuple indices must be integers or slices, not NoneType
|
TypeError
|
def match(self, segments, parse_context):
# Type munging
if isinstance(segments, BaseSegment):
segments = [segments]
# Have we been passed an empty list?
if len(segments) == 0:
return MatchResult.from_empty()
# Make some buffers
seg_buff = segments
matched_segments = MatchResult.from_empty()
# delimiters is a list of tuples containing delimiter segments as we find them.
delimiters = []
# First iterate through all the segments, looking for the delimiter.
# Second, split the list on each of the delimiters, and ensure that
# each sublist in turn matches one of the elements.
# In more detail, match against delimiter, if we match, put a slice
# up to that point onto a list of slices. Carry on.
while True:
# Check to see whether we've exhausted the buffer (or it's all non-code)
if len(seg_buff) == 0 or (
self.code_only and all([not s.is_code for s in seg_buff])
):
# Append the remaining buffer in case we're in the not is_code case.
matched_segments += seg_buff
# Nothing left, this is potentially a trailling case?
if self.allow_trailing and (
self.min_delimiters is None or len(delimiters) >= self.min_delimiters
):
# It is! (nothing left so no unmatched segments to append)
return matched_segments
else:
MatchResult.from_unmatched(segments)
# We rely on _bracket_sensitive_look_ahead_match to do the bracket counting
# element of this now. We look ahead to find a delimiter or terminator.
matchers = [self.delimiter]
if self.terminator:
matchers.append(self.terminator)
pre, mat, m = self._bracket_sensitive_look_ahead_match(
seg_buff,
matchers,
parse_context=parse_context.copy(incr="match_depth"),
# NB: We don't want whitespace at this stage, that should default to
# being passed to the elements in between.
code_only=False,
)
# Have we found a delimiter or terminator looking forward?
if mat:
if m is self.delimiter:
# Yes. Store it and then match the contents up to now.
delimiters.append(mat.matched_segments)
# We now test the intervening section as to whether it matches one
# of the things we're looking for. NB: If it's of zero length then
# we return without trying it.
if len(pre) > 0:
for elem in self._elements:
# We use the whitespace padded match to hoover up whitespace if enabled.
elem_match = self._code_only_sensitive_match(
pre,
elem,
parse_context=parse_context.copy(incr="match_depth"),
# This is where the configured code_only behaviour kicks in.
code_only=self.code_only,
)
if elem_match.is_complete():
# First add the segment up to the delimiter to the matched segments
matched_segments += elem_match
# Then it depends what we matched.
# Delimiter
if m is self.delimiter:
# Then add the delimiter to the matched segments
matched_segments += mat.matched_segments
# Break this for loop and move on, looking for the next delimiter
seg_buff = mat.unmatched_segments
# Still got some buffer left. Carry on.
break
# Terminator
elif m is self.terminator:
# We just return straight away here. We don't add the terminator to
# this match, it should go with the unmatched parts.
# First check we've had enough delimiters
if (
self.min_delimiters
and len(delimiters) < self.min_delimiters
):
return MatchResult.from_unmatched(segments)
else:
return MatchResult(
matched_segments.matched_segments,
mat.all_segments(),
)
else:
raise RuntimeError(
(
"I don't know how I got here. Matched instead on {0}, which "
"doesn't appear to be delimiter or terminator"
).format(m)
)
else:
# We REQUIRE a complete match here between delimiters or up to a
# terminator. If it's only partial then we don't want it.
# NB: using the sensitive match above deals with whitespace
# appropriately.
continue
else:
# None of them matched, return unmatched.
return MatchResult.from_unmatched(segments)
else:
# Zero length section between delimiters. Return unmatched.
return MatchResult.from_unmatched(segments)
else:
# No match for a delimiter looking forward, this means we're
# at the end. In this case we look for a potential partial match
# looking forward. We know it's a non-zero length section because
# we checked that up front.
# First check we're had enough delimiters, because if we haven't then
# there's no sense to try matching
if self.min_delimiters and len(delimiters) < self.min_delimiters:
return MatchResult.from_unmatched(segments)
# We use the whitespace padded match to hoover up whitespace if enabled,
# and default to the longest matcher. We don't care which one matches.
mat, _ = self._longest_code_only_sensitive_match(
seg_buff,
self._elements,
parse_context=parse_context.copy(incr="match_depth"),
code_only=self.code_only,
)
if mat:
# We've got something at the end. Return!
return MatchResult(
matched_segments.matched_segments + mat.matched_segments,
mat.unmatched_segments,
)
else:
# No match at the end, are we allowed to trail? If we are then return,
# otherwise we fail because we can't match the last element.
if self.allow_trailing:
return MatchResult(matched_segments.matched_segments, seg_buff)
else:
return MatchResult.from_unmatched(segments)
|
def match(
self,
segments,
match_depth=0,
parse_depth=0,
verbosity=0,
dialect=None,
match_segment=None,
):
if isinstance(segments, BaseSegment):
segments = [segments]
seg_idx = 0
terminal_idx = len(segments)
sub_bracket_count = 0
start_bracket_idx = None
# delimiters is a list of tuples (idx, len), which keeps track of where
# we found delimiters up to this point.
delimiters = []
matched_segments = MatchResult.from_empty()
# Have we been passed an empty list?
if len(segments) == 0:
return MatchResult.from_empty()
# First iterate through all the segments, looking for the delimiter.
# Second, split the list on each of the delimiters, and ensure that
# each sublist in turn matches one of the elements.
# In more detail, match against delimiter, if we match, put a slice
# up to that point onto a list of slices. Carry on.
while True:
# Are we at the end of the sequence?
if seg_idx >= terminal_idx:
# Yes we're at the end
# We now need to check whether everything from either the start
# or from the last delimiter up to here matches. We CAN allow
# a partial match at this stage.
# Are we in a bracket counting cycle that hasn't finished yet?
if sub_bracket_count > 0:
# TODO: Format this better
raise SQLParseError(
"Couldn't find closing bracket for opening bracket.",
segment=segments[start_bracket_idx],
)
# Do we already have any delimiters?
if delimiters:
# Yes, get the last delimiter
dm1 = delimiters[-1]
# get everything after the last delimiter
pre_segment = segments[dm1[0] + dm1[1] : terminal_idx]
else:
# No, no delimiters at all so far.
# TODO: Allow this to be configured.
# Just get everything up to this point
pre_segment = segments[:terminal_idx]
# Optionally here, we can match some non-code up front.
if self.code_only:
while len(pre_segment) > 0:
if not pre_segment[0].is_code:
matched_segments += (pre_segment[0],) # As tuple
pre_segment = pre_segment[1:]
else:
break
# Check we actually have something left to match on
if len(pre_segment) > 0:
# See if any of the elements match
for elem in self._elements:
elem_match = elem._match(
pre_segment,
match_depth=match_depth + 1,
parse_depth=parse_depth,
verbosity=verbosity,
dialect=dialect,
match_segment=match_segment,
)
if elem_match.has_match():
# Successfully matched one of the elements in this spot
# Add this match onto any already matched segments and return.
# We do this in a slightly odd way here to allow partial matches.
# we do a quick check on min_delimiters if present
if (
self.min_delimiters
and len(delimiters) < self.min_delimiters
):
# if we do have a limit and we haven't met it then crash out
return MatchResult.from_unmatched(segments)
return MatchResult(
matched_segments.matched_segments
+ elem_match.matched_segments,
elem_match.unmatched_segments + segments[terminal_idx:],
)
else:
# Not matched this element, move on.
# NB, a partial match here isn't helpful. We're matching
# BETWEEN two delimiters and so it must be a complete match.
# Incomplete matches are only possible at the end
continue
# If we're here we haven't matched any of the elements on this last element.
# BUT, if we allow trailing, and we have matched something, we can end on the last
# delimiter
if self.allow_trailing and len(matched_segments) > 0:
return MatchResult(
matched_segments.matched_segments,
pre_segment + segments[terminal_idx:],
)
else:
return MatchResult.from_unmatched(segments)
else:
# We've got some sequence left.
# Are we in a bracket cycle?
if sub_bracket_count > 0:
# Is it another bracket entry?
bracket_match = self.start_bracket._match(
segments=segments[seg_idx:],
match_depth=match_depth + 1,
parse_depth=parse_depth,
verbosity=verbosity,
dialect=dialect,
match_segment=match_segment,
)
if bracket_match.has_match():
# increment the open bracket counter and proceed
sub_bracket_count += 1
seg_idx += len(bracket_match)
continue
# Is it a closing bracket?
bracket_match = self.end_bracket._match(
segments=segments[seg_idx:],
match_depth=match_depth + 1,
parse_depth=parse_depth,
verbosity=verbosity,
dialect=dialect,
match_segment=match_segment,
)
if bracket_match.has_match():
# reduce the bracket count and then advance the counter.
sub_bracket_count -= 1
seg_idx += len(bracket_match)
continue
else:
# No bracket cycle
# Do we have a delimiter at the current index?
del_match = self.delimiter._match(
segments[seg_idx:],
match_depth=match_depth + 1,
parse_depth=parse_depth,
verbosity=verbosity,
dialect=dialect,
match_segment=match_segment,
)
# Doesn't have to match fully, just has to give us a delimiter.
if del_match.has_match():
# We've got at least a partial match
# Record the location of this delimiter
d = (seg_idx, len(del_match))
# Do we already have any delimiters?
if delimiters:
# Yes
dm1 = delimiters[-1]
# slice the segments between this delimiter and the previous
pre_segment = segments[dm1[0] + dm1[1] : d[0]]
else:
# No
# Just get everything up to this point
pre_segment = segments[: d[0]]
# Append the delimiter that we have found.
delimiters.append(d)
# Optionally here, we can match some non-code up front.
if self.code_only:
while len(pre_segment) > 0:
if not pre_segment[0].is_code:
matched_segments += (pre_segment[0],) # As tuple
pre_segment = pre_segment[1:]
else:
break
# We now check that this chunk matches whatever we're delimiting.
# In this case it MUST be a full match, not just a partial match
for elem in self._elements:
elem_match = elem._match(
pre_segment,
match_depth=match_depth + 1,
parse_depth=parse_depth,
verbosity=verbosity,
dialect=dialect,
match_segment=match_segment,
)
if elem_match.is_complete():
# Successfully matched one of the elements in this spot
# First add the segment up to the delimiter to the matched segments
matched_segments += elem_match
# Then add the delimiter to the matched segments
matched_segments += del_match
# Break this for loop and move on, looking for the next delimiter
seg_idx += len(del_match)
break
elif elem_match and self.code_only:
# Optionally if it's not a complete match but the unmatched bits are
# all non code then we'll also take it.
if all(
[
not seg.is_code
for seg in elem_match.unmatched_segments
]
):
# Logic as above, just with the unmatched bits too because none are code
matched_segments += elem_match.matched_segments
matched_segments += elem_match.unmatched_segments
matched_segments += del_match
seg_idx += len(del_match)
break
else:
continue
else:
# Not matched this element, move on.
# NB, a partial match here isn't helpful. We're matching
# BETWEEN two delimiters and so it must be a complete match.
# Incomplete matches are only possible at the end
continue
else:
# If we're here we haven't matched any of the elements, then we have a problem
return MatchResult.from_unmatched(segments)
# This index doesn't have a delimiter, check for brackets and terminators
# First is it a terminator (and we're not in a bracket cycle)
if self.terminator:
term_match = self.terminator._match(
segments[seg_idx:],
match_depth=match_depth + 1,
parse_depth=parse_depth,
verbosity=verbosity,
dialect=dialect,
match_segment=match_segment,
)
if term_match:
# we've found a terminator.
# End the cycle here.
terminal_idx = seg_idx
continue
# Last, do we need to enter a bracket cycle
bracket_match = self.start_bracket._match(
segments=segments[seg_idx:],
match_depth=match_depth + 1,
parse_depth=parse_depth,
verbosity=verbosity,
dialect=dialect,
match_segment=match_segment,
)
if bracket_match.has_match():
# increment the open bracket counter and proceed
sub_bracket_count += 1
seg_idx += len(bracket_match)
continue
# Nothing else interesting. Carry On
# This is the same regardless of whether we're in the bracket cycle
# or otherwise.
seg_idx += 1
|
https://github.com/sqlfluff/sqlfluff/issues/23
|
Traceback (most recent call last):
File "/home/user/sql/.venv/bin/sqlfluff", line 11, in <module>
load_entry_point('sqlfluff==0.1.3', 'console_scripts', 'sqlfluff')()
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 764, in __call__
return self.main(*args, **kwargs)
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 717, in main
rv = self.invoke(ctx)
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 1137, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 956, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 555, in invoke
return callback(*args, **kwargs)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/cli/commands.py", line 85, in lint
result = lnt.lint_paths(paths, verbosity=verbose)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/linter.py", line 324, in lint_paths
result.add(self.lint_path(path, verbosity=verbosity, fix=fix))
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/linter.py", line 312, in lint_path
linted_path.add(self.lint_file(f, fname=fname, verbosity=verbosity, fix=fix))
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/linter.py", line 212, in lint_file
parsed, vs, time_dict = self.parse_file(f=f, fname=fname, verbosity=verbosity)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/linter.py", line 193, in parse_file
parsed = fs.parse(recurse=recurse, verbosity=verbosity, dialect=self.dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 228, in parse
verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 394, in expand
res = stmt.parse(recurse=recurse, parse_depth=parse_depth, verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 228, in parse
verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 394, in expand
res = stmt.parse(recurse=recurse, parse_depth=parse_depth, verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 228, in parse
verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 394, in expand
res = stmt.parse(recurse=recurse, parse_depth=parse_depth, verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 228, in parse
verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 394, in expand
res = stmt.parse(recurse=recurse, parse_depth=parse_depth, verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 186, in parse
dialect=dialect, match_segment=self.__class__.__name__)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 273, in match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 485, in match
match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 273, in match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 236, in match
dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 361, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 320, in match
dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 586, in match
segment=segments[start_bracket_idx])
TypeError: tuple indices must be integers or slices, not NoneType
|
TypeError
|
def match(self, segments, parse_context):
matched_buffer = tuple()
forward_buffer = segments
while True:
if len(forward_buffer) == 0:
# We're all good
return MatchResult.from_matched(matched_buffer)
elif self.code_only and not forward_buffer[0].is_code:
matched_buffer += (forward_buffer[0],)
forward_buffer = forward_buffer[1:]
else:
# Try and match it
for opt in self._elements:
if isinstance(opt, str):
if forward_buffer[0].type == opt:
matched_buffer += (forward_buffer[0],)
forward_buffer = forward_buffer[1:]
break
else:
m = opt._match(
forward_buffer,
parse_context=parse_context.copy(incr="match_depth"),
)
if m:
matched_buffer += m.matched_segments
forward_buffer = m.unmatched_segments
break
else:
# Unable to match the forward buffer. We must have found something
# which isn't on our element list. Crash out.
return MatchResult.from_unmatched(segments)
|
def match(
self,
segments,
match_depth=0,
parse_depth=0,
verbosity=0,
dialect=None,
match_segment=None,
):
matched_buffer = tuple()
forward_buffer = segments
while True:
if len(forward_buffer) == 0:
# We're all good
return MatchResult.from_matched(matched_buffer)
elif self.code_only and not forward_buffer[0].is_code:
matched_buffer += (forward_buffer[0],)
forward_buffer = forward_buffer[1:]
else:
# Try and match it
for opt in self._elements:
if isinstance(opt, str):
if forward_buffer[0].type == opt:
matched_buffer += (forward_buffer[0],)
forward_buffer = forward_buffer[1:]
break
else:
m = opt._match(
forward_buffer,
match_depth=match_depth + 1,
parse_depth=parse_depth,
verbosity=verbosity,
dialect=dialect,
match_segment=match_segment,
)
if m:
matched_buffer += m.matched_segments
forward_buffer = m.unmatched_segments
break
else:
# Unable to match the forward buffer. We must have found something
# which isn't on our element list. Crash out.
return MatchResult.from_unmatched(segments)
|
https://github.com/sqlfluff/sqlfluff/issues/23
|
Traceback (most recent call last):
File "/home/user/sql/.venv/bin/sqlfluff", line 11, in <module>
load_entry_point('sqlfluff==0.1.3', 'console_scripts', 'sqlfluff')()
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 764, in __call__
return self.main(*args, **kwargs)
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 717, in main
rv = self.invoke(ctx)
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 1137, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 956, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 555, in invoke
return callback(*args, **kwargs)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/cli/commands.py", line 85, in lint
result = lnt.lint_paths(paths, verbosity=verbose)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/linter.py", line 324, in lint_paths
result.add(self.lint_path(path, verbosity=verbosity, fix=fix))
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/linter.py", line 312, in lint_path
linted_path.add(self.lint_file(f, fname=fname, verbosity=verbosity, fix=fix))
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/linter.py", line 212, in lint_file
parsed, vs, time_dict = self.parse_file(f=f, fname=fname, verbosity=verbosity)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/linter.py", line 193, in parse_file
parsed = fs.parse(recurse=recurse, verbosity=verbosity, dialect=self.dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 228, in parse
verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 394, in expand
res = stmt.parse(recurse=recurse, parse_depth=parse_depth, verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 228, in parse
verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 394, in expand
res = stmt.parse(recurse=recurse, parse_depth=parse_depth, verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 228, in parse
verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 394, in expand
res = stmt.parse(recurse=recurse, parse_depth=parse_depth, verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 228, in parse
verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 394, in expand
res = stmt.parse(recurse=recurse, parse_depth=parse_depth, verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 186, in parse
dialect=dialect, match_segment=self.__class__.__name__)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 273, in match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 485, in match
match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 273, in match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 236, in match
dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 361, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 320, in match
dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 586, in match
segment=segments[start_bracket_idx])
TypeError: tuple indices must be integers or slices, not NoneType
|
TypeError
|
def __init__(self, target, *args, **kwargs):
self.target = target
self.terminator = kwargs.pop("terminator", None)
super(StartsWith, self).__init__(*args, **kwargs)
|
def __init__(self, target, *args, **kwargs):
self.target = target
self.terminator = kwargs.pop("terminator", None)
# The details on how to match a bracket are stored in the dialect
self.start_bracket = Ref("StartBracketSegment")
self.end_bracket = Ref("EndBracketSegment")
super(StartsWith, self).__init__(*args, **kwargs)
|
https://github.com/sqlfluff/sqlfluff/issues/23
|
Traceback (most recent call last):
File "/home/user/sql/.venv/bin/sqlfluff", line 11, in <module>
load_entry_point('sqlfluff==0.1.3', 'console_scripts', 'sqlfluff')()
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 764, in __call__
return self.main(*args, **kwargs)
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 717, in main
rv = self.invoke(ctx)
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 1137, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 956, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 555, in invoke
return callback(*args, **kwargs)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/cli/commands.py", line 85, in lint
result = lnt.lint_paths(paths, verbosity=verbose)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/linter.py", line 324, in lint_paths
result.add(self.lint_path(path, verbosity=verbosity, fix=fix))
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/linter.py", line 312, in lint_path
linted_path.add(self.lint_file(f, fname=fname, verbosity=verbosity, fix=fix))
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/linter.py", line 212, in lint_file
parsed, vs, time_dict = self.parse_file(f=f, fname=fname, verbosity=verbosity)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/linter.py", line 193, in parse_file
parsed = fs.parse(recurse=recurse, verbosity=verbosity, dialect=self.dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 228, in parse
verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 394, in expand
res = stmt.parse(recurse=recurse, parse_depth=parse_depth, verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 228, in parse
verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 394, in expand
res = stmt.parse(recurse=recurse, parse_depth=parse_depth, verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 228, in parse
verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 394, in expand
res = stmt.parse(recurse=recurse, parse_depth=parse_depth, verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 228, in parse
verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 394, in expand
res = stmt.parse(recurse=recurse, parse_depth=parse_depth, verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 186, in parse
dialect=dialect, match_segment=self.__class__.__name__)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 273, in match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 485, in match
match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 273, in match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 236, in match
dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 361, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 320, in match
dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 586, in match
segment=segments[start_bracket_idx])
TypeError: tuple indices must be integers or slices, not NoneType
|
TypeError
|
def match(self, segments, parse_context):
if self.code_only:
first_code_idx = None
# Work through to find the first code segment...
for idx, seg in enumerate(segments):
if seg.is_code:
first_code_idx = idx
break
else:
# We've trying to match on a sequence of segments which contain no code.
# That means this isn't a match.
return MatchResult.from_unmatched(segments)
match = self.target._match(
segments=segments[first_code_idx:],
parse_context=parse_context.copy(incr="match_depth"),
)
if match:
# The match will probably have returned a mutated version rather
# that the raw segment sent for matching. We need to reinsert it
# back into the sequence in place of the raw one, but we can't
# just assign at the index because it's a tuple and not a list.
# to get around that we do this slightly more elaborate construction.
# NB: This match may be partial or full, either is cool. In the case
# of a partial match, given that we're only interested in what it STARTS
# with, then we can still used the unmatched parts on the end.
# We still need to deal with any non-code segments at the start.
if self.terminator:
# We have an optional terminator. We should only match up to when
# this matches. This should also respect bracket counting.
match_segments = match.matched_segments
trailing_segments = match.unmatched_segments
# Given a set of segments, iterate through looking for
# a terminator.
res = self._bracket_sensitive_look_ahead_match(
segments=trailing_segments,
matchers=[self.terminator],
parse_context=parse_context,
)
# Depending on whether we found a terminator or not we treat
# the result slightly differently. If no terminator was found,
# we just use the whole unmatched segment. If we did find one,
# we match up until (but not including) that terminator.
term_match = res[1]
if term_match:
m_tail = res[0]
u_tail = term_match.all_segments()
else:
m_tail = term_match.unmatched_segments
u_tail = ()
return MatchResult(
segments[:first_code_idx] + match_segments + m_tail,
u_tail,
)
else:
return MatchResult.from_matched(
segments[:first_code_idx] + match.all_segments()
)
else:
return MatchResult.from_unmatched(segments)
else:
raise NotImplementedError(
"Not expecting to match StartsWith and also not just code!?"
)
|
def match(
self,
segments,
match_depth=0,
parse_depth=0,
verbosity=0,
dialect=None,
match_segment=None,
):
if self.code_only:
first_code_idx = None
# Work through to find the first code segment...
for idx, seg in enumerate(segments):
if seg.is_code:
first_code_idx = idx
break
else:
# We've trying to match on a sequence of segments which contain no code.
# That means this isn't a match.
return MatchResult.from_unmatched(segments)
match = self.target._match(
segments=segments[first_code_idx:],
match_depth=match_depth + 1,
parse_depth=parse_depth,
verbosity=verbosity,
dialect=dialect,
match_segment=match_segment,
)
if match:
# The match will probably have returned a mutated version rather
# that the raw segment sent for matching. We need to reinsert it
# back into the sequence in place of the raw one, but we can't
# just assign at the index because it's a tuple and not a list.
# to get around that we do this slightly more elaborate construction.
# NB: This match may be partial or full, either is cool. In the case
# of a partial match, given that we're only interested in what it STARTS
# with, then we can still used the unmatched parts on the end.
# We still need to deal with any non-code segments at the start.
if self.terminator:
# We have an optional terminator. We should only match up to when
# this matches. This should also respect bracket counting.
match_segments = match.matched_segments
trailing_segments = match.unmatched_segments
# Given a set of segments, iterate through looking for
# a terminator.
term_match = self.bracket_sensitive_forward_match(
segments=trailing_segments,
start_bracket=self.start_bracket,
end_bracket=self.end_bracket,
match_depth=match_depth,
parse_depth=parse_depth,
verbosity=verbosity,
terminator=self.terminator,
dialect=dialect,
match_segment=match_segment,
)
return MatchResult(
segments[:first_code_idx]
+ match_segments
+ term_match.matched_segments,
term_match.unmatched_segments,
)
else:
return MatchResult.from_matched(
segments[:first_code_idx]
+ match.matched_segments
+ match.unmatched_segments
)
else:
return MatchResult.from_unmatched(segments)
else:
raise NotImplementedError(
"Not expecting to match StartsWith and also not just code!?"
)
|
https://github.com/sqlfluff/sqlfluff/issues/23
|
Traceback (most recent call last):
File "/home/user/sql/.venv/bin/sqlfluff", line 11, in <module>
load_entry_point('sqlfluff==0.1.3', 'console_scripts', 'sqlfluff')()
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 764, in __call__
return self.main(*args, **kwargs)
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 717, in main
rv = self.invoke(ctx)
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 1137, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 956, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 555, in invoke
return callback(*args, **kwargs)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/cli/commands.py", line 85, in lint
result = lnt.lint_paths(paths, verbosity=verbose)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/linter.py", line 324, in lint_paths
result.add(self.lint_path(path, verbosity=verbosity, fix=fix))
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/linter.py", line 312, in lint_path
linted_path.add(self.lint_file(f, fname=fname, verbosity=verbosity, fix=fix))
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/linter.py", line 212, in lint_file
parsed, vs, time_dict = self.parse_file(f=f, fname=fname, verbosity=verbosity)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/linter.py", line 193, in parse_file
parsed = fs.parse(recurse=recurse, verbosity=verbosity, dialect=self.dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 228, in parse
verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 394, in expand
res = stmt.parse(recurse=recurse, parse_depth=parse_depth, verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 228, in parse
verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 394, in expand
res = stmt.parse(recurse=recurse, parse_depth=parse_depth, verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 228, in parse
verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 394, in expand
res = stmt.parse(recurse=recurse, parse_depth=parse_depth, verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 228, in parse
verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 394, in expand
res = stmt.parse(recurse=recurse, parse_depth=parse_depth, verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 186, in parse
dialect=dialect, match_segment=self.__class__.__name__)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 273, in match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 485, in match
match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 273, in match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 236, in match
dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 361, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 320, in match
dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 586, in match
segment=segments[start_bracket_idx])
TypeError: tuple indices must be integers or slices, not NoneType
|
TypeError
|
def match(self, segments, parse_context):
"""The match function for `bracketed` implements bracket counting."""
# 1. work forwards to find the first bracket.
# If we find something other that whitespace, then fail out.
# 2. Once we have the first bracket, we need to bracket count forward to find it's partner.
# 3. Assuming we find it's partner then we try and match what goes between them.
# If we match, great. If not, then we return an empty match.
# If we never find it's partner then we return an empty match but should probably
# log a parsing warning, or error?
seg_buff = segments
matched_segs = ()
# Look for the first bracket
start_match = self._code_only_sensitive_match(
seg_buff,
self.start_bracket,
parse_context=parse_context.copy(incr="match_depth"),
code_only=self.code_only,
)
if start_match:
seg_buff = start_match.unmatched_segments
else:
# Can't find the opening bracket. No Match.
return MatchResult.from_unmatched(segments)
# Look for the closing bracket
pre, end_match, _ = self._bracket_sensitive_look_ahead_match(
segments=seg_buff,
matchers=[self.end_bracket],
parse_context=parse_context,
code_only=self.code_only,
)
if not end_match:
raise SQLParseError(
"Couldn't find closing bracket for opening bracket.", segment=matched_segs
)
# Match the content now we've confirmed the brackets. We use the
# _longest helper function mostly just because it deals with multiple
# matchers.
content_match, _ = self._longest_code_only_sensitive_match(
pre,
self._elements,
parse_context=parse_context.copy(incr="match_depth"),
code_only=self.code_only,
)
# We require a complete match for the content (hopefully for obvious reasons)
if content_match.is_complete():
return MatchResult(
start_match.matched_segments
+ content_match.matched_segments
+ end_match.matched_segments,
end_match.unmatched_segments,
)
else:
return MatchResult.from_unmatched(segments)
|
def match(
self,
segments,
match_depth=0,
parse_depth=0,
verbosity=0,
dialect=None,
match_segment=None,
):
"""The match function for `bracketed` implements bracket counting."""
# 1. work forwards to find the first bracket.
# If we find something other that whitespace, then fail out.
# 2. Once we have the first bracket, we need to bracket count forward to find it's partner.
# 3. Assuming we find it's partner then we try and match what goes between them.
# If we match, great. If not, then we return an empty match.
# If we never find it's partner then we return an empty match but should probably
# log a parsing warning, or error?
sub_bracket_count = 0
pre_content_segments = tuple()
unmatched_segs = segments
matched_segs = tuple()
current_bracket_segment = None
# Step 1. Find the first useful segment
# Work through to find the first code segment...
if self.code_only:
for idx, seg in enumerate(segments):
if seg.is_code:
break
else:
matched_segs += (seg,)
unmatched_segs = unmatched_segs[1:]
else:
# We've trying to match on a sequence of segments which contain no code.
# That means this isn't a match.
return MatchResult.from_unmatched(segments)
# is it a bracket?
m = self.start_bracket._match(
segments=unmatched_segs,
match_depth=match_depth + 1,
parse_depth=parse_depth,
verbosity=verbosity,
dialect=dialect,
match_segment=match_segment,
)
if m.has_match():
# We've got the first bracket.
# Update the seg_idx by the length of the match
current_bracket_segment = m.matched_segments[0]
# No indexing to allow mutation
matched_segs += m.matched_segments
unmatched_segs = m.unmatched_segments
else:
# Whatever we have, it doesn't start with a bracket.
return MatchResult.from_unmatched(segments)
# Step 2: Bracket count forward to find it's pair
content_segments = tuple()
pre_content_segments = matched_segs
while True:
# Are we at the end of the sequence?
if len(unmatched_segs) == 0:
# We've got to the end without finding the closing bracket
# this isn't just parsing issue this is probably a syntax
# error.
# TODO: Format this better
raise SQLParseError(
"Couldn't find closing bracket for opening bracket.",
segment=current_bracket_segment,
)
# Is it a closing bracket?
m = self.end_bracket._match(
segments=unmatched_segs,
match_depth=match_depth + 1,
parse_depth=parse_depth,
verbosity=verbosity,
dialect=dialect,
match_segment=match_segment,
)
if m.has_match():
if sub_bracket_count == 0:
# We're back to the bracket pair!
matched_segs += m.matched_segments
unmatched_segs = m.unmatched_segments
closing_bracket_segs = m.matched_segments
break
else:
# reduce the bracket count and then advance the counter.
sub_bracket_count -= 1
matched_segs += m.matched_segments
unmatched_segs = m.unmatched_segments
continue
# Is it an opening bracket?
m = self.start_bracket._match(
segments=unmatched_segs,
match_depth=match_depth + 1,
parse_depth=parse_depth,
verbosity=verbosity,
dialect=dialect,
match_segment=match_segment,
)
if m.has_match():
# increment the open bracket counter and proceed
sub_bracket_count += 1
matched_segs += m.matched_segments
unmatched_segs = m.unmatched_segments
continue
# If we get here it's not an opening bracket or a closing bracket
# so we should carry on our merry way
matched_segs += (unmatched_segs[0],)
content_segments += (unmatched_segs[0],)
unmatched_segs = unmatched_segs[1:]
# If we get to here then we've found our closing bracket.
# Let's identify the section to match for our content matchers
# and then try it against each of them.
for elem in self._elements:
elem_match = elem._match(
content_segments,
match_depth=match_depth + 1,
parse_depth=parse_depth,
verbosity=verbosity,
dialect=dialect,
match_segment=match_segment,
)
# Matches at this stage must be complete, because we've got nothing
# to do with any leftovers within the brackets.
if elem_match.is_complete():
# We're also returning the *mutated* versions from the sub-matcher
return MatchResult(
pre_content_segments
+ elem_match.matched_segments
+ closing_bracket_segs,
unmatched_segs,
)
else:
# Not matched this element, move on.
# NB, a partial match here isn't helpful. We're matching
# BETWEEN two delimiters and so it must be a complete match.
# Incomplete matches are only possible at the end
continue
else:
# If we're here we haven't matched any of the elements, then we have a problem
return MatchResult.from_unmatched(segments)
|
https://github.com/sqlfluff/sqlfluff/issues/23
|
Traceback (most recent call last):
File "/home/user/sql/.venv/bin/sqlfluff", line 11, in <module>
load_entry_point('sqlfluff==0.1.3', 'console_scripts', 'sqlfluff')()
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 764, in __call__
return self.main(*args, **kwargs)
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 717, in main
rv = self.invoke(ctx)
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 1137, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 956, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 555, in invoke
return callback(*args, **kwargs)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/cli/commands.py", line 85, in lint
result = lnt.lint_paths(paths, verbosity=verbose)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/linter.py", line 324, in lint_paths
result.add(self.lint_path(path, verbosity=verbosity, fix=fix))
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/linter.py", line 312, in lint_path
linted_path.add(self.lint_file(f, fname=fname, verbosity=verbosity, fix=fix))
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/linter.py", line 212, in lint_file
parsed, vs, time_dict = self.parse_file(f=f, fname=fname, verbosity=verbosity)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/linter.py", line 193, in parse_file
parsed = fs.parse(recurse=recurse, verbosity=verbosity, dialect=self.dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 228, in parse
verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 394, in expand
res = stmt.parse(recurse=recurse, parse_depth=parse_depth, verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 228, in parse
verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 394, in expand
res = stmt.parse(recurse=recurse, parse_depth=parse_depth, verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 228, in parse
verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 394, in expand
res = stmt.parse(recurse=recurse, parse_depth=parse_depth, verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 228, in parse
verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 394, in expand
res = stmt.parse(recurse=recurse, parse_depth=parse_depth, verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 186, in parse
dialect=dialect, match_segment=self.__class__.__name__)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 273, in match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 485, in match
match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 273, in match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 236, in match
dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 361, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 320, in match
dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 586, in match
segment=segments[start_bracket_idx])
TypeError: tuple indices must be integers or slices, not NoneType
|
TypeError
|
def __init__(self, config=None):
self.config = config or default_config
self.matcher = RepeatedMultiMatcher(
RegexMatcher.from_shorthand("whitespace", r"[\t ]*"),
RegexMatcher.from_shorthand(
"inline_comment", r"(-- |#)[^\n]*", is_comment=True
),
RegexMatcher.from_shorthand(
"block_comment", r"\/\*([^\*]|\*[^\/])*\*\/", is_comment=True
),
RegexMatcher.from_shorthand("single_quote", r"'[^']*'", is_code=True),
RegexMatcher.from_shorthand("double_quote", r'"[^"]*"', is_code=True),
RegexMatcher.from_shorthand("back_quote", r"`[^`]*`", is_code=True),
RegexMatcher.from_shorthand(
"numeric_literal", r"(-?[0-9]+(\.[0-9]+)?)", is_code=True
),
RegexMatcher.from_shorthand("greater_than_or_equal", r">=", is_code=True),
RegexMatcher.from_shorthand("less_than_or_equal", r"<=", is_code=True),
RegexMatcher.from_shorthand("newline", r"\r\n"),
RegexMatcher.from_shorthand("casting_operator", r"::"),
RegexMatcher.from_shorthand("not_equals", r"!="),
SingletonMatcher.from_shorthand("newline", "\n"),
SingletonMatcher.from_shorthand("equals", "=", is_code=True),
SingletonMatcher.from_shorthand("greater_than", ">", is_code=True),
SingletonMatcher.from_shorthand("less_than", "<", is_code=True),
SingletonMatcher.from_shorthand("dot", ".", is_code=True),
SingletonMatcher.from_shorthand("comma", ",", is_code=True),
SingletonMatcher.from_shorthand("plus", "+", is_code=True),
SingletonMatcher.from_shorthand("tilde", "~", is_code=True),
SingletonMatcher.from_shorthand("minus", "-", is_code=True),
SingletonMatcher.from_shorthand("divide", "/", is_code=True),
SingletonMatcher.from_shorthand("star", "*", is_code=True),
SingletonMatcher.from_shorthand("bracket_open", "(", is_code=True),
SingletonMatcher.from_shorthand("bracket_close", ")", is_code=True),
SingletonMatcher.from_shorthand("semicolon", ";", is_code=True),
RegexMatcher.from_shorthand("code", r"[0-9a-zA-Z_]*", is_code=True),
)
|
def __init__(self, config=None):
self.config = config or default_config
self.matcher = RepeatedMultiMatcher(
RegexMatcher.from_shorthand("whitespace", r"[\t ]*"),
RegexMatcher.from_shorthand(
"inline_comment", r"(-- |#)[^\n]*", is_comment=True
),
RegexMatcher.from_shorthand(
"block_comment", r"\/\*([^\*]|\*[^\/])*\*\/", is_comment=True
),
RegexMatcher.from_shorthand("single_quote", r"'[^']*'", is_code=True),
RegexMatcher.from_shorthand("double_quote", r'"[^"]*"', is_code=True),
RegexMatcher.from_shorthand("back_quote", r"`[^`]*`", is_code=True),
RegexMatcher.from_shorthand(
"numeric_literal", r"(-?[0-9]+(\.[0-9]+)?)", is_code=True
),
RegexMatcher.from_shorthand("greater_than_or_equal", r">=", is_code=True),
RegexMatcher.from_shorthand("less_than_or_equal", r"<=", is_code=True),
RegexMatcher.from_shorthand("newline", r"\r\n"),
RegexMatcher.from_shorthand("casting_operator", r"::"),
RegexMatcher.from_shorthand("not_equals", r"!="),
SingletonMatcher.from_shorthand("newline", "\n"),
SingletonMatcher.from_shorthand("equals", "=", is_code=True),
SingletonMatcher.from_shorthand("greater_than", ">", is_code=True),
SingletonMatcher.from_shorthand("less_than", "<", is_code=True),
SingletonMatcher.from_shorthand("dot", ".", is_code=True),
SingletonMatcher.from_shorthand("comma", ",", is_code=True),
SingletonMatcher.from_shorthand("plus", "+", is_code=True),
SingletonMatcher.from_shorthand("minus", "-", is_code=True),
SingletonMatcher.from_shorthand("divide", "/", is_code=True),
SingletonMatcher.from_shorthand("star", "*", is_code=True),
SingletonMatcher.from_shorthand("bracket_open", "(", is_code=True),
SingletonMatcher.from_shorthand("bracket_close", ")", is_code=True),
SingletonMatcher.from_shorthand("semicolon", ";", is_code=True),
RegexMatcher.from_shorthand("code", r"[0-9a-zA-Z_]*", is_code=True),
)
|
https://github.com/sqlfluff/sqlfluff/issues/23
|
Traceback (most recent call last):
File "/home/user/sql/.venv/bin/sqlfluff", line 11, in <module>
load_entry_point('sqlfluff==0.1.3', 'console_scripts', 'sqlfluff')()
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 764, in __call__
return self.main(*args, **kwargs)
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 717, in main
rv = self.invoke(ctx)
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 1137, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 956, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 555, in invoke
return callback(*args, **kwargs)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/cli/commands.py", line 85, in lint
result = lnt.lint_paths(paths, verbosity=verbose)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/linter.py", line 324, in lint_paths
result.add(self.lint_path(path, verbosity=verbosity, fix=fix))
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/linter.py", line 312, in lint_path
linted_path.add(self.lint_file(f, fname=fname, verbosity=verbosity, fix=fix))
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/linter.py", line 212, in lint_file
parsed, vs, time_dict = self.parse_file(f=f, fname=fname, verbosity=verbosity)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/linter.py", line 193, in parse_file
parsed = fs.parse(recurse=recurse, verbosity=verbosity, dialect=self.dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 228, in parse
verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 394, in expand
res = stmt.parse(recurse=recurse, parse_depth=parse_depth, verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 228, in parse
verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 394, in expand
res = stmt.parse(recurse=recurse, parse_depth=parse_depth, verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 228, in parse
verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 394, in expand
res = stmt.parse(recurse=recurse, parse_depth=parse_depth, verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 228, in parse
verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 394, in expand
res = stmt.parse(recurse=recurse, parse_depth=parse_depth, verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 186, in parse
dialect=dialect, match_segment=self.__class__.__name__)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 273, in match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 485, in match
match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 273, in match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 236, in match
dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 361, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 320, in match
dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 586, in match
segment=segments[start_bracket_idx])
TypeError: tuple indices must be integers or slices, not NoneType
|
TypeError
|
def parse_match_logging(grammar, func, msg, parse_context, v_level, **kwargs):
s = "[PD:{0} MD:{1}]\t{2:<50}\t{3:<20}".format(
parse_context.parse_depth,
parse_context.match_depth,
("." * parse_context.match_depth) + str(parse_context.match_segment),
"{0}.{1} {2}".format(grammar, func, msg),
)
if kwargs:
s += "\t[{0}]".format(
", ".join(
[
"{0}={1}".format(k, repr(v) if isinstance(v, str) else v)
for k, v in kwargs.items()
]
)
)
verbosity_logger(s, parse_context.verbosity, v_level=v_level)
|
def parse_match_logging(
parse_depth,
match_depth,
match_segment,
grammar,
func,
msg,
verbosity,
v_level,
**kwargs,
):
s = "[PD:{0} MD:{1}]\t{2:<50}\t{3:<20}".format(
parse_depth,
match_depth,
("." * match_depth) + str(match_segment),
"{0}.{1} {2}".format(grammar, func, msg),
)
if kwargs:
s += "\t[{0}]".format(
", ".join(
[
"{0}={1}".format(k, repr(v) if isinstance(v, str) else v)
for k, v in kwargs.items()
]
)
)
verbosity_logger(s, verbosity, v_level=v_level)
|
https://github.com/sqlfluff/sqlfluff/issues/23
|
Traceback (most recent call last):
File "/home/user/sql/.venv/bin/sqlfluff", line 11, in <module>
load_entry_point('sqlfluff==0.1.3', 'console_scripts', 'sqlfluff')()
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 764, in __call__
return self.main(*args, **kwargs)
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 717, in main
rv = self.invoke(ctx)
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 1137, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 956, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 555, in invoke
return callback(*args, **kwargs)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/cli/commands.py", line 85, in lint
result = lnt.lint_paths(paths, verbosity=verbose)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/linter.py", line 324, in lint_paths
result.add(self.lint_path(path, verbosity=verbosity, fix=fix))
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/linter.py", line 312, in lint_path
linted_path.add(self.lint_file(f, fname=fname, verbosity=verbosity, fix=fix))
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/linter.py", line 212, in lint_file
parsed, vs, time_dict = self.parse_file(f=f, fname=fname, verbosity=verbosity)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/linter.py", line 193, in parse_file
parsed = fs.parse(recurse=recurse, verbosity=verbosity, dialect=self.dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 228, in parse
verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 394, in expand
res = stmt.parse(recurse=recurse, parse_depth=parse_depth, verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 228, in parse
verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 394, in expand
res = stmt.parse(recurse=recurse, parse_depth=parse_depth, verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 228, in parse
verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 394, in expand
res = stmt.parse(recurse=recurse, parse_depth=parse_depth, verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 228, in parse
verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 394, in expand
res = stmt.parse(recurse=recurse, parse_depth=parse_depth, verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 186, in parse
dialect=dialect, match_segment=self.__class__.__name__)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 273, in match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 485, in match
match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 273, in match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 236, in match
dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 361, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 320, in match
dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 586, in match
segment=segments[start_bracket_idx])
TypeError: tuple indices must be integers or slices, not NoneType
|
TypeError
|
def parse(self, parse_context=None):
"""Use the parse kwarg for testing, mostly to check how deep to go.
True/False for yes or no, an integer allows a certain number of levels"""
# We should call the parse grammar on this segment, which calls
# the match grammar on all it's children.
if not parse_context.dialect:
raise RuntimeError("No dialect provided to {0!r}!".format(self))
# the parse_depth and recurse kwargs control how deep we will recurse for testing.
if not self.segments:
# This means we're a root segment, just return an unmutated self
return self
# Get the Parse Grammar
g = self._parse_grammar()
if g is None:
logging.debug(
"{0}.parse: no grammar. returning".format(self.__class__.__name__)
)
return self
# Use the Parse Grammar (and the private method)
# NOTE: No match_depth kwarg, because this is the start of the matching.
m = g._match(
segments=self.segments,
parse_context=parse_context.copy(match_segment=self.__class__.__name__),
)
# Calling unify here, allows the MatchResult class to do all the type checking.
try:
m = MatchResult.unify(m)
except TypeError as err:
logging.error(
"[PD:{0}] {1}.parse. Error on unifying result of match grammar!".format(
parse_context.parse_depth, self.__class__.__name__
)
)
raise err
# Basic Validation, that we haven't dropped anything.
check_still_complete(self.segments, m.matched_segments, m.unmatched_segments)
if m.has_match():
if m.is_complete():
# Complete match, happy days!
self.segments = m.matched_segments
else:
# Incomplete match.
# For now this means the parsing has failed. Lets add the unmatched bit at the
# end as something unparsable.
# TODO: Do something more intelligent here.
self.segments = m.matched_segments + (
UnparsableSegment(segments=m.unmatched_segments, expected="Nothing..."),
)
else:
# If there's no match at this stage, then it's unparsable. That's
# a problem at this stage so wrap it in an unparable segment and carry on.
self.segments = (
UnparsableSegment(
segments=self.segments,
expected=g.expected_string(dialect=parse_context.dialect),
),
) # NB: tuple
# Validate new segments
self.validate_segments(text="parsing")
# Recurse if allowed (using the expand method to deal with the expansion)
logging.debug(
"{0}.parse: Done Parse. Plotting Recursion. Recurse={1!r}".format(
self.__class__.__name__, parse_context.recurse
)
)
parse_depth_msg = "###\n#\n# Beginning Parse Depth {0}: {1}\n#\n###\nInitial Structure:\n{2}".format(
parse_context.parse_depth + 1, self.__class__.__name__, self.stringify()
)
if parse_context.recurse is True:
logging.debug(parse_depth_msg)
self.segments = self.expand(
self.segments,
parse_context=parse_context.copy(
incr="parse_depth", match_depth=0, recurse=True
),
)
elif isinstance(parse_context.recurse, int):
if parse_context.recurse > 1:
logging.debug(parse_depth_msg)
self.segments = self.expand(
self.segments,
parse_context=parse_context.copy(decr="recurse", incr="parse_depth"),
)
# Validate new segments
self.validate_segments(text="expanding")
return self
|
def parse(self, recurse=True, parse_depth=0, verbosity=0, dialect=None):
"""Use the parse kwarg for testing, mostly to check how deep to go.
True/False for yes or no, an integer allows a certain number of levels"""
# We should call the parse grammar on this segment, which calls
# the match grammar on all it's children.
if not dialect:
raise RuntimeError("No dialect provided to {0!r}!".format(self))
# the parse_depth and recurse kwargs control how deep we will recurse for testing.
if not self.segments:
# This means we're a root segment, just return an unmutated self
return self
# Get the Parse Grammar
g = self._parse_grammar()
if g is None:
logging.debug(
"{0}.parse: no grammar. returning".format(self.__class__.__name__)
)
return self
# Use the Parse Grammar (and the private method)
# NOTE: No match_depth kwarg, because this is the start of the matching.
m = g._match(
segments=self.segments,
parse_depth=parse_depth,
verbosity=verbosity,
dialect=dialect,
match_segment=self.__class__.__name__,
)
# Calling unify here, allows the MatchResult class to do all the type checking.
try:
m = MatchResult.unify(m)
except TypeError as err:
logging.error(
"[PD:{0}] {1}.parse. Error on unifying result of match grammar!".format(
parse_depth, self.__class__.__name__
)
)
raise err
# Basic Validation, that we haven't dropped anything.
check_still_complete(self.segments, m.matched_segments, m.unmatched_segments)
if m.has_match():
if m.is_complete():
# Complete match, happy days!
self.segments = m.matched_segments
else:
# Incomplete match.
# For now this means the parsing has failed. Lets add the unmatched bit at the
# end as something unparsable.
# TODO: Do something more intelligent here.
self.segments = m.matched_segments + (
UnparsableSegment(segments=m.unmatched_segments, expected="Nothing..."),
)
else:
# If there's no match at this stage, then it's unparsable. That's
# a problem at this stage so wrap it in an unparable segment and carry on.
self.segments = (
UnparsableSegment(
segments=self.segments, expected=g.expected_string(dialect=dialect)
),
) # NB: tuple
# Validate new segments
self.validate_segments(text="parsing")
# Recurse if allowed (using the expand method to deal with the expansion)
logging.debug(
"{0}.parse: Done Parse. Plotting Recursion. Recurse={1!r}".format(
self.__class__.__name__, recurse
)
)
parse_depth_msg = "###\n#\n# Beginning Parse Depth {0}: {1}\n#\n###\nInitial Structure:\n{2}".format(
parse_depth + 1, self.__class__.__name__, self.stringify()
)
if recurse is True:
logging.debug(parse_depth_msg)
self.segments = self.expand(
self.segments,
recurse=True,
parse_depth=parse_depth + 1,
verbosity=verbosity,
dialect=dialect,
)
elif isinstance(recurse, int):
if recurse > 1:
logging.debug(parse_depth_msg)
self.segments = self.expand(
self.segments,
recurse=recurse - 1,
parse_depth=parse_depth + 1,
verbosity=verbosity,
dialect=dialect,
)
# Validate new segments
self.validate_segments(text="expanding")
return self
|
https://github.com/sqlfluff/sqlfluff/issues/23
|
Traceback (most recent call last):
File "/home/user/sql/.venv/bin/sqlfluff", line 11, in <module>
load_entry_point('sqlfluff==0.1.3', 'console_scripts', 'sqlfluff')()
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 764, in __call__
return self.main(*args, **kwargs)
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 717, in main
rv = self.invoke(ctx)
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 1137, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 956, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 555, in invoke
return callback(*args, **kwargs)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/cli/commands.py", line 85, in lint
result = lnt.lint_paths(paths, verbosity=verbose)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/linter.py", line 324, in lint_paths
result.add(self.lint_path(path, verbosity=verbosity, fix=fix))
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/linter.py", line 312, in lint_path
linted_path.add(self.lint_file(f, fname=fname, verbosity=verbosity, fix=fix))
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/linter.py", line 212, in lint_file
parsed, vs, time_dict = self.parse_file(f=f, fname=fname, verbosity=verbosity)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/linter.py", line 193, in parse_file
parsed = fs.parse(recurse=recurse, verbosity=verbosity, dialect=self.dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 228, in parse
verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 394, in expand
res = stmt.parse(recurse=recurse, parse_depth=parse_depth, verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 228, in parse
verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 394, in expand
res = stmt.parse(recurse=recurse, parse_depth=parse_depth, verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 228, in parse
verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 394, in expand
res = stmt.parse(recurse=recurse, parse_depth=parse_depth, verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 228, in parse
verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 394, in expand
res = stmt.parse(recurse=recurse, parse_depth=parse_depth, verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 186, in parse
dialect=dialect, match_segment=self.__class__.__name__)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 273, in match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 485, in match
match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 273, in match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 236, in match
dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 361, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 320, in match
dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 586, in match
segment=segments[start_bracket_idx])
TypeError: tuple indices must be integers or slices, not NoneType
|
TypeError
|
def match(cls, segments, parse_context):
"""
Matching can be done from either the raw or the segments.
This raw function can be overridden, or a grammar defined
on the underlying class.
"""
if cls._match_grammar():
# Call the private method
m = cls._match_grammar()._match(
segments=segments, parse_context=parse_context.copy(incr="match_depth")
)
# Calling unify here, allows the MatchResult class to do all the type checking.
try:
m = MatchResult.unify(m)
except TypeError as err:
logging.error(
"[PD:{0} MD:{1}] {2}.match. Error on unifying result of match grammar!".format(
parse_context.parse_depth, parse_context.match_depth, cls.__name__
)
)
raise err
# Once unified we can deal with it just as a MatchResult
if m.has_match():
return MatchResult(
(cls(segments=m.matched_segments),), m.unmatched_segments
)
else:
return MatchResult.from_unmatched(segments)
else:
raise NotImplementedError(
"{0} has no match function implemented".format(cls.__name__)
)
|
def match(
cls,
segments,
match_depth=0,
parse_depth=0,
verbosity=0,
dialect=None,
match_segment=None,
):
"""
Matching can be done from either the raw or the segments.
This raw function can be overridden, or a grammar defined
on the underlying class.
"""
if cls._match_grammar():
# Call the private method
m = cls._match_grammar()._match(
segments=segments,
match_depth=match_depth + 1,
parse_depth=parse_depth,
verbosity=verbosity,
dialect=dialect,
match_segment=match_segment,
)
# Calling unify here, allows the MatchResult class to do all the type checking.
try:
m = MatchResult.unify(m)
except TypeError as err:
logging.error(
"[PD:{0} MD:{1}] {2}.match. Error on unifying result of match grammar!".format(
parse_depth, match_depth, cls.__name__
)
)
raise err
# Once unified we can deal with it just as a MatchResult
if m.has_match():
return MatchResult(
(cls(segments=m.matched_segments),), m.unmatched_segments
)
else:
return MatchResult.from_unmatched(segments)
else:
raise NotImplementedError(
"{0} has no match function implemented".format(cls.__name__)
)
|
https://github.com/sqlfluff/sqlfluff/issues/23
|
Traceback (most recent call last):
File "/home/user/sql/.venv/bin/sqlfluff", line 11, in <module>
load_entry_point('sqlfluff==0.1.3', 'console_scripts', 'sqlfluff')()
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 764, in __call__
return self.main(*args, **kwargs)
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 717, in main
rv = self.invoke(ctx)
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 1137, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 956, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 555, in invoke
return callback(*args, **kwargs)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/cli/commands.py", line 85, in lint
result = lnt.lint_paths(paths, verbosity=verbose)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/linter.py", line 324, in lint_paths
result.add(self.lint_path(path, verbosity=verbosity, fix=fix))
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/linter.py", line 312, in lint_path
linted_path.add(self.lint_file(f, fname=fname, verbosity=verbosity, fix=fix))
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/linter.py", line 212, in lint_file
parsed, vs, time_dict = self.parse_file(f=f, fname=fname, verbosity=verbosity)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/linter.py", line 193, in parse_file
parsed = fs.parse(recurse=recurse, verbosity=verbosity, dialect=self.dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 228, in parse
verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 394, in expand
res = stmt.parse(recurse=recurse, parse_depth=parse_depth, verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 228, in parse
verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 394, in expand
res = stmt.parse(recurse=recurse, parse_depth=parse_depth, verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 228, in parse
verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 394, in expand
res = stmt.parse(recurse=recurse, parse_depth=parse_depth, verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 228, in parse
verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 394, in expand
res = stmt.parse(recurse=recurse, parse_depth=parse_depth, verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 186, in parse
dialect=dialect, match_segment=self.__class__.__name__)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 273, in match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 485, in match
match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 273, in match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 236, in match
dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 361, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 320, in match
dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 586, in match
segment=segments[start_bracket_idx])
TypeError: tuple indices must be integers or slices, not NoneType
|
TypeError
|
def _match(cls, segments, parse_context):
"""A wrapper on the match function to do some basic validation and logging"""
parse_match_logging(
cls.__name__,
"_match",
"IN",
parse_context=parse_context,
v_level=4,
ls=len(segments),
)
if isinstance(segments, BaseSegment):
segments = (segments,) # Make into a tuple for compatability
if not isinstance(segments, tuple):
logging.warning(
"{0}.match, was passed {1} rather than tuple or segment".format(
cls.__name__, type(segments)
)
)
if isinstance(segments, list):
# Let's make it a tuple for compatibility
segments = tuple(segments)
if len(segments) == 0:
logging.info(
"{0}._match, was passed zero length segments list".format(cls.__name__)
)
m = cls.match(segments, parse_context=parse_context)
if not isinstance(m, tuple) and m is not None:
logging.warning(
"{0}.match, returned {1} rather than tuple".format(cls.__name__, type(m))
)
parse_match_logging(
cls.__name__, "_match", "OUT", parse_context=parse_context, v_level=4, m=m
)
# Basic Validation
check_still_complete(segments, m.matched_segments, m.unmatched_segments)
return m
|
def _match(
cls,
segments,
match_depth=0,
parse_depth=0,
verbosity=0,
dialect=None,
match_segment=None,
):
"""A wrapper on the match function to do some basic validation and logging"""
parse_match_logging(
parse_depth,
match_depth,
match_segment,
cls.__name__,
"_match",
"IN",
verbosity=verbosity,
v_level=4,
ls=len(segments),
)
if isinstance(segments, BaseSegment):
segments = (segments,) # Make into a tuple for compatability
if not isinstance(segments, tuple):
logging.warning(
"{0}.match, was passed {1} rather than tuple or segment".format(
cls.__name__, type(segments)
)
)
if isinstance(segments, list):
# Let's make it a tuple for compatibility
segments = tuple(segments)
if len(segments) == 0:
logging.info(
"{0}._match, was passed zero length segments list".format(cls.__name__)
)
m = cls.match(
segments,
match_depth=match_depth,
parse_depth=parse_depth,
verbosity=verbosity,
dialect=dialect,
match_segment=match_segment,
)
if not isinstance(m, tuple) and m is not None:
logging.warning(
"{0}.match, returned {1} rather than tuple".format(cls.__name__, type(m))
)
parse_match_logging(
parse_depth,
match_depth,
match_segment,
cls.__name__,
"_match",
"OUT",
verbosity=verbosity,
v_level=4,
m=m,
)
# Basic Validation
check_still_complete(segments, m.matched_segments, m.unmatched_segments)
return m
|
https://github.com/sqlfluff/sqlfluff/issues/23
|
Traceback (most recent call last):
File "/home/user/sql/.venv/bin/sqlfluff", line 11, in <module>
load_entry_point('sqlfluff==0.1.3', 'console_scripts', 'sqlfluff')()
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 764, in __call__
return self.main(*args, **kwargs)
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 717, in main
rv = self.invoke(ctx)
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 1137, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 956, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 555, in invoke
return callback(*args, **kwargs)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/cli/commands.py", line 85, in lint
result = lnt.lint_paths(paths, verbosity=verbose)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/linter.py", line 324, in lint_paths
result.add(self.lint_path(path, verbosity=verbosity, fix=fix))
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/linter.py", line 312, in lint_path
linted_path.add(self.lint_file(f, fname=fname, verbosity=verbosity, fix=fix))
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/linter.py", line 212, in lint_file
parsed, vs, time_dict = self.parse_file(f=f, fname=fname, verbosity=verbosity)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/linter.py", line 193, in parse_file
parsed = fs.parse(recurse=recurse, verbosity=verbosity, dialect=self.dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 228, in parse
verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 394, in expand
res = stmt.parse(recurse=recurse, parse_depth=parse_depth, verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 228, in parse
verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 394, in expand
res = stmt.parse(recurse=recurse, parse_depth=parse_depth, verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 228, in parse
verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 394, in expand
res = stmt.parse(recurse=recurse, parse_depth=parse_depth, verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 228, in parse
verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 394, in expand
res = stmt.parse(recurse=recurse, parse_depth=parse_depth, verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 186, in parse
dialect=dialect, match_segment=self.__class__.__name__)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 273, in match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 485, in match
match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 273, in match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 236, in match
dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 361, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 320, in match
dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 586, in match
segment=segments[start_bracket_idx])
TypeError: tuple indices must be integers or slices, not NoneType
|
TypeError
|
def expand(segments, parse_context):
segs = tuple()
for stmt in segments:
try:
if not stmt.is_expandable:
logging.info(
"[PD:{0}] Skipping expansion of {1}...".format(
parse_context.parse_depth, stmt
)
)
segs += (stmt,)
continue
except Exception as err:
# raise ValueError("{0} has no attribute `is_expandable`. This segment appears poorly constructed.".format(stmt))
logging.error(
"{0} has no attribute `is_expandable`. This segment appears poorly constructed.".format(
stmt
)
)
raise err
if not hasattr(stmt, "parse"):
raise ValueError(
"{0} has no method `parse`. This segment appears poorly constructed.".format(
stmt
)
)
parse_depth_msg = "Parse Depth {0}. Expanding: {1}: {2!r}".format(
parse_context.parse_depth,
stmt.__class__.__name__,
curtail_string(stmt.raw, length=40),
)
verbosity_logger(frame_msg(parse_depth_msg), verbosity=parse_context.verbosity)
res = stmt.parse(parse_context=parse_context)
if isinstance(res, BaseSegment):
segs += (res,)
else:
# We might get back an iterable of segments
segs += tuple(res)
# Basic Validation
check_still_complete(segments, segs, tuple())
return segs
|
def expand(segments, recurse=True, parse_depth=0, verbosity=0, dialect=None):
segs = tuple()
for stmt in segments:
try:
if not stmt.is_expandable:
logging.info(
"[PD:{0}] Skipping expansion of {1}...".format(parse_depth, stmt)
)
segs += (stmt,)
continue
except Exception as err:
# raise ValueError("{0} has no attribute `is_expandable`. This segment appears poorly constructed.".format(stmt))
logging.error(
"{0} has no attribute `is_expandable`. This segment appears poorly constructed.".format(
stmt
)
)
raise err
if not hasattr(stmt, "parse"):
raise ValueError(
"{0} has no method `parse`. This segment appears poorly constructed.".format(
stmt
)
)
parse_depth_msg = "Parse Depth {0}. Expanding: {1}: {2!r}".format(
parse_depth, stmt.__class__.__name__, curtail_string(stmt.raw, length=40)
)
verbosity_logger(frame_msg(parse_depth_msg), verbosity=verbosity)
res = stmt.parse(
recurse=recurse,
parse_depth=parse_depth,
verbosity=verbosity,
dialect=dialect,
)
if isinstance(res, BaseSegment):
segs += (res,)
else:
# We might get back an iterable of segments
segs += tuple(res)
# Basic Validation
check_still_complete(segments, segs, tuple())
return segs
|
https://github.com/sqlfluff/sqlfluff/issues/23
|
Traceback (most recent call last):
File "/home/user/sql/.venv/bin/sqlfluff", line 11, in <module>
load_entry_point('sqlfluff==0.1.3', 'console_scripts', 'sqlfluff')()
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 764, in __call__
return self.main(*args, **kwargs)
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 717, in main
rv = self.invoke(ctx)
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 1137, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 956, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 555, in invoke
return callback(*args, **kwargs)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/cli/commands.py", line 85, in lint
result = lnt.lint_paths(paths, verbosity=verbose)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/linter.py", line 324, in lint_paths
result.add(self.lint_path(path, verbosity=verbosity, fix=fix))
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/linter.py", line 312, in lint_path
linted_path.add(self.lint_file(f, fname=fname, verbosity=verbosity, fix=fix))
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/linter.py", line 212, in lint_file
parsed, vs, time_dict = self.parse_file(f=f, fname=fname, verbosity=verbosity)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/linter.py", line 193, in parse_file
parsed = fs.parse(recurse=recurse, verbosity=verbosity, dialect=self.dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 228, in parse
verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 394, in expand
res = stmt.parse(recurse=recurse, parse_depth=parse_depth, verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 228, in parse
verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 394, in expand
res = stmt.parse(recurse=recurse, parse_depth=parse_depth, verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 228, in parse
verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 394, in expand
res = stmt.parse(recurse=recurse, parse_depth=parse_depth, verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 228, in parse
verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 394, in expand
res = stmt.parse(recurse=recurse, parse_depth=parse_depth, verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 186, in parse
dialect=dialect, match_segment=self.__class__.__name__)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 273, in match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 485, in match
match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 273, in match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 236, in match
dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 361, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 320, in match
dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 586, in match
segment=segments[start_bracket_idx])
TypeError: tuple indices must be integers or slices, not NoneType
|
TypeError
|
def match(cls, segments, parse_context):
"""Keyword implements it's own matching function"""
# If we've been passed the singular, make it a list
if isinstance(segments, BaseSegment):
segments = [segments]
# We're only going to match against the first element
if len(segments) >= 1:
raw = segments[0].raw
pos = segments[0].pos_marker
if cls._case_sensitive:
raw_comp = raw
else:
raw_comp = raw.upper()
logging.debug(
"[PD:{0} MD:{1}] (KW) {2}.match considering {3!r} against {4!r}".format(
parse_context.parse_depth,
parse_context.match_depth,
cls.__name__,
raw_comp,
cls._template,
)
)
if cls._template == raw_comp:
m = (cls(raw=raw, pos_marker=pos),) # Return as a tuple
return MatchResult(m, segments[1:])
else:
logging.debug(
"{1} will not match sequence of length {0}".format(
len(segments), cls.__name__
)
)
return MatchResult.from_unmatched(segments)
|
def match(
cls,
segments,
match_depth=0,
parse_depth=0,
verbosity=0,
dialect=None,
match_segment=None,
):
"""Keyword implements it's own matching function"""
# If we've been passed the singular, make it a list
if isinstance(segments, BaseSegment):
segments = [segments]
# We're only going to match against the first element
if len(segments) >= 1:
raw = segments[0].raw
pos = segments[0].pos_marker
if cls._case_sensitive:
raw_comp = raw
else:
raw_comp = raw.upper()
logging.debug(
"[PD:{0} MD:{1}] (KW) {2}.match considering {3!r} against {4!r}".format(
parse_depth, match_depth, cls.__name__, raw_comp, cls._template
)
)
if cls._template == raw_comp:
m = (cls(raw=raw, pos_marker=pos),) # Return as a tuple
return MatchResult(m, segments[1:])
else:
logging.debug(
"{1} will not match sequence of length {0}".format(
len(segments), cls.__name__
)
)
return MatchResult.from_unmatched(segments)
|
https://github.com/sqlfluff/sqlfluff/issues/23
|
Traceback (most recent call last):
File "/home/user/sql/.venv/bin/sqlfluff", line 11, in <module>
load_entry_point('sqlfluff==0.1.3', 'console_scripts', 'sqlfluff')()
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 764, in __call__
return self.main(*args, **kwargs)
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 717, in main
rv = self.invoke(ctx)
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 1137, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 956, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 555, in invoke
return callback(*args, **kwargs)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/cli/commands.py", line 85, in lint
result = lnt.lint_paths(paths, verbosity=verbose)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/linter.py", line 324, in lint_paths
result.add(self.lint_path(path, verbosity=verbosity, fix=fix))
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/linter.py", line 312, in lint_path
linted_path.add(self.lint_file(f, fname=fname, verbosity=verbosity, fix=fix))
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/linter.py", line 212, in lint_file
parsed, vs, time_dict = self.parse_file(f=f, fname=fname, verbosity=verbosity)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/linter.py", line 193, in parse_file
parsed = fs.parse(recurse=recurse, verbosity=verbosity, dialect=self.dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 228, in parse
verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 394, in expand
res = stmt.parse(recurse=recurse, parse_depth=parse_depth, verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 228, in parse
verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 394, in expand
res = stmt.parse(recurse=recurse, parse_depth=parse_depth, verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 228, in parse
verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 394, in expand
res = stmt.parse(recurse=recurse, parse_depth=parse_depth, verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 228, in parse
verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 394, in expand
res = stmt.parse(recurse=recurse, parse_depth=parse_depth, verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 186, in parse
dialect=dialect, match_segment=self.__class__.__name__)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 273, in match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 485, in match
match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 273, in match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 236, in match
dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 361, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 320, in match
dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 586, in match
segment=segments[start_bracket_idx])
TypeError: tuple indices must be integers or slices, not NoneType
|
TypeError
|
def match(cls, segments, parse_context):
"""ReSegment implements it's own matching function,
we assume that ._template is a r"" string, and is formatted
for use directly as a regex. This only matches on a single segment."""
# If we've been passed the singular, make it a list
if isinstance(segments, BaseSegment):
segments = [segments]
# Regardless of what we're passed, make a string.
# NB: We only match on the first element of a set of segments.
s = segments[0].raw
# Deal with case sentitivity
if not cls._case_sensitive:
sc = s.upper()
else:
sc = s
if len(s) == 0:
raise ValueError("Zero length string passed to ReSegment!?")
logging.debug(
"[PD:{0} MD:{1}] (RE) {2}.match considering {3!r} against {4!r}".format(
parse_context.parse_depth,
parse_context.match_depth,
cls.__name__,
sc,
cls._template,
)
)
# Try the regex
result = re.match(cls._template, sc)
if result:
r = result.group(0)
# Check that we've fully matched
if r == sc:
m = (cls(raw=s, pos_marker=segments[0].pos_marker),) # Return a tuple
return MatchResult(m, segments[1:])
return MatchResult.from_unmatched(segments)
|
def match(
cls,
segments,
match_depth=0,
parse_depth=0,
verbosity=0,
dialect=None,
match_segment=None,
):
"""ReSegment implements it's own matching function,
we assume that ._template is a r"" string, and is formatted
for use directly as a regex. This only matches on a single segment."""
# If we've been passed the singular, make it a list
if isinstance(segments, BaseSegment):
segments = [segments]
# Regardless of what we're passed, make a string.
# NB: We only match on the first element of a set of segments.
s = segments[0].raw
# Deal with case sentitivity
if not cls._case_sensitive:
sc = s.upper()
else:
sc = s
if len(s) == 0:
raise ValueError("Zero length string passed to ReSegment!?")
logging.debug(
"[PD:{0} MD:{1}] (RE) {2}.match considering {3!r} against {4!r}".format(
parse_depth, match_depth, cls.__name__, sc, cls._template
)
)
# Try the regex
result = re.match(cls._template, sc)
if result:
r = result.group(0)
# Check that we've fully matched
if r == sc:
m = (cls(raw=s, pos_marker=segments[0].pos_marker),) # Return a tuple
return MatchResult(m, segments[1:])
return MatchResult.from_unmatched(segments)
|
https://github.com/sqlfluff/sqlfluff/issues/23
|
Traceback (most recent call last):
File "/home/user/sql/.venv/bin/sqlfluff", line 11, in <module>
load_entry_point('sqlfluff==0.1.3', 'console_scripts', 'sqlfluff')()
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 764, in __call__
return self.main(*args, **kwargs)
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 717, in main
rv = self.invoke(ctx)
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 1137, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 956, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 555, in invoke
return callback(*args, **kwargs)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/cli/commands.py", line 85, in lint
result = lnt.lint_paths(paths, verbosity=verbose)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/linter.py", line 324, in lint_paths
result.add(self.lint_path(path, verbosity=verbosity, fix=fix))
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/linter.py", line 312, in lint_path
linted_path.add(self.lint_file(f, fname=fname, verbosity=verbosity, fix=fix))
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/linter.py", line 212, in lint_file
parsed, vs, time_dict = self.parse_file(f=f, fname=fname, verbosity=verbosity)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/linter.py", line 193, in parse_file
parsed = fs.parse(recurse=recurse, verbosity=verbosity, dialect=self.dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 228, in parse
verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 394, in expand
res = stmt.parse(recurse=recurse, parse_depth=parse_depth, verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 228, in parse
verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 394, in expand
res = stmt.parse(recurse=recurse, parse_depth=parse_depth, verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 228, in parse
verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 394, in expand
res = stmt.parse(recurse=recurse, parse_depth=parse_depth, verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 228, in parse
verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 394, in expand
res = stmt.parse(recurse=recurse, parse_depth=parse_depth, verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 186, in parse
dialect=dialect, match_segment=self.__class__.__name__)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 273, in match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 485, in match
match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 273, in match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 236, in match
dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 361, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 320, in match
dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 586, in match
segment=segments[start_bracket_idx])
TypeError: tuple indices must be integers or slices, not NoneType
|
TypeError
|
def match(cls, segments, parse_context):
"""NamedSegment implements it's own matching function,
we assume that ._template is the `name` of a segment"""
# If we've been passed the singular, make it a list
if isinstance(segments, BaseSegment):
segments = [segments]
# We only match on the first element of a set of segments
if len(segments) >= 1:
s = segments[0]
if not cls._case_sensitive:
n = s.name.upper()
else:
n = s.name
logging.debug(
"[PD:{0} MD:{1}] (KW) {2}.match considering {3!r} against {4!r}".format(
parse_context.parse_depth,
parse_context.match_depth,
cls.__name__,
n,
cls._template,
)
)
if cls._template == n:
m = (cls(raw=s.raw, pos_marker=segments[0].pos_marker),) # Return a tuple
return MatchResult(m, segments[1:])
else:
logging.debug(
"{1} will not match sequence of length {0}".format(
len(segments), cls.__name__
)
)
return MatchResult.from_unmatched(segments)
|
def match(
cls,
segments,
match_depth=0,
parse_depth=0,
verbosity=0,
dialect=None,
match_segment=None,
):
"""NamedSegment implements it's own matching function,
we assume that ._template is the `name` of a segment"""
# If we've been passed the singular, make it a list
if isinstance(segments, BaseSegment):
segments = [segments]
# We only match on the first element of a set of segments
if len(segments) >= 1:
s = segments[0]
if not cls._case_sensitive:
n = s.name.upper()
else:
n = s.name
logging.debug(
"[PD:{0} MD:{1}] (KW) {2}.match considering {3!r} against {4!r}".format(
parse_depth, match_depth, cls.__name__, n, cls._template
)
)
if cls._template == n:
m = (cls(raw=s.raw, pos_marker=segments[0].pos_marker),) # Return a tuple
return MatchResult(m, segments[1:])
else:
logging.debug(
"{1} will not match sequence of length {0}".format(
len(segments), cls.__name__
)
)
return MatchResult.from_unmatched(segments)
|
https://github.com/sqlfluff/sqlfluff/issues/23
|
Traceback (most recent call last):
File "/home/user/sql/.venv/bin/sqlfluff", line 11, in <module>
load_entry_point('sqlfluff==0.1.3', 'console_scripts', 'sqlfluff')()
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 764, in __call__
return self.main(*args, **kwargs)
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 717, in main
rv = self.invoke(ctx)
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 1137, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 956, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 555, in invoke
return callback(*args, **kwargs)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/cli/commands.py", line 85, in lint
result = lnt.lint_paths(paths, verbosity=verbose)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/linter.py", line 324, in lint_paths
result.add(self.lint_path(path, verbosity=verbosity, fix=fix))
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/linter.py", line 312, in lint_path
linted_path.add(self.lint_file(f, fname=fname, verbosity=verbosity, fix=fix))
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/linter.py", line 212, in lint_file
parsed, vs, time_dict = self.parse_file(f=f, fname=fname, verbosity=verbosity)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/linter.py", line 193, in parse_file
parsed = fs.parse(recurse=recurse, verbosity=verbosity, dialect=self.dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 228, in parse
verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 394, in expand
res = stmt.parse(recurse=recurse, parse_depth=parse_depth, verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 228, in parse
verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 394, in expand
res = stmt.parse(recurse=recurse, parse_depth=parse_depth, verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 228, in parse
verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 394, in expand
res = stmt.parse(recurse=recurse, parse_depth=parse_depth, verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 228, in parse
verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 394, in expand
res = stmt.parse(recurse=recurse, parse_depth=parse_depth, verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 186, in parse
dialect=dialect, match_segment=self.__class__.__name__)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 273, in match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 485, in match
match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 273, in match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 236, in match
dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 361, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 320, in match
dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 586, in match
segment=segments[start_bracket_idx])
TypeError: tuple indices must be integers or slices, not NoneType
|
TypeError
|
def expected_string(cls, dialect=None, called_from=None):
return "!!TODO!!"
|
def expected_string(cls, dialect=None, called_from=None):
return "[" + cls._template + "]"
|
https://github.com/sqlfluff/sqlfluff/issues/23
|
Traceback (most recent call last):
File "/home/user/sql/.venv/bin/sqlfluff", line 11, in <module>
load_entry_point('sqlfluff==0.1.3', 'console_scripts', 'sqlfluff')()
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 764, in __call__
return self.main(*args, **kwargs)
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 717, in main
rv = self.invoke(ctx)
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 1137, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 956, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/home/user/sql/.venv/lib/python3.7/site-packages/click/core.py", line 555, in invoke
return callback(*args, **kwargs)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/cli/commands.py", line 85, in lint
result = lnt.lint_paths(paths, verbosity=verbose)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/linter.py", line 324, in lint_paths
result.add(self.lint_path(path, verbosity=verbosity, fix=fix))
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/linter.py", line 312, in lint_path
linted_path.add(self.lint_file(f, fname=fname, verbosity=verbosity, fix=fix))
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/linter.py", line 212, in lint_file
parsed, vs, time_dict = self.parse_file(f=f, fname=fname, verbosity=verbosity)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/linter.py", line 193, in parse_file
parsed = fs.parse(recurse=recurse, verbosity=verbosity, dialect=self.dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 228, in parse
verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 394, in expand
res = stmt.parse(recurse=recurse, parse_depth=parse_depth, verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 228, in parse
verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 394, in expand
res = stmt.parse(recurse=recurse, parse_depth=parse_depth, verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 228, in parse
verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 394, in expand
res = stmt.parse(recurse=recurse, parse_depth=parse_depth, verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 228, in parse
verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 394, in expand
res = stmt.parse(recurse=recurse, parse_depth=parse_depth, verbosity=verbosity, dialect=dialect)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 186, in parse
dialect=dialect, match_segment=self.__class__.__name__)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 273, in match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 485, in match
match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 273, in match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 236, in match
dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 361, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/segments_base.py", line 320, in match
dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 70, in _match
verbosity=verbosity, dialect=dialect, match_segment=match_segment)
File "/home/user/sql/.venv/lib/python3.7/site-packages/sqlfluff/parser/grammar.py", line 586, in match
segment=segments[start_bracket_idx])
TypeError: tuple indices must be integers or slices, not NoneType
|
TypeError
|
def ls(long: bool, dropbox_path: str, include_deleted: bool, config_name: str) -> None:
from datetime import datetime
from .utils import natural_size
if not dropbox_path.startswith("/"):
dropbox_path = "/" + dropbox_path
with MaestralProxy(config_name, fallback=True) as m:
entries = m.list_folder(
dropbox_path,
recursive=False,
include_deleted=include_deleted,
)
entries.sort(key=lambda x: cast(str, x["name"]).lower())
if long:
names = []
types = []
sizes = []
shared = []
last_modified = []
excluded = []
to_short_type = {
"FileMetadata": "file",
"FolderMetadata": "folder",
"DeletedMetadata": "deleted",
}
for e in entries:
long_type = cast(str, e["type"])
name = cast(str, e["name"])
path_lower = cast(str, e["path_lower"])
types.append(to_short_type[long_type])
names.append(name)
shared.append("shared" if "sharing_info" in e else "private")
excluded.append(m.excluded_status(path_lower))
if "size" in e:
size = cast(float, e["size"])
sizes.append(natural_size(size))
else:
sizes.append("-")
if "client_modified" in e:
cm = cast(str, e["client_modified"])
dt = datetime.strptime(cm, "%Y-%m-%dT%H:%M:%S%z").astimezone()
last_modified.append(dt.strftime("%d %b %Y %H:%M"))
else:
last_modified.append("-")
click.echo("")
click.echo(
format_table(
headers=["Name", "Type", "Size", "Shared", "Syncing", "Modified"],
columns=[names, types, sizes, shared, excluded, last_modified],
alignment=[LEFT, LEFT, RIGHT, LEFT, LEFT, LEFT],
wrap=False,
),
)
click.echo("")
else:
from .utils import chunks
names = []
colors = []
formatted_names = []
max_len = 0
for e in entries:
name = cast(str, e["name"])
max_len = max(max_len, len(name))
names.append(name)
colors.append("blue" if e["type"] == "DeletedMetadata" else None)
max_len += 2 # add 2 spaces padding
for name, color in zip(names, colors):
formatted_names.append(click.style(name.ljust(max_len), fg=color))
width, height = click.get_terminal_size()
n_columns = max(width // max_len, 1)
rows = chunks(formatted_names, n_columns)
for row in rows:
click.echo("".join(row))
|
def ls(long: bool, dropbox_path: str, include_deleted: bool, config_name: str) -> None:
from datetime import datetime
from .utils import natural_size
if not dropbox_path.startswith("/"):
dropbox_path = "/" + dropbox_path
with MaestralProxy(config_name, fallback=True) as m:
entries = m.list_folder(
dropbox_path, recursive=False, include_deleted=include_deleted
)
entries.sort(key=lambda x: cast(str, x["name"]).lower())
if long:
names = []
types = []
sizes = []
shared = []
last_modified = []
excluded = []
to_short_type = {
"FileMetadata": "file",
"FolderMetadata": "folder",
"DeletedMetadata": "deleted",
}
for e in entries:
long_type = cast(str, e["type"])
name = cast(str, e["name"])
path_lower = cast(str, e["path_lower"])
types.append(to_short_type[long_type])
names.append(name)
shared.append("shared" if "sharing_info" in e else "private")
excluded.append(m.excluded_status(path_lower))
if "size" in e:
size = cast(float, e["size"])
sizes.append(natural_size(size))
else:
sizes.append("-")
if "client_modified" in e:
cm = cast(str, e["client_modified"])
dt = datetime.strptime(cm, "%Y-%m-%dT%H:%M:%S%z").astimezone()
last_modified.append(dt.strftime("%d %b %Y %H:%M"))
else:
last_modified.append("-")
click.echo("")
click.echo(
format_table(
headers=["Name", "Type", "Size", "Shared", "Syncing", "Last modified"],
columns=[names, types, sizes, shared, excluded, last_modified],
alignment=[LEFT, LEFT, RIGHT, LEFT, LEFT, LEFT],
wrap=False,
),
)
click.echo("")
else:
from .utils import chunks
names = []
colors = []
formatted_names = []
max_len = 0
for e in entries:
name = cast(str, e["name"])
max_len = max(max_len, len(name))
names.append(name)
colors.append("blue" if e["type"] == "DeletedMetadata" else None)
max_len += 2 # add 2 spaces padding
for name, color in zip(names, colors):
formatted_names.append(click.style(name.ljust(max_len), fg=color))
width, height = click.get_terminal_size()
n_columns = max(width // max_len, 1)
rows = chunks(formatted_names, n_columns)
for row in rows:
click.echo("".join(row))
|
https://github.com/SamSchott/maestral/issues/224
|
Traceback (most recent call last):
File "/opt/miniconda3/bin/maestral", line 8, in <module>
sys.exit(main())
File "/opt/miniconda3/lib/python3.7/site-packages/click/core.py", line 829, in __call__
return self.main(*args, **kwargs)
File "/opt/miniconda3/lib/python3.7/site-packages/click/core.py", line 782, in main
rv = self.invoke(ctx)
File "/opt/miniconda3/lib/python3.7/site-packages/click/core.py", line 1259, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/opt/miniconda3/lib/python3.7/site-packages/click/core.py", line 1066, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/opt/miniconda3/lib/python3.7/site-packages/click/core.py", line 610, in invoke
return callback(*args, **kwargs)
File "/opt/miniconda3/lib/python3.7/site-packages/maestral/cli.py", line 223, in wrapper
return func(*args, **kwargs)
File "/opt/miniconda3/lib/python3.7/site-packages/maestral/cli.py", line 952, in ls
excluded.append(m.excluded_status(path_lower))
File "/opt/miniconda3/lib/python3.7/site-packages/maestral/daemon.py", line 705, in __getattr__
elif isinstance(self._m, Proxy):
File "/opt/miniconda3/lib/python3.7/site-packages/maestral/daemon.py", line 704, in __getattr__
super().__getattribute__(item)
AttributeError: 'MaestralProxy' object has no attribute '_m'
|
AttributeError
|
def check_for_updates():
"""
Checks if updates are available by reading the cached release number from the
config file and notifies the user. Prints an update note to the command line.
"""
from packaging.version import Version
from maestral import __version__
state = MaestralState("maestral")
latest_release = state.get("app", "latest_release")
has_update = Version(__version__) < Version(latest_release)
if has_update:
click.echo(
f"Maestral v{latest_release} has been released, you have v{__version__}. "
f"Please use your package manager to update."
)
|
def check_for_updates():
"""
Checks if updates are available by reading the cached release number from the
config file and notifies the user. Prints an update note to the command line.
"""
from packaging.version import Version
from maestral import __version__
state = MaestralState("maestral")
latest_release = state.get("app", "latest_release")
has_update = Version(__version__) < Version(latest_release)
if has_update:
click.secho(
f"Maestral v{latest_release} has been released, you have v{__version__}. "
f"Please use your package manager to update.",
fg="orange",
)
|
https://github.com/SamSchott/maestral/issues/136
|
Traceback (most recent call last):
File "/usr/lib/python3.8/site-packages/click/termui.py", line 498, in style
bits.append("\033[{}m".format(_ansi_colors[fg]))
KeyError: 'orange'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/usr/bin/maestral", line 11, in <module>
load_entry_point('maestral==1.0.0.dev5', 'console_scripts', 'maestral')()
File "/usr/lib/python3.8/site-packages/click/core.py", line 829, in __call__
return self.main(*args, **kwargs)
File "/usr/lib/python3.8/site-packages/click/core.py", line 782, in main
rv = self.invoke(ctx)
File "/usr/lib/python3.8/site-packages/click/core.py", line 1256, in invoke
Command.invoke(self, ctx)
File "/usr/lib/python3.8/site-packages/click/core.py", line 1066, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/usr/lib/python3.8/site-packages/click/core.py", line 610, in invoke
return callback(*args, **kwargs)
File "/usr/lib/python3.8/site-packages/maestral/cli.py", line 388, in main
check_for_updates()
File "/usr/lib/python3.8/site-packages/maestral/cli.py", line 147, in check_for_updates
click.secho(
File "/usr/lib/python3.8/site-packages/click/termui.py", line 547, in secho
message = style(message, **styles)
File "/usr/lib/python3.8/site-packages/click/termui.py", line 500, in style
raise TypeError("Unknown color '{}'".format(fg))
TypeError: Unknown color 'orange'
|
KeyError
|
def catch_sync_issues(download=False):
"""
Returns a decorator that catches all SyncErrors and logs them.
Should only be used for methods of UpDownSync.
"""
def decorator(func):
@functools.wraps(func)
def wrapper(self, *args, **kwargs):
try:
res = func(self, *args, **kwargs)
if res is None:
res = True
except SyncError as exc:
# fill out missing dbx_path or local_path
if exc.dbx_path or exc.local_path:
if not exc.local_path:
exc.local_path = self.to_local_path(exc.dbx_path)
if not exc.dbx_path:
exc.dbx_path = self.to_dbx_path(exc.local_path)
if exc.dbx_path_dst or exc.local_path_dst:
if not exc.local_path_dst:
exc.local_path_dst = self.to_local_path(exc.dbx_path_dst)
if not exc.dbx_path:
exc.dbx_path_dst = self.to_dbx_path(exc.local_path_dst)
if exc.dbx_path:
# we have a file / folder associated with the sync error
file_name = osp.basename(exc.dbx_path)
logger.warning("Could not sync %s", file_name, exc_info=True)
self.sync_errors.put(exc)
# save download errors to retry later
if download:
self.download_errors.add(exc.dbx_path)
res = False
return res
return wrapper
return decorator
|
def catch_sync_issues(func):
"""
Decorator that catches all SyncErrors and logs them.
"""
@functools.wraps(func)
def wrapper(self, *args, **kwargs):
try:
res = func(self, *args, **kwargs)
if res is None:
res = True
except SyncError as exc:
file_name = os.path.basename(exc.dbx_path)
logger.warning("Could not sync %s", file_name, exc_info=True)
if exc.dbx_path is not None:
if exc.local_path is None:
exc.local_path = self.to_local_path(exc.dbx_path)
self.sync_errors.put(exc)
if any(isinstance(a, Metadata) for a in args):
self.download_errors.add(exc.dbx_path)
res = False
return res
return wrapper
|
https://github.com/SamSchott/maestral/issues/131
|
Traceback (most recent call last):
File "/home/charles/tf/lib/python3.7/site-packages/maestral/sync.py", line 343, in wrapper
res = func(self, *args, **kwargs)
File "/home/charles/tf/lib/python3.7/site-packages/maestral/sync.py", line 1524, in create_remote_entry
self._on_created(event)
File "/home/charles/tf/lib/python3.7/site-packages/maestral/sync.py", line 1621, in _on_created
if self._handle_selective_sync_conflict(event):
File "/home/charles/tf/lib/python3.7/site-packages/maestral/sync.py", line 1435, in _handle_selective_sync_conflict
raise os_to_maestral_error(exc, local_path=local_path_cc)
maestral.errors.PathError: Could not create local file. The file name (including path) is too long.
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/charles/tf/lib/python3.7/site-packages/maestral/sync.py", line 2679, in startup_worker
sync.upload_local_changes_while_inactive()
File "/home/charles/tf/lib/python3.7/site-packages/maestral/sync.py", line 1053, in upload_local_changes_while_inactive
self.apply_local_changes(events, local_cursor)
File "/home/charles/tf/lib/python3.7/site-packages/maestral/sync.py", line 1495, in apply_local_changes
success.append(f.result())
File "/usr/lib/python3.7/concurrent/futures/_base.py", line 428, in result
return self.__get_result()
File "/usr/lib/python3.7/concurrent/futures/_base.py", line 384, in __get_result
raise self._exception
File "/usr/lib/python3.7/concurrent/futures/thread.py", line 57, in run
result = self.fn(*self.args, **self.kwargs)
File "/home/charles/tf/lib/python3.7/site-packages/maestral/sync.py", line 347, in wrapper
file_name = os.path.basename(exc.dbx_path)
File "/home/charles/tf/lib/python3.7/posixpath.py", line 146, in basename
p = os.fspath(p)
TypeError: expected str, bytes or os.PathLike object, not NoneType
|
maestral.errors.PathError
|
def wrapper(self, *args, **kwargs):
try:
res = func(self, *args, **kwargs)
if res is None:
res = True
except SyncError as exc:
# fill out missing dbx_path or local_path
if exc.dbx_path or exc.local_path:
if not exc.local_path:
exc.local_path = self.to_local_path(exc.dbx_path)
if not exc.dbx_path:
exc.dbx_path = self.to_dbx_path(exc.local_path)
if exc.dbx_path_dst or exc.local_path_dst:
if not exc.local_path_dst:
exc.local_path_dst = self.to_local_path(exc.dbx_path_dst)
if not exc.dbx_path:
exc.dbx_path_dst = self.to_dbx_path(exc.local_path_dst)
if exc.dbx_path:
# we have a file / folder associated with the sync error
file_name = osp.basename(exc.dbx_path)
logger.warning("Could not sync %s", file_name, exc_info=True)
self.sync_errors.put(exc)
# save download errors to retry later
if download:
self.download_errors.add(exc.dbx_path)
res = False
return res
|
def wrapper(self, *args, **kwargs):
try:
res = func(self, *args, **kwargs)
if res is None:
res = True
except SyncError as exc:
file_name = os.path.basename(exc.dbx_path)
logger.warning("Could not sync %s", file_name, exc_info=True)
if exc.dbx_path is not None:
if exc.local_path is None:
exc.local_path = self.to_local_path(exc.dbx_path)
self.sync_errors.put(exc)
if any(isinstance(a, Metadata) for a in args):
self.download_errors.add(exc.dbx_path)
res = False
return res
|
https://github.com/SamSchott/maestral/issues/131
|
Traceback (most recent call last):
File "/home/charles/tf/lib/python3.7/site-packages/maestral/sync.py", line 343, in wrapper
res = func(self, *args, **kwargs)
File "/home/charles/tf/lib/python3.7/site-packages/maestral/sync.py", line 1524, in create_remote_entry
self._on_created(event)
File "/home/charles/tf/lib/python3.7/site-packages/maestral/sync.py", line 1621, in _on_created
if self._handle_selective_sync_conflict(event):
File "/home/charles/tf/lib/python3.7/site-packages/maestral/sync.py", line 1435, in _handle_selective_sync_conflict
raise os_to_maestral_error(exc, local_path=local_path_cc)
maestral.errors.PathError: Could not create local file. The file name (including path) is too long.
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/charles/tf/lib/python3.7/site-packages/maestral/sync.py", line 2679, in startup_worker
sync.upload_local_changes_while_inactive()
File "/home/charles/tf/lib/python3.7/site-packages/maestral/sync.py", line 1053, in upload_local_changes_while_inactive
self.apply_local_changes(events, local_cursor)
File "/home/charles/tf/lib/python3.7/site-packages/maestral/sync.py", line 1495, in apply_local_changes
success.append(f.result())
File "/usr/lib/python3.7/concurrent/futures/_base.py", line 428, in result
return self.__get_result()
File "/usr/lib/python3.7/concurrent/futures/_base.py", line 384, in __get_result
raise self._exception
File "/usr/lib/python3.7/concurrent/futures/thread.py", line 57, in run
result = self.fn(*self.args, **self.kwargs)
File "/home/charles/tf/lib/python3.7/site-packages/maestral/sync.py", line 347, in wrapper
file_name = os.path.basename(exc.dbx_path)
File "/home/charles/tf/lib/python3.7/posixpath.py", line 146, in basename
p = os.fspath(p)
TypeError: expected str, bytes or os.PathLike object, not NoneType
|
maestral.errors.PathError
|
def _handle_case_conflict(self, event):
"""
Checks for other items in the same directory with same name but a different
case. Renames items if necessary.Only needed for case sensitive file systems.
:param FileSystemEvent event: Created or moved event.
:returns: ``True`` or ``False``.
:rtype: bool
"""
if not IS_FS_CASE_SENSITIVE:
return False
if event.event_type not in (EVENT_TYPE_CREATED, EVENT_TYPE_MOVED):
return False
# get the created path (src_path or dest_path)
local_path = get_dest_path(event)
dirname, basename = osp.split(local_path)
# check number of paths with the same case
if len(path_exists_case_insensitive(basename, root=dirname)) > 1:
local_path_cc = generate_cc_name(local_path, suffix="case conflict")
with self.fs_events.ignore(
local_path,
recursive=osp.isdir(local_path),
event_types=(EVENT_TYPE_DELETED, EVENT_TYPE_MOVED),
):
exc = move(local_path, local_path_cc)
if exc:
raise os_to_maestral_error(exc, local_path=local_path_cc)
logger.info('Case conflict: renamed "%s" to "%s"', local_path, local_path_cc)
return True
else:
return False
|
def _handle_case_conflict(self, event):
"""
Checks for other items in the same directory with same name but a different
case. Renames items if necessary.Only needed for case sensitive file systems.
:param FileSystemEvent event: Created or moved event.
:returns: ``True`` or ``False``.
:rtype: bool
"""
if not IS_FS_CASE_SENSITIVE:
return False
if event.event_type not in (EVENT_TYPE_CREATED, EVENT_TYPE_MOVED):
return False
# get the created path (src_path or dest_path)
dest_path = get_dest_path(event)
dirname, basename = osp.split(dest_path)
# check number of paths with the same case
if len(path_exists_case_insensitive(basename, root=dirname)) > 1:
dest_path_cc = generate_cc_name(dest_path, suffix="case conflict")
with self.fs_events.ignore(
dest_path,
recursive=osp.isdir(dest_path),
event_types=(EVENT_TYPE_DELETED, EVENT_TYPE_MOVED),
):
exc = move(dest_path, dest_path_cc)
if exc:
raise os_to_maestral_error(exc, local_path=dest_path_cc)
logger.info('Case conflict: renamed "%s" to "%s"', dest_path, dest_path_cc)
return True
else:
return False
|
https://github.com/SamSchott/maestral/issues/131
|
Traceback (most recent call last):
File "/home/charles/tf/lib/python3.7/site-packages/maestral/sync.py", line 343, in wrapper
res = func(self, *args, **kwargs)
File "/home/charles/tf/lib/python3.7/site-packages/maestral/sync.py", line 1524, in create_remote_entry
self._on_created(event)
File "/home/charles/tf/lib/python3.7/site-packages/maestral/sync.py", line 1621, in _on_created
if self._handle_selective_sync_conflict(event):
File "/home/charles/tf/lib/python3.7/site-packages/maestral/sync.py", line 1435, in _handle_selective_sync_conflict
raise os_to_maestral_error(exc, local_path=local_path_cc)
maestral.errors.PathError: Could not create local file. The file name (including path) is too long.
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/charles/tf/lib/python3.7/site-packages/maestral/sync.py", line 2679, in startup_worker
sync.upload_local_changes_while_inactive()
File "/home/charles/tf/lib/python3.7/site-packages/maestral/sync.py", line 1053, in upload_local_changes_while_inactive
self.apply_local_changes(events, local_cursor)
File "/home/charles/tf/lib/python3.7/site-packages/maestral/sync.py", line 1495, in apply_local_changes
success.append(f.result())
File "/usr/lib/python3.7/concurrent/futures/_base.py", line 428, in result
return self.__get_result()
File "/usr/lib/python3.7/concurrent/futures/_base.py", line 384, in __get_result
raise self._exception
File "/usr/lib/python3.7/concurrent/futures/thread.py", line 57, in run
result = self.fn(*self.args, **self.kwargs)
File "/home/charles/tf/lib/python3.7/site-packages/maestral/sync.py", line 347, in wrapper
file_name = os.path.basename(exc.dbx_path)
File "/home/charles/tf/lib/python3.7/posixpath.py", line 146, in basename
p = os.fspath(p)
TypeError: expected str, bytes or os.PathLike object, not NoneType
|
maestral.errors.PathError
|
def __init__(self, run=True):
self.client = MaestralApiClient()
# periodically check for updates and refresh account info
self.update_thread = Thread(
name="Maestral update check",
target=self._periodic_refresh,
daemon=True,
)
self.update_thread.start()
# monitor needs to be created before any decorators are called
self.monitor = MaestralMonitor(self.client)
self.sync = self.monitor.sync
if NOTIFY_SOCKET and system_notifier:
# notify systemd that we have successfully started
system_notifier.notify("READY=1")
if WATCHDOG_USEC and int(WATCHDOG_PID) == os.getpid() and system_notifier:
# notify systemd periodically that we are still alive
self.watchdog_thread = Thread(
name="Maestral watchdog",
target=self._periodic_watchdog,
daemon=True,
)
self.watchdog_thread.start()
if run:
# if `run == False`, make sure that you manually run the setup
# before calling `start_sync`
if self.pending_dropbox_folder():
self.create_dropbox_directory()
self.set_excluded_folders()
self.sync.last_cursor = ""
self.sync.last_sync = 0
self.start_sync()
|
def __init__(self, run=True):
self.client = MaestralApiClient()
# periodically check for updates and refresh account info
self.update_thread = Thread(
name="Maestral update check",
target=self._periodic_refresh,
daemon=True,
)
self.update_thread.start()
# monitor needs to be created before any decorators are called
self.monitor = MaestralMonitor(self.client)
self.sync = self.monitor.sync
if NOTIFY_SOCKET and system_notifier:
# notify systemd that we have successfully started
system_notifier.notify("READY=1")
if WATCHDOG_USEC and int(WATCHDOG_PID) == os.getpid() and system_notifier:
# notify systemd periodically that we are still alive
self.watchdog_thread = Thread(
name="Maestral watchdog",
target=self._periodic_watchdog,
daemon=True,
)
self.update_thread.start()
if run:
# if `run == False`, make sure that you manually run the setup
# before calling `start_sync`
if self.pending_dropbox_folder():
self.create_dropbox_directory()
self.set_excluded_folders()
self.sync.last_cursor = ""
self.sync.last_sync = 0
self.start_sync()
|
https://github.com/SamSchott/maestral/issues/62
|
Oct 22 21:59:54 dolores systemd[1126]: Starting Maestral - a Dropbox clone...
Oct 22 21:59:55 dolores systemd[1126]: Started Maestral - a Dropbox clone.
Oct 22 21:59:55 dolores maestral[5770]: Traceback (most recent call last):
Oct 22 21:59:55 dolores maestral[5770]: File "/nix/store/vrllbj5ial3g70b2wd2bxmx01rxv32yw-maestral-0.4.1/lib/python3.7/site-packages/maestral/sync/daemon.py", line 119, in start_maestral_daemon
Oct 22 21:59:55 dolores maestral[5770]: m = ExposedMaestral(run=run)
Oct 22 21:59:55 dolores maestral[5770]: File "/nix/store/vrllbj5ial3g70b2wd2bxmx01rxv32yw-maestral-0.4.1/lib/python3.7/site-packages/maestral/sync/main.py", line 255, in __init__
Oct 22 21:59:55 dolores maestral[5770]: self.update_thread.start()
Oct 22 21:59:55 dolores maestral[5770]: File "/nix/store/8lhmyjarm73453f2mdz0xli9w8sy0wvh-python3-3.7.4/lib/python3.7/threading.py", line 848, in start
Oct 22 21:59:55 dolores maestral[5770]: raise RuntimeError("threads can only be started once")
Oct 22 21:59:55 dolores maestral[5770]: RuntimeError: threads can only be started once
|
RuntimeError
|
def __str__(self):
return "'{0}': {1}. {2}".format(self.dbx_path, self.title, self.message)
|
def __str__(self):
return "{0}: {1}".format(self.title, self.message)
|
https://github.com/SamSchott/maestral/issues/39
|
dropbox.exceptions.ApiError: ApiError('30f1d5eeaac843c1ff555c04e08561b8', UploadError('path', UploadWriteFailed(reason=WriteError('disallowed_name', None), upload_session_id='AAAAAAAAM9FqZ1pyxzgvlA')))
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "/nix/store/jjccwr48ahjxvzg7v6gigacwhsj8k2m4-maestral-gui-0.2.6/lib/python3.7/site-packages/maestral/monitor.py", line 188, in wrapper
res = func(self, *args, **kwargs)
File "/nix/store/jjccwr48ahjxvzg7v6gigacwhsj8k2m4-maestral-gui-0.2.6/lib/python3.7/site-packages/maestral/monitor.py", line 710, in _apply_event
self._on_created(evnt)
File "/nix/store/jjccwr48ahjxvzg7v6gigacwhsj8k2m4-maestral-gui-0.2.6/lib/python3.7/site-packages/maestral/monitor.py", line 812, in _on_created
md = self.client.upload(path, dbx_path, autorename=True, mode=mode)
File "/nix/store/jjccwr48ahjxvzg7v6gigacwhsj8k2m4-maestral-gui-0.2.6/lib/python3.7/site-packages/maestral/client.py", line 282, in upload
raise to_maestral_error(exc, dbx_path) from exc
maestral.errors.PathError: Could not upload file: Dropbox will not save the file or folder because of its name.
2019-08-13 21:50:09 maestral.monitor ERROR: Sync error
Traceback (most recent call last):
File "/nix/store/jjccwr48ahjxvzg7v6gigacwhsj8k2m4-maestral-gui-0.2.6/lib/python3.7/site-packages/maestral/client.py", line 257, in upload
f.read(), dbx_path, client_modified=mtime_dt, **kwargs)
File "/nix/store/5ajzqlpk5l37lc4x2q4sb5hkszvf9kir-python3.7-dropbox-9.4.0/lib/python3.7/site-packages/dropbox/base.py", line 2458, in files_upload
f,
File "/nix/store/5ajzqlpk5l37lc4x2q4sb5hkszvf9kir-python3.7-dropbox-9.4.0/lib/python3.7/site-packages/dropbox/dropbox.py", line 296, in request
user_message_locale)
dropbox.exceptions.ApiError: ApiError('0c98289b3e80e3259fe670566c2d7f98', UploadError('path', UploadWriteFailed(reason=WriteError('disallowed_name', None), upload_session_id='AAAAAAAAM9IdJt1oBQwYjg')))
|
maestral.errors.PathError
|
def bulk_history_create(self, objs, batch_size=None):
"""Bulk create the history for the objects specified by objs"""
historical_instances = [
self.model(
history_date=getattr(instance, "_history_date", now()),
history_user=getattr(instance, "_history_user", None),
**{
field.attname: getattr(instance, field.attname)
for field in instance._meta.fields
if field.name not in self.model._history_excluded_fields
},
)
for instance in objs
]
return self.model.objects.bulk_create(historical_instances, batch_size=batch_size)
|
def bulk_history_create(self, objs, batch_size=None):
"""Bulk create the history for the objects specified by objs"""
historical_instances = [
self.model(
history_date=getattr(instance, "_history_date", now()),
history_user=getattr(instance, "_history_user", None),
**{
field.attname: getattr(instance, field.attname)
for field in instance._meta.fields
},
)
for instance in objs
]
return self.model.objects.bulk_create(historical_instances, batch_size=batch_size)
|
https://github.com/jazzband/django-simple-history/issues/402
|
Traceback (most recent call last):
File "manage.py", line 10, in <module>
execute_from_command_line(sys.argv)
File "/srv/venv/local/lib/python2.7/site-packages/django/core/management/__init__.py", line 364, in execute_from_command_line
utility.execute()
File "/srv/venv/local/lib/python2.7/site-packages/django/core/management/__init__.py", line 356, in execute
self.fetch_command(subcommand).run_from_argv(self.argv)
File "/srv/venv/local/lib/python2.7/site-packages/django/core/management/base.py", line 283, in run_from_argv
self.execute(*args, **cmd_options)
File "/srv/venv/local/lib/python2.7/site-packages/django/core/management/base.py", line 330, in execute
output = self.handle(*args, **options)
File "/srv/venv/local/lib/python2.7/site-packages/simple_history/management/commands/populate_history.py", line 65, in handle
self._process(to_process, batch_size=options['batchsize'])
File "/srv/venv/local/lib/python2.7/site-packages/simple_history/management/commands/populate_history.py", line 110, in _process
utils.bulk_history_create(model, history_model, batch_size)
File "/srv/venv/local/lib/python2.7/site-packages/simple_history/management/commands/_populate_utils.py", line 28, in bulk_history_create
) for instance in model.objects.all()]
File "/srv/venv/local/lib/python2.7/site-packages/django/db/models/base.py", line 572, in __init__
raise TypeError("'%s' is an invalid keyword argument for this function" % list(kwargs)[0])
TypeError: 'description' is an invalid keyword argument for this function
|
TypeError
|
def create_history_model(self, model, inherited):
"""
Creates a historical model to associate with the model provided.
"""
attrs = {
"__module__": self.module,
"_history_excluded_fields": self.excluded_fields,
}
app_module = "%s.models" % model._meta.app_label
if inherited:
# inherited use models module
attrs["__module__"] = model.__module__
elif model.__module__ != self.module:
# registered under different app
attrs["__module__"] = self.module
elif app_module != self.module:
# Abuse an internal API because the app registry is loading.
app = apps.app_configs[model._meta.app_label]
models_module = app.name
attrs["__module__"] = models_module
fields = self.copy_fields(model)
attrs.update(fields)
attrs.update(self.get_extra_fields(model, fields))
# type in python2 wants str as a first argument
attrs.update(Meta=type(str("Meta"), (), self.get_meta_options(model)))
if self.table_name is not None:
attrs["Meta"].db_table = self.table_name
name = "Historical%s" % model._meta.object_name
registered_models[model._meta.db_table] = model
return python_2_unicode_compatible(type(str(name), self.bases, attrs))
|
def create_history_model(self, model, inherited):
"""
Creates a historical model to associate with the model provided.
"""
attrs = {"__module__": self.module}
app_module = "%s.models" % model._meta.app_label
if inherited:
# inherited use models module
attrs["__module__"] = model.__module__
elif model.__module__ != self.module:
# registered under different app
attrs["__module__"] = self.module
elif app_module != self.module:
# Abuse an internal API because the app registry is loading.
app = apps.app_configs[model._meta.app_label]
models_module = app.name
attrs["__module__"] = models_module
fields = self.copy_fields(model)
attrs.update(fields)
attrs.update(self.get_extra_fields(model, fields))
# type in python2 wants str as a first argument
attrs.update(Meta=type(str("Meta"), (), self.get_meta_options(model)))
if self.table_name is not None:
attrs["Meta"].db_table = self.table_name
name = "Historical%s" % model._meta.object_name
registered_models[model._meta.db_table] = model
return python_2_unicode_compatible(type(str(name), self.bases, attrs))
|
https://github.com/jazzband/django-simple-history/issues/402
|
Traceback (most recent call last):
File "manage.py", line 10, in <module>
execute_from_command_line(sys.argv)
File "/srv/venv/local/lib/python2.7/site-packages/django/core/management/__init__.py", line 364, in execute_from_command_line
utility.execute()
File "/srv/venv/local/lib/python2.7/site-packages/django/core/management/__init__.py", line 356, in execute
self.fetch_command(subcommand).run_from_argv(self.argv)
File "/srv/venv/local/lib/python2.7/site-packages/django/core/management/base.py", line 283, in run_from_argv
self.execute(*args, **cmd_options)
File "/srv/venv/local/lib/python2.7/site-packages/django/core/management/base.py", line 330, in execute
output = self.handle(*args, **options)
File "/srv/venv/local/lib/python2.7/site-packages/simple_history/management/commands/populate_history.py", line 65, in handle
self._process(to_process, batch_size=options['batchsize'])
File "/srv/venv/local/lib/python2.7/site-packages/simple_history/management/commands/populate_history.py", line 110, in _process
utils.bulk_history_create(model, history_model, batch_size)
File "/srv/venv/local/lib/python2.7/site-packages/simple_history/management/commands/_populate_utils.py", line 28, in bulk_history_create
) for instance in model.objects.all()]
File "/srv/venv/local/lib/python2.7/site-packages/django/db/models/base.py", line 572, in __init__
raise TypeError("'%s' is an invalid keyword argument for this function" % list(kwargs)[0])
TypeError: 'description' is an invalid keyword argument for this function
|
TypeError
|
def get_extra_fields(self, model, fields):
"""Return dict of extra fields added to the historical record model"""
user_model = getattr(settings, "AUTH_USER_MODEL", "auth.User")
def revert_url(self):
"""URL for this change in the default admin site."""
opts = model._meta
app_label, model_name = opts.app_label, opts.model_name
return reverse(
"%s:%s_%s_simple_history" % (admin.site.name, app_label, model_name),
args=[getattr(self, opts.pk.attname), self.history_id],
)
def get_instance(self):
attrs = {
field.attname: getattr(self, field.attname) for field in fields.values()
}
if self._history_excluded_fields:
excluded_attnames = [
model._meta.get_field(field).attname
for field in self._history_excluded_fields
]
values = (
model.objects.filter(pk=getattr(self, model._meta.pk.attname))
.values(*excluded_attnames)
.get()
)
attrs.update(values)
return model(**attrs)
def get_next_record(self):
"""
Get the next history record for the instance. `None` if last.
"""
return (
self.instance.history.filter(Q(history_date__gt=self.history_date))
.order_by("history_date")
.first()
)
def get_prev_record(self):
"""
Get the previous history record for the instance. `None` if first.
"""
return (
self.instance.history.filter(Q(history_date__lt=self.history_date))
.order_by("history_date")
.last()
)
if self.history_id_field:
history_id_field = self.history_id_field
history_id_field.primary_key = True
history_id_field.editable = False
elif getattr(settings, "SIMPLE_HISTORY_HISTORY_ID_USE_UUID", False):
history_id_field = models.UUIDField(
primary_key=True, default=uuid.uuid4, editable=False
)
else:
history_id_field = models.AutoField(primary_key=True)
if self.history_change_reason_field:
# User specific field from init
history_change_reason_field = self.history_change_reason_field
elif getattr(
settings, "SIMPLE_HISTORY_HISTORY_CHANGE_REASON_USE_TEXT_FIELD", False
):
# Use text field with no max length, not enforced by DB anyways
history_change_reason_field = models.TextField(null=True)
else:
# Current default, with max length
history_change_reason_field = models.CharField(max_length=100, null=True)
return {
"history_id": history_id_field,
"history_date": models.DateTimeField(),
"history_change_reason": history_change_reason_field,
"history_user": models.ForeignKey(
user_model,
null=True,
related_name=self.user_related_name,
on_delete=models.SET_NULL,
),
"history_type": models.CharField(
max_length=1,
choices=(
("+", _("Created")),
("~", _("Changed")),
("-", _("Deleted")),
),
),
"history_object": HistoricalObjectDescriptor(
model, self.fields_included(model)
),
"instance": property(get_instance),
"instance_type": model,
"next_record": property(get_next_record),
"prev_record": property(get_prev_record),
"revert_url": revert_url,
"__str__": lambda self: "%s as of %s"
% (self.history_object, self.history_date),
}
|
def get_extra_fields(self, model, fields):
"""Return dict of extra fields added to the historical record model"""
user_model = getattr(settings, "AUTH_USER_MODEL", "auth.User")
def revert_url(self):
"""URL for this change in the default admin site."""
opts = model._meta
app_label, model_name = opts.app_label, opts.model_name
return reverse(
"%s:%s_%s_simple_history" % (admin.site.name, app_label, model_name),
args=[getattr(self, opts.pk.attname), self.history_id],
)
def get_instance(self):
return model(
**{field.attname: getattr(self, field.attname) for field in fields.values()}
)
def get_next_record(self):
"""
Get the next history record for the instance. `None` if last.
"""
return (
self.instance.history.filter(Q(history_date__gt=self.history_date))
.order_by("history_date")
.first()
)
def get_prev_record(self):
"""
Get the previous history record for the instance. `None` if first.
"""
return (
self.instance.history.filter(Q(history_date__lt=self.history_date))
.order_by("history_date")
.last()
)
if self.history_id_field:
history_id_field = self.history_id_field
history_id_field.primary_key = True
history_id_field.editable = False
elif getattr(settings, "SIMPLE_HISTORY_HISTORY_ID_USE_UUID", False):
history_id_field = models.UUIDField(
primary_key=True, default=uuid.uuid4, editable=False
)
else:
history_id_field = models.AutoField(primary_key=True)
if self.history_change_reason_field:
# User specific field from init
history_change_reason_field = self.history_change_reason_field
elif getattr(
settings, "SIMPLE_HISTORY_HISTORY_CHANGE_REASON_USE_TEXT_FIELD", False
):
# Use text field with no max length, not enforced by DB anyways
history_change_reason_field = models.TextField(null=True)
else:
# Current default, with max length
history_change_reason_field = models.CharField(max_length=100, null=True)
return {
"history_id": history_id_field,
"history_date": models.DateTimeField(),
"history_change_reason": history_change_reason_field,
"history_user": models.ForeignKey(
user_model,
null=True,
related_name=self.user_related_name,
on_delete=models.SET_NULL,
),
"history_type": models.CharField(
max_length=1,
choices=(
("+", _("Created")),
("~", _("Changed")),
("-", _("Deleted")),
),
),
"history_object": HistoricalObjectDescriptor(
model, self.fields_included(model)
),
"instance": property(get_instance),
"instance_type": model,
"next_record": property(get_next_record),
"prev_record": property(get_prev_record),
"revert_url": revert_url,
"__str__": lambda self: "%s as of %s"
% (self.history_object, self.history_date),
}
|
https://github.com/jazzband/django-simple-history/issues/402
|
Traceback (most recent call last):
File "manage.py", line 10, in <module>
execute_from_command_line(sys.argv)
File "/srv/venv/local/lib/python2.7/site-packages/django/core/management/__init__.py", line 364, in execute_from_command_line
utility.execute()
File "/srv/venv/local/lib/python2.7/site-packages/django/core/management/__init__.py", line 356, in execute
self.fetch_command(subcommand).run_from_argv(self.argv)
File "/srv/venv/local/lib/python2.7/site-packages/django/core/management/base.py", line 283, in run_from_argv
self.execute(*args, **cmd_options)
File "/srv/venv/local/lib/python2.7/site-packages/django/core/management/base.py", line 330, in execute
output = self.handle(*args, **options)
File "/srv/venv/local/lib/python2.7/site-packages/simple_history/management/commands/populate_history.py", line 65, in handle
self._process(to_process, batch_size=options['batchsize'])
File "/srv/venv/local/lib/python2.7/site-packages/simple_history/management/commands/populate_history.py", line 110, in _process
utils.bulk_history_create(model, history_model, batch_size)
File "/srv/venv/local/lib/python2.7/site-packages/simple_history/management/commands/_populate_utils.py", line 28, in bulk_history_create
) for instance in model.objects.all()]
File "/srv/venv/local/lib/python2.7/site-packages/django/db/models/base.py", line 572, in __init__
raise TypeError("'%s' is an invalid keyword argument for this function" % list(kwargs)[0])
TypeError: 'description' is an invalid keyword argument for this function
|
TypeError
|
def get_instance(self):
attrs = {field.attname: getattr(self, field.attname) for field in fields.values()}
if self._history_excluded_fields:
excluded_attnames = [
model._meta.get_field(field).attname
for field in self._history_excluded_fields
]
values = (
model.objects.filter(pk=getattr(self, model._meta.pk.attname))
.values(*excluded_attnames)
.get()
)
attrs.update(values)
return model(**attrs)
|
def get_instance(self):
return model(
**{field.attname: getattr(self, field.attname) for field in fields.values()}
)
|
https://github.com/jazzband/django-simple-history/issues/402
|
Traceback (most recent call last):
File "manage.py", line 10, in <module>
execute_from_command_line(sys.argv)
File "/srv/venv/local/lib/python2.7/site-packages/django/core/management/__init__.py", line 364, in execute_from_command_line
utility.execute()
File "/srv/venv/local/lib/python2.7/site-packages/django/core/management/__init__.py", line 356, in execute
self.fetch_command(subcommand).run_from_argv(self.argv)
File "/srv/venv/local/lib/python2.7/site-packages/django/core/management/base.py", line 283, in run_from_argv
self.execute(*args, **cmd_options)
File "/srv/venv/local/lib/python2.7/site-packages/django/core/management/base.py", line 330, in execute
output = self.handle(*args, **options)
File "/srv/venv/local/lib/python2.7/site-packages/simple_history/management/commands/populate_history.py", line 65, in handle
self._process(to_process, batch_size=options['batchsize'])
File "/srv/venv/local/lib/python2.7/site-packages/simple_history/management/commands/populate_history.py", line 110, in _process
utils.bulk_history_create(model, history_model, batch_size)
File "/srv/venv/local/lib/python2.7/site-packages/simple_history/management/commands/_populate_utils.py", line 28, in bulk_history_create
) for instance in model.objects.all()]
File "/srv/venv/local/lib/python2.7/site-packages/django/db/models/base.py", line 572, in __init__
raise TypeError("'%s' is an invalid keyword argument for this function" % list(kwargs)[0])
TypeError: 'description' is an invalid keyword argument for this function
|
TypeError
|
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the HomeMatic binary sensor platform."""
if discovery_info is None:
return
devices = []
for conf in discovery_info[ATTR_DISCOVER_DEVICES]:
if discovery_info[ATTR_DISCOVERY_TYPE] == DISCOVER_BATTERY:
devices.append(HMBatterySensor(conf))
else:
devices.append(HMBinarySensor(conf))
add_entities(devices, True)
|
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the HomeMatic binary sensor platform."""
if discovery_info is None:
return
devices = []
for conf in discovery_info[ATTR_DISCOVER_DEVICES]:
if discovery_info[ATTR_DISCOVERY_TYPE] == DISCOVER_BATTERY:
devices.append(HMBatterySensor(conf))
else:
devices.append(HMBinarySensor(conf))
add_entities(devices)
|
https://github.com/home-assistant/core/issues/30736
|
Error doing job: Task exception was never retrieved
Traceback (most recent call last):
File "/usr/src/homeassistant/homeassistant/helpers/entity_platform.py", line 344, in _async_add_entity
capabilities=entity.capability_attributes,
File "/usr/src/homeassistant/homeassistant/components/climate/__init__.py", line 179, in capability_attributes
ATTR_HVAC_MODES: self.hvac_modes,
File "/usr/src/homeassistant/homeassistant/components/homematic/climate.py", line 87, in hvac_modes
if "AUTO_MODE" in self._hmdevice.ACTIONNODE:
AttributeError: 'NoneType' object has no attribute 'ACTIONNODE'
|
AttributeError
|
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Homematic thermostat platform."""
if discovery_info is None:
return
devices = []
for conf in discovery_info[ATTR_DISCOVER_DEVICES]:
new_device = HMThermostat(conf)
devices.append(new_device)
add_entities(devices, True)
|
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Homematic thermostat platform."""
if discovery_info is None:
return
devices = []
for conf in discovery_info[ATTR_DISCOVER_DEVICES]:
new_device = HMThermostat(conf)
devices.append(new_device)
add_entities(devices)
|
https://github.com/home-assistant/core/issues/30736
|
Error doing job: Task exception was never retrieved
Traceback (most recent call last):
File "/usr/src/homeassistant/homeassistant/helpers/entity_platform.py", line 344, in _async_add_entity
capabilities=entity.capability_attributes,
File "/usr/src/homeassistant/homeassistant/components/climate/__init__.py", line 179, in capability_attributes
ATTR_HVAC_MODES: self.hvac_modes,
File "/usr/src/homeassistant/homeassistant/components/homematic/climate.py", line 87, in hvac_modes
if "AUTO_MODE" in self._hmdevice.ACTIONNODE:
AttributeError: 'NoneType' object has no attribute 'ACTIONNODE'
|
AttributeError
|
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the platform."""
if discovery_info is None:
return
devices = []
for conf in discovery_info[ATTR_DISCOVER_DEVICES]:
new_device = HMCover(conf)
devices.append(new_device)
add_entities(devices, True)
|
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the platform."""
if discovery_info is None:
return
devices = []
for conf in discovery_info[ATTR_DISCOVER_DEVICES]:
new_device = HMCover(conf)
devices.append(new_device)
add_entities(devices)
|
https://github.com/home-assistant/core/issues/30736
|
Error doing job: Task exception was never retrieved
Traceback (most recent call last):
File "/usr/src/homeassistant/homeassistant/helpers/entity_platform.py", line 344, in _async_add_entity
capabilities=entity.capability_attributes,
File "/usr/src/homeassistant/homeassistant/components/climate/__init__.py", line 179, in capability_attributes
ATTR_HVAC_MODES: self.hvac_modes,
File "/usr/src/homeassistant/homeassistant/components/homematic/climate.py", line 87, in hvac_modes
if "AUTO_MODE" in self._hmdevice.ACTIONNODE:
AttributeError: 'NoneType' object has no attribute 'ACTIONNODE'
|
AttributeError
|
def _init_data_struct(self):
"""Generate a data dictionary (self._data) from metadata."""
self._state = "LEVEL"
self._data.update({self._state: None})
if "LEVEL_2" in self._hmdevice.WRITENODE:
self._data.update({"LEVEL_2": None})
|
def _init_data_struct(self):
"""Generate a data dictionary (self._data) from metadata."""
self._state = "LEVEL"
self._data.update({self._state: STATE_UNKNOWN})
if "LEVEL_2" in self._hmdevice.WRITENODE:
self._data.update({"LEVEL_2": STATE_UNKNOWN})
|
https://github.com/home-assistant/core/issues/30736
|
Error doing job: Task exception was never retrieved
Traceback (most recent call last):
File "/usr/src/homeassistant/homeassistant/helpers/entity_platform.py", line 344, in _async_add_entity
capabilities=entity.capability_attributes,
File "/usr/src/homeassistant/homeassistant/components/climate/__init__.py", line 179, in capability_attributes
ATTR_HVAC_MODES: self.hvac_modes,
File "/usr/src/homeassistant/homeassistant/components/homematic/climate.py", line 87, in hvac_modes
if "AUTO_MODE" in self._hmdevice.ACTIONNODE:
AttributeError: 'NoneType' object has no attribute 'ACTIONNODE'
|
AttributeError
|
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Homematic light platform."""
if discovery_info is None:
return
devices = []
for conf in discovery_info[ATTR_DISCOVER_DEVICES]:
new_device = HMLight(conf)
devices.append(new_device)
add_entities(devices, True)
|
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Homematic light platform."""
if discovery_info is None:
return
devices = []
for conf in discovery_info[ATTR_DISCOVER_DEVICES]:
new_device = HMLight(conf)
devices.append(new_device)
add_entities(devices)
|
https://github.com/home-assistant/core/issues/30736
|
Error doing job: Task exception was never retrieved
Traceback (most recent call last):
File "/usr/src/homeassistant/homeassistant/helpers/entity_platform.py", line 344, in _async_add_entity
capabilities=entity.capability_attributes,
File "/usr/src/homeassistant/homeassistant/components/climate/__init__.py", line 179, in capability_attributes
ATTR_HVAC_MODES: self.hvac_modes,
File "/usr/src/homeassistant/homeassistant/components/homematic/climate.py", line 87, in hvac_modes
if "AUTO_MODE" in self._hmdevice.ACTIONNODE:
AttributeError: 'NoneType' object has no attribute 'ACTIONNODE'
|
AttributeError
|
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Homematic lock platform."""
if discovery_info is None:
return
devices = []
for conf in discovery_info[ATTR_DISCOVER_DEVICES]:
devices.append(HMLock(conf))
add_entities(devices, True)
|
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Homematic lock platform."""
if discovery_info is None:
return
devices = []
for conf in discovery_info[ATTR_DISCOVER_DEVICES]:
devices.append(HMLock(conf))
add_entities(devices)
|
https://github.com/home-assistant/core/issues/30736
|
Error doing job: Task exception was never retrieved
Traceback (most recent call last):
File "/usr/src/homeassistant/homeassistant/helpers/entity_platform.py", line 344, in _async_add_entity
capabilities=entity.capability_attributes,
File "/usr/src/homeassistant/homeassistant/components/climate/__init__.py", line 179, in capability_attributes
ATTR_HVAC_MODES: self.hvac_modes,
File "/usr/src/homeassistant/homeassistant/components/homematic/climate.py", line 87, in hvac_modes
if "AUTO_MODE" in self._hmdevice.ACTIONNODE:
AttributeError: 'NoneType' object has no attribute 'ACTIONNODE'
|
AttributeError
|
def _init_data_struct(self):
"""Generate the data dictionary (self._data) from metadata."""
self._state = "STATE"
self._data.update({self._state: None})
|
def _init_data_struct(self):
"""Generate the data dictionary (self._data) from metadata."""
self._state = "STATE"
self._data.update({self._state: STATE_UNKNOWN})
|
https://github.com/home-assistant/core/issues/30736
|
Error doing job: Task exception was never retrieved
Traceback (most recent call last):
File "/usr/src/homeassistant/homeassistant/helpers/entity_platform.py", line 344, in _async_add_entity
capabilities=entity.capability_attributes,
File "/usr/src/homeassistant/homeassistant/components/climate/__init__.py", line 179, in capability_attributes
ATTR_HVAC_MODES: self.hvac_modes,
File "/usr/src/homeassistant/homeassistant/components/homematic/climate.py", line 87, in hvac_modes
if "AUTO_MODE" in self._hmdevice.ACTIONNODE:
AttributeError: 'NoneType' object has no attribute 'ACTIONNODE'
|
AttributeError
|
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the HomeMatic sensor platform."""
if discovery_info is None:
return
devices = []
for conf in discovery_info[ATTR_DISCOVER_DEVICES]:
new_device = HMSensor(conf)
devices.append(new_device)
add_entities(devices, True)
|
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the HomeMatic sensor platform."""
if discovery_info is None:
return
devices = []
for conf in discovery_info[ATTR_DISCOVER_DEVICES]:
new_device = HMSensor(conf)
devices.append(new_device)
add_entities(devices)
|
https://github.com/home-assistant/core/issues/30736
|
Error doing job: Task exception was never retrieved
Traceback (most recent call last):
File "/usr/src/homeassistant/homeassistant/helpers/entity_platform.py", line 344, in _async_add_entity
capabilities=entity.capability_attributes,
File "/usr/src/homeassistant/homeassistant/components/climate/__init__.py", line 179, in capability_attributes
ATTR_HVAC_MODES: self.hvac_modes,
File "/usr/src/homeassistant/homeassistant/components/homematic/climate.py", line 87, in hvac_modes
if "AUTO_MODE" in self._hmdevice.ACTIONNODE:
AttributeError: 'NoneType' object has no attribute 'ACTIONNODE'
|
AttributeError
|
def _init_data_struct(self):
"""Generate a data dictionary (self._data) from metadata."""
if self._state:
self._data.update({self._state: None})
else:
_LOGGER.critical("Unable to initialize sensor: %s", self._name)
|
def _init_data_struct(self):
"""Generate a data dictionary (self._data) from metadata."""
if self._state:
self._data.update({self._state: STATE_UNKNOWN})
else:
_LOGGER.critical("Unable to initialize sensor: %s", self._name)
|
https://github.com/home-assistant/core/issues/30736
|
Error doing job: Task exception was never retrieved
Traceback (most recent call last):
File "/usr/src/homeassistant/homeassistant/helpers/entity_platform.py", line 344, in _async_add_entity
capabilities=entity.capability_attributes,
File "/usr/src/homeassistant/homeassistant/components/climate/__init__.py", line 179, in capability_attributes
ATTR_HVAC_MODES: self.hvac_modes,
File "/usr/src/homeassistant/homeassistant/components/homematic/climate.py", line 87, in hvac_modes
if "AUTO_MODE" in self._hmdevice.ACTIONNODE:
AttributeError: 'NoneType' object has no attribute 'ACTIONNODE'
|
AttributeError
|
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the HomeMatic switch platform."""
if discovery_info is None:
return
devices = []
for conf in discovery_info[ATTR_DISCOVER_DEVICES]:
new_device = HMSwitch(conf)
devices.append(new_device)
add_entities(devices, True)
|
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the HomeMatic switch platform."""
if discovery_info is None:
return
devices = []
for conf in discovery_info[ATTR_DISCOVER_DEVICES]:
new_device = HMSwitch(conf)
devices.append(new_device)
add_entities(devices)
|
https://github.com/home-assistant/core/issues/30736
|
Error doing job: Task exception was never retrieved
Traceback (most recent call last):
File "/usr/src/homeassistant/homeassistant/helpers/entity_platform.py", line 344, in _async_add_entity
capabilities=entity.capability_attributes,
File "/usr/src/homeassistant/homeassistant/components/climate/__init__.py", line 179, in capability_attributes
ATTR_HVAC_MODES: self.hvac_modes,
File "/usr/src/homeassistant/homeassistant/components/homematic/climate.py", line 87, in hvac_modes
if "AUTO_MODE" in self._hmdevice.ACTIONNODE:
AttributeError: 'NoneType' object has no attribute 'ACTIONNODE'
|
AttributeError
|
def _init_data_struct(self):
"""Generate the data dictionary (self._data) from metadata."""
self._state = "STATE"
self._data.update({self._state: None})
# Need sensor values for SwitchPowermeter
for node in self._hmdevice.SENSORNODE:
self._data.update({node: None})
|
def _init_data_struct(self):
"""Generate the data dictionary (self._data) from metadata."""
self._state = "STATE"
self._data.update({self._state: STATE_UNKNOWN})
# Need sensor values for SwitchPowermeter
for node in self._hmdevice.SENSORNODE:
self._data.update({node: STATE_UNKNOWN})
|
https://github.com/home-assistant/core/issues/30736
|
Error doing job: Task exception was never retrieved
Traceback (most recent call last):
File "/usr/src/homeassistant/homeassistant/helpers/entity_platform.py", line 344, in _async_add_entity
capabilities=entity.capability_attributes,
File "/usr/src/homeassistant/homeassistant/components/climate/__init__.py", line 179, in capability_attributes
ATTR_HVAC_MODES: self.hvac_modes,
File "/usr/src/homeassistant/homeassistant/components/homematic/climate.py", line 87, in hvac_modes
if "AUTO_MODE" in self._hmdevice.ACTIONNODE:
AttributeError: 'NoneType' object has no attribute 'ACTIONNODE'
|
AttributeError
|
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up PS4 from a config entry."""
config = config_entry
creds = config.data[CONF_TOKEN]
device_list = []
for device in config.data["devices"]:
host = device[CONF_HOST]
region = device[CONF_REGION]
name = device[CONF_NAME]
ps4 = pyps4.Ps4Async(host, creds, device_name=DEFAULT_ALIAS)
device_list.append(PS4Device(config, name, host, region, ps4, creds))
async_add_entities(device_list, update_before_add=True)
|
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up PS4 from a config entry."""
config = config_entry
await async_setup_platform(hass, config, async_add_entities, discovery_info=None)
|
https://github.com/home-assistant/core/issues/25665
|
Error while setting up platform ps4
Traceback (most recent call last):
File "/usr/src/homeassistant/homeassistant/helpers/entity_platform.py", line 146, in _async_setup_platform
await asyncio.wait_for(asyncio.shield(task), SLOW_SETUP_MAX_WAIT)
File "/usr/local/lib/python3.7/asyncio/tasks.py", line 442, in wait_for
return fut.result()
File "/usr/src/homeassistant/homeassistant/components/ps4/media_player.py", line 66, in async_setup_platform
creds = config.data[CONF_TOKEN]
AttributeError: 'collections.OrderedDict' object has no attribute 'data'
|
AttributeError
|
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Not Implemented."""
pass
|
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up PS4 Platform."""
creds = config.data[CONF_TOKEN]
device_list = []
for device in config.data["devices"]:
host = device[CONF_HOST]
region = device[CONF_REGION]
name = device[CONF_NAME]
ps4 = pyps4.Ps4Async(host, creds, device_name=DEFAULT_ALIAS)
device_list.append(PS4Device(config, name, host, region, ps4, creds))
async_add_entities(device_list, update_before_add=True)
|
https://github.com/home-assistant/core/issues/25665
|
Error while setting up platform ps4
Traceback (most recent call last):
File "/usr/src/homeassistant/homeassistant/helpers/entity_platform.py", line 146, in _async_setup_platform
await asyncio.wait_for(asyncio.shield(task), SLOW_SETUP_MAX_WAIT)
File "/usr/local/lib/python3.7/asyncio/tasks.py", line 442, in wait_for
return fut.result()
File "/usr/src/homeassistant/homeassistant/components/ps4/media_player.py", line 66, in async_setup_platform
creds = config.data[CONF_TOKEN]
AttributeError: 'collections.OrderedDict' object has no attribute 'data'
|
AttributeError
|
async def async_setup_entry(hass, config_entry):
"""Set up TPLink from a config entry."""
from pyHS100 import SmartBulb, SmartPlug, SmartDeviceException
devices = {}
config_data = hass.data[DOMAIN].get(ATTR_CONFIG)
# These will contain the initialized devices
lights = hass.data[DOMAIN][CONF_LIGHT] = []
switches = hass.data[DOMAIN][CONF_SWITCH] = []
# If discovery is defined and not disabled, discover devices
# If initialized from configure integrations, there's no config
# so we default here to True
if config_data is None or config_data[CONF_DISCOVERY]:
devs = await _async_has_devices(hass)
_LOGGER.info("Discovered %s TP-Link smart home device(s)", len(devs))
devices.update(devs)
def _device_for_type(host, type_):
dev = None
if type_ == CONF_LIGHT:
dev = SmartBulb(host)
elif type_ == CONF_SWITCH:
dev = SmartPlug(host)
return dev
# When arriving from configure integrations, we have no config data.
if config_data is not None:
for type_ in [CONF_LIGHT, CONF_SWITCH]:
for entry in config_data[type_]:
try:
host = entry["host"]
dev = _device_for_type(host, type_)
devices[host] = dev
_LOGGER.debug("Succesfully added %s %s: %s", type_, host, dev)
except SmartDeviceException as ex:
_LOGGER.error("Unable to initialize %s %s: %s", type_, host, ex)
# This is necessary to avoid I/O blocking on is_dimmable
def _fill_device_lists():
for dev in devices.values():
if isinstance(dev, SmartPlug):
try:
if dev.is_dimmable: # Dimmers act as lights
lights.append(dev)
else:
switches.append(dev)
except SmartDeviceException as ex:
_LOGGER.error("Unable to connect to device %s: %s", dev.host, ex)
elif isinstance(dev, SmartBulb):
lights.append(dev)
else:
_LOGGER.error("Unknown smart device type: %s", type(dev))
# Avoid blocking on is_dimmable
await hass.async_add_executor_job(_fill_device_lists)
forward_setup = hass.config_entries.async_forward_entry_setup
if lights:
_LOGGER.debug("Got %s lights: %s", len(lights), lights)
hass.async_create_task(forward_setup(config_entry, "light"))
if switches:
_LOGGER.debug("Got %s switches: %s", len(switches), switches)
hass.async_create_task(forward_setup(config_entry, "switch"))
return True
|
async def async_setup_entry(hass, config_entry):
"""Set up TPLink from a config entry."""
from pyHS100 import SmartBulb, SmartPlug, SmartDeviceException
devices = {}
config_data = hass.data[DOMAIN].get(ATTR_CONFIG)
# These will contain the initialized devices
lights = hass.data[DOMAIN][CONF_LIGHT] = []
switches = hass.data[DOMAIN][CONF_SWITCH] = []
# If discovery is defined and not disabled, discover devices
# If initialized from configure integrations, there's no config
# so we default here to True
if config_data is None or config_data[CONF_DISCOVERY]:
devs = await _async_has_devices(hass)
_LOGGER.info("Discovered %s TP-Link smart home device(s)", len(devs))
devices.update(devs)
def _device_for_type(host, type_):
dev = None
if type_ == CONF_LIGHT:
dev = SmartBulb(host)
elif type_ == CONF_SWITCH:
dev = SmartPlug(host)
return dev
# When arriving from configure integrations, we have no config data.
if config_data is not None:
for type_ in [CONF_LIGHT, CONF_SWITCH]:
for entry in config_data[type_]:
try:
host = entry["host"]
dev = _device_for_type(host, type_)
devices[host] = dev
_LOGGER.debug("Succesfully added %s %s: %s", type_, host, dev)
except SmartDeviceException as ex:
_LOGGER.error("Unable to initialize %s %s: %s", type_, host, ex)
# This is necessary to avoid I/O blocking on is_dimmable
def _fill_device_lists():
for dev in devices.values():
if isinstance(dev, SmartPlug):
if dev.is_dimmable: # Dimmers act as lights
lights.append(dev)
else:
switches.append(dev)
elif isinstance(dev, SmartBulb):
lights.append(dev)
else:
_LOGGER.error("Unknown smart device type: %s", type(dev))
# Avoid blocking on is_dimmable
await hass.async_add_executor_job(_fill_device_lists)
forward_setup = hass.config_entries.async_forward_entry_setup
if lights:
_LOGGER.debug("Got %s lights: %s", len(lights), lights)
hass.async_create_task(forward_setup(config_entry, "light"))
if switches:
_LOGGER.debug("Got %s switches: %s", len(switches), switches)
hass.async_create_task(forward_setup(config_entry, "switch"))
return True
|
https://github.com/home-assistant/core/issues/21725
|
2019-03-07 01:08:44 ERROR (MainThread) [homeassistant.config_entries] Error setting up entry TP-Link Smart Home for tplink
Traceback (most recent call last):
File "/usr/local/lib/python3.7/site-packages/pyHS100/smartdevice.py", line 116, in _query_helper
request=request,
File "/usr/local/lib/python3.7/site-packages/pyHS100/protocol.py", line 47, in query
sock = socket.create_connection((host, port), timeout)
File "/usr/local/lib/python3.7/socket.py", line 727, in create_connection
raise err
File "/usr/local/lib/python3.7/socket.py", line 716, in create_connection
sock.connect(sa)
OSError: [Errno 113] No route to host
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "/usr/src/app/homeassistant/config_entries.py", line 283, in async_setup
result = await component.async_setup_entry(hass, self)
File "/usr/src/app/homeassistant/components/tplink/__init__.py", line 120, in async_setup_entry
await hass.async_add_executor_job(_fill_device_lists)
File "/usr/local/lib/python3.7/concurrent/futures/thread.py", line 57, in run
result = self.fn(*self.args, **self.kwargs)
File "/usr/src/app/homeassistant/components/tplink/__init__.py", line 110, in _fill_device_lists
if dev.is_dimmable: # Dimmers act as lights
File "/usr/local/lib/python3.7/site-packages/pyHS100/smartplug.py", line 131, in is_dimmable
return "brightness" in self.sys_info
File "/usr/local/lib/python3.7/site-packages/pyHS100/smartdevice.py", line 185, in sys_info
return defaultdict(lambda: None, self.get_sysinfo())
File "/usr/local/lib/python3.7/site-packages/pyHS100/smartdevice.py", line 195, in get_sysinfo
return self._query_helper("system", "get_sysinfo")
File "/usr/local/lib/python3.7/site-packages/pyHS100/smartdevice.py", line 119, in _query_helper
raise SmartDeviceException('Communication error') from ex
pyHS100.smartdevice.SmartDeviceException: Communication error
|
OSError
|
def _fill_device_lists():
for dev in devices.values():
if isinstance(dev, SmartPlug):
try:
if dev.is_dimmable: # Dimmers act as lights
lights.append(dev)
else:
switches.append(dev)
except SmartDeviceException as ex:
_LOGGER.error("Unable to connect to device %s: %s", dev.host, ex)
elif isinstance(dev, SmartBulb):
lights.append(dev)
else:
_LOGGER.error("Unknown smart device type: %s", type(dev))
|
def _fill_device_lists():
for dev in devices.values():
if isinstance(dev, SmartPlug):
if dev.is_dimmable: # Dimmers act as lights
lights.append(dev)
else:
switches.append(dev)
elif isinstance(dev, SmartBulb):
lights.append(dev)
else:
_LOGGER.error("Unknown smart device type: %s", type(dev))
|
https://github.com/home-assistant/core/issues/21725
|
2019-03-07 01:08:44 ERROR (MainThread) [homeassistant.config_entries] Error setting up entry TP-Link Smart Home for tplink
Traceback (most recent call last):
File "/usr/local/lib/python3.7/site-packages/pyHS100/smartdevice.py", line 116, in _query_helper
request=request,
File "/usr/local/lib/python3.7/site-packages/pyHS100/protocol.py", line 47, in query
sock = socket.create_connection((host, port), timeout)
File "/usr/local/lib/python3.7/socket.py", line 727, in create_connection
raise err
File "/usr/local/lib/python3.7/socket.py", line 716, in create_connection
sock.connect(sa)
OSError: [Errno 113] No route to host
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "/usr/src/app/homeassistant/config_entries.py", line 283, in async_setup
result = await component.async_setup_entry(hass, self)
File "/usr/src/app/homeassistant/components/tplink/__init__.py", line 120, in async_setup_entry
await hass.async_add_executor_job(_fill_device_lists)
File "/usr/local/lib/python3.7/concurrent/futures/thread.py", line 57, in run
result = self.fn(*self.args, **self.kwargs)
File "/usr/src/app/homeassistant/components/tplink/__init__.py", line 110, in _fill_device_lists
if dev.is_dimmable: # Dimmers act as lights
File "/usr/local/lib/python3.7/site-packages/pyHS100/smartplug.py", line 131, in is_dimmable
return "brightness" in self.sys_info
File "/usr/local/lib/python3.7/site-packages/pyHS100/smartdevice.py", line 185, in sys_info
return defaultdict(lambda: None, self.get_sysinfo())
File "/usr/local/lib/python3.7/site-packages/pyHS100/smartdevice.py", line 195, in get_sysinfo
return self._query_helper("system", "get_sysinfo")
File "/usr/local/lib/python3.7/site-packages/pyHS100/smartdevice.py", line 119, in _query_helper
raise SmartDeviceException('Communication error') from ex
pyHS100.smartdevice.SmartDeviceException: Communication error
|
OSError
|
def __init__(self, device, device_type, xiaomi_hub):
"""Initialize the Xiaomi device."""
self._state = None
self._is_available = True
self._sid = device["sid"]
self._name = "{}_{}".format(device_type, self._sid)
self._type = device_type
self._write_to_hub = xiaomi_hub.write_to_hub
self._get_from_hub = xiaomi_hub.get_from_hub
self._device_state_attributes = {}
self._remove_unavailability_tracker = None
self._xiaomi_hub = xiaomi_hub
self.parse_data(device["data"], device["raw_data"])
self.parse_voltage(device["data"])
if hasattr(self, "_data_key") and self._data_key: # pylint: disable=no-member
self._unique_id = "{}{}".format(
self._data_key, # pylint: disable=no-member
self._sid,
)
else:
self._unique_id = "{}{}".format(self._type, self._sid)
|
def __init__(self, device, device_type, xiaomi_hub):
"""Initialize the Xiaomi device."""
self._state = None
self._is_available = True
self._sid = device["sid"]
self._name = "{}_{}".format(device_type, self._sid)
self._type = device_type
self._write_to_hub = xiaomi_hub.write_to_hub
self._get_from_hub = xiaomi_hub.get_from_hub
self._device_state_attributes = {}
self._remove_unavailability_tracker = None
self._xiaomi_hub = xiaomi_hub
self.parse_data(device["data"], device["raw_data"])
self.parse_voltage(device["data"])
if hasattr(self, "_data_key") and self._data_key: # pylint: disable=no-member
self._unique_id = slugify(
"{}-{}".format(
self._data_key, # pylint: disable=no-member
self._sid,
)
)
else:
self._unique_id = slugify("{}-{}".format(self._type, self._sid))
|
https://github.com/home-assistant/core/issues/13522
|
2018-03-29 03:47:13 ERROR (Thread-7) [miio.device] Unable to discover a device at address 192.168.10.213
2018-03-29 03:47:13 ERROR (MainThread) [homeassistant.components.fan] Error while setting up platform smart_mi_fan
Traceback (most recent call last):
File "/srv/homeassistant/lib/python3.5/site-packages/homeassistant/helpers/entity_platform.py", line 82, in async_setup
SLOW_SETUP_MAX_WAIT, loop=hass.loop)
File "/usr/lib/python3.5/asyncio/tasks.py", line 400, in wait_for
return fut.result()
File "/usr/lib/python3.5/asyncio/futures.py", line 293, in result
raise self._exception
File "/usr/lib/python3.5/concurrent/futures/thread.py", line 55, in run
result = self.fn(*self.args, **self.kwargs)
File "/home/homeassistant/.homeassistant/custom_components/fan/smart_mi_fan.py", line 77, in setup_platform
SmartMiFan(hass, name, host, token),
File "/home/homeassistant/.homeassistant/custom_components/fan/smart_mi_fan.py", line 165, in __init__
self._state_attrs = self.fan_get_prop()
File "/home/homeassistant/.homeassistant/custom_components/fan/smart_mi_fan.py", line 404, in fan_get_prop
"natural_level", "child_lock", "buzzer", "led_b"])
File "/srv/homeassistant/lib/python3.5/site-packages/miio/device.py", line 210, in send
self.do_discover()
File "/srv/homeassistant/lib/python3.5/site-packages/miio/device.py", line 148, in do_discover
raise DeviceException("Unable to discover the device %s" % self.ip)
miio.device.DeviceException: Unable to discover the device 192.168.10.213
|
miio.device.DeviceException
|
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Perform the setup for Xiaomi devices."""
devices = []
for _, gateway in hass.data[PY_XIAOMI_GATEWAY].gateways.items():
for device in gateway.devices["cover"]:
model = device["model"]
if model == "curtain":
devices.append(XiaomiGenericCover(device, "Curtain", "status", gateway))
add_entities(devices)
|
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Perform the setup for Xiaomi devices."""
devices = []
for _, gateway in hass.data[PY_XIAOMI_GATEWAY].gateways.items():
for device in gateway.devices["cover"]:
model = device["model"]
if model == "curtain":
devices.append(
XiaomiGenericCover(
device,
"Curtain",
{"status": "status", "pos": "curtain_level"},
gateway,
)
)
add_entities(devices)
|
https://github.com/home-assistant/core/issues/13522
|
2018-03-29 03:47:13 ERROR (Thread-7) [miio.device] Unable to discover a device at address 192.168.10.213
2018-03-29 03:47:13 ERROR (MainThread) [homeassistant.components.fan] Error while setting up platform smart_mi_fan
Traceback (most recent call last):
File "/srv/homeassistant/lib/python3.5/site-packages/homeassistant/helpers/entity_platform.py", line 82, in async_setup
SLOW_SETUP_MAX_WAIT, loop=hass.loop)
File "/usr/lib/python3.5/asyncio/tasks.py", line 400, in wait_for
return fut.result()
File "/usr/lib/python3.5/asyncio/futures.py", line 293, in result
raise self._exception
File "/usr/lib/python3.5/concurrent/futures/thread.py", line 55, in run
result = self.fn(*self.args, **self.kwargs)
File "/home/homeassistant/.homeassistant/custom_components/fan/smart_mi_fan.py", line 77, in setup_platform
SmartMiFan(hass, name, host, token),
File "/home/homeassistant/.homeassistant/custom_components/fan/smart_mi_fan.py", line 165, in __init__
self._state_attrs = self.fan_get_prop()
File "/home/homeassistant/.homeassistant/custom_components/fan/smart_mi_fan.py", line 404, in fan_get_prop
"natural_level", "child_lock", "buzzer", "led_b"])
File "/srv/homeassistant/lib/python3.5/site-packages/miio/device.py", line 210, in send
self.do_discover()
File "/srv/homeassistant/lib/python3.5/site-packages/miio/device.py", line 148, in do_discover
raise DeviceException("Unable to discover the device %s" % self.ip)
miio.device.DeviceException: Unable to discover the device 192.168.10.213
|
miio.device.DeviceException
|
def close_cover(self, **kwargs):
"""Close the cover."""
self._write_to_hub(self._sid, **{self._data_key: "close"})
|
def close_cover(self, **kwargs):
"""Close the cover."""
self._write_to_hub(self._sid, **{self._data_key["status"]: "close"})
|
https://github.com/home-assistant/core/issues/13522
|
2018-03-29 03:47:13 ERROR (Thread-7) [miio.device] Unable to discover a device at address 192.168.10.213
2018-03-29 03:47:13 ERROR (MainThread) [homeassistant.components.fan] Error while setting up platform smart_mi_fan
Traceback (most recent call last):
File "/srv/homeassistant/lib/python3.5/site-packages/homeassistant/helpers/entity_platform.py", line 82, in async_setup
SLOW_SETUP_MAX_WAIT, loop=hass.loop)
File "/usr/lib/python3.5/asyncio/tasks.py", line 400, in wait_for
return fut.result()
File "/usr/lib/python3.5/asyncio/futures.py", line 293, in result
raise self._exception
File "/usr/lib/python3.5/concurrent/futures/thread.py", line 55, in run
result = self.fn(*self.args, **self.kwargs)
File "/home/homeassistant/.homeassistant/custom_components/fan/smart_mi_fan.py", line 77, in setup_platform
SmartMiFan(hass, name, host, token),
File "/home/homeassistant/.homeassistant/custom_components/fan/smart_mi_fan.py", line 165, in __init__
self._state_attrs = self.fan_get_prop()
File "/home/homeassistant/.homeassistant/custom_components/fan/smart_mi_fan.py", line 404, in fan_get_prop
"natural_level", "child_lock", "buzzer", "led_b"])
File "/srv/homeassistant/lib/python3.5/site-packages/miio/device.py", line 210, in send
self.do_discover()
File "/srv/homeassistant/lib/python3.5/site-packages/miio/device.py", line 148, in do_discover
raise DeviceException("Unable to discover the device %s" % self.ip)
miio.device.DeviceException: Unable to discover the device 192.168.10.213
|
miio.device.DeviceException
|
def open_cover(self, **kwargs):
"""Open the cover."""
self._write_to_hub(self._sid, **{self._data_key: "open"})
|
def open_cover(self, **kwargs):
"""Open the cover."""
self._write_to_hub(self._sid, **{self._data_key["status"]: "open"})
|
https://github.com/home-assistant/core/issues/13522
|
2018-03-29 03:47:13 ERROR (Thread-7) [miio.device] Unable to discover a device at address 192.168.10.213
2018-03-29 03:47:13 ERROR (MainThread) [homeassistant.components.fan] Error while setting up platform smart_mi_fan
Traceback (most recent call last):
File "/srv/homeassistant/lib/python3.5/site-packages/homeassistant/helpers/entity_platform.py", line 82, in async_setup
SLOW_SETUP_MAX_WAIT, loop=hass.loop)
File "/usr/lib/python3.5/asyncio/tasks.py", line 400, in wait_for
return fut.result()
File "/usr/lib/python3.5/asyncio/futures.py", line 293, in result
raise self._exception
File "/usr/lib/python3.5/concurrent/futures/thread.py", line 55, in run
result = self.fn(*self.args, **self.kwargs)
File "/home/homeassistant/.homeassistant/custom_components/fan/smart_mi_fan.py", line 77, in setup_platform
SmartMiFan(hass, name, host, token),
File "/home/homeassistant/.homeassistant/custom_components/fan/smart_mi_fan.py", line 165, in __init__
self._state_attrs = self.fan_get_prop()
File "/home/homeassistant/.homeassistant/custom_components/fan/smart_mi_fan.py", line 404, in fan_get_prop
"natural_level", "child_lock", "buzzer", "led_b"])
File "/srv/homeassistant/lib/python3.5/site-packages/miio/device.py", line 210, in send
self.do_discover()
File "/srv/homeassistant/lib/python3.5/site-packages/miio/device.py", line 148, in do_discover
raise DeviceException("Unable to discover the device %s" % self.ip)
miio.device.DeviceException: Unable to discover the device 192.168.10.213
|
miio.device.DeviceException
|
def stop_cover(self, **kwargs):
"""Stop the cover."""
self._write_to_hub(self._sid, **{self._data_key: "stop"})
|
def stop_cover(self, **kwargs):
"""Stop the cover."""
self._write_to_hub(self._sid, **{self._data_key["status"]: "stop"})
|
https://github.com/home-assistant/core/issues/13522
|
2018-03-29 03:47:13 ERROR (Thread-7) [miio.device] Unable to discover a device at address 192.168.10.213
2018-03-29 03:47:13 ERROR (MainThread) [homeassistant.components.fan] Error while setting up platform smart_mi_fan
Traceback (most recent call last):
File "/srv/homeassistant/lib/python3.5/site-packages/homeassistant/helpers/entity_platform.py", line 82, in async_setup
SLOW_SETUP_MAX_WAIT, loop=hass.loop)
File "/usr/lib/python3.5/asyncio/tasks.py", line 400, in wait_for
return fut.result()
File "/usr/lib/python3.5/asyncio/futures.py", line 293, in result
raise self._exception
File "/usr/lib/python3.5/concurrent/futures/thread.py", line 55, in run
result = self.fn(*self.args, **self.kwargs)
File "/home/homeassistant/.homeassistant/custom_components/fan/smart_mi_fan.py", line 77, in setup_platform
SmartMiFan(hass, name, host, token),
File "/home/homeassistant/.homeassistant/custom_components/fan/smart_mi_fan.py", line 165, in __init__
self._state_attrs = self.fan_get_prop()
File "/home/homeassistant/.homeassistant/custom_components/fan/smart_mi_fan.py", line 404, in fan_get_prop
"natural_level", "child_lock", "buzzer", "led_b"])
File "/srv/homeassistant/lib/python3.5/site-packages/miio/device.py", line 210, in send
self.do_discover()
File "/srv/homeassistant/lib/python3.5/site-packages/miio/device.py", line 148, in do_discover
raise DeviceException("Unable to discover the device %s" % self.ip)
miio.device.DeviceException: Unable to discover the device 192.168.10.213
|
miio.device.DeviceException
|
def set_cover_position(self, **kwargs):
"""Move the cover to a specific position."""
position = kwargs.get(ATTR_POSITION)
self._write_to_hub(self._sid, **{ATTR_CURTAIN_LEVEL: str(position)})
|
def set_cover_position(self, **kwargs):
"""Move the cover to a specific position."""
position = kwargs.get(ATTR_POSITION)
self._write_to_hub(self._sid, **{self._data_key["pos"]: str(position)})
|
https://github.com/home-assistant/core/issues/13522
|
2018-03-29 03:47:13 ERROR (Thread-7) [miio.device] Unable to discover a device at address 192.168.10.213
2018-03-29 03:47:13 ERROR (MainThread) [homeassistant.components.fan] Error while setting up platform smart_mi_fan
Traceback (most recent call last):
File "/srv/homeassistant/lib/python3.5/site-packages/homeassistant/helpers/entity_platform.py", line 82, in async_setup
SLOW_SETUP_MAX_WAIT, loop=hass.loop)
File "/usr/lib/python3.5/asyncio/tasks.py", line 400, in wait_for
return fut.result()
File "/usr/lib/python3.5/asyncio/futures.py", line 293, in result
raise self._exception
File "/usr/lib/python3.5/concurrent/futures/thread.py", line 55, in run
result = self.fn(*self.args, **self.kwargs)
File "/home/homeassistant/.homeassistant/custom_components/fan/smart_mi_fan.py", line 77, in setup_platform
SmartMiFan(hass, name, host, token),
File "/home/homeassistant/.homeassistant/custom_components/fan/smart_mi_fan.py", line 165, in __init__
self._state_attrs = self.fan_get_prop()
File "/home/homeassistant/.homeassistant/custom_components/fan/smart_mi_fan.py", line 404, in fan_get_prop
"natural_level", "child_lock", "buzzer", "led_b"])
File "/srv/homeassistant/lib/python3.5/site-packages/miio/device.py", line 210, in send
self.do_discover()
File "/srv/homeassistant/lib/python3.5/site-packages/miio/device.py", line 148, in do_discover
raise DeviceException("Unable to discover the device %s" % self.ip)
miio.device.DeviceException: Unable to discover the device 192.168.10.213
|
miio.device.DeviceException
|
def setup_cors(app, origins):
"""Setup cors."""
import aiohttp_cors
cors = aiohttp_cors.setup(
app,
defaults={
host: aiohttp_cors.ResourceOptions(
allow_headers=ALLOWED_CORS_HEADERS,
allow_methods="*",
)
for host in origins
},
)
cors_added = set()
def _allow_cors(route, config=None):
"""Allow cors on a route."""
if hasattr(route, "resource"):
path = route.resource
else:
path = route
path = path.canonical
if path in cors_added:
return
cors.add(route, config)
cors_added.add(path)
app["allow_cors"] = lambda route: _allow_cors(
route,
{
"*": aiohttp_cors.ResourceOptions(
allow_headers=ALLOWED_CORS_HEADERS,
allow_methods="*",
)
},
)
if not origins:
return
async def cors_startup(app):
"""Initialize cors when app starts up."""
for route in list(app.router.routes()):
_allow_cors(route)
app.on_startup.append(cors_startup)
|
def setup_cors(app, origins):
"""Setup cors."""
import aiohttp_cors
cors = aiohttp_cors.setup(
app,
defaults={
host: aiohttp_cors.ResourceOptions(
allow_headers=ALLOWED_CORS_HEADERS,
allow_methods="*",
)
for host in origins
},
)
def allow_cors(route, methods):
"""Allow cors on a route."""
cors.add(
route,
{
"*": aiohttp_cors.ResourceOptions(
allow_headers=ALLOWED_CORS_HEADERS,
allow_methods=methods,
)
},
)
app["allow_cors"] = allow_cors
if not origins:
return
async def cors_startup(app):
"""Initialize cors when app starts up."""
cors_added = set()
for route in list(app.router.routes()):
if hasattr(route, "resource"):
route = route.resource
if route in cors_added:
continue
cors.add(route)
cors_added.add(route)
app.on_startup.append(cors_startup)
|
https://github.com/home-assistant/core/issues/15659
|
2018-07-24 20:02:02 ERROR (MainThread) [homeassistant.core] Error doing job: Task exception was never retrieved
Traceback (most recent call last):
File "/usr/src/app/homeassistant/components/http/__init__.py", line 132, in start_server
await server.start()
File "/usr/src/app/homeassistant/components/http/__init__.py", line 284, in start
await self.app.startup()
File "/usr/local/lib/python3.6/site-packages/aiohttp/web_app.py", line 278, in startup
await self.on_startup.send(self)
File "/usr/local/lib/python3.6/site-packages/aiohttp/signals.py", line 35, in send
await receiver(*args, **kwargs)
File "/usr/src/app/homeassistant/components/http/cors.py", line 53, in cors_startup
cors.add(route)
File "/usr/local/lib/python3.6/site-packages/aiohttp_cors/cors_config.py", line 263, in add
return self._cors_impl.add(routing_entity, config)
File "/usr/local/lib/python3.6/site-packages/aiohttp_cors/cors_config.py", line 137, in add
routing_entity, parsed_config)
File "/usr/local/lib/python3.6/site-packages/aiohttp_cors/urldispatcher_router_adapter.py", line 240, in set_config_for_routing_entity
resource))
ValueError: CORS is already configured for <PlainResource /auth/token> resource.
|
ValueError
|
async def cors_startup(app):
"""Initialize cors when app starts up."""
for route in list(app.router.routes()):
_allow_cors(route)
|
async def cors_startup(app):
"""Initialize cors when app starts up."""
cors_added = set()
for route in list(app.router.routes()):
if hasattr(route, "resource"):
route = route.resource
if route in cors_added:
continue
cors.add(route)
cors_added.add(route)
|
https://github.com/home-assistant/core/issues/15659
|
2018-07-24 20:02:02 ERROR (MainThread) [homeassistant.core] Error doing job: Task exception was never retrieved
Traceback (most recent call last):
File "/usr/src/app/homeassistant/components/http/__init__.py", line 132, in start_server
await server.start()
File "/usr/src/app/homeassistant/components/http/__init__.py", line 284, in start
await self.app.startup()
File "/usr/local/lib/python3.6/site-packages/aiohttp/web_app.py", line 278, in startup
await self.on_startup.send(self)
File "/usr/local/lib/python3.6/site-packages/aiohttp/signals.py", line 35, in send
await receiver(*args, **kwargs)
File "/usr/src/app/homeassistant/components/http/cors.py", line 53, in cors_startup
cors.add(route)
File "/usr/local/lib/python3.6/site-packages/aiohttp_cors/cors_config.py", line 263, in add
return self._cors_impl.add(routing_entity, config)
File "/usr/local/lib/python3.6/site-packages/aiohttp_cors/cors_config.py", line 137, in add
routing_entity, parsed_config)
File "/usr/local/lib/python3.6/site-packages/aiohttp_cors/urldispatcher_router_adapter.py", line 240, in set_config_for_routing_entity
resource))
ValueError: CORS is already configured for <PlainResource /auth/token> resource.
|
ValueError
|
def register(self, app, router):
"""Register the view with a router."""
assert self.url is not None, "No url set for view"
urls = [self.url] + self.extra_urls
routes = []
for method in ("get", "post", "delete", "put"):
handler = getattr(self, method, None)
if not handler:
continue
handler = request_handler_factory(self, handler)
for url in urls:
routes.append(router.add_route(method, url, handler))
if not self.cors_allowed:
return
for route in routes:
app["allow_cors"](route)
|
def register(self, app, router):
"""Register the view with a router."""
assert self.url is not None, "No url set for view"
urls = [self.url] + self.extra_urls
routes = []
for method in ("get", "post", "delete", "put"):
handler = getattr(self, method, None)
if not handler:
continue
handler = request_handler_factory(self, handler)
for url in urls:
routes.append((method, router.add_route(method, url, handler)))
if not self.cors_allowed:
return
for method, route in routes:
app["allow_cors"](route, [method.upper()])
|
https://github.com/home-assistant/core/issues/15659
|
2018-07-24 20:02:02 ERROR (MainThread) [homeassistant.core] Error doing job: Task exception was never retrieved
Traceback (most recent call last):
File "/usr/src/app/homeassistant/components/http/__init__.py", line 132, in start_server
await server.start()
File "/usr/src/app/homeassistant/components/http/__init__.py", line 284, in start
await self.app.startup()
File "/usr/local/lib/python3.6/site-packages/aiohttp/web_app.py", line 278, in startup
await self.on_startup.send(self)
File "/usr/local/lib/python3.6/site-packages/aiohttp/signals.py", line 35, in send
await receiver(*args, **kwargs)
File "/usr/src/app/homeassistant/components/http/cors.py", line 53, in cors_startup
cors.add(route)
File "/usr/local/lib/python3.6/site-packages/aiohttp_cors/cors_config.py", line 263, in add
return self._cors_impl.add(routing_entity, config)
File "/usr/local/lib/python3.6/site-packages/aiohttp_cors/cors_config.py", line 137, in add
routing_entity, parsed_config)
File "/usr/local/lib/python3.6/site-packages/aiohttp_cors/urldispatcher_router_adapter.py", line 240, in set_config_for_routing_entity
resource))
ValueError: CORS is already configured for <PlainResource /auth/token> resource.
|
ValueError
|
def __init__(self, hass, config):
"""Initialize."""
super().__init__()
self._extra_arguments = config.get(CONF_FFMPEG_ARGUMENTS)
self._last_image = None
self._last_url = None
self._manager = hass.data[DATA_FFMPEG]
self._name = config[CONF_NAME]
self.host = config[CONF_HOST]
self.port = config[CONF_PORT]
self.path = config[CONF_PATH]
self.user = config[CONF_USERNAME]
self.passwd = config[CONF_PASSWORD]
|
def __init__(self, hass, config):
"""Initialize."""
super().__init__()
self._extra_arguments = config.get(CONF_FFMPEG_ARGUMENTS)
self._ftp = None
self._last_image = None
self._last_url = None
self._manager = hass.data[DATA_FFMPEG]
self._name = config[CONF_NAME]
self.host = config[CONF_HOST]
self.port = config[CONF_PORT]
self.path = config[CONF_PATH]
self.user = config[CONF_USERNAME]
self.passwd = config[CONF_PASSWORD]
hass.async_add_job(self._connect_to_client)
|
https://github.com/home-assistant/core/issues/15108
|
Traceback (most recent call last):
File "/usr/lib/python3.6/asyncio/tasks.py", line 179, in _step
result = coro.send(None)
File "/usr/lib/python3.6/site-packages/homeassistant/components/camera/__init__.py", line 474, in send_camera_still
image = await async_get_image(hass, msg['entity_id'])
File "/usr/lib/python3.6/site-packages/homeassistant/components/camera/__init__.py", line 124, in async_get_image
image = await camera.async_camera_image()
File "/usr/lib/python3.6/site-packages/homeassistant/components/camera/yi.py", line 122, in async_camera_image
url = await self._get_latest_video_url()
File "/usr/lib/python3.6/site-packages/homeassistant/components/camera/yi.py", line 96, in _get_latest_video_url
await self._ftp.change_directory(self.path)
File "/config/deps/lib/python3.6/site-packages/aioftp/client.py", line 555, in change_directory
await self.command(cmd, "2xx")
File "/config/deps/lib/python3.6/site-packages/aioftp/client.py", line 242, in command
await self.stream.write(message.encode(encoding=self.encoding))
File "/config/deps/lib/python3.6/site-packages/aioftp/common.py", line 549, in write
await super().write(data)
File "/usr/lib/python3.6/asyncio/tasks.py", line 333, in wait_for
return (yield from fut)
File "/config/deps/lib/python3.6/site-packages/aioftp/common.py", line 301, in write
await self.writer.drain()
File "/usr/lib/python3.6/asyncio/streams.py", line 323, in drain
raise exc
File "/usr/lib/python3.6/asyncio/tasks.py", line 179, in _step
result = coro.send(None)
File "/usr/lib/python3.6/site-packages/homeassistant/components/camera/__init__.py", line 474, in send_camera_still
image = await async_get_image(hass, msg['entity_id'])
File "/usr/lib/python3.6/site-packages/homeassistant/components/camera/__init__.py", line 124, in async_get_image
image = await camera.async_camera_image()
File "/usr/lib/python3.6/site-packages/homeassistant/components/camera/yi.py", line 122, in async_camera_image
url = await self._get_latest_video_url()
File "/usr/lib/python3.6/site-packages/homeassistant/components/camera/yi.py", line 96, in _get_latest_video_url
await self._ftp.change_directory(self.path)
File "/config/deps/lib/python3.6/site-packages/aioftp/client.py", line 555, in change_directory
await self.command(cmd, "2xx")
File "/config/deps/lib/python3.6/site-packages/aioftp/client.py", line 242, in command
await self.stream.write(message.encode(encoding=self.encoding))
File "/config/deps/lib/python3.6/site-packages/aioftp/common.py", line 549, in write
await super().write(data)
File "/usr/lib/python3.6/asyncio/tasks.py", line 333, in wait_for
return (yield from fut)
File "/config/deps/lib/python3.6/site-packages/aioftp/common.py", line 301, in write
await self.writer.drain()
File "/usr/lib/python3.6/asyncio/streams.py", line 323, in drain
raise exc
File "/usr/lib/python3.6/asyncio/tasks.py", line 179, in _step
result = coro.send(None)
File "/usr/lib/python3.6/site-packages/homeassistant/components/camera/__init__.py", line 474, in send_camera_still
image = await async_get_image(hass, msg['entity_id'])
File "/usr/lib/python3.6/site-packages/homeassistant/components/camera/__init__.py", line 124, in async_get_image
image = await camera.async_camera_image()
File "/usr/lib/python3.6/site-packages/homeassistant/components/camera/yi.py", line 122, in async_camera_image
url = await self._get_latest_video_url()
File "/usr/lib/python3.6/site-packages/homeassistant/components/camera/yi.py", line 96, in _get_latest_video_url
await self._ftp.change_directory(self.path)
File "/config/deps/lib/python3.6/site-packages/aioftp/client.py", line 555, in change_directory
await self.command(cmd, "2xx")
File "/config/deps/lib/python3.6/site-packages/aioftp/client.py", line 242, in command
await self.stream.write(message.encode(encoding=self.encoding))
File "/config/deps/lib/python3.6/site-packages/aioftp/common.py", line 549, in write
await super().write(data)
File "/usr/lib/python3.6/asyncio/tasks.py", line 333, in wait_for
return (yield from fut)
File "/config/deps/lib/python3.6/site-packages/aioftp/common.py", line 301, in write
await self.writer.drain()
File "/usr/lib/python3.6/asyncio/streams.py", line 323, in drain
raise exc
File "/usr/lib/python3.6/asyncio/tasks.py", line 179, in _step
result = coro.send(None)
File "/usr/lib/python3.6/site-packages/homeassistant/components/camera/__init__.py", line 474, in send_camera_still
image = await async_get_image(hass, msg['entity_id'])
File "/usr/lib/python3.6/site-packages/homeassistant/components/camera/__init__.py", line 124, in async_get_image
image = await camera.async_camera_image()
File "/usr/lib/python3.6/site-packages/homeassistant/components/camera/yi.py", line 122, in async_camera_image
url = await self._get_latest_video_url()
File "/usr/lib/python3.6/site-packages/homeassistant/components/camera/yi.py", line 96, in _get_latest_video_url
await self._ftp.change_directory(self.path)
File "/config/deps/lib/python3.6/site-packages/aioftp/client.py", line 555, in change_directory
await self.command(cmd, "2xx")
File "/config/deps/lib/python3.6/site-packages/aioftp/client.py", line 242, in command
await self.stream.write(message.encode(encoding=self.encoding))
File "/config/deps/lib/python3.6/site-packages/aioftp/common.py", line 549, in write
await super().write(data)
File "/usr/lib/python3.6/asyncio/tasks.py", line 333, in wait_for
return (yield from fut)
File "/config/deps/lib/python3.6/site-packages/aioftp/common.py", line 301, in write
await self.writer.drain()
File "/usr/lib/python3.6/asyncio/streams.py", line 323, in drain
raise exc
File "/usr/lib/python3.6/asyncio/tasks.py", line 179, in _step
result = coro.send(None)
File "/usr/lib/python3.6/site-packages/homeassistant/components/camera/__init__.py", line 474, in send_camera_still
image = await async_get_image(hass, msg['entity_id'])
File "/usr/lib/python3.6/site-packages/homeassistant/components/camera/__init__.py", line 124, in async_get_image
image = await camera.async_camera_image()
File "/usr/lib/python3.6/site-packages/homeassistant/components/camera/yi.py", line 122, in async_camera_image
url = await self._get_latest_video_url()
File "/usr/lib/python3.6/site-packages/homeassistant/components/camera/yi.py", line 96, in _get_latest_video_url
await self._ftp.change_directory(self.path)
File "/config/deps/lib/python3.6/site-packages/aioftp/client.py", line 555, in change_directory
await self.command(cmd, "2xx")
File "/config/deps/lib/python3.6/site-packages/aioftp/client.py", line 242, in command
await self.stream.write(message.encode(encoding=self.encoding))
File "/config/deps/lib/python3.6/site-packages/aioftp/common.py", line 549, in write
await super().write(data)
File "/usr/lib/python3.6/asyncio/tasks.py", line 333, in wait_for
return (yield from fut)
File "/config/deps/lib/python3.6/site-packages/aioftp/common.py", line 301, in write
await self.writer.drain()
File "/usr/lib/python3.6/asyncio/streams.py", line 323, in drain
raise exc
File "/usr/lib/python3.6/asyncio/selector_events.py", line 762, in write
n = self._sock.send(data)
BrokenPipeError: [Errno 32] Broken pipe
|
BrokenPipeError
|
def _recursive_merge(conf, package):
"""Merge package into conf, recursively."""
error = False
for key, pack_conf in package.items():
if isinstance(pack_conf, dict):
if not pack_conf:
continue
conf[key] = conf.get(key, OrderedDict())
error = _recursive_merge(conf=conf[key], package=pack_conf)
elif isinstance(pack_conf, list):
if not pack_conf:
continue
conf[key] = cv.ensure_list(conf.get(key))
conf[key].extend(cv.ensure_list(pack_conf))
else:
if conf.get(key) is not None:
return key
else:
conf[key] = pack_conf
return error
|
def _recursive_merge(pack_name, comp_name, config, conf, package):
"""Merge package into conf, recursively."""
for key, pack_conf in package.items():
if isinstance(pack_conf, dict):
if not pack_conf:
continue
conf[key] = conf.get(key, OrderedDict())
_recursive_merge(
pack_name, comp_name, config, conf=conf[key], package=pack_conf
)
elif isinstance(pack_conf, list):
if not pack_conf:
continue
conf[key] = cv.ensure_list(conf.get(key))
conf[key].extend(cv.ensure_list(pack_conf))
else:
if conf.get(key) is not None:
_log_pkg_error(
pack_name,
comp_name,
config,
"has keys that are defined multiple times",
)
else:
conf[key] = pack_conf
|
https://github.com/home-assistant/core/issues/14906
|
2018-06-10 00:27:15 INFO (SyncWorker_0) [homeassistant.config] Upgrading configuration directory from 0.70.1 to 0.71.0
2018-06-10 00:27:15 INFO (SyncWorker_0) [homeassistant.config] Migrating old system configuration files to new locations
2018-06-10 00:27:15 INFO (MainThread) [homeassistant.loader] Loaded device_tracker from homeassistant.components.device_tracker
2018-06-10 00:27:15 INFO (MainThread) [homeassistant.loader] Loaded sensor from homeassistant.components.sensor
2018-06-10 00:27:15 INFO (MainThread) [homeassistant.loader] Loaded automation from homeassistant.components.automation
2018-06-10 00:27:15 INFO (MainThread) [homeassistant.loader] Loaded map from homeassistant.components.map
Traceback (most recent call last):
File "/usr/local/lib/python3.6/runpy.py", line 193, in _run_module_as_main
"__main__", mod_spec)
File "/usr/local/lib/python3.6/runpy.py", line 85, in _run_code
exec(code, run_globals)
File "/usr/src/app/homeassistant/__main__.py", line 377, in <module>
sys.exit(main())
File "/usr/src/app/homeassistant/__main__.py", line 369, in main
exit_code = setup_and_run_hass(config_dir, args)
File "/usr/src/app/homeassistant/__main__.py", line 274, in setup_and_run_hass
log_no_color=args.log_no_color)
File "/usr/src/app/homeassistant/bootstrap.py", line 179, in from_config_file
log_rotate_days, log_file, log_no_color)
File "uvloop/loop.pyx", line 1422, in uvloop.loop.Loop.run_until_complete
File "/usr/src/app/homeassistant/bootstrap.py", line 215, in async_from_config_file
config_dict, hass, enable_log=False, skip_pip=skip_pip)
File "/usr/src/app/homeassistant/bootstrap.py", line 110, in async_from_config_dict
hass, config, core_config.get(conf_util.CONF_PACKAGES, {}))
File "/usr/src/app/homeassistant/config.py", line 638, in merge_packages_config
conf=config[comp_name], package=comp_conf)
File "/usr/src/app/homeassistant/config.py", line 553, in _recursive_merge
for key, pack_conf in package.items():
AttributeError: 'NoneType' object has no attribute 'items'
|
AttributeError
|
def merge_packages_config(hass, config, packages, _log_pkg_error=_log_pkg_error):
"""Merge packages into the top-level configuration. Mutate config."""
# pylint: disable=too-many-nested-blocks
PACKAGES_CONFIG_SCHEMA(packages)
for pack_name, pack_conf in packages.items():
for comp_name, comp_conf in pack_conf.items():
if comp_name == CONF_CORE:
continue
component = get_component(hass, comp_name)
if component is None:
_log_pkg_error(pack_name, comp_name, config, "does not exist")
continue
if hasattr(component, "PLATFORM_SCHEMA"):
if not comp_conf:
continue # Ensure we dont add Falsy items to list
config[comp_name] = cv.ensure_list(config.get(comp_name))
config[comp_name].extend(cv.ensure_list(comp_conf))
continue
if hasattr(component, "CONFIG_SCHEMA"):
merge_type, _ = _identify_config_schema(component)
if merge_type == "list":
if not comp_conf:
continue # Ensure we dont add Falsy items to list
config[comp_name] = cv.ensure_list(config.get(comp_name))
config[comp_name].extend(cv.ensure_list(comp_conf))
continue
if comp_conf is None:
comp_conf = OrderedDict()
if not isinstance(comp_conf, dict):
_log_pkg_error(
pack_name, comp_name, config, "cannot be merged. Expected a dict."
)
continue
if comp_name not in config or config[comp_name] is None:
config[comp_name] = OrderedDict()
if not isinstance(config[comp_name], dict):
_log_pkg_error(
pack_name,
comp_name,
config,
"cannot be merged. Dict expected in main config.",
)
continue
if not isinstance(comp_conf, dict):
_log_pkg_error(
pack_name,
comp_name,
config,
"cannot be merged. Dict expected in package.",
)
continue
error = _recursive_merge(conf=config[comp_name], package=comp_conf)
if error:
_log_pkg_error(
pack_name, comp_name, config, "has duplicate key '{}'".format(error)
)
return config
|
def merge_packages_config(hass, config, packages, _log_pkg_error=_log_pkg_error):
"""Merge packages into the top-level configuration. Mutate config."""
# pylint: disable=too-many-nested-blocks
PACKAGES_CONFIG_SCHEMA(packages)
for pack_name, pack_conf in packages.items():
for comp_name, comp_conf in pack_conf.items():
if comp_name == CONF_CORE:
continue
component = get_component(hass, comp_name)
if component is None:
_log_pkg_error(pack_name, comp_name, config, "does not exist")
continue
if hasattr(component, "PLATFORM_SCHEMA"):
if not comp_conf:
continue # Ensure we dont add Falsy items to list
config[comp_name] = cv.ensure_list(config.get(comp_name))
config[comp_name].extend(cv.ensure_list(comp_conf))
continue
if hasattr(component, "CONFIG_SCHEMA"):
merge_type, _ = _identify_config_schema(component)
if merge_type == "list":
if not comp_conf:
continue # Ensure we dont add Falsy items to list
config[comp_name] = cv.ensure_list(config.get(comp_name))
config[comp_name].extend(cv.ensure_list(comp_conf))
continue
if merge_type == "dict":
if comp_conf is None:
comp_conf = OrderedDict()
if not isinstance(comp_conf, dict):
_log_pkg_error(
pack_name,
comp_name,
config,
"cannot be merged. Expected a dict.",
)
continue
if comp_name not in config:
config[comp_name] = OrderedDict()
if not isinstance(config[comp_name], dict):
_log_pkg_error(
pack_name,
comp_name,
config,
"cannot be merged. Dict expected in main config.",
)
continue
for key, val in comp_conf.items():
if key in config[comp_name]:
_log_pkg_error(
pack_name,
comp_name,
config,
"duplicate key '{}'".format(key),
)
continue
config[comp_name][key] = val
continue
# The last merge type are sections that require recursive merging
if comp_name in config:
_recursive_merge(
pack_name,
comp_name,
config,
conf=config[comp_name],
package=comp_conf,
)
continue
config[comp_name] = comp_conf
return config
|
https://github.com/home-assistant/core/issues/14906
|
2018-06-10 00:27:15 INFO (SyncWorker_0) [homeassistant.config] Upgrading configuration directory from 0.70.1 to 0.71.0
2018-06-10 00:27:15 INFO (SyncWorker_0) [homeassistant.config] Migrating old system configuration files to new locations
2018-06-10 00:27:15 INFO (MainThread) [homeassistant.loader] Loaded device_tracker from homeassistant.components.device_tracker
2018-06-10 00:27:15 INFO (MainThread) [homeassistant.loader] Loaded sensor from homeassistant.components.sensor
2018-06-10 00:27:15 INFO (MainThread) [homeassistant.loader] Loaded automation from homeassistant.components.automation
2018-06-10 00:27:15 INFO (MainThread) [homeassistant.loader] Loaded map from homeassistant.components.map
Traceback (most recent call last):
File "/usr/local/lib/python3.6/runpy.py", line 193, in _run_module_as_main
"__main__", mod_spec)
File "/usr/local/lib/python3.6/runpy.py", line 85, in _run_code
exec(code, run_globals)
File "/usr/src/app/homeassistant/__main__.py", line 377, in <module>
sys.exit(main())
File "/usr/src/app/homeassistant/__main__.py", line 369, in main
exit_code = setup_and_run_hass(config_dir, args)
File "/usr/src/app/homeassistant/__main__.py", line 274, in setup_and_run_hass
log_no_color=args.log_no_color)
File "/usr/src/app/homeassistant/bootstrap.py", line 179, in from_config_file
log_rotate_days, log_file, log_no_color)
File "uvloop/loop.pyx", line 1422, in uvloop.loop.Loop.run_until_complete
File "/usr/src/app/homeassistant/bootstrap.py", line 215, in async_from_config_file
config_dict, hass, enable_log=False, skip_pip=skip_pip)
File "/usr/src/app/homeassistant/bootstrap.py", line 110, in async_from_config_dict
hass, config, core_config.get(conf_util.CONF_PACKAGES, {}))
File "/usr/src/app/homeassistant/config.py", line 638, in merge_packages_config
conf=config[comp_name], package=comp_conf)
File "/usr/src/app/homeassistant/config.py", line 553, in _recursive_merge
for key, pack_conf in package.items():
AttributeError: 'NoneType' object has no attribute 'items'
|
AttributeError
|
async def async_setup(hass, config):
"""Setup configured zones as well as home assistant zone if necessary."""
hass.data[DOMAIN] = {}
entities = set()
zone_entries = configured_zones(hass)
for _, entry in config_per_platform(config, DOMAIN):
if slugify(entry[CONF_NAME]) not in zone_entries:
zone = Zone(
hass,
entry[CONF_NAME],
entry[CONF_LATITUDE],
entry[CONF_LONGITUDE],
entry.get(CONF_RADIUS),
entry.get(CONF_ICON),
entry.get(CONF_PASSIVE),
)
zone.entity_id = async_generate_entity_id(
ENTITY_ID_FORMAT, entry[CONF_NAME], entities
)
hass.async_add_job(zone.async_update_ha_state())
entities.add(zone.entity_id)
if ENTITY_ID_HOME not in entities and HOME_ZONE not in zone_entries:
zone = Zone(
hass,
hass.config.location_name,
hass.config.latitude,
hass.config.longitude,
DEFAULT_RADIUS,
ICON_HOME,
False,
)
zone.entity_id = ENTITY_ID_HOME
hass.async_add_job(zone.async_update_ha_state())
return True
|
async def async_setup(hass, config):
"""Setup configured zones as well as home assistant zone if necessary."""
if DOMAIN not in hass.data:
hass.data[DOMAIN] = {}
zone_entries = configured_zones(hass)
for _, entry in config_per_platform(config, DOMAIN):
name = slugify(entry[CONF_NAME])
if name not in zone_entries:
zone = Zone(
hass,
entry[CONF_NAME],
entry[CONF_LATITUDE],
entry[CONF_LONGITUDE],
entry.get(CONF_RADIUS),
entry.get(CONF_ICON),
entry.get(CONF_PASSIVE),
)
zone.entity_id = async_generate_entity_id(
ENTITY_ID_FORMAT, entry[CONF_NAME], None, hass
)
hass.async_add_job(zone.async_update_ha_state())
hass.data[DOMAIN][name] = zone
if HOME_ZONE not in hass.data[DOMAIN] and HOME_ZONE not in zone_entries:
name = hass.config.location_name
zone = Zone(
hass,
name,
hass.config.latitude,
hass.config.longitude,
DEFAULT_RADIUS,
ICON_HOME,
False,
)
zone.entity_id = ENTITY_ID_HOME
hass.async_add_job(zone.async_update_ha_state())
hass.data[DOMAIN][slugify(name)] = zone
return True
|
https://github.com/home-assistant/core/issues/14396
|
Error during setup of component zone
Traceback (most recent call last):
File "/srv/homeassistant/lib/python3.6/site-packages/homeassistant/setup.py", line 142, in _async_setup_component
result = await component.async_setup(hass, processed_config)
File "/srv/homeassistant/lib/python3.6/site-packages/homeassistant/components/zone/__init__.py", line 68, in async_setup
hass.data[DOMAIN][slugify(name)] = zone
File "/srv/homeassistant/lib/python3.6/site-packages/homeassistant/util/__init__.py", line 43, in slugify
text = normalize('NFKD', text)
TypeError: normalize() argument 2 must be str, not None
|
TypeError
|
def update_as_of(self, utc_point_in_time):
"""Calculate sun state at a point in UTC time."""
import astral
mod = -1
while True:
try:
next_rising_dt = self.location.sunrise(
utc_point_in_time + timedelta(days=mod), local=False
)
if next_rising_dt > utc_point_in_time:
break
except astral.AstralError:
pass
mod += 1
mod = -1
while True:
try:
next_setting_dt = self.location.sunset(
utc_point_in_time + timedelta(days=mod), local=False
)
if next_setting_dt > utc_point_in_time:
break
except astral.AstralError:
pass
mod += 1
self.next_rising = next_rising_dt
self.next_setting = next_setting_dt
|
def update_as_of(self, utc_point_in_time):
"""Calculate sun state at a point in UTC time."""
mod = -1
while True:
next_rising_dt = self.location.sunrise(
utc_point_in_time + timedelta(days=mod), local=False
)
if next_rising_dt > utc_point_in_time:
break
mod += 1
mod = -1
while True:
next_setting_dt = self.location.sunset(
utc_point_in_time + timedelta(days=mod), local=False
)
if next_setting_dt > utc_point_in_time:
break
mod += 1
self.next_rising = next_rising_dt
self.next_setting = next_setting_dt
|
https://github.com/home-assistant/core/issues/2090
|
16-05-16 15:41:28 homeassistant.bootstrap: Error during setup of component sun
Traceback (most recent call last):
File "/home/vidar/.homeassistant/deps/astral.py", line 1464, in sunrise_utc
return self._calc_time(90 + 0.833, SUN_RISING, date, latitude, longitude)
File "/home/vidar/.homeassistant/deps/astral.py", line 2120, in _calc_time
hourangle = self._hour_angle(latitude, solarDec, depression)
File "/home/vidar/.homeassistant/deps/astral.py", line 2093, in _hour_angle
HA = acos(h)
ValueError: math domain error
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/usr/local/lib/python3.4/dist-packages/homeassistant/bootstrap.py", line 157, in _setup_component
if not component.setup(hass, config):
File "/usr/local/lib/python3.4/dist-packages/homeassistant/components/sun.py", line 118, in setup
sun.point_in_time_listener(dt_util.utcnow())
File "/usr/local/lib/python3.4/dist-packages/homeassistant/components/sun.py", line 194, in point_in_time_listener
self.update_as_of(now)
File "/usr/local/lib/python3.4/dist-packages/homeassistant/components/sun.py", line 176, in update_as_of
utc_point_in_time + timedelta(days=mod), local=False)
File "/home/vidar/.homeassistant/deps/astral.py", line 769, in sunrise
sunrise = self.astral.sunrise_utc(date, self.latitude, self.longitude)
File "/home/vidar/.homeassistant/deps/astral.py", line 1466, in sunrise_utc
raise AstralError(('Sun remains below the horizon on this day, '
astral.AstralError: Sun remains below the horizon on this day, at this location.
|
ValueError
|
def detect_location_info():
"""Detect location information."""
try:
raw_info = requests.get("https://freegeoip.net/json/", timeout=5).json()
except (requests.RequestException, ValueError):
return None
data = {key: raw_info.get(key) for key in LocationInfo._fields}
# From Wikipedia: Fahrenheit is used in the Bahamas, Belize,
# the Cayman Islands, Palau, and the United States and associated
# territories of American Samoa and the U.S. Virgin Islands
data["use_fahrenheit"] = data["country_code"] in (
"BS",
"BZ",
"KY",
"PW",
"US",
"AS",
"VI",
)
return LocationInfo(**data)
|
def detect_location_info():
"""Detect location information."""
try:
raw_info = requests.get("https://freegeoip.net/json/", timeout=5).json()
except requests.RequestException:
return
data = {key: raw_info.get(key) for key in LocationInfo._fields}
# From Wikipedia: Fahrenheit is used in the Bahamas, Belize,
# the Cayman Islands, Palau, and the United States and associated
# territories of American Samoa and the U.S. Virgin Islands
data["use_fahrenheit"] = data["country_code"] in (
"BS",
"BZ",
"KY",
"PW",
"US",
"AS",
"VI",
)
return LocationInfo(**data)
|
https://github.com/home-assistant/core/issues/1378
|
C:\Users\fredr>py -m homeassistant --open-ui
Traceback (most recent call last):
File "", line 1, in
File "C:\Users\fredr\AppData\Local\Programs\Python\Python35\lib\multiprocessing\spawn.py", line 106, in spawn_main
exitcode = main(fd)
File "C:\Users\fredr\AppData\Local\Programs\Python\Python35\lib\multiprocessing\spawn.py", line 116, in main
self = pickle.load(from_parent)
AttributeError: Can't get attribute 'setup_and_run_hass' on <module '__main' (built-in)>
|
AttributeError
|
def setup_platform(hass, config, add_devices_callback, discovery_info=None):
"""Setup the RFXtrx platform."""
import RFXtrx as rfxtrxmod
lights = []
devices = config.get("devices", None)
if devices:
for entity_id, entity_info in devices.items():
if entity_id not in rfxtrx.RFX_DEVICES:
_LOGGER.info("Add %s rfxtrx.light", entity_info[ATTR_NAME])
# Check if i must fire event
fire_event = entity_info.get(ATTR_FIREEVENT, False)
datas = {ATTR_STATE: False, ATTR_FIREEVENT: fire_event}
rfxobject = rfxtrx.get_rfx_object(entity_info[ATTR_PACKETID])
if not isinstance(rfxobject.device, rfxtrxmod.LightDevice):
_LOGGER.exception("%s is not a light", entity_info[ATTR_NAME])
return
new_light = RfxtrxLight(entity_info[ATTR_NAME], rfxobject, datas)
rfxtrx.RFX_DEVICES[entity_id] = new_light
lights.append(new_light)
add_devices_callback(lights)
def light_update(event):
"""Callback for light updates from the RFXtrx gateway."""
if not isinstance(event.device, rfxtrxmod.LightDevice):
return
# Add entity if not exist and the automatic_add is True
entity_id = slugify(event.device.id_string.lower())
if entity_id not in rfxtrx.RFX_DEVICES:
automatic_add = config.get("automatic_add", False)
if not automatic_add:
return
_LOGGER.info(
"Automatic add %s rfxtrx.light (Class: %s Sub: %s)",
entity_id,
event.device.__class__.__name__,
event.device.subtype,
)
pkt_id = "".join("{0:02x}".format(x) for x in event.data)
entity_name = "%s : %s" % (entity_id, pkt_id)
datas = {ATTR_STATE: False, ATTR_FIREEVENT: False}
new_light = RfxtrxLight(entity_name, event, datas)
rfxtrx.RFX_DEVICES[entity_id] = new_light
add_devices_callback([new_light])
# Check if entity exists or previously added automatically
if entity_id in rfxtrx.RFX_DEVICES:
_LOGGER.debug(
"EntityID: %s light_update. Command: %s",
entity_id,
event.values["Command"],
)
if event.values["Command"] == "On" or event.values["Command"] == "Off":
# Update the rfxtrx device state
is_on = event.values["Command"] == "On"
# pylint: disable=protected-access
rfxtrx.RFX_DEVICES[entity_id]._state = is_on
rfxtrx.RFX_DEVICES[entity_id].update_ha_state()
elif event.values["Command"] == "Set level":
# pylint: disable=protected-access
rfxtrx.RFX_DEVICES[entity_id]._brightness = (
event.values["Dim level"] * 255 // 100
)
# Update the rfxtrx device state
is_on = rfxtrx.RFX_DEVICES[entity_id]._brightness > 0
rfxtrx.RFX_DEVICES[entity_id]._state = is_on
rfxtrx.RFX_DEVICES[entity_id].update_ha_state()
else:
return
# Fire event
if rfxtrx.RFX_DEVICES[entity_id].should_fire_event:
rfxtrx.RFX_DEVICES[entity_id].hass.bus.fire(
EVENT_BUTTON_PRESSED,
{
ATTR_ENTITY_ID: rfxtrx.RFX_DEVICES[entity_id].entity_id,
ATTR_STATE: event.values["Command"].lower(),
},
)
# Subscribe to main rfxtrx events
if light_update not in rfxtrx.RECEIVED_EVT_SUBSCRIBERS:
rfxtrx.RECEIVED_EVT_SUBSCRIBERS.append(light_update)
|
def setup_platform(hass, config, add_devices_callback, discovery_info=None):
"""Setup the RFXtrx platform."""
import RFXtrx as rfxtrxmod
lights = []
devices = config.get("devices", None)
if devices:
for entity_id, entity_info in devices.items():
if entity_id not in rfxtrx.RFX_DEVICES:
_LOGGER.info("Add %s rfxtrx.light", entity_info[ATTR_NAME])
# Check if i must fire event
fire_event = entity_info.get(ATTR_FIREEVENT, False)
datas = {ATTR_STATE: False, ATTR_FIREEVENT: fire_event}
rfxobject = rfxtrx.get_rfx_object(entity_info[ATTR_PACKETID])
new_light = RfxtrxLight(entity_info[ATTR_NAME], rfxobject, datas)
rfxtrx.RFX_DEVICES[entity_id] = new_light
lights.append(new_light)
add_devices_callback(lights)
def light_update(event):
"""Callback for light updates from the RFXtrx gateway."""
if not isinstance(event.device, rfxtrxmod.LightingDevice):
return
# Add entity if not exist and the automatic_add is True
entity_id = slugify(event.device.id_string.lower())
if entity_id not in rfxtrx.RFX_DEVICES:
automatic_add = config.get("automatic_add", False)
if not automatic_add:
return
_LOGGER.info(
"Automatic add %s rfxtrx.light (Class: %s Sub: %s)",
entity_id,
event.device.__class__.__name__,
event.device.subtype,
)
pkt_id = "".join("{0:02x}".format(x) for x in event.data)
entity_name = "%s : %s" % (entity_id, pkt_id)
datas = {ATTR_STATE: False, ATTR_FIREEVENT: False}
new_light = RfxtrxLight(entity_name, event, datas)
rfxtrx.RFX_DEVICES[entity_id] = new_light
add_devices_callback([new_light])
# Check if entity exists or previously added automatically
if entity_id in rfxtrx.RFX_DEVICES and isinstance(
rfxtrx.RFX_DEVICES[entity_id], RfxtrxLight
):
_LOGGER.debug(
"EntityID: %s light_update. Command: %s",
entity_id,
event.values["Command"],
)
if event.values["Command"] == "On" or event.values["Command"] == "Off":
# Update the rfxtrx device state
is_on = event.values["Command"] == "On"
# pylint: disable=protected-access
rfxtrx.RFX_DEVICES[entity_id]._state = is_on
rfxtrx.RFX_DEVICES[entity_id].update_ha_state()
# Fire event
if rfxtrx.RFX_DEVICES[entity_id].should_fire_event:
rfxtrx.RFX_DEVICES[entity_id].hass.bus.fire(
EVENT_BUTTON_PRESSED,
{
ATTR_ENTITY_ID: rfxtrx.RFX_DEVICES[entity_id].entity_id,
ATTR_STATE: event.values["Command"].lower(),
},
)
# Subscribe to main rfxtrx events
if light_update not in rfxtrx.RECEIVED_EVT_SUBSCRIBERS:
rfxtrx.RECEIVED_EVT_SUBSCRIBERS.append(light_update)
|
https://github.com/home-assistant/core/issues/1116
|
INFO:homeassistant.components.rfxtrx:Receive RFXCOM event from <class 'RFXtrx.LightingDevice'> type='LightwaveRF, Siemens' id='f11043:16' => f1104316 : 0a140005f1104310050050
INFO:homeassistant.components.switch.rfxtrx:Automatic add f1104316 rfxtrx.switch (Class: LightingDevice Sub: 0)
INFO:homeassistant.core:Bus:Handling <Event state_changed[L]: new_state=<state switch.f1104316__0a140005f1104310050050=off; friendly_name=f1104316 : 0a140005f1104310050050 @ 20:34:47 03-02-2016>, entity_id=switch.f1104316__0a140005f1104310050050>
INFO:homeassistant.core:Bus:Handling <Event state_changed[L]: new_state=<state group.all_switches=off; hidden=True, order=0, entity_id=('switch.f1104316__0a140005f1104310050050',), auto=True, friendly_name=all switches @ 20:34:47 03-02-2016>, entity_id=group.all_switches>
Traceback (most recent call last):
File "/usr/local/lib/python3.4/threading.py", line 911, in _bootstrap_inner
self.run()
File "/usr/local/lib/python3.4/threading.py", line 859, in run
self._target(*self._args, **self._kwargs)
File "/usr/local/lib/python3.4/site-packages/RFXtrx/__init__.py", line 55, in _connect
self._event_callback(event)
File "/usr/src/app/homeassistant/components/rfxtrx.py", line 48, in handle_receive
subscriber(event)
File "/usr/src/app/homeassistant/components/switch/rfxtrx.py", line 81, in switch_update
event.values['Command']
KeyError: 'Command'
|
KeyError
|
def light_update(event):
"""Callback for light updates from the RFXtrx gateway."""
if not isinstance(event.device, rfxtrxmod.LightDevice):
return
# Add entity if not exist and the automatic_add is True
entity_id = slugify(event.device.id_string.lower())
if entity_id not in rfxtrx.RFX_DEVICES:
automatic_add = config.get("automatic_add", False)
if not automatic_add:
return
_LOGGER.info(
"Automatic add %s rfxtrx.light (Class: %s Sub: %s)",
entity_id,
event.device.__class__.__name__,
event.device.subtype,
)
pkt_id = "".join("{0:02x}".format(x) for x in event.data)
entity_name = "%s : %s" % (entity_id, pkt_id)
datas = {ATTR_STATE: False, ATTR_FIREEVENT: False}
new_light = RfxtrxLight(entity_name, event, datas)
rfxtrx.RFX_DEVICES[entity_id] = new_light
add_devices_callback([new_light])
# Check if entity exists or previously added automatically
if entity_id in rfxtrx.RFX_DEVICES:
_LOGGER.debug(
"EntityID: %s light_update. Command: %s", entity_id, event.values["Command"]
)
if event.values["Command"] == "On" or event.values["Command"] == "Off":
# Update the rfxtrx device state
is_on = event.values["Command"] == "On"
# pylint: disable=protected-access
rfxtrx.RFX_DEVICES[entity_id]._state = is_on
rfxtrx.RFX_DEVICES[entity_id].update_ha_state()
elif event.values["Command"] == "Set level":
# pylint: disable=protected-access
rfxtrx.RFX_DEVICES[entity_id]._brightness = (
event.values["Dim level"] * 255 // 100
)
# Update the rfxtrx device state
is_on = rfxtrx.RFX_DEVICES[entity_id]._brightness > 0
rfxtrx.RFX_DEVICES[entity_id]._state = is_on
rfxtrx.RFX_DEVICES[entity_id].update_ha_state()
else:
return
# Fire event
if rfxtrx.RFX_DEVICES[entity_id].should_fire_event:
rfxtrx.RFX_DEVICES[entity_id].hass.bus.fire(
EVENT_BUTTON_PRESSED,
{
ATTR_ENTITY_ID: rfxtrx.RFX_DEVICES[entity_id].entity_id,
ATTR_STATE: event.values["Command"].lower(),
},
)
|
def light_update(event):
"""Callback for light updates from the RFXtrx gateway."""
if not isinstance(event.device, rfxtrxmod.LightingDevice):
return
# Add entity if not exist and the automatic_add is True
entity_id = slugify(event.device.id_string.lower())
if entity_id not in rfxtrx.RFX_DEVICES:
automatic_add = config.get("automatic_add", False)
if not automatic_add:
return
_LOGGER.info(
"Automatic add %s rfxtrx.light (Class: %s Sub: %s)",
entity_id,
event.device.__class__.__name__,
event.device.subtype,
)
pkt_id = "".join("{0:02x}".format(x) for x in event.data)
entity_name = "%s : %s" % (entity_id, pkt_id)
datas = {ATTR_STATE: False, ATTR_FIREEVENT: False}
new_light = RfxtrxLight(entity_name, event, datas)
rfxtrx.RFX_DEVICES[entity_id] = new_light
add_devices_callback([new_light])
# Check if entity exists or previously added automatically
if entity_id in rfxtrx.RFX_DEVICES and isinstance(
rfxtrx.RFX_DEVICES[entity_id], RfxtrxLight
):
_LOGGER.debug(
"EntityID: %s light_update. Command: %s", entity_id, event.values["Command"]
)
if event.values["Command"] == "On" or event.values["Command"] == "Off":
# Update the rfxtrx device state
is_on = event.values["Command"] == "On"
# pylint: disable=protected-access
rfxtrx.RFX_DEVICES[entity_id]._state = is_on
rfxtrx.RFX_DEVICES[entity_id].update_ha_state()
# Fire event
if rfxtrx.RFX_DEVICES[entity_id].should_fire_event:
rfxtrx.RFX_DEVICES[entity_id].hass.bus.fire(
EVENT_BUTTON_PRESSED,
{
ATTR_ENTITY_ID: rfxtrx.RFX_DEVICES[entity_id].entity_id,
ATTR_STATE: event.values["Command"].lower(),
},
)
|
https://github.com/home-assistant/core/issues/1116
|
INFO:homeassistant.components.rfxtrx:Receive RFXCOM event from <class 'RFXtrx.LightingDevice'> type='LightwaveRF, Siemens' id='f11043:16' => f1104316 : 0a140005f1104310050050
INFO:homeassistant.components.switch.rfxtrx:Automatic add f1104316 rfxtrx.switch (Class: LightingDevice Sub: 0)
INFO:homeassistant.core:Bus:Handling <Event state_changed[L]: new_state=<state switch.f1104316__0a140005f1104310050050=off; friendly_name=f1104316 : 0a140005f1104310050050 @ 20:34:47 03-02-2016>, entity_id=switch.f1104316__0a140005f1104310050050>
INFO:homeassistant.core:Bus:Handling <Event state_changed[L]: new_state=<state group.all_switches=off; hidden=True, order=0, entity_id=('switch.f1104316__0a140005f1104310050050',), auto=True, friendly_name=all switches @ 20:34:47 03-02-2016>, entity_id=group.all_switches>
Traceback (most recent call last):
File "/usr/local/lib/python3.4/threading.py", line 911, in _bootstrap_inner
self.run()
File "/usr/local/lib/python3.4/threading.py", line 859, in run
self._target(*self._args, **self._kwargs)
File "/usr/local/lib/python3.4/site-packages/RFXtrx/__init__.py", line 55, in _connect
self._event_callback(event)
File "/usr/src/app/homeassistant/components/rfxtrx.py", line 48, in handle_receive
subscriber(event)
File "/usr/src/app/homeassistant/components/switch/rfxtrx.py", line 81, in switch_update
event.values['Command']
KeyError: 'Command'
|
KeyError
|
def setup(hass, config):
"""Setup the RFXtrx component."""
# Declare the Handle event
def handle_receive(event):
"""Callback all subscribers for RFXtrx gateway."""
# Log RFXCOM event
if not event.device.id_string:
return
entity_id = slugify(event.device.id_string.lower())
packet_id = "".join("{0:02x}".format(x) for x in event.data)
entity_name = "%s : %s" % (entity_id, packet_id)
_LOGGER.info("Receive RFXCOM event from %s => %s", event.device, entity_name)
# Callback to HA registered components
for subscriber in RECEIVED_EVT_SUBSCRIBERS:
subscriber(event)
# Try to load the RFXtrx module
import RFXtrx as rfxtrxmod
# Init the rfxtrx module
global RFXOBJECT
if ATTR_DEVICE not in config[DOMAIN]:
_LOGGER.exception(
"can found device parameter in %s YAML configuration section", DOMAIN
)
return False
device = config[DOMAIN][ATTR_DEVICE]
debug = config[DOMAIN].get(ATTR_DEBUG, False)
RFXOBJECT = rfxtrxmod.Core(device, handle_receive, debug=debug)
return True
|
def setup(hass, config):
"""Setup the RFXtrx component."""
# Declare the Handle event
def handle_receive(event):
"""Callback all subscribers for RFXtrx gateway."""
# Log RFXCOM event
entity_id = slugify(event.device.id_string.lower())
packet_id = "".join("{0:02x}".format(x) for x in event.data)
entity_name = "%s : %s" % (entity_id, packet_id)
_LOGGER.info("Receive RFXCOM event from %s => %s", event.device, entity_name)
# Callback to HA registered components
for subscriber in RECEIVED_EVT_SUBSCRIBERS:
subscriber(event)
# Try to load the RFXtrx module
import RFXtrx as rfxtrxmod
# Init the rfxtrx module
global RFXOBJECT
if ATTR_DEVICE not in config[DOMAIN]:
_LOGGER.exception(
"can found device parameter in %s YAML configuration section", DOMAIN
)
return False
device = config[DOMAIN][ATTR_DEVICE]
debug = config[DOMAIN].get(ATTR_DEBUG, False)
RFXOBJECT = rfxtrxmod.Core(device, handle_receive, debug=debug)
return True
|
https://github.com/home-assistant/core/issues/1116
|
INFO:homeassistant.components.rfxtrx:Receive RFXCOM event from <class 'RFXtrx.LightingDevice'> type='LightwaveRF, Siemens' id='f11043:16' => f1104316 : 0a140005f1104310050050
INFO:homeassistant.components.switch.rfxtrx:Automatic add f1104316 rfxtrx.switch (Class: LightingDevice Sub: 0)
INFO:homeassistant.core:Bus:Handling <Event state_changed[L]: new_state=<state switch.f1104316__0a140005f1104310050050=off; friendly_name=f1104316 : 0a140005f1104310050050 @ 20:34:47 03-02-2016>, entity_id=switch.f1104316__0a140005f1104310050050>
INFO:homeassistant.core:Bus:Handling <Event state_changed[L]: new_state=<state group.all_switches=off; hidden=True, order=0, entity_id=('switch.f1104316__0a140005f1104310050050',), auto=True, friendly_name=all switches @ 20:34:47 03-02-2016>, entity_id=group.all_switches>
Traceback (most recent call last):
File "/usr/local/lib/python3.4/threading.py", line 911, in _bootstrap_inner
self.run()
File "/usr/local/lib/python3.4/threading.py", line 859, in run
self._target(*self._args, **self._kwargs)
File "/usr/local/lib/python3.4/site-packages/RFXtrx/__init__.py", line 55, in _connect
self._event_callback(event)
File "/usr/src/app/homeassistant/components/rfxtrx.py", line 48, in handle_receive
subscriber(event)
File "/usr/src/app/homeassistant/components/switch/rfxtrx.py", line 81, in switch_update
event.values['Command']
KeyError: 'Command'
|
KeyError
|
def handle_receive(event):
"""Callback all subscribers for RFXtrx gateway."""
# Log RFXCOM event
if not event.device.id_string:
return
entity_id = slugify(event.device.id_string.lower())
packet_id = "".join("{0:02x}".format(x) for x in event.data)
entity_name = "%s : %s" % (entity_id, packet_id)
_LOGGER.info("Receive RFXCOM event from %s => %s", event.device, entity_name)
# Callback to HA registered components
for subscriber in RECEIVED_EVT_SUBSCRIBERS:
subscriber(event)
|
def handle_receive(event):
"""Callback all subscribers for RFXtrx gateway."""
# Log RFXCOM event
entity_id = slugify(event.device.id_string.lower())
packet_id = "".join("{0:02x}".format(x) for x in event.data)
entity_name = "%s : %s" % (entity_id, packet_id)
_LOGGER.info("Receive RFXCOM event from %s => %s", event.device, entity_name)
# Callback to HA registered components
for subscriber in RECEIVED_EVT_SUBSCRIBERS:
subscriber(event)
|
https://github.com/home-assistant/core/issues/1116
|
INFO:homeassistant.components.rfxtrx:Receive RFXCOM event from <class 'RFXtrx.LightingDevice'> type='LightwaveRF, Siemens' id='f11043:16' => f1104316 : 0a140005f1104310050050
INFO:homeassistant.components.switch.rfxtrx:Automatic add f1104316 rfxtrx.switch (Class: LightingDevice Sub: 0)
INFO:homeassistant.core:Bus:Handling <Event state_changed[L]: new_state=<state switch.f1104316__0a140005f1104310050050=off; friendly_name=f1104316 : 0a140005f1104310050050 @ 20:34:47 03-02-2016>, entity_id=switch.f1104316__0a140005f1104310050050>
INFO:homeassistant.core:Bus:Handling <Event state_changed[L]: new_state=<state group.all_switches=off; hidden=True, order=0, entity_id=('switch.f1104316__0a140005f1104310050050',), auto=True, friendly_name=all switches @ 20:34:47 03-02-2016>, entity_id=group.all_switches>
Traceback (most recent call last):
File "/usr/local/lib/python3.4/threading.py", line 911, in _bootstrap_inner
self.run()
File "/usr/local/lib/python3.4/threading.py", line 859, in run
self._target(*self._args, **self._kwargs)
File "/usr/local/lib/python3.4/site-packages/RFXtrx/__init__.py", line 55, in _connect
self._event_callback(event)
File "/usr/src/app/homeassistant/components/rfxtrx.py", line 48, in handle_receive
subscriber(event)
File "/usr/src/app/homeassistant/components/switch/rfxtrx.py", line 81, in switch_update
event.values['Command']
KeyError: 'Command'
|
KeyError
|
def setup_platform(hass, config, add_devices_callback, discovery_info=None):
"""Setup the RFXtrx platform."""
import RFXtrx as rfxtrxmod
# Add switch from config file
switchs = []
devices = config.get("devices")
if devices:
for entity_id, entity_info in devices.items():
if entity_id not in rfxtrx.RFX_DEVICES:
_LOGGER.info("Add %s rfxtrx.switch", entity_info[ATTR_NAME])
# Check if i must fire event
fire_event = entity_info.get(ATTR_FIREEVENT, False)
datas = {ATTR_STATE: False, ATTR_FIREEVENT: fire_event}
rfxobject = rfxtrx.get_rfx_object(entity_info[ATTR_PACKETID])
newswitch = RfxtrxSwitch(entity_info[ATTR_NAME], rfxobject, datas)
rfxtrx.RFX_DEVICES[entity_id] = newswitch
switchs.append(newswitch)
add_devices_callback(switchs)
def switch_update(event):
"""Callback for sensor updates from the RFXtrx gateway."""
if not isinstance(event.device, rfxtrxmod.SwitchDevice) or isinstance(
event.device, rfxtrxmod.LightDevice
):
return
# Add entity if not exist and the automatic_add is True
entity_id = slugify(event.device.id_string.lower())
if entity_id not in rfxtrx.RFX_DEVICES:
automatic_add = config.get("automatic_add", False)
if not automatic_add:
return
_LOGGER.info(
"Automatic add %s rfxtrx.switch (Class: %s Sub: %s)",
entity_id,
event.device.__class__.__name__,
event.device.subtype,
)
pkt_id = "".join("{0:02x}".format(x) for x in event.data)
entity_name = "%s : %s" % (entity_id, pkt_id)
datas = {ATTR_STATE: False, ATTR_FIREEVENT: False}
new_switch = RfxtrxSwitch(entity_name, event, datas)
rfxtrx.RFX_DEVICES[entity_id] = new_switch
add_devices_callback([new_switch])
# Check if entity exists or previously added automatically
if entity_id in rfxtrx.RFX_DEVICES:
_LOGGER.debug(
"EntityID: %s switch_update. Command: %s",
entity_id,
event.values["Command"],
)
if event.values["Command"] == "On" or event.values["Command"] == "Off":
# Update the rfxtrx device state
is_on = event.values["Command"] == "On"
# pylint: disable=protected-access
rfxtrx.RFX_DEVICES[entity_id]._state = is_on
rfxtrx.RFX_DEVICES[entity_id].update_ha_state()
# Fire event
if rfxtrx.RFX_DEVICES[entity_id].should_fire_event:
rfxtrx.RFX_DEVICES[entity_id].hass.bus.fire(
EVENT_BUTTON_PRESSED,
{
ATTR_ENTITY_ID: rfxtrx.RFX_DEVICES[entity_id].entity_id,
ATTR_STATE: event.values["Command"].lower(),
},
)
# Subscribe to main rfxtrx events
if switch_update not in rfxtrx.RECEIVED_EVT_SUBSCRIBERS:
rfxtrx.RECEIVED_EVT_SUBSCRIBERS.append(switch_update)
|
def setup_platform(hass, config, add_devices_callback, discovery_info=None):
"""Setup the RFXtrx platform."""
import RFXtrx as rfxtrxmod
# Add switch from config file
switchs = []
devices = config.get("devices")
if devices:
for entity_id, entity_info in devices.items():
if entity_id not in rfxtrx.RFX_DEVICES:
_LOGGER.info("Add %s rfxtrx.switch", entity_info[ATTR_NAME])
# Check if i must fire event
fire_event = entity_info.get(ATTR_FIREEVENT, False)
datas = {ATTR_STATE: False, ATTR_FIREEVENT: fire_event}
rfxobject = rfxtrx.get_rfx_object(entity_info[ATTR_PACKETID])
newswitch = RfxtrxSwitch(entity_info[ATTR_NAME], rfxobject, datas)
rfxtrx.RFX_DEVICES[entity_id] = newswitch
switchs.append(newswitch)
add_devices_callback(switchs)
def switch_update(event):
"""Callback for sensor updates from the RFXtrx gateway."""
if not isinstance(event.device, rfxtrxmod.LightingDevice):
return
# Add entity if not exist and the automatic_add is True
entity_id = slugify(event.device.id_string.lower())
if entity_id not in rfxtrx.RFX_DEVICES:
automatic_add = config.get("automatic_add", False)
if not automatic_add:
return
_LOGGER.info(
"Automatic add %s rfxtrx.switch (Class: %s Sub: %s)",
entity_id,
event.device.__class__.__name__,
event.device.subtype,
)
pkt_id = "".join("{0:02x}".format(x) for x in event.data)
entity_name = "%s : %s" % (entity_id, pkt_id)
datas = {ATTR_STATE: False, ATTR_FIREEVENT: False}
new_switch = RfxtrxSwitch(entity_name, event, datas)
rfxtrx.RFX_DEVICES[entity_id] = new_switch
add_devices_callback([new_switch])
# Check if entity exists or previously added automatically
if entity_id in rfxtrx.RFX_DEVICES and isinstance(
rfxtrx.RFX_DEVICES[entity_id], RfxtrxSwitch
):
_LOGGER.debug(
"EntityID: %s switch_update. Command: %s",
entity_id,
event.values["Command"],
)
if event.values["Command"] == "On" or event.values["Command"] == "Off":
# Update the rfxtrx device state
is_on = event.values["Command"] == "On"
# pylint: disable=protected-access
rfxtrx.RFX_DEVICES[entity_id]._state = is_on
rfxtrx.RFX_DEVICES[entity_id].update_ha_state()
# Fire event
if rfxtrx.RFX_DEVICES[entity_id].should_fire_event:
rfxtrx.RFX_DEVICES[entity_id].hass.bus.fire(
EVENT_BUTTON_PRESSED,
{
ATTR_ENTITY_ID: rfxtrx.RFX_DEVICES[entity_id].entity_id,
ATTR_STATE: event.values["Command"].lower(),
},
)
# Subscribe to main rfxtrx events
if switch_update not in rfxtrx.RECEIVED_EVT_SUBSCRIBERS:
rfxtrx.RECEIVED_EVT_SUBSCRIBERS.append(switch_update)
|
https://github.com/home-assistant/core/issues/1116
|
INFO:homeassistant.components.rfxtrx:Receive RFXCOM event from <class 'RFXtrx.LightingDevice'> type='LightwaveRF, Siemens' id='f11043:16' => f1104316 : 0a140005f1104310050050
INFO:homeassistant.components.switch.rfxtrx:Automatic add f1104316 rfxtrx.switch (Class: LightingDevice Sub: 0)
INFO:homeassistant.core:Bus:Handling <Event state_changed[L]: new_state=<state switch.f1104316__0a140005f1104310050050=off; friendly_name=f1104316 : 0a140005f1104310050050 @ 20:34:47 03-02-2016>, entity_id=switch.f1104316__0a140005f1104310050050>
INFO:homeassistant.core:Bus:Handling <Event state_changed[L]: new_state=<state group.all_switches=off; hidden=True, order=0, entity_id=('switch.f1104316__0a140005f1104310050050',), auto=True, friendly_name=all switches @ 20:34:47 03-02-2016>, entity_id=group.all_switches>
Traceback (most recent call last):
File "/usr/local/lib/python3.4/threading.py", line 911, in _bootstrap_inner
self.run()
File "/usr/local/lib/python3.4/threading.py", line 859, in run
self._target(*self._args, **self._kwargs)
File "/usr/local/lib/python3.4/site-packages/RFXtrx/__init__.py", line 55, in _connect
self._event_callback(event)
File "/usr/src/app/homeassistant/components/rfxtrx.py", line 48, in handle_receive
subscriber(event)
File "/usr/src/app/homeassistant/components/switch/rfxtrx.py", line 81, in switch_update
event.values['Command']
KeyError: 'Command'
|
KeyError
|
def switch_update(event):
"""Callback for sensor updates from the RFXtrx gateway."""
if not isinstance(event.device, rfxtrxmod.SwitchDevice) or isinstance(
event.device, rfxtrxmod.LightDevice
):
return
# Add entity if not exist and the automatic_add is True
entity_id = slugify(event.device.id_string.lower())
if entity_id not in rfxtrx.RFX_DEVICES:
automatic_add = config.get("automatic_add", False)
if not automatic_add:
return
_LOGGER.info(
"Automatic add %s rfxtrx.switch (Class: %s Sub: %s)",
entity_id,
event.device.__class__.__name__,
event.device.subtype,
)
pkt_id = "".join("{0:02x}".format(x) for x in event.data)
entity_name = "%s : %s" % (entity_id, pkt_id)
datas = {ATTR_STATE: False, ATTR_FIREEVENT: False}
new_switch = RfxtrxSwitch(entity_name, event, datas)
rfxtrx.RFX_DEVICES[entity_id] = new_switch
add_devices_callback([new_switch])
# Check if entity exists or previously added automatically
if entity_id in rfxtrx.RFX_DEVICES:
_LOGGER.debug(
"EntityID: %s switch_update. Command: %s",
entity_id,
event.values["Command"],
)
if event.values["Command"] == "On" or event.values["Command"] == "Off":
# Update the rfxtrx device state
is_on = event.values["Command"] == "On"
# pylint: disable=protected-access
rfxtrx.RFX_DEVICES[entity_id]._state = is_on
rfxtrx.RFX_DEVICES[entity_id].update_ha_state()
# Fire event
if rfxtrx.RFX_DEVICES[entity_id].should_fire_event:
rfxtrx.RFX_DEVICES[entity_id].hass.bus.fire(
EVENT_BUTTON_PRESSED,
{
ATTR_ENTITY_ID: rfxtrx.RFX_DEVICES[entity_id].entity_id,
ATTR_STATE: event.values["Command"].lower(),
},
)
|
def switch_update(event):
"""Callback for sensor updates from the RFXtrx gateway."""
if not isinstance(event.device, rfxtrxmod.LightingDevice):
return
# Add entity if not exist and the automatic_add is True
entity_id = slugify(event.device.id_string.lower())
if entity_id not in rfxtrx.RFX_DEVICES:
automatic_add = config.get("automatic_add", False)
if not automatic_add:
return
_LOGGER.info(
"Automatic add %s rfxtrx.switch (Class: %s Sub: %s)",
entity_id,
event.device.__class__.__name__,
event.device.subtype,
)
pkt_id = "".join("{0:02x}".format(x) for x in event.data)
entity_name = "%s : %s" % (entity_id, pkt_id)
datas = {ATTR_STATE: False, ATTR_FIREEVENT: False}
new_switch = RfxtrxSwitch(entity_name, event, datas)
rfxtrx.RFX_DEVICES[entity_id] = new_switch
add_devices_callback([new_switch])
# Check if entity exists or previously added automatically
if entity_id in rfxtrx.RFX_DEVICES and isinstance(
rfxtrx.RFX_DEVICES[entity_id], RfxtrxSwitch
):
_LOGGER.debug(
"EntityID: %s switch_update. Command: %s",
entity_id,
event.values["Command"],
)
if event.values["Command"] == "On" or event.values["Command"] == "Off":
# Update the rfxtrx device state
is_on = event.values["Command"] == "On"
# pylint: disable=protected-access
rfxtrx.RFX_DEVICES[entity_id]._state = is_on
rfxtrx.RFX_DEVICES[entity_id].update_ha_state()
# Fire event
if rfxtrx.RFX_DEVICES[entity_id].should_fire_event:
rfxtrx.RFX_DEVICES[entity_id].hass.bus.fire(
EVENT_BUTTON_PRESSED,
{
ATTR_ENTITY_ID: rfxtrx.RFX_DEVICES[entity_id].entity_id,
ATTR_STATE: event.values["Command"].lower(),
},
)
|
https://github.com/home-assistant/core/issues/1116
|
INFO:homeassistant.components.rfxtrx:Receive RFXCOM event from <class 'RFXtrx.LightingDevice'> type='LightwaveRF, Siemens' id='f11043:16' => f1104316 : 0a140005f1104310050050
INFO:homeassistant.components.switch.rfxtrx:Automatic add f1104316 rfxtrx.switch (Class: LightingDevice Sub: 0)
INFO:homeassistant.core:Bus:Handling <Event state_changed[L]: new_state=<state switch.f1104316__0a140005f1104310050050=off; friendly_name=f1104316 : 0a140005f1104310050050 @ 20:34:47 03-02-2016>, entity_id=switch.f1104316__0a140005f1104310050050>
INFO:homeassistant.core:Bus:Handling <Event state_changed[L]: new_state=<state group.all_switches=off; hidden=True, order=0, entity_id=('switch.f1104316__0a140005f1104310050050',), auto=True, friendly_name=all switches @ 20:34:47 03-02-2016>, entity_id=group.all_switches>
Traceback (most recent call last):
File "/usr/local/lib/python3.4/threading.py", line 911, in _bootstrap_inner
self.run()
File "/usr/local/lib/python3.4/threading.py", line 859, in run
self._target(*self._args, **self._kwargs)
File "/usr/local/lib/python3.4/site-packages/RFXtrx/__init__.py", line 55, in _connect
self._event_callback(event)
File "/usr/src/app/homeassistant/components/rfxtrx.py", line 48, in handle_receive
subscriber(event)
File "/usr/src/app/homeassistant/components/switch/rfxtrx.py", line 81, in switch_update
event.values['Command']
KeyError: 'Command'
|
KeyError
|
def run(self):
"""Intentionally terminating the process with an error code."""
sys.exit(-1)
|
def run(self):
"""Do nothing so the command intentionally fails."""
pass
|
https://github.com/Qiskit/qiskit/issues/78
|
Building wheels for collected packages: qiskit
Building wheel for qiskit (setup.py) ... done
Exception:
Traceback (most recent call last):
File "/home/ima/envs/test/lib/python3.6/site-packages/pip/_internal/cli/base_command.py", line 179, in main
status = self.run(options, args)
File "/home/ima/envs/test/lib/python3.6/site-packages/pip/_internal/commands/install.py", line 355, in run
session=session, autobuilding=True
File "/home/ima/envs/test/lib/python3.6/site-packages/pip/_internal/wheel.py", line 980, in build
python_tag=python_tag,
File "/home/ima/envs/test/lib/python3.6/site-packages/pip/_internal/wheel.py", line 813, in _build_one
python_tag=python_tag)
File "/home/ima/envs/test/lib/python3.6/site-packages/pip/_internal/wheel.py", line 821, in _build_one_inside_env
wheel_path = builder(req, temp_dir.path, python_tag=python_tag)
File "/home/ima/envs/test/lib/python3.6/site-packages/pip/_internal/wheel.py", line 898, in _build_one_legacy
return os.path.join(tempd, sorted(os.listdir(tempd))[0])
IndexError: list index out of range
|
IndexError
|
async def write(self, uuid):
p = f"{self.path}/{uuid}.{self.count[uuid]}"
async with AIOFile(p, mode="a") as fp:
r = await fp.write("\n".join(self.data[uuid]) + "\n", offset=self.pointer[uuid])
self.pointer[uuid] += r
self.data[uuid] = []
if self.pointer[uuid] >= self.rotate:
self.count[uuid] += 1
self.pointer[uuid] = 0
|
async def write(self, uuid):
p = f"{self.path}/{uuid}.{self.count[uuid]}"
async with AIOFile(p, mode="a") as fp:
r = await fp.write("\n".join(self.data[uuid]) + "\n", offset=self.pointer[uuid])
self.pointer[uuid] += len(r)
self.data[uuid] = []
if self.pointer[uuid] >= self.rotate:
self.count[uuid] += 1
self.pointer[uuid] = 0
|
https://github.com/bmoscon/cryptofeed/issues/282
|
2020-08-23 14:18:45,985 : ERROR : COINBASE: encountered an exception, reconnecting
Traceback (most recent call last):
File "/home/g/.local/lib/python3.8/site-packages/cryptofeed/feedhandler.py", line 211, in _connect
await self._handler(websocket, feed.message_handler, feed.uuid)
File "/home/g/.local/lib/python3.8/site-packages/cryptofeed/feedhandler.py", line 231, in _handler
await self.raw_message_capture(message, self.last_msg[feed_id], feed_id)
File "/home/g/.local/lib/python3.8/site-packages/cryptofeed/util/async_file.py", line 42, in __call__
await self.write(uuid)
File "/home/g/.local/lib/python3.8/site-packages/cryptofeed/util/async_file.py", line 32, in write
self.pointer[uuid] += len(r)
TypeError: object of type 'int' has no len()
|
TypeError
|
def run_module(self):
import runpy
code = "run_module(modname, alter_sys=True)"
global_dict = {"run_module": runpy.run_module, "modname": self.options.module}
sys.argv = [self.options.module] + self.command[:]
sys.path.append(os.getcwd())
return self.run_code(code, global_dict)
|
def run_module(self):
import runpy
code = "run_module(modname, run_name='__main__')"
global_dict = {"run_module": runpy.run_module, "modname": self.options.module}
sys.argv = [self.options.module] + self.command[:]
sys.path.append(os.getcwd())
return self.run_code(code, global_dict)
|
https://github.com/gaogaotiantian/viztracer/issues/58
|
viztracer --log_multiprocess -m sushy.indexer
date=2020-12-06T12:49:12.350 pid=8953 level=WARNING filename=models.py:129:init_db msg="virtual tables may not be indexed"
date=2020-12-06T12:49:12.360 pid=8953 level=INFO filename=indexer.py:258:<module> msg="Scanning /home/rcarmo/Sites/taoofmac.com/space, static rendering is False"
Loading finish
Loading finish
Saving report to /home/rcarmo/Sync/Development/taoofmac-engine/viztracer_multiprocess_tmp/result_8988.json ...
Saving report to /home/rcarmo/Sync/Development/taoofmac-engine/viztracer_multiprocess_tmp/result_8989.json ...
Dumping trace data to json, total entries: 17, estimated json file size: 2.0KiB
Dumping trace data to json, total entries: 17, estimated json file size: 2.0KiB
Report saved.
Report saved.
Traceback (most recent call last):
File "/home/rcarmo/.pyenv/versions/3.6.9/bin/viztracer", line 8, in <module>
sys.exit(main())
File "/home/rcarmo/.pyenv/versions/3.6.9/lib/python3.6/site-packages/viztracer/main.py", line 378, in main
success, err_msg = ui.run()
File "/home/rcarmo/.pyenv/versions/3.6.9/lib/python3.6/site-packages/viztracer/main.py", line 205, in run
return self.run_module()
File "/home/rcarmo/.pyenv/versions/3.6.9/lib/python3.6/site-packages/viztracer/main.py", line 270, in run_module
return self.run_code(code, global_dict)
File "/home/rcarmo/.pyenv/versions/3.6.9/lib/python3.6/site-packages/viztracer/main.py", line 256, in run_code
exec(code, global_dict)
File "<string>", line 1, in <module>
File "/home/rcarmo/.pyenv/versions/3.6.9/lib/python3.6/runpy.py", line 208, in run_module
return _run_code(code, {}, init_globals, run_name, mod_spec)
File "/home/rcarmo/.pyenv/versions/3.6.9/lib/python3.6/runpy.py", line 85, in _run_code
exec(code, run_globals)
File "/home/rcarmo/Sync/Development/taoofmac-engine/sushy/indexer.py", line 260, in <module>
pool_indexer(STORE_PATH)
File "/home/rcarmo/Sync/Development/taoofmac-engine/sushy/indexer.py", line 240, in pool_indexer
for res in pool.imap(pool_worker, gen_pages(path), chunksize=10):
File "/home/rcarmo/.pyenv/versions/3.6.9/lib/python3.6/multiprocessing/pool.py", line 320, in <genexpr>
return (item for chunk in result for item in chunk)
File "/home/rcarmo/.pyenv/versions/3.6.9/lib/python3.6/multiprocessing/pool.py", line 735, in next
raise value
File "/home/rcarmo/.pyenv/versions/3.6.9/lib/python3.6/multiprocessing/pool.py", line 424, in _handle_tasks
put(task)
File "/home/rcarmo/.pyenv/versions/3.6.9/lib/python3.6/multiprocessing/connection.py", line 206, in send
self._send_bytes(_ForkingPickler.dumps(obj))
File "/home/rcarmo/.pyenv/versions/3.6.9/lib/python3.6/multiprocessing/reduction.py", line 51, in dumps
cls(buf, protocol).dump(obj)
_pickle.PicklingError: Can't pickle <function pool_worker at 0x7f39b6d6a488>: attribute lookup pool_worker on __main__ failed
Loading finish
Saving report to /home/rcarmo/Sync/Development/taoofmac-engine/viztracer_multiprocess_tmp/result_8953.json ...
Dumping trace data to json, total entries: 503870, estimated json file size: 57.7MiB
Report saved.
Saving report to /home/rcarmo/Sync/Development/taoofmac-engine/result.html ...
Dumping trace data to json, total entries: 503904, estimated json file size: 57.7MiB
Generating HTML report
Report saved.
|
_pickle.PicklingError
|
def ignore_function(method=None, tracer=None):
def inner(func):
@functools.wraps(func)
def ignore_wrapper(*args, **kwargs):
# We need this to keep trace a local variable
t = tracer
if not t:
t = get_tracer()
if not t:
raise NameError("ignore_function only works with global tracer")
t.pause()
ret = func(*args, **kwargs)
t.resume()
return ret
return ignore_wrapper
if method:
return inner(method)
return inner
|
def ignore_function(method=None, tracer=None):
if not tracer:
tracer = get_tracer()
def inner(func):
@functools.wraps(func)
def ignore_wrapper(*args, **kwargs):
tracer.pause()
ret = func(*args, **kwargs)
tracer.resume()
return ret
return ignore_wrapper
if method:
return inner(method)
return inner
|
https://github.com/gaogaotiantian/viztracer/issues/42
|
Traceback (most recent call last):
File "C:/Users/PycharmProjects/oneway_test/test.py", line 12, in <module>
f()
File "D:\virtual\Envs\venv37\lib\site-packages\viztracer\decorator.py", line 17, in ignore_wrapper
tracer.pause()
AttributeError: 'NoneType' object has no attribute 'pause'
|
AttributeError
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.