fname stringlengths 63 176 | rel_fname stringclasses 706
values | line int64 -1 4.5k | name stringlengths 1 81 | kind stringclasses 2
values | category stringclasses 2
values | info stringlengths 0 77.9k ⌀ |
|---|---|---|---|---|---|---|
playground/e9b22a58-260b-483f-88d7-7a5fe9f8b1d4/pylint/pylint/checkers/typecheck.py | pylint/checkers/typecheck.py | 2,064 | _check_iterable | ref | function | self._check_iterable(node.iter)
|
playground/e9b22a58-260b-483f-88d7-7a5fe9f8b1d4/pylint/pylint/checkers/typecheck.py | pylint/checkers/typecheck.py | 2,066 | check_messages | ref | function | @check_messages("not-an-iterable")
|
playground/e9b22a58-260b-483f-88d7-7a5fe9f8b1d4/pylint/pylint/checkers/typecheck.py | pylint/checkers/typecheck.py | 2,067 | visit_asyncfor | def | function | def visit_asyncfor(self, node: nodes.AsyncFor) -> None:
self._check_iterable(node.iter, check_async=_True)
@check_messages("not-an-iterable")
def visit_yieldfrom(self, node: nodes.YieldFrom) -> None:
if self._is_asyncio_coroutine(node.value):
return
self._check_iterable(node.value)
@check_messages("not-an-iterable", "not-a-mapping")
def visit_call(self, node: nodes.Call) -> None:
for stararg in node.starargs:
self._check_iterable(stararg.value)
for kwarg in node.kwargs:
self._check_mapping(kwarg.value)
@check_messages("not-an-iterable")
def visit_listcomp(self, node: nodes.ListComp) -> None:
for gen in node.generators:
self._check_iterable(gen.iter, check_async=gen.is_async)
@check_messages("not-an-iterable")
def visit_dictcomp(self, node: nodes.DictComp) -> None:
for gen in node.generators:
self._check_iterable(gen.iter, check_async=gen.is_async)
@check_messages("not-an-iterable")
def visit_setcomp(self, node: nodes.SetComp) -> None:
for gen in node.generators:
self._check_iterable(gen.iter, check_async=gen.is_async)
@check_messages("not-an-iterable")
def visit_generatorexp(self, node: nodes.GeneratorExp) -> None:
for gen in node.generators:
self._check_iterable(gen.iter, check_async=gen.is_async)
@check_messages("await-outside-async")
def visit_await(self, node: nodes.Await) -> None:
self._check_await_outside_coroutine(node)
def _check_await_outside_coroutine(self, node: nodes.Await) -> None:
node_scope = node.scope()
while not isinstance(node_scope, nodes.Module):
if isinstance(node_scope, nodes.AsyncFunctionDef):
return
if isinstance(node_scope, nodes.FunctionDef):
break
node_scope = node_scope.parent.scope()
self.add_message("await-outside-async", node=node)
|
playground/e9b22a58-260b-483f-88d7-7a5fe9f8b1d4/pylint/pylint/checkers/typecheck.py | pylint/checkers/typecheck.py | 2,068 | _check_iterable | ref | function | self._check_iterable(node.iter, check_async=True)
|
playground/e9b22a58-260b-483f-88d7-7a5fe9f8b1d4/pylint/pylint/checkers/typecheck.py | pylint/checkers/typecheck.py | 2,070 | check_messages | ref | function | @check_messages("not-an-iterable")
|
playground/e9b22a58-260b-483f-88d7-7a5fe9f8b1d4/pylint/pylint/checkers/typecheck.py | pylint/checkers/typecheck.py | 2,071 | visit_yieldfrom | def | function | def visit_yieldfrom(self, node: nodes.YieldFrom) -> None:
if self._is_asyncio_coroutine(node.value):
return
self._check_iterable(node.value)
@check_messages("not-an-iterable", "not-a-mapping")
def visit_call(self, node: nodes.Call) -> None:
for stararg in node.starargs:
self._check_iterable(stararg.value)
for kwarg in node.kwargs:
self._check_mapping(kwarg.value)
@check_messages("not-an-iterable")
def visit_listcomp(self, node: nodes.ListComp) -> None:
for gen in node.generators:
self._check_iterable(gen.iter, check_async=gen.is_async)
@check_messages("not-an-iterable")
def visit_dictcomp(self, node: nodes.DictComp) -> None:
for gen in node.generators:
self._check_iterable(gen.iter, check_async=gen.is_async)
@check_messages("not-an-iterable")
def visit_setcomp(self, node: nodes.SetComp) -> None:
for gen in node.generators:
self._check_iterable(gen.iter, check_async=gen.is_async)
@check_messages("not-an-iterable")
def visit_generatorexp(self, node: nodes.GeneratorExp) -> None:
for gen in node.generators:
self._check_iterable(gen.iter, check_async=gen.is_async)
@check_messages("await-outside-async")
def visit_await(self, node: nodes.Await) -> None:
self._check_await_outside_coroutine(node)
def _check_await_outside_coroutine(self, node: nodes.Await) -> None:
node_scope = node.scope()
while not isinstance(node_scope, nodes.Module):
if isinstance(node_scope, nodes.AsyncFunctionDef):
return
if isinstance(node_scope, nodes.FunctionDef):
break
node_scope = node_scope.parent.scope()
self.add_message("await-outside-async", node=node)
|
playground/e9b22a58-260b-483f-88d7-7a5fe9f8b1d4/pylint/pylint/checkers/typecheck.py | pylint/checkers/typecheck.py | 2,072 | _is_asyncio_coroutine | ref | function | if self._is_asyncio_coroutine(node.value):
|
playground/e9b22a58-260b-483f-88d7-7a5fe9f8b1d4/pylint/pylint/checkers/typecheck.py | pylint/checkers/typecheck.py | 2,074 | _check_iterable | ref | function | self._check_iterable(node.value)
|
playground/e9b22a58-260b-483f-88d7-7a5fe9f8b1d4/pylint/pylint/checkers/typecheck.py | pylint/checkers/typecheck.py | 2,076 | check_messages | ref | function | @check_messages("not-an-iterable", "not-a-mapping")
|
playground/e9b22a58-260b-483f-88d7-7a5fe9f8b1d4/pylint/pylint/checkers/typecheck.py | pylint/checkers/typecheck.py | 2,077 | visit_call | def | function | def visit_call(self, node: nodes.Call) -> None:
"""Check that called functions/methods are inferred to callable objects,
and that the arguments passed to the function match the parameters in
the inferred function's definition
"""
called = safe_infer(node.func)
self._check_not_callable(node, called)
try:
called, implicit_args, callable_name = _determine_callable(called)
except ValueError:
# Any error occurred during determining the function type, most of
# those errors are handled by different warnings.
return
if called.args.args is None:
if called.name == "isinstance":
# Verify whether second argument of isinstance is a valid type
self._check_isinstance_args(node)
# Built-in functions have no argument information.
return
if len(called.argnames()) != len(set(called.argnames())):
# Duplicate parameter name (see duplicate-argument). We can't really
# make sense of the function call in this case, so just return.
return
# Build the set of keyword arguments, checking for duplicate keywords,
# and count the positional arguments.
call_site = astroid.arguments.CallSite.from_call(node)
# Warn about duplicated keyword arguments, such as `f=24, **{'f': 24}`
for keyword in call_site.duplicated_keywords:
self.add_message("repeated-keyword", node=node, args=(keyword,))
if call_site.has_invalid_arguments() or call_site.has_invalid_keywords():
# Can't make sense of this.
return
# Has the function signature changed in ways we cannot reliably detect?
if hasattr(called, "decorators") and decorated_with(
called, self.config.signature_mutators
):
return
num_positional_args = len(call_site.positional_arguments)
keyword_args = list(call_site.keyword_arguments.keys())
overload_function = is_overload_stub(called)
# Determine if we don't have a context for our call and we use variadics.
node_scope = node.scope()
if isinstance(node_scope, (nodes.Lambda, nodes.FunctionDef)):
has_no_context_positional_variadic = _no_context_variadic_positional(
node, node_scope
)
has_no_context_keywords_variadic = _no_context_variadic_keywords(
node, node_scope
)
else:
has_no_context_positional_variadic = (
has_no_context_keywords_variadic
) = _False
# These are coming from the functools.partial implementation in astroid
already_filled_positionals = getattr(called, "filled_positionals", 0)
already_filled_keywords = getattr(called, "filled_keywords", {})
keyword_args += list(already_filled_keywords)
num_positional_args += implicit_args + already_filled_positionals
# Analyze the list of formal parameters.
args = list(itertools.chain(called.args.posonlyargs or (), called.args.args))
num_mandatory_parameters = len(args) - len(called.args.defaults)
parameters: List[List[Any]] = []
parameter_name_to_index = {}
for i, arg in enumerate(args):
if isinstance(arg, nodes.Tuple):
name = None
# Don't store any parameter names within the tuple, since those
# are not assignable from keyword arguments.
else:
assert isinstance(arg, nodes.AssignName)
# This occurs with:
# def f( (a), (b) ): pass
name = arg.name
parameter_name_to_index[name] = i
if i >= num_mandatory_parameters:
defval = called.args.defaults[i - num_mandatory_parameters]
else:
defval = None
parameters.append([(name, defval), _False])
kwparams = {}
for i, arg in enumerate(called.args.kwonlyargs):
if isinstance(arg, nodes.Keyword):
name = arg.arg
else:
assert isinstance(arg, nodes.AssignName)
name = arg.name
kwparams[name] = [called.args.kw_defaults[i], _False]
self._check_argument_order(
node, call_site, called, [p[0][0] for p in parameters]
)
# 1. Match the positional arguments.
for i in range(num_positional_args):
if i < len(parameters):
parameters[i][1] = _True
elif called.args.vararg is not None:
# The remaining positional arguments get assigned to the *args
# parameter.
break
elif not overload_function:
# Too many positional arguments.
self.add_message(
"too-many-function-args", node=node, args=(callable_name,)
)
break
# 2. Match the keyword arguments.
for keyword in keyword_args:
if keyword in parameter_name_to_index:
i = parameter_name_to_index[keyword]
if parameters[i][1]:
# Duplicate definition of function parameter.
# Might be too hardcoded, but this can actually
# happen when using str.format and `self` is passed
# by keyword argument, as in `.format(self=self)`.
# It's perfectly valid to so, so we're just skipping
# it if that's the case.
if not (keyword == "self" and called.qname() in STR_FORMAT):
self.add_message(
"redundant-keyword-arg",
node=node,
args=(keyword, callable_name),
)
else:
parameters[i][1] = _True
elif keyword in kwparams:
if kwparams[keyword][1]:
# Duplicate definition of function parameter.
self.add_message(
"redundant-keyword-arg",
node=node,
args=(keyword, callable_name),
)
else:
kwparams[keyword][1] = _True
elif called.args.kwarg is not None:
# The keyword argument gets assigned to the **kwargs parameter.
pass
elif isinstance(
called, nodes.FunctionDef
) and self._keyword_argument_is_in_all_decorator_returns(called, keyword):
pass
elif not overload_function:
# Unexpected keyword argument.
self.add_message(
"unexpected-keyword-arg", node=node, args=(keyword, callable_name)
)
# 3. Match the **kwargs, if any.
if node.kwargs:
for i, [(name, defval), assigned] in enumerate(parameters):
# Assume that *kwargs provides values for all remaining
# unassigned named parameters.
if name is not None:
parameters[i][1] = _True
else:
# **kwargs can't assign to tuples.
pass
# Check that any parameters without a default have been assigned
# values.
for [(name, defval), assigned] in parameters:
if (defval is None) and not assigned:
display_name = "<tuple>" if name is None else repr(name)
if not has_no_context_positional_variadic and not overload_function:
self.add_message(
"no-value-for-parameter",
node=node,
args=(display_name, callable_name),
)
for name, val in kwparams.items():
defval, assigned = val
if (
defval is None
and not assigned
and not has_no_context_keywords_variadic
and not overload_function
):
self.add_message("missing-kwoa", node=node, args=(name, callable_name))
@staticmethod
def _keyword_argument_is_in_all_decorator_returns(
func: nodes.FunctionDef, keyword: str
) -> bool:
"""Check if the keyword argument exists in all signatures of the
return values of all decorators of the function.
"""
if not func.decorators:
return _False
for decorator in func.decorators.nodes:
inferred = safe_infer(decorator)
# If we can't infer the decorator we assume it satisfies consumes
# the keyword, so we don't raise false positives
if not inferred:
return _True
# We only check arguments of function decorators
if not isinstance(inferred, nodes.FunctionDef):
return _False
for return_value in inferred.infer_call_result():
# infer_call_result() returns nodes.Const.None for None return values
# so this also catches non-returning decorators
if not isinstance(return_value, nodes.FunctionDef):
return _False
# If the return value uses a kwarg the keyword will be consumed
if return_value.args.kwarg:
continue
# Check if the keyword is another type of argument
if return_value.args.is_argument(keyword):
continue
return _False
return _True
def _check_invalid_sequence_index(self, subscript: nodes.Subscript):
# Look for index operations where the parent is a sequence type.
# If the types can be determined, only allow indices to be int,
# slice or instances with __index__.
parent_type = safe_infer(subscript.value)
if not isinstance(
parent_type, (nodes.ClassDef, astroid.Instance)
) or not has_known_bases(parent_type):
return None
# Determine what method on the parent this index will use
# The parent of this node will be a Subscript, and the parent of that
# node determines if the Subscript is a get, set, or delete operation.
if subscript.ctx is astroid.Store:
methodname = "__setitem__"
elif subscript.ctx is astroid.Del:
methodname = "__delitem__"
else:
methodname = "__getitem__"
# Check if this instance's __getitem__, __setitem__, or __delitem__, as
# appropriate to the statement, is implemented in a builtin sequence
# type. This way we catch subclasses of sequence types but skip classes
# that override __getitem__ and which may allow non-integer indices.
try:
methods = astroid.interpreter.dunder_lookup.lookup(parent_type, methodname)
if methods is astroid.Uninferable:
return None
itemmethod = methods[0]
except (
astroid.AttributeInferenceError,
IndexError,
):
return None
if (
not isinstance(itemmethod, nodes.FunctionDef)
or itemmethod.root().name != "builtins"
or not itemmethod.parent
or itemmethod.parent.frame().name not in SEQUENCE_TYPES
):
return None
# For ExtSlice objects coming from visit_extslice, no further
# inference is necessary, since if we got this far the ExtSlice
# is an error.
if isinstance(subscript.value, nodes.ExtSlice):
index_type = subscript.value
else:
index_type = safe_infer(subscript.slice)
if index_type is None or index_type is astroid.Uninferable:
return None
# Constants must be of type int
if isinstance(index_type, nodes.Const):
if isinstance(index_type.value, int):
return None
# Instance values must be int, slice, or have an __index__ method
elif isinstance(index_type, astroid.Instance):
if index_type.pytype() in {"builtins.int", "builtins.slice"}:
return None
try:
index_type.getattr("__index__")
return None
except astroid.NotFoundError:
pass
elif isinstance(index_type, nodes.Slice):
# A slice can be present
# here after inferring the index node, which could
# be a `slice(...)` call for instance.
return self._check_invalid_slice_index(index_type)
# Anything else is an error
self.add_message("invalid-sequence-index", node=subscript)
return None
def _check_not_callable(
self, node: nodes.Call, inferred_call: Optional[nodes.NodeNG]
) -> None:
"""Checks to see if the not-callable message should be emitted.
Only functions, generators and objects defining __call__ are "callable"
We ignore instances of descriptors since astroid cannot properly handle them yet
"""
# Handle uninferable calls
if not inferred_call or inferred_call.callable():
self._check_uninferable_call(node)
return
if not isinstance(inferred_call, astroid.Instance):
self.add_message("not-callable", node=node, args=node.func.as_string())
return
# Don't emit if we can't make sure this object is callable.
if not has_known_bases(inferred_call):
return
if inferred_call.parent and isinstance(inferred_call.scope(), nodes.ClassDef):
# Ignore descriptor instances
if "__get__" in inferred_call.locals:
return
# NamedTuple instances are callable
if inferred_call.qname() == "typing.NamedTuple":
return
self.add_message("not-callable", node=node, args=node.func.as_string())
@check_messages("invalid-sequence-index")
def visit_extslice(self, node: nodes.ExtSlice) -> None:
if not node.parent or not hasattr(node.parent, "value"):
return None
# Check extended slice objects as if they were used as a sequence
# index to check if the object being sliced can support them
return self._check_invalid_sequence_index(node.parent)
def _check_invalid_slice_index(self, node: nodes.Slice) -> None:
# Check the type of each part of the slice
invalid_slices_nodes: List[nodes.NodeNG] = []
for index in (node.lower, node.upper, node.step):
if index is None:
continue
index_type = safe_infer(index)
if index_type is None or index_type is astroid.Uninferable:
continue
# Constants must be of type int or None
if isinstance(index_type, nodes.Const):
if isinstance(index_type.value, (int, type(None))):
continue
# Instance values must be of type int, None or an object
# with __index__
elif isinstance(index_type, astroid.Instance):
if index_type.pytype() in {"builtins.int", "builtins.NoneType"}:
continue
try:
index_type.getattr("__index__")
return
except astroid.NotFoundError:
pass
invalid_slices_nodes.append(index)
if not invalid_slices_nodes:
return
# Anything else is an error, unless the object that is indexed
# is a custom object, which knows how to handle this kind of slices
parent = node.parent
if isinstance(parent, nodes.ExtSlice):
parent = parent.parent
if isinstance(parent, nodes.Subscript):
inferred = safe_infer(parent.value)
if inferred is None or inferred is astroid.Uninferable:
# Don't know what this is
return
known_objects = (
nodes.List,
nodes.Dict,
nodes.Tuple,
astroid.objects.FrozenSet,
nodes.Set,
)
if not isinstance(inferred, known_objects):
# Might be an instance that knows how to handle this slice object
return
for snode in invalid_slices_nodes:
self.add_message("invalid-slice-index", node=snode)
@check_messages("not-context-manager")
def visit_with(self, node: nodes.With) -> None:
for ctx_mgr, _ in node.items:
context = astroid.context.InferenceContext()
inferred = safe_infer(ctx_mgr, context=context)
if inferred is None or inferred is astroid.Uninferable:
continue
if isinstance(inferred, astroid.bases.Generator):
# Check if we are dealing with a function decorated
# with contextlib.contextmanager.
if decorated_with(
inferred.parent, self.config.contextmanager_decorators
):
continue
# If the parent of the generator is not the context manager itself,
# that means that it could have been returned from another
# function which was the real context manager.
# The following approach is more of a hack rather than a real
# solution: walk all the inferred statements for the
# given *ctx_mgr* and if you find one function scope
# which is decorated, consider it to be the real
# manager and give up, otherwise emit not-context-manager.
# See the test file for not_context_manager for a couple
# of self explaining tests.
# Retrieve node from all previously visited nodes in the
# inference history
context_path_names: Iterator[Any] = filter(
None, _unflatten(context.path)
)
inferred_paths = _flatten_container(
safe_infer(path) for path in context_path_names
)
for inferred_path in inferred_paths:
if not inferred_path:
continue
scope = inferred_path.scope()
if not isinstance(scope, nodes.FunctionDef):
continue
if decorated_with(scope, self.config.contextmanager_decorators):
break
else:
self.add_message(
"not-context-manager", node=node, args=(inferred.name,)
)
else:
try:
inferred.getattr("__enter__")
inferred.getattr("__exit__")
except astroid.NotFoundError:
if isinstance(inferred, astroid.Instance):
# If we do not know the bases of this class,
# just skip it.
if not has_known_bases(inferred):
continue
# Just ignore mixin classes.
if self.config.ignore_mixin_members:
if inferred.name[-5:].lower() == "mixin":
continue
self.add_message(
"not-context-manager", node=node, args=(inferred.name,)
)
@check_messages("invalid-unary-operand-type")
def visit_unaryop(self, node: nodes.UnaryOp) -> None:
"""Detect TypeErrors for unary operands."""
for error in node.type_errors():
# Let the error customize its output.
self.add_message("invalid-unary-operand-type", args=str(error), node=node)
@check_messages("unsupported-binary-operation")
def visit_binop(self, node: nodes.BinOp) -> None:
if node.op == "|":
self._detect_unsupported_alternative_union_syntax(node)
def _detect_unsupported_alternative_union_syntax(self, node: nodes.BinOp) -> None:
"""Detect if unsupported alternative Union syntax (PEP 604) was used."""
if self._py310_plus: # 310+ supports the new syntax
return
if isinstance(
node.parent, TYPE_ANNOTATION_NODES_TYPES
) and not is_postponed_evaluation_enabled(node):
# Use in type annotations only allowed if
# postponed evaluation is enabled.
self._check_unsupported_alternative_union_syntax(node)
if isinstance(
node.parent,
(
nodes.Assign,
nodes.Call,
nodes.Keyword,
nodes.Dict,
nodes.Tuple,
nodes.Set,
nodes.List,
nodes.BinOp,
),
):
# Check other contexts the syntax might appear, but are invalid.
# Make sure to filter context if postponed evaluation is enabled
# and parent is allowed node type.
allowed_nested_syntax = _False
if is_postponed_evaluation_enabled(node):
parent_node = node.parent
while _True:
if isinstance(parent_node, TYPE_ANNOTATION_NODES_TYPES):
allowed_nested_syntax = _True
break
parent_node = parent_node.parent
if isinstance(parent_node, nodes.Module):
break
if not allowed_nested_syntax:
self._check_unsupported_alternative_union_syntax(node)
def _check_unsupported_alternative_union_syntax(self, node: nodes.BinOp) -> None:
"""Check if left or right node is of type `type`."""
msg = "unsupported operand type(s) for |"
for n in (node.left, node.right):
n = astroid.helpers.object_type(n)
if isinstance(n, nodes.ClassDef) and is_classdef_type(n):
self.add_message("unsupported-binary-operation", args=msg, node=node)
break
@check_messages("unsupported-binary-operation")
def _visit_binop(self, node: nodes.BinOp) -> None:
"""Detect TypeErrors for binary arithmetic operands."""
self._check_binop_errors(node)
@check_messages("unsupported-binary-operation")
def _visit_augassign(self, node: nodes.AugAssign) -> None:
"""Detect TypeErrors for augmented binary arithmetic operands."""
self._check_binop_errors(node)
def _check_binop_errors(self, node):
for error in node.type_errors():
# Let the error customize its output.
if any(
isinstance(obj, nodes.ClassDef) and not has_known_bases(obj)
for obj in (error.left_type, error.right_type)
):
continue
self.add_message("unsupported-binary-operation", args=str(error), node=node)
def _check_membership_test(self, node):
if is_inside_abstract_class(node):
return
if is_comprehension(node):
return
inferred = safe_infer(node)
if inferred is None or inferred is astroid.Uninferable:
return
if not supports_membership_test(inferred):
self.add_message(
"unsupported-membership-test", args=node.as_string(), node=node
)
@check_messages("unsupported-membership-test")
def visit_compare(self, node: nodes.Compare) -> None:
if len(node.ops) != 1:
return
op, right = node.ops[0]
if op in {"in", "not in"}:
self._check_membership_test(right)
@check_messages(
"unsubscriptable-object",
"unsupported-assignment-operation",
"unsupported-delete-operation",
"unhashable-dict-key",
"invalid-sequence-index",
"invalid-slice-index",
)
def visit_subscript(self, node: nodes.Subscript) -> None:
self._check_invalid_sequence_index(node)
supported_protocol: Optional[Callable[[Any, Any], bool]] = None
if isinstance(node.value, (nodes.ListComp, nodes.DictComp)):
return
if isinstance(node.value, nodes.Dict):
# Assert dict key is hashable
inferred = safe_infer(node.slice)
if inferred and inferred != astroid.Uninferable:
try:
hash_fn = next(inferred.igetattr("__hash__"))
except astroid.InferenceError:
pass
else:
if getattr(hash_fn, "value", _True) is None:
self.add_message("unhashable-dict-key", node=node.value)
if node.ctx == astroid.Load:
supported_protocol = supports_getitem
msg = "unsubscriptable-object"
elif node.ctx == astroid.Store:
supported_protocol = supports_setitem
msg = "unsupported-assignment-operation"
elif node.ctx == astroid.Del:
supported_protocol = supports_delitem
msg = "unsupported-delete-operation"
if isinstance(node.value, nodes.SetComp):
self.add_message(msg, args=node.value.as_string(), node=node.value)
return
if is_inside_abstract_class(node):
return
inferred = safe_infer(node.value)
if inferred is None or inferred is astroid.Uninferable:
return
if getattr(inferred, "decorators", None):
first_decorator = astroid.helpers.safe_infer(inferred.decorators.nodes[0])
if isinstance(first_decorator, nodes.ClassDef):
inferred = first_decorator.instantiate_class()
else:
return # It would be better to handle function
# decorators, but let's start slow.
if supported_protocol and not supported_protocol(inferred, node):
self.add_message(msg, args=node.value.as_string(), node=node.value)
@check_messages("dict-items-missing-iter")
def visit_for(self, node: nodes.For) -> None:
if not isinstance(node.target, nodes.Tuple):
# target is not a tuple
return
if not len(node.target.elts) == 2:
# target is not a tuple of two elements
return
iterable = node.iter
if not isinstance(iterable, nodes.Name):
# it's not a bare variable
return
inferred = safe_infer(iterable)
if not inferred:
return
if not isinstance(inferred, nodes.Dict):
# the iterable is not a dict
return
if all(isinstance(i[0], nodes.Tuple) for i in inferred.items):
# if all keys are tuples
return
self.add_message("dict-iter-missing-items", node=node)
|
playground/e9b22a58-260b-483f-88d7-7a5fe9f8b1d4/pylint/pylint/checkers/typecheck.py | pylint/checkers/typecheck.py | 2,079 | _check_iterable | ref | function | self._check_iterable(stararg.value)
|
playground/e9b22a58-260b-483f-88d7-7a5fe9f8b1d4/pylint/pylint/checkers/typecheck.py | pylint/checkers/typecheck.py | 2,081 | _check_mapping | ref | function | self._check_mapping(kwarg.value)
|
playground/e9b22a58-260b-483f-88d7-7a5fe9f8b1d4/pylint/pylint/checkers/typecheck.py | pylint/checkers/typecheck.py | 2,083 | check_messages | ref | function | @check_messages("not-an-iterable")
|
playground/e9b22a58-260b-483f-88d7-7a5fe9f8b1d4/pylint/pylint/checkers/typecheck.py | pylint/checkers/typecheck.py | 2,084 | visit_listcomp | def | function | def visit_listcomp(self, node: nodes.ListComp) -> None:
for gen in node.generators:
self._check_iterable(gen.iter, check_async=gen.is_async)
@check_messages("not-an-iterable")
def visit_dictcomp(self, node: nodes.DictComp) -> None:
for gen in node.generators:
self._check_iterable(gen.iter, check_async=gen.is_async)
@check_messages("not-an-iterable")
def visit_setcomp(self, node: nodes.SetComp) -> None:
for gen in node.generators:
self._check_iterable(gen.iter, check_async=gen.is_async)
@check_messages("not-an-iterable")
def visit_generatorexp(self, node: nodes.GeneratorExp) -> None:
for gen in node.generators:
self._check_iterable(gen.iter, check_async=gen.is_async)
@check_messages("await-outside-async")
def visit_await(self, node: nodes.Await) -> None:
self._check_await_outside_coroutine(node)
def _check_await_outside_coroutine(self, node: nodes.Await) -> None:
node_scope = node.scope()
while not isinstance(node_scope, nodes.Module):
if isinstance(node_scope, nodes.AsyncFunctionDef):
return
if isinstance(node_scope, nodes.FunctionDef):
break
node_scope = node_scope.parent.scope()
self.add_message("await-outside-async", node=node)
|
playground/e9b22a58-260b-483f-88d7-7a5fe9f8b1d4/pylint/pylint/checkers/typecheck.py | pylint/checkers/typecheck.py | 2,086 | _check_iterable | ref | function | self._check_iterable(gen.iter, check_async=gen.is_async)
|
playground/e9b22a58-260b-483f-88d7-7a5fe9f8b1d4/pylint/pylint/checkers/typecheck.py | pylint/checkers/typecheck.py | 2,088 | check_messages | ref | function | @check_messages("not-an-iterable")
|
playground/e9b22a58-260b-483f-88d7-7a5fe9f8b1d4/pylint/pylint/checkers/typecheck.py | pylint/checkers/typecheck.py | 2,089 | visit_dictcomp | def | function | def visit_dictcomp(self, node: nodes.DictComp) -> None:
for gen in node.generators:
self._check_iterable(gen.iter, check_async=gen.is_async)
@check_messages("not-an-iterable")
def visit_setcomp(self, node: nodes.SetComp) -> None:
for gen in node.generators:
self._check_iterable(gen.iter, check_async=gen.is_async)
@check_messages("not-an-iterable")
def visit_generatorexp(self, node: nodes.GeneratorExp) -> None:
for gen in node.generators:
self._check_iterable(gen.iter, check_async=gen.is_async)
@check_messages("await-outside-async")
def visit_await(self, node: nodes.Await) -> None:
self._check_await_outside_coroutine(node)
def _check_await_outside_coroutine(self, node: nodes.Await) -> None:
node_scope = node.scope()
while not isinstance(node_scope, nodes.Module):
if isinstance(node_scope, nodes.AsyncFunctionDef):
return
if isinstance(node_scope, nodes.FunctionDef):
break
node_scope = node_scope.parent.scope()
self.add_message("await-outside-async", node=node)
|
playground/e9b22a58-260b-483f-88d7-7a5fe9f8b1d4/pylint/pylint/checkers/typecheck.py | pylint/checkers/typecheck.py | 2,091 | _check_iterable | ref | function | self._check_iterable(gen.iter, check_async=gen.is_async)
|
playground/e9b22a58-260b-483f-88d7-7a5fe9f8b1d4/pylint/pylint/checkers/typecheck.py | pylint/checkers/typecheck.py | 2,093 | check_messages | ref | function | @check_messages("not-an-iterable")
|
playground/e9b22a58-260b-483f-88d7-7a5fe9f8b1d4/pylint/pylint/checkers/typecheck.py | pylint/checkers/typecheck.py | 2,094 | visit_setcomp | def | function | def visit_setcomp(self, node: nodes.SetComp) -> None:
for gen in node.generators:
self._check_iterable(gen.iter, check_async=gen.is_async)
@check_messages("not-an-iterable")
def visit_generatorexp(self, node: nodes.GeneratorExp) -> None:
for gen in node.generators:
self._check_iterable(gen.iter, check_async=gen.is_async)
@check_messages("await-outside-async")
def visit_await(self, node: nodes.Await) -> None:
self._check_await_outside_coroutine(node)
def _check_await_outside_coroutine(self, node: nodes.Await) -> None:
node_scope = node.scope()
while not isinstance(node_scope, nodes.Module):
if isinstance(node_scope, nodes.AsyncFunctionDef):
return
if isinstance(node_scope, nodes.FunctionDef):
break
node_scope = node_scope.parent.scope()
self.add_message("await-outside-async", node=node)
|
playground/e9b22a58-260b-483f-88d7-7a5fe9f8b1d4/pylint/pylint/checkers/typecheck.py | pylint/checkers/typecheck.py | 2,096 | _check_iterable | ref | function | self._check_iterable(gen.iter, check_async=gen.is_async)
|
playground/e9b22a58-260b-483f-88d7-7a5fe9f8b1d4/pylint/pylint/checkers/typecheck.py | pylint/checkers/typecheck.py | 2,098 | check_messages | ref | function | @check_messages("not-an-iterable")
|
playground/e9b22a58-260b-483f-88d7-7a5fe9f8b1d4/pylint/pylint/checkers/typecheck.py | pylint/checkers/typecheck.py | 2,099 | visit_generatorexp | def | function | def visit_generatorexp(self, node: nodes.GeneratorExp) -> None:
for gen in node.generators:
self._check_iterable(gen.iter, check_async=gen.is_async)
@check_messages("await-outside-async")
def visit_await(self, node: nodes.Await) -> None:
self._check_await_outside_coroutine(node)
def _check_await_outside_coroutine(self, node: nodes.Await) -> None:
node_scope = node.scope()
while not isinstance(node_scope, nodes.Module):
if isinstance(node_scope, nodes.AsyncFunctionDef):
return
if isinstance(node_scope, nodes.FunctionDef):
break
node_scope = node_scope.parent.scope()
self.add_message("await-outside-async", node=node)
|
playground/e9b22a58-260b-483f-88d7-7a5fe9f8b1d4/pylint/pylint/checkers/typecheck.py | pylint/checkers/typecheck.py | 2,101 | _check_iterable | ref | function | self._check_iterable(gen.iter, check_async=gen.is_async)
|
playground/e9b22a58-260b-483f-88d7-7a5fe9f8b1d4/pylint/pylint/checkers/typecheck.py | pylint/checkers/typecheck.py | 2,103 | check_messages | ref | function | @check_messages("await-outside-async")
|
playground/e9b22a58-260b-483f-88d7-7a5fe9f8b1d4/pylint/pylint/checkers/typecheck.py | pylint/checkers/typecheck.py | 2,104 | visit_await | def | function | def visit_await(self, node: nodes.Await) -> None:
self._check_await_outside_coroutine(node)
def _check_await_outside_coroutine(self, node: nodes.Await) -> None:
node_scope = node.scope()
while not isinstance(node_scope, nodes.Module):
if isinstance(node_scope, nodes.AsyncFunctionDef):
return
if isinstance(node_scope, nodes.FunctionDef):
break
node_scope = node_scope.parent.scope()
self.add_message("await-outside-async", node=node)
|
playground/e9b22a58-260b-483f-88d7-7a5fe9f8b1d4/pylint/pylint/checkers/typecheck.py | pylint/checkers/typecheck.py | 2,105 | _check_await_outside_coroutine | ref | function | self._check_await_outside_coroutine(node)
|
playground/e9b22a58-260b-483f-88d7-7a5fe9f8b1d4/pylint/pylint/checkers/typecheck.py | pylint/checkers/typecheck.py | 2,107 | _check_await_outside_coroutine | def | function | def _check_await_outside_coroutine(self, node: nodes.Await) -> None:
node_scope = node.scope()
while not isinstance(node_scope, nodes.Module):
if isinstance(node_scope, nodes.AsyncFunctionDef):
return
if isinstance(node_scope, nodes.FunctionDef):
break
node_scope = node_scope.parent.scope()
self.add_message("await-outside-async", node=node)
|
playground/e9b22a58-260b-483f-88d7-7a5fe9f8b1d4/pylint/pylint/checkers/typecheck.py | pylint/checkers/typecheck.py | 2,108 | scope | ref | function | node_scope = node.scope()
|
playground/e9b22a58-260b-483f-88d7-7a5fe9f8b1d4/pylint/pylint/checkers/typecheck.py | pylint/checkers/typecheck.py | 2,114 | scope | ref | function | node_scope = node_scope.parent.scope()
|
playground/e9b22a58-260b-483f-88d7-7a5fe9f8b1d4/pylint/pylint/checkers/typecheck.py | pylint/checkers/typecheck.py | 2,115 | add_message | ref | function | self.add_message("await-outside-async", node=node)
|
playground/e9b22a58-260b-483f-88d7-7a5fe9f8b1d4/pylint/pylint/checkers/typecheck.py | pylint/checkers/typecheck.py | 2,118 | register | def | function | def register(linter: "PyLinter") -> None:
linter.register_checker(TypeChecker(linter))
linter.register_checker(IterableChecker(linter))
|
playground/e9b22a58-260b-483f-88d7-7a5fe9f8b1d4/pylint/pylint/checkers/typecheck.py | pylint/checkers/typecheck.py | 2,119 | register_checker | ref | function | linter.register_checker(TypeChecker(linter))
|
playground/e9b22a58-260b-483f-88d7-7a5fe9f8b1d4/pylint/pylint/checkers/typecheck.py | pylint/checkers/typecheck.py | 2,119 | TypeChecker | ref | function | linter.register_checker(TypeChecker(linter))
|
playground/e9b22a58-260b-483f-88d7-7a5fe9f8b1d4/pylint/pylint/checkers/typecheck.py | pylint/checkers/typecheck.py | 2,120 | register_checker | ref | function | linter.register_checker(IterableChecker(linter))
|
playground/e9b22a58-260b-483f-88d7-7a5fe9f8b1d4/pylint/pylint/checkers/typecheck.py | pylint/checkers/typecheck.py | 2,120 | IterableChecker | ref | function | linter.register_checker(IterableChecker(linter))
|
playground/e9b22a58-260b-483f-88d7-7a5fe9f8b1d4/pylint/pylint/checkers/unicode.py | pylint/checkers/unicode.py | 53 | _BadChar | def | class | description human_code |
playground/e9b22a58-260b-483f-88d7-7a5fe9f8b1d4/pylint/pylint/checkers/unicode.py | pylint/checkers/unicode.py | 62 | description | def | function | def description(self) -> str:
"""Used for the detailed error message description."""
return (
f"Invalid unescaped character {self.name}, "
f'use "{self.escaped}" instead.'
)
def human_code(self) -> str:
"""Used to generate the human readable error message."""
return f"invalid-character-{self.name}"
|
playground/e9b22a58-260b-483f-88d7-7a5fe9f8b1d4/pylint/pylint/checkers/unicode.py | pylint/checkers/unicode.py | 69 | human_code | def | function | def human_code(self) -> str:
"""Used to generate the human readable error message."""
return f"invalid-character-{self.name}"
|
playground/e9b22a58-260b-483f-88d7-7a5fe9f8b1d4/pylint/pylint/checkers/unicode.py | pylint/checkers/unicode.py | 76 | _BadChar | ref | function | _BadChar(
|
playground/e9b22a58-260b-483f-88d7-7a5fe9f8b1d4/pylint/pylint/checkers/unicode.py | pylint/checkers/unicode.py | 86 | _BadChar | ref | function | _BadChar(
|
playground/e9b22a58-260b-483f-88d7-7a5fe9f8b1d4/pylint/pylint/checkers/unicode.py | pylint/checkers/unicode.py | 96 | _BadChar | ref | function | _BadChar(
|
playground/e9b22a58-260b-483f-88d7-7a5fe9f8b1d4/pylint/pylint/checkers/unicode.py | pylint/checkers/unicode.py | 106 | _BadChar | ref | function | _BadChar(
|
playground/e9b22a58-260b-483f-88d7-7a5fe9f8b1d4/pylint/pylint/checkers/unicode.py | pylint/checkers/unicode.py | 116 | _BadChar | ref | function | _BadChar(
|
playground/e9b22a58-260b-483f-88d7-7a5fe9f8b1d4/pylint/pylint/checkers/unicode.py | pylint/checkers/unicode.py | 123 | _BadChar | ref | function | _BadChar(
|
playground/e9b22a58-260b-483f-88d7-7a5fe9f8b1d4/pylint/pylint/checkers/unicode.py | pylint/checkers/unicode.py | 136 | _line_length | def | function | def _line_length(line: _StrLike, codec: str) -> int:
"""Get the length of a string like line as displayed in an editor."""
if isinstance(line, bytes):
decoded = _remove_bom(line, codec).decode(codec, "replace")
else:
decoded = line
stripped = decoded.rstrip("\n")
if stripped != decoded:
stripped = stripped.rstrip("\r")
return len(stripped)
|
playground/e9b22a58-260b-483f-88d7-7a5fe9f8b1d4/pylint/pylint/checkers/unicode.py | pylint/checkers/unicode.py | 139 | _remove_bom | ref | function | decoded = _remove_bom(line, codec).decode(codec, "replace")
|
playground/e9b22a58-260b-483f-88d7-7a5fe9f8b1d4/pylint/pylint/checkers/unicode.py | pylint/checkers/unicode.py | 151 | _map_positions_to_result | def | function | def _map_positions_to_result(
line: _StrLike,
search_dict: Dict[_StrLike, _BadChar],
new_line: _StrLike,
byte_str_length: int = 1,
|
playground/e9b22a58-260b-483f-88d7-7a5fe9f8b1d4/pylint/pylint/checkers/unicode.py | pylint/checkers/unicode.py | 209 | _normalize_codec_name | def | function | def _normalize_codec_name(codec: str) -> str:
"""Make sure the codec name is always given as defined in the BOM dict."""
return UTF_NAME_REGEX_COMPILED.sub(r"utf-\1\2", codec).lower()
|
playground/e9b22a58-260b-483f-88d7-7a5fe9f8b1d4/pylint/pylint/checkers/unicode.py | pylint/checkers/unicode.py | 214 | _remove_bom | def | function | def _remove_bom(encoded: bytes, encoding: str) -> bytes:
"""Remove the bom if given from a line."""
if not encoding.startswith("utf"):
return encoded
bom = UNICODE_BOMS[encoding]
if encoded.startswith(bom):
return encoded[len(bom) :]
return encoded
|
playground/e9b22a58-260b-483f-88d7-7a5fe9f8b1d4/pylint/pylint/checkers/unicode.py | pylint/checkers/unicode.py | 224 | _encode_without_bom | def | function | def _encode_without_bom(string: str, encoding: str) -> bytes:
"""Encode a string but remove the BOM."""
return _remove_bom(string.encode(encoding), encoding)
|
playground/e9b22a58-260b-483f-88d7-7a5fe9f8b1d4/pylint/pylint/checkers/unicode.py | pylint/checkers/unicode.py | 226 | _remove_bom | ref | function | return _remove_bom(string.encode(encoding), encoding)
|
playground/e9b22a58-260b-483f-88d7-7a5fe9f8b1d4/pylint/pylint/checkers/unicode.py | pylint/checkers/unicode.py | 229 | _byte_to_str_length | def | function | def _byte_to_str_length(codec: str) -> int:
"""Return how many byte are usually(!) a character point."""
if codec.startswith("utf-32"):
return 4
if codec.startswith("utf-16"):
return 2
return 1
|
playground/e9b22a58-260b-483f-88d7-7a5fe9f8b1d4/pylint/pylint/checkers/unicode.py | pylint/checkers/unicode.py | 240 | _cached_encode_search | def | function | def _cached_encode_search(string: str, encoding: str) -> bytes:
"""A cached version of encode used for search pattern."""
return _encode_without_bom(string, encoding)
|
playground/e9b22a58-260b-483f-88d7-7a5fe9f8b1d4/pylint/pylint/checkers/unicode.py | pylint/checkers/unicode.py | 242 | _encode_without_bom | ref | function | return _encode_without_bom(string, encoding)
|
playground/e9b22a58-260b-483f-88d7-7a5fe9f8b1d4/pylint/pylint/checkers/unicode.py | pylint/checkers/unicode.py | 245 | _fix_utf16_32_line_stream | def | function | def _fix_utf16_32_line_stream(steam: Iterable[bytes], codec: str) -> Iterable[bytes]:
"""Handle line ending for UTF16 and UTF32 correctly.
Currently Python simply strips the required zeros after \n after the
line ending. Leading to lines that can't be decoded propery
"""
if not codec.startswith("utf-16") and not codec.startswith("utf-32"):
yield from steam
else:
# First we get all the bytes in memory
content = b"".join(line for line in steam)
new_line = _cached_encode_search("\n", codec)
# Now we split the line by the real new line in the correct encoding
# we can't use split as it would strip the \n that we need
start = 0
while _True:
pos = content.find(new_line, start)
if pos >= 0:
yield content[start : pos + len(new_line)]
else:
# Yield the rest and finish
if content[start:]:
yield content[start:]
break
start = pos + len(new_line)
|
playground/e9b22a58-260b-483f-88d7-7a5fe9f8b1d4/pylint/pylint/checkers/unicode.py | pylint/checkers/unicode.py | 257 | _cached_encode_search | ref | function | new_line = _cached_encode_search("\n", codec)
|
playground/e9b22a58-260b-483f-88d7-7a5fe9f8b1d4/pylint/pylint/checkers/unicode.py | pylint/checkers/unicode.py | 275 | extract_codec_from_bom | def | function | def extract_codec_from_bom(first_line: bytes) -> str:
"""Try to extract the codec (unicode only) by checking for the BOM.
For details about BOM see https://unicode.org/faq/utf_bom.html#BOM
Args:
first_line: the first line of a file
Returns:
a codec name
Raises:
ValueError: if no codec was found
"""
for bom, codec in BOM_SORTED_TO_CODEC.items():
if first_line.startswith(bom):
return codec
raise ValueError("No BOM found. Could not detect Unicode codec.")
|
playground/e9b22a58-260b-483f-88d7-7a5fe9f8b1d4/pylint/pylint/checkers/unicode.py | pylint/checkers/unicode.py | 296 | UnicodeChecker | def | class | _is_invalid_codec _is_unicode _find_line_matches _determine_codec _check_codec _check_invalid_chars _check_bidi_chars process_module |
playground/e9b22a58-260b-483f-88d7-7a5fe9f8b1d4/pylint/pylint/checkers/unicode.py | pylint/checkers/unicode.py | 365 | description | ref | function | bad_char.description(),
|
playground/e9b22a58-260b-483f-88d7-7a5fe9f8b1d4/pylint/pylint/checkers/unicode.py | pylint/checkers/unicode.py | 366 | human_code | ref | function | bad_char.human_code(),
|
playground/e9b22a58-260b-483f-88d7-7a5fe9f8b1d4/pylint/pylint/checkers/unicode.py | pylint/checkers/unicode.py | 374 | _is_invalid_codec | def | function | def _is_invalid_codec(codec: str) -> bool:
return codec.startswith("utf-16") or codec.startswith("utf-32")
@staticmethod
def _is_unicode(codec: str) -> bool:
return codec.startswith("utf")
@classmethod
def _find_line_matches(cls, line: bytes, codec: str) -> Dict[int, _BadChar]:
"""Find all matches of BAD_CHARS within line.
Args:
line: the input
codec: that will be used to convert line/or search string into
Return:
A dictionary with the column offset and the BadASCIIChar
"""
# We try to decode in Unicode to get the correct column offset
# if we would use bytes, it could be off because UTF-8 has no fixed length
try:
line_search = line.decode(codec, errors="strict")
search_dict = BAD_ASCII_SEARCH_DICT
return _map_positions_to_result(line_search, search_dict, "\n")
except UnicodeDecodeError:
# If we can't decode properly, we simply use bytes, even so the column offsets
# might be wrong a bit, but it is still better then nothing
line_search_byte = line
search_dict_byte: Dict[bytes, _BadChar] = {}
for char in BAD_CHARS:
# Some characters might not exist in all encodings
with contextlib.suppress(UnicodeDecodeError):
search_dict_byte[
_cached_encode_search(char.unescaped, codec)
] = char
return _map_positions_to_result(
line_search_byte,
search_dict_byte,
_cached_encode_search("\n", codec),
byte_str_length=_byte_to_str_length(codec),
)
@staticmethod
def _determine_codec(stream: io.BytesIO) -> Tuple[str, int]:
"""Determine the codec from the given stream.
first tries https://www.python.org/dev/peps/pep-0263/
and if this fails also checks for BOMs of UTF-16 and UTF-32
to be future-proof.
Args:
stream: The byte stream to analyse
Returns: A tuple consisting of:
- normalized codec name
- the line in which the codec was found
Raises:
SyntaxError: if failing to detect codec
"""
try:
# First try to detect encoding with PEP 263
# Doesn't work with UTF-16/32 at the time of writing
# see https://bugs.python.org/issue1503789
codec, lines = detect_encoding(stream.readline)
# lines are empty if UTF-8 BOM is found
codec_definition_line = len(lines) or 1
except SyntaxError as e:
# Codec could not be detected by Python, we try manually to check for
# UTF 16/32 BOMs, which aren't supported by Python at the time of writing.
# This is only included to be future save and handle these codecs as well
stream.seek(0)
try:
codec = extract_codec_from_bom(stream.readline())
codec_definition_line = 1
except ValueError as ve:
# Failed to detect codec, so the syntax error originated not from
# UTF16/32 codec usage. So simply raise the error again.
raise e from ve
return _normalize_codec_name(codec), codec_definition_line
def _check_codec(self, codec: str, codec_definition_line: int) -> None:
"""Check validity of the codec."""
if codec != "utf-8":
msg = "bad-file-encoding"
if self._is_invalid_codec(codec):
msg = "invalid-unicode-codec"
self.add_message(
msg,
# Currently Nodes will lead to crashes of pylint
# node=node,
line=codec_definition_line,
end_lineno=codec_definition_line,
confidence=pylint.interfaces.HIGH,
col_offset=None,
end_col_offset=None,
)
def _check_invalid_chars(self, line: bytes, lineno: int, codec: str) -> None:
"""Look for chars considered bad."""
matches = self._find_line_matches(line, codec)
for col, char in matches.items():
self.add_message(
char.human_code(),
# Currently Nodes will lead to crashes of pylint
# node=node,
line=lineno,
end_lineno=lineno,
confidence=pylint.interfaces.HIGH,
col_offset=col + 1,
end_col_offset=col + len(char.unescaped) + 1,
)
def _check_bidi_chars(self, line: bytes, lineno: int, codec: str) -> None:
"""Look for Bidirectional Unicode, if we use unicode."""
if not self._is_unicode(codec):
return
for dangerous in BIDI_UNICODE:
if _cached_encode_search(dangerous, codec) in line:
# Note that we don't add a col_offset on purpose:
# Using these unicode characters it depends on the editor
# how it displays the location of characters in the line.
# So we mark the complete line.
self.add_message(
"bidirectional-unicode",
# Currently Nodes will lead to crashes of pylint
# node=node,
line=lineno,
end_lineno=lineno,
# We mark the complete line, as bidi controls make it hard
# to determine the correct cursor position within an editor
col_offset=0,
end_col_offset=_line_length(line, codec),
confidence=pylint.interfaces.HIGH,
)
# We look for bidirectional unicode only once per line
# as we mark the complete line anyway
break
def process_module(self, node: nodes.Module) -> None:
"""Perform the actual check by checking module stream."""
with node.stream() as stream:
codec, codec_line = self._determine_codec(stream)
self._check_codec(codec, codec_line)
stream.seek(0)
# Check for invalid content (controls/chars)
for (lineno, line) in enumerate(
_fix_utf16_32_line_stream(stream, codec), start=1
):
if lineno == 1:
line = _remove_bom(line, codec)
self._check_bidi_chars(line, lineno, codec)
self._check_invalid_chars(line, lineno, codec)
|
playground/e9b22a58-260b-483f-88d7-7a5fe9f8b1d4/pylint/pylint/checkers/unicode.py | pylint/checkers/unicode.py | 378 | _is_unicode | def | function | def _is_unicode(codec: str) -> bool:
return codec.startswith("utf")
@classmethod
def _find_line_matches(cls, line: bytes, codec: str) -> Dict[int, _BadChar]:
"""Find all matches of BAD_CHARS within line.
Args:
line: the input
codec: that will be used to convert line/or search string into
Return:
A dictionary with the column offset and the BadASCIIChar
"""
# We try to decode in Unicode to get the correct column offset
# if we would use bytes, it could be off because UTF-8 has no fixed length
try:
line_search = line.decode(codec, errors="strict")
search_dict = BAD_ASCII_SEARCH_DICT
return _map_positions_to_result(line_search, search_dict, "\n")
except UnicodeDecodeError:
# If we can't decode properly, we simply use bytes, even so the column offsets
# might be wrong a bit, but it is still better then nothing
line_search_byte = line
search_dict_byte: Dict[bytes, _BadChar] = {}
for char in BAD_CHARS:
# Some characters might not exist in all encodings
with contextlib.suppress(UnicodeDecodeError):
search_dict_byte[
_cached_encode_search(char.unescaped, codec)
] = char
return _map_positions_to_result(
line_search_byte,
search_dict_byte,
_cached_encode_search("\n", codec),
byte_str_length=_byte_to_str_length(codec),
)
@staticmethod
def _determine_codec(stream: io.BytesIO) -> Tuple[str, int]:
"""Determine the codec from the given stream.
first tries https://www.python.org/dev/peps/pep-0263/
and if this fails also checks for BOMs of UTF-16 and UTF-32
to be future-proof.
Args:
stream: The byte stream to analyse
Returns: A tuple consisting of:
- normalized codec name
- the line in which the codec was found
Raises:
SyntaxError: if failing to detect codec
"""
try:
# First try to detect encoding with PEP 263
# Doesn't work with UTF-16/32 at the time of writing
# see https://bugs.python.org/issue1503789
codec, lines = detect_encoding(stream.readline)
# lines are empty if UTF-8 BOM is found
codec_definition_line = len(lines) or 1
except SyntaxError as e:
# Codec could not be detected by Python, we try manually to check for
# UTF 16/32 BOMs, which aren't supported by Python at the time of writing.
# This is only included to be future save and handle these codecs as well
stream.seek(0)
try:
codec = extract_codec_from_bom(stream.readline())
codec_definition_line = 1
except ValueError as ve:
# Failed to detect codec, so the syntax error originated not from
# UTF16/32 codec usage. So simply raise the error again.
raise e from ve
return _normalize_codec_name(codec), codec_definition_line
def _check_codec(self, codec: str, codec_definition_line: int) -> None:
"""Check validity of the codec."""
if codec != "utf-8":
msg = "bad-file-encoding"
if self._is_invalid_codec(codec):
msg = "invalid-unicode-codec"
self.add_message(
msg,
# Currently Nodes will lead to crashes of pylint
# node=node,
line=codec_definition_line,
end_lineno=codec_definition_line,
confidence=pylint.interfaces.HIGH,
col_offset=None,
end_col_offset=None,
)
def _check_invalid_chars(self, line: bytes, lineno: int, codec: str) -> None:
"""Look for chars considered bad."""
matches = self._find_line_matches(line, codec)
for col, char in matches.items():
self.add_message(
char.human_code(),
# Currently Nodes will lead to crashes of pylint
# node=node,
line=lineno,
end_lineno=lineno,
confidence=pylint.interfaces.HIGH,
col_offset=col + 1,
end_col_offset=col + len(char.unescaped) + 1,
)
def _check_bidi_chars(self, line: bytes, lineno: int, codec: str) -> None:
"""Look for Bidirectional Unicode, if we use unicode."""
if not self._is_unicode(codec):
return
for dangerous in BIDI_UNICODE:
if _cached_encode_search(dangerous, codec) in line:
# Note that we don't add a col_offset on purpose:
# Using these unicode characters it depends on the editor
# how it displays the location of characters in the line.
# So we mark the complete line.
self.add_message(
"bidirectional-unicode",
# Currently Nodes will lead to crashes of pylint
# node=node,
line=lineno,
end_lineno=lineno,
# We mark the complete line, as bidi controls make it hard
# to determine the correct cursor position within an editor
col_offset=0,
end_col_offset=_line_length(line, codec),
confidence=pylint.interfaces.HIGH,
)
# We look for bidirectional unicode only once per line
# as we mark the complete line anyway
break
def process_module(self, node: nodes.Module) -> None:
"""Perform the actual check by checking module stream."""
with node.stream() as stream:
codec, codec_line = self._determine_codec(stream)
self._check_codec(codec, codec_line)
stream.seek(0)
# Check for invalid content (controls/chars)
for (lineno, line) in enumerate(
_fix_utf16_32_line_stream(stream, codec), start=1
):
if lineno == 1:
line = _remove_bom(line, codec)
self._check_bidi_chars(line, lineno, codec)
self._check_invalid_chars(line, lineno, codec)
|
playground/e9b22a58-260b-483f-88d7-7a5fe9f8b1d4/pylint/pylint/checkers/unicode.py | pylint/checkers/unicode.py | 382 | _find_line_matches | def | function | def _find_line_matches(cls, line: bytes, codec: str) -> Dict[int, _BadChar]:
"""Find all matches of BAD_CHARS within line.
Args:
line: the input
codec: that will be used to convert line/or search string into
Return:
A dictionary with the column offset and the BadASCIIChar
"""
# We try to decode in Unicode to get the correct column offset
# if we would use bytes, it could be off because UTF-8 has no fixed length
try:
line_search = line.decode(codec, errors="strict")
search_dict = BAD_ASCII_SEARCH_DICT
return _map_positions_to_result(line_search, search_dict, "\n")
except UnicodeDecodeError:
# If we can't decode properly, we simply use bytes, even so the column offsets
# might be wrong a bit, but it is still better then nothing
line_search_byte = line
search_dict_byte: Dict[bytes, _BadChar] = {}
for char in BAD_CHARS:
# Some characters might not exist in all encodings
with contextlib.suppress(UnicodeDecodeError):
search_dict_byte[
_cached_encode_search(char.unescaped, codec)
] = char
return _map_positions_to_result(
line_search_byte,
search_dict_byte,
_cached_encode_search("\n", codec),
byte_str_length=_byte_to_str_length(codec),
)
@staticmethod
def _determine_codec(stream: io.BytesIO) -> Tuple[str, int]:
"""Determine the codec from the given stream.
first tries https://www.python.org/dev/peps/pep-0263/
and if this fails also checks for BOMs of UTF-16 and UTF-32
to be future-proof.
Args:
stream: The byte stream to analyse
Returns: A tuple consisting of:
- normalized codec name
- the line in which the codec was found
Raises:
SyntaxError: if failing to detect codec
"""
try:
# First try to detect encoding with PEP 263
# Doesn't work with UTF-16/32 at the time of writing
# see https://bugs.python.org/issue1503789
codec, lines = detect_encoding(stream.readline)
# lines are empty if UTF-8 BOM is found
codec_definition_line = len(lines) or 1
except SyntaxError as e:
# Codec could not be detected by Python, we try manually to check for
# UTF 16/32 BOMs, which aren't supported by Python at the time of writing.
# This is only included to be future save and handle these codecs as well
stream.seek(0)
try:
codec = extract_codec_from_bom(stream.readline())
codec_definition_line = 1
except ValueError as ve:
# Failed to detect codec, so the syntax error originated not from
# UTF16/32 codec usage. So simply raise the error again.
raise e from ve
return _normalize_codec_name(codec), codec_definition_line
def _check_codec(self, codec: str, codec_definition_line: int) -> None:
"""Check validity of the codec."""
if codec != "utf-8":
msg = "bad-file-encoding"
if self._is_invalid_codec(codec):
msg = "invalid-unicode-codec"
self.add_message(
msg,
# Currently Nodes will lead to crashes of pylint
# node=node,
line=codec_definition_line,
end_lineno=codec_definition_line,
confidence=pylint.interfaces.HIGH,
col_offset=None,
end_col_offset=None,
)
def _check_invalid_chars(self, line: bytes, lineno: int, codec: str) -> None:
"""Look for chars considered bad."""
matches = self._find_line_matches(line, codec)
for col, char in matches.items():
self.add_message(
char.human_code(),
# Currently Nodes will lead to crashes of pylint
# node=node,
line=lineno,
end_lineno=lineno,
confidence=pylint.interfaces.HIGH,
col_offset=col + 1,
end_col_offset=col + len(char.unescaped) + 1,
)
def _check_bidi_chars(self, line: bytes, lineno: int, codec: str) -> None:
"""Look for Bidirectional Unicode, if we use unicode."""
if not self._is_unicode(codec):
return
for dangerous in BIDI_UNICODE:
if _cached_encode_search(dangerous, codec) in line:
# Note that we don't add a col_offset on purpose:
# Using these unicode characters it depends on the editor
# how it displays the location of characters in the line.
# So we mark the complete line.
self.add_message(
"bidirectional-unicode",
# Currently Nodes will lead to crashes of pylint
# node=node,
line=lineno,
end_lineno=lineno,
# We mark the complete line, as bidi controls make it hard
# to determine the correct cursor position within an editor
col_offset=0,
end_col_offset=_line_length(line, codec),
confidence=pylint.interfaces.HIGH,
)
# We look for bidirectional unicode only once per line
# as we mark the complete line anyway
break
def process_module(self, node: nodes.Module) -> None:
"""Perform the actual check by checking module stream."""
with node.stream() as stream:
codec, codec_line = self._determine_codec(stream)
self._check_codec(codec, codec_line)
stream.seek(0)
# Check for invalid content (controls/chars)
for (lineno, line) in enumerate(
_fix_utf16_32_line_stream(stream, codec), start=1
):
if lineno == 1:
line = _remove_bom(line, codec)
self._check_bidi_chars(line, lineno, codec)
self._check_invalid_chars(line, lineno, codec)
|
playground/e9b22a58-260b-483f-88d7-7a5fe9f8b1d4/pylint/pylint/checkers/unicode.py | pylint/checkers/unicode.py | 397 | _map_positions_to_result | ref | function | return _map_positions_to_result(line_search, search_dict, "\n")
|
playground/e9b22a58-260b-483f-88d7-7a5fe9f8b1d4/pylint/pylint/checkers/unicode.py | pylint/checkers/unicode.py | 407 | _cached_encode_search | ref | function | _cached_encode_search(char.unescaped, codec)
|
playground/e9b22a58-260b-483f-88d7-7a5fe9f8b1d4/pylint/pylint/checkers/unicode.py | pylint/checkers/unicode.py | 410 | _map_positions_to_result | ref | function | return _map_positions_to_result(
|
playground/e9b22a58-260b-483f-88d7-7a5fe9f8b1d4/pylint/pylint/checkers/unicode.py | pylint/checkers/unicode.py | 413 | _cached_encode_search | ref | function | _cached_encode_search("\n", codec),
|
playground/e9b22a58-260b-483f-88d7-7a5fe9f8b1d4/pylint/pylint/checkers/unicode.py | pylint/checkers/unicode.py | 414 | _byte_to_str_length | ref | function | byte_str_length=_byte_to_str_length(codec),
|
playground/e9b22a58-260b-483f-88d7-7a5fe9f8b1d4/pylint/pylint/checkers/unicode.py | pylint/checkers/unicode.py | 418 | _determine_codec | def | function | def _determine_codec(stream: io.BytesIO) -> Tuple[str, int]:
"""Determine the codec from the given stream.
first tries https://www.python.org/dev/peps/pep-0263/
and if this fails also checks for BOMs of UTF-16 and UTF-32
to be future-proof.
Args:
stream: The byte stream to analyse
Returns: A tuple consisting of:
- normalized codec name
- the line in which the codec was found
Raises:
SyntaxError: if failing to detect codec
"""
try:
# First try to detect encoding with PEP 263
# Doesn't work with UTF-16/32 at the time of writing
# see https://bugs.python.org/issue1503789
codec, lines = detect_encoding(stream.readline)
# lines are empty if UTF-8 BOM is found
codec_definition_line = len(lines) or 1
except SyntaxError as e:
# Codec could not be detected by Python, we try manually to check for
# UTF 16/32 BOMs, which aren't supported by Python at the time of writing.
# This is only included to be future save and handle these codecs as well
stream.seek(0)
try:
codec = extract_codec_from_bom(stream.readline())
codec_definition_line = 1
except ValueError as ve:
# Failed to detect codec, so the syntax error originated not from
# UTF16/32 codec usage. So simply raise the error again.
raise e from ve
return _normalize_codec_name(codec), codec_definition_line
def _check_codec(self, codec: str, codec_definition_line: int) -> None:
"""Check validity of the codec."""
if codec != "utf-8":
msg = "bad-file-encoding"
if self._is_invalid_codec(codec):
msg = "invalid-unicode-codec"
self.add_message(
msg,
# Currently Nodes will lead to crashes of pylint
# node=node,
line=codec_definition_line,
end_lineno=codec_definition_line,
confidence=pylint.interfaces.HIGH,
col_offset=None,
end_col_offset=None,
)
def _check_invalid_chars(self, line: bytes, lineno: int, codec: str) -> None:
"""Look for chars considered bad."""
matches = self._find_line_matches(line, codec)
for col, char in matches.items():
self.add_message(
char.human_code(),
# Currently Nodes will lead to crashes of pylint
# node=node,
line=lineno,
end_lineno=lineno,
confidence=pylint.interfaces.HIGH,
col_offset=col + 1,
end_col_offset=col + len(char.unescaped) + 1,
)
def _check_bidi_chars(self, line: bytes, lineno: int, codec: str) -> None:
"""Look for Bidirectional Unicode, if we use unicode."""
if not self._is_unicode(codec):
return
for dangerous in BIDI_UNICODE:
if _cached_encode_search(dangerous, codec) in line:
# Note that we don't add a col_offset on purpose:
# Using these unicode characters it depends on the editor
# how it displays the location of characters in the line.
# So we mark the complete line.
self.add_message(
"bidirectional-unicode",
# Currently Nodes will lead to crashes of pylint
# node=node,
line=lineno,
end_lineno=lineno,
# We mark the complete line, as bidi controls make it hard
# to determine the correct cursor position within an editor
col_offset=0,
end_col_offset=_line_length(line, codec),
confidence=pylint.interfaces.HIGH,
)
# We look for bidirectional unicode only once per line
# as we mark the complete line anyway
break
def process_module(self, node: nodes.Module) -> None:
"""Perform the actual check by checking module stream."""
with node.stream() as stream:
codec, codec_line = self._determine_codec(stream)
self._check_codec(codec, codec_line)
stream.seek(0)
# Check for invalid content (controls/chars)
for (lineno, line) in enumerate(
_fix_utf16_32_line_stream(stream, codec), start=1
):
if lineno == 1:
line = _remove_bom(line, codec)
self._check_bidi_chars(line, lineno, codec)
self._check_invalid_chars(line, lineno, codec)
|
playground/e9b22a58-260b-483f-88d7-7a5fe9f8b1d4/pylint/pylint/checkers/unicode.py | pylint/checkers/unicode.py | 447 | seek | ref | function | stream.seek(0)
|
playground/e9b22a58-260b-483f-88d7-7a5fe9f8b1d4/pylint/pylint/checkers/unicode.py | pylint/checkers/unicode.py | 449 | extract_codec_from_bom | ref | function | codec = extract_codec_from_bom(stream.readline())
|
playground/e9b22a58-260b-483f-88d7-7a5fe9f8b1d4/pylint/pylint/checkers/unicode.py | pylint/checkers/unicode.py | 449 | readline | ref | function | codec = extract_codec_from_bom(stream.readline())
|
playground/e9b22a58-260b-483f-88d7-7a5fe9f8b1d4/pylint/pylint/checkers/unicode.py | pylint/checkers/unicode.py | 456 | _normalize_codec_name | ref | function | return _normalize_codec_name(codec), codec_definition_line
|
playground/e9b22a58-260b-483f-88d7-7a5fe9f8b1d4/pylint/pylint/checkers/unicode.py | pylint/checkers/unicode.py | 458 | _check_codec | def | function | def _check_codec(self, codec: str, codec_definition_line: int) -> None:
"""Check validity of the codec."""
if codec != "utf-8":
msg = "bad-file-encoding"
if self._is_invalid_codec(codec):
msg = "invalid-unicode-codec"
self.add_message(
msg,
# Currently Nodes will lead to crashes of pylint
# node=node,
line=codec_definition_line,
end_lineno=codec_definition_line,
confidence=pylint.interfaces.HIGH,
col_offset=None,
end_col_offset=None,
)
def _check_invalid_chars(self, line: bytes, lineno: int, codec: str) -> None:
"""Look for chars considered bad."""
matches = self._find_line_matches(line, codec)
for col, char in matches.items():
self.add_message(
char.human_code(),
# Currently Nodes will lead to crashes of pylint
# node=node,
line=lineno,
end_lineno=lineno,
confidence=pylint.interfaces.HIGH,
col_offset=col + 1,
end_col_offset=col + len(char.unescaped) + 1,
)
def _check_bidi_chars(self, line: bytes, lineno: int, codec: str) -> None:
"""Look for Bidirectional Unicode, if we use unicode."""
if not self._is_unicode(codec):
return
for dangerous in BIDI_UNICODE:
if _cached_encode_search(dangerous, codec) in line:
# Note that we don't add a col_offset on purpose:
# Using these unicode characters it depends on the editor
# how it displays the location of characters in the line.
# So we mark the complete line.
self.add_message(
"bidirectional-unicode",
# Currently Nodes will lead to crashes of pylint
# node=node,
line=lineno,
end_lineno=lineno,
# We mark the complete line, as bidi controls make it hard
# to determine the correct cursor position within an editor
col_offset=0,
end_col_offset=_line_length(line, codec),
confidence=pylint.interfaces.HIGH,
)
# We look for bidirectional unicode only once per line
# as we mark the complete line anyway
break
def process_module(self, node: nodes.Module) -> None:
"""Perform the actual check by checking module stream."""
with node.stream() as stream:
codec, codec_line = self._determine_codec(stream)
self._check_codec(codec, codec_line)
stream.seek(0)
# Check for invalid content (controls/chars)
for (lineno, line) in enumerate(
_fix_utf16_32_line_stream(stream, codec), start=1
):
if lineno == 1:
line = _remove_bom(line, codec)
self._check_bidi_chars(line, lineno, codec)
self._check_invalid_chars(line, lineno, codec)
|
playground/e9b22a58-260b-483f-88d7-7a5fe9f8b1d4/pylint/pylint/checkers/unicode.py | pylint/checkers/unicode.py | 462 | _is_invalid_codec | ref | function | if self._is_invalid_codec(codec):
|
playground/e9b22a58-260b-483f-88d7-7a5fe9f8b1d4/pylint/pylint/checkers/unicode.py | pylint/checkers/unicode.py | 464 | add_message | ref | function | self.add_message(
|
playground/e9b22a58-260b-483f-88d7-7a5fe9f8b1d4/pylint/pylint/checkers/unicode.py | pylint/checkers/unicode.py | 475 | _check_invalid_chars | def | function | def _check_invalid_chars(self, line: bytes, lineno: int, codec: str) -> None:
"""Look for chars considered bad."""
matches = self._find_line_matches(line, codec)
for col, char in matches.items():
self.add_message(
char.human_code(),
# Currently Nodes will lead to crashes of pylint
# node=node,
line=lineno,
end_lineno=lineno,
confidence=pylint.interfaces.HIGH,
col_offset=col + 1,
end_col_offset=col + len(char.unescaped) + 1,
)
def _check_bidi_chars(self, line: bytes, lineno: int, codec: str) -> None:
"""Look for Bidirectional Unicode, if we use unicode."""
if not self._is_unicode(codec):
return
for dangerous in BIDI_UNICODE:
if _cached_encode_search(dangerous, codec) in line:
# Note that we don't add a col_offset on purpose:
# Using these unicode characters it depends on the editor
# how it displays the location of characters in the line.
# So we mark the complete line.
self.add_message(
"bidirectional-unicode",
# Currently Nodes will lead to crashes of pylint
# node=node,
line=lineno,
end_lineno=lineno,
# We mark the complete line, as bidi controls make it hard
# to determine the correct cursor position within an editor
col_offset=0,
end_col_offset=_line_length(line, codec),
confidence=pylint.interfaces.HIGH,
)
# We look for bidirectional unicode only once per line
# as we mark the complete line anyway
break
def process_module(self, node: nodes.Module) -> None:
"""Perform the actual check by checking module stream."""
with node.stream() as stream:
codec, codec_line = self._determine_codec(stream)
self._check_codec(codec, codec_line)
stream.seek(0)
# Check for invalid content (controls/chars)
for (lineno, line) in enumerate(
_fix_utf16_32_line_stream(stream, codec), start=1
):
if lineno == 1:
line = _remove_bom(line, codec)
self._check_bidi_chars(line, lineno, codec)
self._check_invalid_chars(line, lineno, codec)
|
playground/e9b22a58-260b-483f-88d7-7a5fe9f8b1d4/pylint/pylint/checkers/unicode.py | pylint/checkers/unicode.py | 477 | _find_line_matches | ref | function | matches = self._find_line_matches(line, codec)
|
playground/e9b22a58-260b-483f-88d7-7a5fe9f8b1d4/pylint/pylint/checkers/unicode.py | pylint/checkers/unicode.py | 479 | add_message | ref | function | self.add_message(
|
playground/e9b22a58-260b-483f-88d7-7a5fe9f8b1d4/pylint/pylint/checkers/unicode.py | pylint/checkers/unicode.py | 480 | human_code | ref | function | char.human_code(),
|
playground/e9b22a58-260b-483f-88d7-7a5fe9f8b1d4/pylint/pylint/checkers/unicode.py | pylint/checkers/unicode.py | 490 | _check_bidi_chars | def | function | def _check_bidi_chars(self, line: bytes, lineno: int, codec: str) -> None:
"""Look for Bidirectional Unicode, if we use unicode."""
if not self._is_unicode(codec):
return
for dangerous in BIDI_UNICODE:
if _cached_encode_search(dangerous, codec) in line:
# Note that we don't add a col_offset on purpose:
# Using these unicode characters it depends on the editor
# how it displays the location of characters in the line.
# So we mark the complete line.
self.add_message(
"bidirectional-unicode",
# Currently Nodes will lead to crashes of pylint
# node=node,
line=lineno,
end_lineno=lineno,
# We mark the complete line, as bidi controls make it hard
# to determine the correct cursor position within an editor
col_offset=0,
end_col_offset=_line_length(line, codec),
confidence=pylint.interfaces.HIGH,
)
# We look for bidirectional unicode only once per line
# as we mark the complete line anyway
break
def process_module(self, node: nodes.Module) -> None:
"""Perform the actual check by checking module stream."""
with node.stream() as stream:
codec, codec_line = self._determine_codec(stream)
self._check_codec(codec, codec_line)
stream.seek(0)
# Check for invalid content (controls/chars)
for (lineno, line) in enumerate(
_fix_utf16_32_line_stream(stream, codec), start=1
):
if lineno == 1:
line = _remove_bom(line, codec)
self._check_bidi_chars(line, lineno, codec)
self._check_invalid_chars(line, lineno, codec)
|
playground/e9b22a58-260b-483f-88d7-7a5fe9f8b1d4/pylint/pylint/checkers/unicode.py | pylint/checkers/unicode.py | 492 | _is_unicode | ref | function | if not self._is_unicode(codec):
|
playground/e9b22a58-260b-483f-88d7-7a5fe9f8b1d4/pylint/pylint/checkers/unicode.py | pylint/checkers/unicode.py | 495 | _cached_encode_search | ref | function | if _cached_encode_search(dangerous, codec) in line:
|
playground/e9b22a58-260b-483f-88d7-7a5fe9f8b1d4/pylint/pylint/checkers/unicode.py | pylint/checkers/unicode.py | 500 | add_message | ref | function | self.add_message(
|
playground/e9b22a58-260b-483f-88d7-7a5fe9f8b1d4/pylint/pylint/checkers/unicode.py | pylint/checkers/unicode.py | 509 | _line_length | ref | function | end_col_offset=_line_length(line, codec),
|
playground/e9b22a58-260b-483f-88d7-7a5fe9f8b1d4/pylint/pylint/checkers/unicode.py | pylint/checkers/unicode.py | 516 | process_module | def | function | def process_module(self, node: nodes.Module) -> None:
"""Perform the actual check by checking module stream."""
with node.stream() as stream:
codec, codec_line = self._determine_codec(stream)
self._check_codec(codec, codec_line)
stream.seek(0)
# Check for invalid content (controls/chars)
for (lineno, line) in enumerate(
_fix_utf16_32_line_stream(stream, codec), start=1
):
if lineno == 1:
line = _remove_bom(line, codec)
self._check_bidi_chars(line, lineno, codec)
self._check_invalid_chars(line, lineno, codec)
|
playground/e9b22a58-260b-483f-88d7-7a5fe9f8b1d4/pylint/pylint/checkers/unicode.py | pylint/checkers/unicode.py | 518 | stream | ref | function | with node.stream() as stream:
|
playground/e9b22a58-260b-483f-88d7-7a5fe9f8b1d4/pylint/pylint/checkers/unicode.py | pylint/checkers/unicode.py | 519 | _determine_codec | ref | function | codec, codec_line = self._determine_codec(stream)
|
playground/e9b22a58-260b-483f-88d7-7a5fe9f8b1d4/pylint/pylint/checkers/unicode.py | pylint/checkers/unicode.py | 520 | _check_codec | ref | function | self._check_codec(codec, codec_line)
|
playground/e9b22a58-260b-483f-88d7-7a5fe9f8b1d4/pylint/pylint/checkers/unicode.py | pylint/checkers/unicode.py | 522 | seek | ref | function | stream.seek(0)
|
playground/e9b22a58-260b-483f-88d7-7a5fe9f8b1d4/pylint/pylint/checkers/unicode.py | pylint/checkers/unicode.py | 526 | _fix_utf16_32_line_stream | ref | function | _fix_utf16_32_line_stream(stream, codec), start=1
|
playground/e9b22a58-260b-483f-88d7-7a5fe9f8b1d4/pylint/pylint/checkers/unicode.py | pylint/checkers/unicode.py | 529 | _remove_bom | ref | function | line = _remove_bom(line, codec)
|
playground/e9b22a58-260b-483f-88d7-7a5fe9f8b1d4/pylint/pylint/checkers/unicode.py | pylint/checkers/unicode.py | 530 | _check_bidi_chars | ref | function | self._check_bidi_chars(line, lineno, codec)
|
playground/e9b22a58-260b-483f-88d7-7a5fe9f8b1d4/pylint/pylint/checkers/unicode.py | pylint/checkers/unicode.py | 531 | _check_invalid_chars | ref | function | self._check_invalid_chars(line, lineno, codec)
|
playground/e9b22a58-260b-483f-88d7-7a5fe9f8b1d4/pylint/pylint/checkers/unicode.py | pylint/checkers/unicode.py | 535 | register_checker | ref | function | linter.register_checker(UnicodeChecker(linter))
|
playground/e9b22a58-260b-483f-88d7-7a5fe9f8b1d4/pylint/pylint/checkers/unicode.py | pylint/checkers/unicode.py | 535 | UnicodeChecker | ref | function | linter.register_checker(UnicodeChecker(linter))
|
playground/e9b22a58-260b-483f-88d7-7a5fe9f8b1d4/pylint/pylint/checkers/unsupported_version.py | pylint/checkers/unsupported_version.py | 29 | UnsupportedVersionChecker | def | class | open visit_joinedstr visit_decorators _check_typing_final |
playground/e9b22a58-260b-483f-88d7-7a5fe9f8b1d4/pylint/pylint/checkers/unsupported_version.py | pylint/checkers/unsupported_version.py | 53 | get_global_option | ref | function | py_version = get_global_option(self, "py-version")
|
playground/e9b22a58-260b-483f-88d7-7a5fe9f8b1d4/pylint/pylint/checkers/unsupported_version.py | pylint/checkers/unsupported_version.py | 57 | check_messages | ref | function | @check_messages("using-f-string-in-unsupported-version")
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.