language stringclasses 1
value | repo stringclasses 346
values | path stringlengths 6 201 | class_span dict | source stringlengths 21 2.38M | target stringlengths 1 96 |
|---|---|---|---|---|---|
python | ray-project__ray | doc/source/serve/doc_code/monitoring/custom_metric_snippet.py | {
"start": 113,
"end": 819
} | class ____:
def __init__(self):
self.num_requests = 0
self.my_counter = metrics.Counter(
"my_counter",
description=("The number of odd-numbered requests to this deployment."),
tag_keys=("model",),
)
self.my_counter.set_default_tags({"model": "123"})
def __call__(self):
self.num_requests += 1
if self.num_requests % 2 == 1:
self.my_counter.inc()
my_deployment = MyDeployment.bind()
serve.run(my_deployment)
while True:
requests.get("http://localhost:8000/")
time.sleep(1)
# __end__
break
response = requests.get("http://localhost:8000/")
assert response.status_code == 200
| MyDeployment |
python | python__mypy | mypy/plugins/functools.py | {
"start": 934,
"end": 15282
} | class ____(NamedTuple):
is_static: bool
type: CallableType
def functools_total_ordering_maker_callback(
ctx: mypy.plugin.ClassDefContext, auto_attribs_default: bool = False
) -> bool:
"""Add dunder methods to classes decorated with functools.total_ordering."""
comparison_methods = _analyze_class(ctx)
if not comparison_methods:
ctx.api.fail(
'No ordering operation defined when using "functools.total_ordering": < > <= >=',
ctx.reason,
)
return True
# prefer __lt__ to __le__ to __gt__ to __ge__
root = max(comparison_methods, key=lambda k: (comparison_methods[k] is None, k))
root_method = comparison_methods[root]
if not root_method:
# None of the defined comparison methods can be analysed
return True
other_type = _find_other_type(root_method)
bool_type = ctx.api.named_type("builtins.bool")
ret_type: Type = bool_type
if root_method.type.ret_type != ctx.api.named_type("builtins.bool"):
proper_ret_type = get_proper_type(root_method.type.ret_type)
if not (
isinstance(proper_ret_type, UnboundType)
and proper_ret_type.name.split(".")[-1] == "bool"
):
ret_type = AnyType(TypeOfAny.implementation_artifact)
for additional_op in _ORDERING_METHODS:
# Either the method is not implemented
# or has an unknown signature that we can now extrapolate.
if not comparison_methods.get(additional_op):
args = [Argument(Var("other", other_type), other_type, None, ARG_POS)]
add_method_to_class(ctx.api, ctx.cls, additional_op, args, ret_type)
return True
def _find_other_type(method: _MethodInfo) -> Type:
"""Find the type of the ``other`` argument in a comparison method."""
first_arg_pos = 0 if method.is_static else 1
cur_pos_arg = 0
other_arg = None
for arg_kind, arg_type in zip(method.type.arg_kinds, method.type.arg_types):
if arg_kind.is_positional():
if cur_pos_arg == first_arg_pos:
other_arg = arg_type
break
cur_pos_arg += 1
elif arg_kind != ARG_STAR2:
other_arg = arg_type
break
if other_arg is None:
return AnyType(TypeOfAny.implementation_artifact)
return other_arg
def _analyze_class(ctx: mypy.plugin.ClassDefContext) -> dict[str, _MethodInfo | None]:
"""Analyze the class body, its parents, and return the comparison methods found."""
# Traverse the MRO and collect ordering methods.
comparison_methods: dict[str, _MethodInfo | None] = {}
# Skip object because total_ordering does not use methods from object
for cls in ctx.cls.info.mro[:-1]:
for name in _ORDERING_METHODS:
if name in cls.names and name not in comparison_methods:
node = cls.names[name].node
if isinstance(node, SYMBOL_FUNCBASE_TYPES) and isinstance(node.type, CallableType):
comparison_methods[name] = _MethodInfo(node.is_static, node.type)
continue
if isinstance(node, Var):
proper_type = get_proper_type(node.type)
if isinstance(proper_type, CallableType):
comparison_methods[name] = _MethodInfo(node.is_staticmethod, proper_type)
continue
comparison_methods[name] = None
return comparison_methods
def partial_new_callback(ctx: mypy.plugin.FunctionContext) -> Type:
"""Infer a more precise return type for functools.partial"""
if not isinstance(ctx.api, mypy.checker.TypeChecker): # use internals
return ctx.default_return_type
if len(ctx.arg_types) != 3: # fn, *args, **kwargs
return ctx.default_return_type
if len(ctx.arg_types[0]) != 1:
return ctx.default_return_type
if isinstance(get_proper_type(ctx.arg_types[0][0]), Overloaded):
# TODO: handle overloads, just fall back to whatever the non-plugin code does
return ctx.default_return_type
return handle_partial_with_callee(ctx, callee=ctx.arg_types[0][0])
def handle_partial_with_callee(ctx: mypy.plugin.FunctionContext, callee: Type) -> Type:
if not isinstance(ctx.api, mypy.checker.TypeChecker): # use internals
return ctx.default_return_type
if isinstance(callee_proper := get_proper_type(callee), UnionType):
return UnionType.make_union(
[handle_partial_with_callee(ctx, item) for item in callee_proper.items]
)
fn_type = ctx.api.extract_callable_type(callee, ctx=ctx.default_return_type)
if fn_type is None:
return ctx.default_return_type
# We must normalize from the start to have coherent view together with TypeChecker.
fn_type = fn_type.with_unpacked_kwargs().with_normalized_var_args()
last_context = ctx.api.type_context[-1]
if not fn_type.is_type_obj():
# We wrap the return type to get use of a possible type context provided by caller.
# We cannot do this in case of class objects, since otherwise the plugin may get
# falsely triggered when evaluating the constructed call itself.
ret_type: Type = ctx.api.named_generic_type(PARTIAL, [fn_type.ret_type])
wrapped_return = True
else:
ret_type = fn_type.ret_type
# Instead, for class objects we ignore any type context to avoid spurious errors,
# since the type context will be partial[X] etc., not X.
ctx.api.type_context[-1] = None
wrapped_return = False
# Flatten actual to formal mapping, since this is what check_call() expects.
actual_args = []
actual_arg_kinds = []
actual_arg_names = []
actual_types = []
seen_args = set()
for i, param in enumerate(ctx.args[1:], start=1):
for j, a in enumerate(param):
if a in seen_args:
# Same actual arg can map to multiple formals, but we need to include
# each one only once.
continue
# Here we rely on the fact that expressions are essentially immutable, so
# they can be compared by identity.
seen_args.add(a)
actual_args.append(a)
actual_arg_kinds.append(ctx.arg_kinds[i][j])
actual_arg_names.append(ctx.arg_names[i][j])
actual_types.append(ctx.arg_types[i][j])
formal_to_actual = map_actuals_to_formals(
actual_kinds=actual_arg_kinds,
actual_names=actual_arg_names,
formal_kinds=fn_type.arg_kinds,
formal_names=fn_type.arg_names,
actual_arg_type=lambda i: actual_types[i],
)
# We need to remove any type variables that appear only in formals that have
# no actuals, to avoid eagerly binding them in check_call() below.
can_infer_ids = set()
for i, arg_type in enumerate(fn_type.arg_types):
if not formal_to_actual[i]:
continue
can_infer_ids.update({tv.id for tv in get_all_type_vars(arg_type)})
# special_sig="partial" allows omission of args/kwargs typed with ParamSpec
defaulted = fn_type.copy_modified(
arg_kinds=[
(
ArgKind.ARG_OPT
if k == ArgKind.ARG_POS
else (ArgKind.ARG_NAMED_OPT if k == ArgKind.ARG_NAMED else k)
)
for k in fn_type.arg_kinds
],
ret_type=ret_type,
variables=[
tv
for tv in fn_type.variables
# Keep TypeVarTuple/ParamSpec to avoid spurious errors on empty args.
if tv.id in can_infer_ids or not isinstance(tv, TypeVarType)
],
special_sig="partial",
)
if defaulted.line < 0:
# Make up a line number if we don't have one
defaulted.set_line(ctx.default_return_type)
# Create a valid context for various ad-hoc inspections in check_call().
call_expr = CallExpr(
callee=ctx.args[0][0],
args=actual_args,
arg_kinds=actual_arg_kinds,
arg_names=actual_arg_names,
analyzed=ctx.context.analyzed if isinstance(ctx.context, CallExpr) else None,
)
call_expr.set_line(ctx.context)
_, bound = ctx.api.expr_checker.check_call(
callee=defaulted,
args=actual_args,
arg_kinds=actual_arg_kinds,
arg_names=actual_arg_names,
context=call_expr,
)
if not wrapped_return:
# Restore previously ignored context.
ctx.api.type_context[-1] = last_context
bound = get_proper_type(bound)
if not isinstance(bound, CallableType):
return ctx.default_return_type
if wrapped_return:
# Reverse the wrapping we did above.
ret_type = get_proper_type(bound.ret_type)
if not isinstance(ret_type, Instance) or ret_type.type.fullname != PARTIAL:
return ctx.default_return_type
bound = bound.copy_modified(ret_type=ret_type.args[0])
partial_kinds = []
partial_types = []
partial_names = []
# We need to fully apply any positional arguments (they cannot be respecified)
# However, keyword arguments can be respecified, so just give them a default
for i, actuals in enumerate(formal_to_actual):
if len(bound.arg_types) == len(fn_type.arg_types):
arg_type = bound.arg_types[i]
if not mypy.checker.is_valid_inferred_type(arg_type, ctx.api.options):
arg_type = fn_type.arg_types[i] # bit of a hack
else:
# TODO: I assume that bound and fn_type have the same arguments. It appears this isn't
# true when PEP 646 things are happening. See testFunctoolsPartialTypeVarTuple
arg_type = fn_type.arg_types[i]
if not actuals or fn_type.arg_kinds[i] in (ArgKind.ARG_STAR, ArgKind.ARG_STAR2):
partial_kinds.append(fn_type.arg_kinds[i])
partial_types.append(arg_type)
partial_names.append(fn_type.arg_names[i])
else:
assert actuals
if any(actual_arg_kinds[j] in (ArgKind.ARG_POS, ArgKind.ARG_STAR) for j in actuals):
# Don't add params for arguments passed positionally
continue
# Add defaulted params for arguments passed via keyword
kind = actual_arg_kinds[actuals[0]]
if kind == ArgKind.ARG_NAMED or kind == ArgKind.ARG_STAR2:
kind = ArgKind.ARG_NAMED_OPT
partial_kinds.append(kind)
partial_types.append(arg_type)
partial_names.append(fn_type.arg_names[i])
ret_type = bound.ret_type
if not mypy.checker.is_valid_inferred_type(ret_type, ctx.api.options):
ret_type = fn_type.ret_type # same kind of hack as above
partially_applied = fn_type.copy_modified(
arg_types=partial_types,
arg_kinds=partial_kinds,
arg_names=partial_names,
ret_type=ret_type,
special_sig="partial",
)
# Do not leak typevars from generic functions - they cannot be usable.
# Keep them in the wrapped callable, but avoid `partial[SomeStrayTypeVar]`
erased_ret_type = erase_typevars(ret_type, [tv.id for tv in fn_type.variables])
ret = ctx.api.named_generic_type(PARTIAL, [erased_ret_type])
ret = ret.copy_with_extra_attr("__mypy_partial", partially_applied)
if partially_applied.param_spec():
assert ret.extra_attrs is not None # copy_with_extra_attr above ensures this
attrs = ret.extra_attrs.copy()
if ArgKind.ARG_STAR in actual_arg_kinds:
attrs.immutable.add("__mypy_partial_paramspec_args_bound")
if ArgKind.ARG_STAR2 in actual_arg_kinds:
attrs.immutable.add("__mypy_partial_paramspec_kwargs_bound")
ret.extra_attrs = attrs
return ret
def partial_call_callback(ctx: mypy.plugin.MethodContext) -> Type:
"""Infer a more precise return type for functools.partial.__call__."""
if (
not isinstance(ctx.api, mypy.checker.TypeChecker) # use internals
or not isinstance(ctx.type, Instance)
or ctx.type.type.fullname != PARTIAL
or not ctx.type.extra_attrs
or "__mypy_partial" not in ctx.type.extra_attrs.attrs
):
return ctx.default_return_type
extra_attrs = ctx.type.extra_attrs
partial_type = get_proper_type(extra_attrs.attrs["__mypy_partial"])
if len(ctx.arg_types) != 2: # *args, **kwargs
return ctx.default_return_type
# See comments for similar actual to formal code above
actual_args = []
actual_arg_kinds = []
actual_arg_names = []
seen_args = set()
for i, param in enumerate(ctx.args):
for j, a in enumerate(param):
if a in seen_args:
continue
seen_args.add(a)
actual_args.append(a)
actual_arg_kinds.append(ctx.arg_kinds[i][j])
actual_arg_names.append(ctx.arg_names[i][j])
result, _ = ctx.api.expr_checker.check_call(
callee=partial_type,
args=actual_args,
arg_kinds=actual_arg_kinds,
arg_names=actual_arg_names,
context=ctx.context,
)
if not isinstance(partial_type, CallableType) or partial_type.param_spec() is None:
return result
args_bound = "__mypy_partial_paramspec_args_bound" in extra_attrs.immutable
kwargs_bound = "__mypy_partial_paramspec_kwargs_bound" in extra_attrs.immutable
passed_paramspec_parts = [
arg.node.type
for arg in actual_args
if isinstance(arg, NameExpr)
and isinstance(arg.node, Var)
and isinstance(arg.node.type, ParamSpecType)
]
# ensure *args: P.args
args_passed = any(part.flavor == ParamSpecFlavor.ARGS for part in passed_paramspec_parts)
if not args_bound and not args_passed:
ctx.api.expr_checker.msg.too_few_arguments(partial_type, ctx.context, actual_arg_names)
elif args_bound and args_passed:
ctx.api.expr_checker.msg.too_many_arguments(partial_type, ctx.context)
# ensure **kwargs: P.kwargs
kwargs_passed = any(part.flavor == ParamSpecFlavor.KWARGS for part in passed_paramspec_parts)
if not kwargs_bound and not kwargs_passed:
ctx.api.expr_checker.msg.too_few_arguments(partial_type, ctx.context, actual_arg_names)
return result
| _MethodInfo |
python | tensorflow__tensorflow | tensorflow/python/autograph/pyct/templates.py | {
"start": 3260,
"end": 9510
} | class ____(gast.NodeTransformer):
"""Replace AST nodes."""
def __init__(self, replacements):
"""Create a new ReplaceTransformer.
Args:
replacements: A mapping from placeholder names to (lists of) AST nodes
that these placeholders will be replaced by.
"""
self.replacements = replacements
self.in_replacements = False
self.preserved_annos = {
anno.Basic.DIRECTIVES,
anno.Basic.EXTRA_LOOP_TEST,
anno.Basic.ORIGIN,
anno.Basic.SKIP_PROCESSING,
anno.Static.ORIG_DEFINITIONS,
'function_context_name',
}
def _prepare_replacement(self, replaced, key):
"""Prepares a replacement AST that's safe to swap in for a node.
Args:
replaced: ast.AST, the node being replaced
key: Hashable, the key of the replacement AST
Returns:
ast.AST, the replacement AST
"""
repl = self.replacements[key]
new_nodes = ast_util.copy_clean(repl, preserve_annos=self.preserved_annos)
if isinstance(new_nodes, gast.AST):
new_nodes = [new_nodes]
return new_nodes
def visit_Expr(self, node):
# When replacing a placeholder with an entire statement, the replacement
# must stand on its own and not be wrapped in an Expr.
new_value = self.visit(node.value)
if new_value is node.value:
return node
return new_value
def visit_keyword(self, node):
if node.arg not in self.replacements:
return self.generic_visit(node)
repl = self._prepare_replacement(node, node.arg)
if isinstance(repl, gast.keyword):
return repl
elif (repl and isinstance(repl, (list, tuple)) and
all(isinstance(r, gast.keyword) for r in repl)):
return repl
# TODO(mdan): We may allow replacing with a string as well.
# For example, if one wanted to replace foo with bar in foo=baz, then
# we could allow changing just node arg, so that we end up with bar=baz.
raise ValueError(
'a keyword argument may only be replaced by another keyword or a '
'non-empty list of keywords. Found: {} for keyword {}'.format(
repl, node.arg))
def visit_FunctionDef(self, node):
node = self.generic_visit(node)
if node.name not in self.replacements:
return node
repl = self.replacements[node.name]
if not isinstance(repl, (gast.Name, ast.Name)):
raise ValueError(
'a function name can only be replaced by a Name node. Found: %s' %
repl)
node.name = repl.id
return node
def visit_Attribute(self, node):
node = self.generic_visit(node)
if node.attr not in self.replacements:
return node
repl = self.replacements[node.attr]
if not isinstance(repl, gast.Name):
raise ValueError(
'An attribute can only be replaced by a Name node. Found: %s' % repl)
node.attr = repl.id
return node
def visit_Name(self, node):
if node.id not in self.replacements:
return node
new_nodes = self._prepare_replacement(node, node.id)
if not new_nodes:
return new_nodes
# Preserve the target context.
adjuster = ContextAdjuster(type(node.ctx))
for n in new_nodes:
if hasattr(n, 'ctx'):
adjuster.visit(n)
if len(new_nodes) == 1:
new_nodes, = new_nodes
return new_nodes
def _convert_to_ast(n):
"""Converts from a known data type to AST."""
# Note: When generating AST nodes from strings/QNs in isolation, ctx is
# unknown. ctx must be filled in according to the template being used.
# See ReplaceTransformer.visit_Name.
if isinstance(n, str):
return gast.Name(id=n, ctx=None, annotation=None, type_comment=None)
if isinstance(n, qual_names.QN):
return n.ast()
if isinstance(n, list):
return [_convert_to_ast(e) for e in n]
if isinstance(n, tuple):
return tuple(_convert_to_ast(e) for e in n)
return n
def replace(template, **replacements):
"""Replaces placeholders in a Python template.
AST Name and Tuple nodes always receive the context that inferred from
the template. However, when replacing more complex nodes (that can potentially
contain Name children), then the caller is responsible for setting the
appropriate context.
Args:
template: A string representing Python code. Any symbol name can be used
that appears in the template code can be used as placeholder.
**replacements: A mapping from placeholder names to (lists of) AST nodes
that these placeholders will be replaced by. String values are also
supported as a shorthand for AST Name nodes with the respective ID.
Returns:
An AST node or list of AST nodes with the replacements made. If the
template was a function, a list will be returned. If the template was a
node, the same node will be returned. If the template was a string, an
AST node will be returned (a `Module` node in the case of a multi-line
string, an `Expr` node otherwise).
Raises:
ValueError: if the arguments are incorrect.
"""
if not isinstance(template, str):
raise ValueError('Expected string template, got %s' % type(template))
for k in replacements:
replacements[k] = _convert_to_ast(replacements[k])
template_str = parser.STANDARD_PREAMBLE + textwrap.dedent(template)
nodes = parser.parse(
template_str,
preamble_len=parser.STANDARD_PREAMBLE_LEN,
single_node=False)
results = []
for node in nodes:
node = ReplaceTransformer(replacements).visit(node)
if isinstance(node, (list, tuple)):
results.extend(node)
else:
results.append(node)
results = [qual_names.resolve(r) for r in results]
return results
def replace_as_expression(template, **replacements):
"""Variant of replace that generates expressions, instead of code blocks."""
replacement = replace(template, **replacements)
if len(replacement) != 1:
raise ValueError(
'single expression expected; for more general templates use replace')
node, = replacement
if isinstance(node, gast.Expr):
return node.value
elif isinstance(node, gast.Name):
return node
raise ValueError(
'the template is expected to generate an expression or a name node;'
' instead found %s' % node)
| ReplaceTransformer |
python | tensorflow__tensorflow | tensorflow/python/module/module_test.py | {
"start": 19564,
"end": 19929
} | class ____(module.Module):
def __init__(self, create_child=True, container_type=list):
super().__init__()
self.z = MemberType()
self.a = container_type([MemberType(), MemberType()])
if create_child:
self.c = SimpleModule(create_child=False)
is_member = lambda v: isinstance(v, MemberType)
if __name__ == "__main__":
test.main()
| SimpleModule |
python | tiangolo__fastapi | scripts/contributors.py | {
"start": 1634,
"end": 1712
} | class ____(BaseModel):
cursor: str
node: PullRequestNode
| PullRequestEdge |
python | ray-project__ray | python/ray/_private/thirdparty/pynvml/pynvml.py | {
"start": 99879,
"end": 100603
} | class ____(_PrintableStructure):
_fields_ = [("version", c_uint),
("id", c_uint),
("sliceCount", c_uint),
("instanceCount", c_uint),
("multiprocessorCount", c_uint),
("sharedCopyEngineCount", c_uint),
("sharedDecoderCount", c_uint),
("sharedEncoderCount", c_uint),
("sharedJpegCount", c_uint),
("sharedOfaCount", c_uint),
("name", c_char * NVML_DEVICE_NAME_V2_BUFFER_SIZE)
]
def __init__(self):
super(c_nvmlComputeInstanceProfileInfo_v2_t, self).__init__(version=nvmlComputeInstanceProfileInfo_v2)
| c_nvmlComputeInstanceProfileInfo_v2_t |
python | tensorflow__tensorflow | tensorflow/python/training/optimizer.py | {
"start": 4176,
"end": 5200
} | class ____(_OptimizableVariable):
"""Processor for Variable."""
def __init__(self, v):
self._v = v
def __str__(self):
return "<_RefVariableProcessor(%s)>" % self._v
def target(self):
return self._v._ref() # pylint: disable=protected-access
def update_op(self, optimizer, g):
if isinstance(g, tensor.Tensor):
update_op = optimizer._apply_dense(g, self._v) # pylint: disable=protected-access
if self._v.constraint is not None:
with ops.control_dependencies([update_op]):
return self._v.assign(self._v.constraint(self._v))
else:
return update_op
else:
assert isinstance(g, indexed_slices.IndexedSlices), (
"Gradient ", g, " is neither a tensor nor IndexedSlices.")
if self._v.constraint is not None:
raise RuntimeError(
"Cannot use a constraint function on a sparse variable.")
# pylint: disable=protected-access
return optimizer._apply_sparse_duplicate_indices(g, self._v)
| _RefVariableProcessor |
python | sphinx-doc__sphinx | sphinx/ext/graphviz.py | {
"start": 1306,
"end": 1374
} | class ____(SphinxError):
category = 'Graphviz error'
| GraphvizError |
python | anthropics__anthropic-sdk-python | src/anthropic/types/beta/beta_code_execution_tool_result_error.py | {
"start": 302,
"end": 461
} | class ____(BaseModel):
error_code: BetaCodeExecutionToolResultErrorCode
type: Literal["code_execution_tool_result_error"]
| BetaCodeExecutionToolResultError |
python | joke2k__faker | faker/providers/phone_number/no_NO/__init__.py | {
"start": 49,
"end": 328
} | class ____(PhoneNumberProvider):
formats = (
"+47########",
"+47 ## ## ## ##",
"## ## ## ##",
"## ## ## ##",
"########",
"########",
"9## ## ###",
"4## ## ###",
"9#######",
"4#######",
)
| Provider |
python | langchain-ai__langchain | libs/core/langchain_core/callbacks/manager.py | {
"start": 32660,
"end": 34489
} | class ____(AsyncParentRunManager, ToolManagerMixin):
"""Async callback manager for tool run."""
def get_sync(self) -> CallbackManagerForToolRun:
"""Get the equivalent sync RunManager.
Returns:
The sync RunManager.
"""
return CallbackManagerForToolRun(
run_id=self.run_id,
handlers=self.handlers,
inheritable_handlers=self.inheritable_handlers,
parent_run_id=self.parent_run_id,
tags=self.tags,
inheritable_tags=self.inheritable_tags,
metadata=self.metadata,
inheritable_metadata=self.inheritable_metadata,
)
async def on_tool_end(self, output: Any, **kwargs: Any) -> None:
"""Async run when the tool ends running.
Args:
output: The output of the tool.
**kwargs: Additional keyword arguments.
"""
if not self.handlers:
return
await ahandle_event(
self.handlers,
"on_tool_end",
"ignore_agent",
output,
run_id=self.run_id,
parent_run_id=self.parent_run_id,
tags=self.tags,
**kwargs,
)
async def on_tool_error(
self,
error: BaseException,
**kwargs: Any,
) -> None:
"""Run when tool errors.
Args:
error: The error.
**kwargs: Additional keyword arguments.
"""
if not self.handlers:
return
await ahandle_event(
self.handlers,
"on_tool_error",
"ignore_agent",
error,
run_id=self.run_id,
parent_run_id=self.parent_run_id,
tags=self.tags,
**kwargs,
)
| AsyncCallbackManagerForToolRun |
python | has2k1__plotnine | plotnine/themes/themeable.py | {
"start": 65481,
"end": 65544
} | class ____(legend_key_spacing_y):
pass
| legend_entry_spacing_y |
python | PyCQA__bandit | tests/unit/formatters/test_screen.py | {
"start": 383,
"end": 8477
} | class ____(testtools.TestCase):
def setUp(self):
super().setUp()
@mock.patch("bandit.core.issue.Issue.get_code")
def test_output_issue(self, get_code):
issue = _get_issue_instance()
get_code.return_value = "DDDDDDD"
indent_val = "CCCCCCC"
def _template(_issue, _indent_val, _code, _color):
return_val = [
"{}{}>> Issue: [{}:{}] {}".format(
_indent_val,
_color,
_issue.test_id,
_issue.test,
_issue.text,
),
"{} Severity: {} Confidence: {}".format(
_indent_val,
_issue.severity.capitalize(),
_issue.confidence.capitalize(),
),
f"{_indent_val} CWE: {_issue.cwe}",
f"{_indent_val} More Info: "
f"{docs_utils.get_url(_issue.test_id)}",
"{} Location: {}:{}:{}{}".format(
_indent_val,
_issue.fname,
_issue.lineno,
_issue.col_offset,
screen.COLOR["DEFAULT"],
),
]
if _code:
return_val.append(f"{_indent_val}{_code}")
return "\n".join(return_val)
issue_text = screen._output_issue_str(issue, indent_val)
expected_return = _template(
issue, indent_val, "DDDDDDD", screen.COLOR["MEDIUM"]
)
self.assertEqual(expected_return, issue_text)
issue_text = screen._output_issue_str(
issue, indent_val, show_code=False
)
expected_return = _template(
issue, indent_val, "", screen.COLOR["MEDIUM"]
)
self.assertEqual(expected_return, issue_text)
issue.lineno = ""
issue.col_offset = ""
issue_text = screen._output_issue_str(
issue, indent_val, show_lineno=False
)
expected_return = _template(
issue, indent_val, "DDDDDDD", screen.COLOR["MEDIUM"]
)
self.assertEqual(expected_return, issue_text)
@mock.patch("bandit.core.manager.BanditManager.get_issue_list")
def test_no_issues(self, get_issue_list):
conf = config.BanditConfig()
self.manager = manager.BanditManager(conf, "file")
(tmp_fd, self.tmp_fname) = tempfile.mkstemp()
self.manager.out_file = self.tmp_fname
get_issue_list.return_value = collections.OrderedDict()
with mock.patch("bandit.formatters.screen.do_print") as m:
with open(self.tmp_fname, "w") as tmp_file:
screen.report(
self.manager, tmp_file, bandit.LOW, bandit.LOW, lines=5
)
self.assertIn(
"No issues identified.",
"\n".join([str(a) for a in m.call_args]),
)
@mock.patch("bandit.core.manager.BanditManager.get_issue_list")
def test_report_nobaseline(self, get_issue_list):
conf = config.BanditConfig()
self.manager = manager.BanditManager(conf, "file")
(tmp_fd, self.tmp_fname) = tempfile.mkstemp()
self.manager.out_file = self.tmp_fname
self.manager.verbose = True
self.manager.files_list = ["binding.py"]
self.manager.scores = [
{"SEVERITY": [0, 0, 0, 1], "CONFIDENCE": [0, 0, 0, 1]}
]
self.manager.skipped = [("abc.py", "File is bad")]
self.manager.excluded_files = ["def.py"]
issue_a = _get_issue_instance()
issue_b = _get_issue_instance()
get_issue_list.return_value = [issue_a, issue_b]
self.manager.metrics.data["_totals"] = {"loc": 1000, "nosec": 50}
for category in ["SEVERITY", "CONFIDENCE"]:
for level in ["UNDEFINED", "LOW", "MEDIUM", "HIGH"]:
self.manager.metrics.data["_totals"][f"{category}.{level}"] = 1
# Validate that we're outputting the correct issues
output_str_fn = "bandit.formatters.screen._output_issue_str"
with mock.patch(output_str_fn) as output_str:
output_str.return_value = "ISSUE_OUTPUT_TEXT"
with open(self.tmp_fname, "w") as tmp_file:
screen.report(
self.manager, tmp_file, bandit.LOW, bandit.LOW, lines=5
)
calls = [
mock.call(issue_a, "", lines=5),
mock.call(issue_b, "", lines=5),
]
output_str.assert_has_calls(calls, any_order=True)
# Validate that we're outputting all of the expected fields and the
# correct values
with mock.patch("bandit.formatters.screen.do_print") as m:
with open(self.tmp_fname, "w") as tmp_file:
screen.report(
self.manager, tmp_file, bandit.LOW, bandit.LOW, lines=5
)
data = "\n".join([str(a) for a in m.call_args[0][0]])
expected = "Run started"
self.assertIn(expected, data)
expected_items = [
screen.header("Files in scope (1):"),
"\n\tbinding.py (score: {SEVERITY: 1, CONFIDENCE: 1})",
]
for item in expected_items:
self.assertIn(item, data)
expected = screen.header("Files excluded (1):") + "\n\tdef.py"
self.assertIn(expected, data)
expected = (
"Total lines of code: 1000\n\tTotal lines skipped "
"(#nosec): 50"
)
self.assertIn(expected, data)
expected = (
"Total issues (by severity):\n\t\tUndefined: 1\n\t\t"
"Low: 1\n\t\tMedium: 1\n\t\tHigh: 1"
)
self.assertIn(expected, data)
expected = (
"Total issues (by confidence):\n\t\tUndefined: 1\n\t\t"
"Low: 1\n\t\tMedium: 1\n\t\tHigh: 1"
)
self.assertIn(expected, data)
expected = (
screen.header("Files skipped (1):")
+ "\n\tabc.py (File is bad)"
)
self.assertIn(expected, data)
@mock.patch("bandit.core.manager.BanditManager.get_issue_list")
def test_report_baseline(self, get_issue_list):
conf = config.BanditConfig()
self.manager = manager.BanditManager(conf, "file")
(tmp_fd, self.tmp_fname) = tempfile.mkstemp()
self.manager.out_file = self.tmp_fname
issue_a = _get_issue_instance()
issue_b = _get_issue_instance()
issue_x = _get_issue_instance()
issue_x.fname = "x"
issue_y = _get_issue_instance()
issue_y.fname = "y"
issue_z = _get_issue_instance()
issue_z.fname = "z"
get_issue_list.return_value = collections.OrderedDict(
[(issue_a, [issue_x]), (issue_b, [issue_y, issue_z])]
)
# Validate that we're outputting the correct issues
indent_val = " " * 10
output_str_fn = "bandit.formatters.screen._output_issue_str"
with mock.patch(output_str_fn) as output_str:
output_str.return_value = "ISSUE_OUTPUT_TEXT"
with open(self.tmp_fname, "w") as tmp_file:
screen.report(
self.manager, tmp_file, bandit.LOW, bandit.LOW, lines=5
)
calls = [
mock.call(issue_a, "", lines=5),
mock.call(issue_b, "", show_code=False, show_lineno=False),
mock.call(issue_y, indent_val, lines=5),
mock.call(issue_z, indent_val, lines=5),
]
output_str.assert_has_calls(calls, any_order=True)
def _get_issue_instance(
severity=bandit.MEDIUM, cwe=123, confidence=bandit.MEDIUM
):
new_issue = issue.Issue(severity, cwe, confidence, "Test issue")
new_issue.fname = "code.py"
new_issue.test = "bandit_plugin"
new_issue.lineno = 1
return new_issue
| ScreenFormatterTests |
python | allegroai__clearml | clearml/backend_api/services/v2_23/tasks.py | {
"start": 15312,
"end": 15750
} | class ____(StringEnum):
dataset_import = "dataset_import"
annotation = "annotation"
annotation_manual = "annotation_manual"
training = "training"
testing = "testing"
inference = "inference"
data_processing = "data_processing"
application = "application"
monitor = "monitor"
controller = "controller"
optimizer = "optimizer"
service = "service"
qc = "qc"
custom = "custom"
| TaskTypeEnum |
python | huggingface__transformers | src/transformers/models/metaclip_2/configuration_metaclip_2.py | {
"start": 10160,
"end": 17722
} | class ____(PreTrainedConfig):
r"""
[`MetaClip2Config`] is the configuration class to store the configuration of a [`MetaClip2Model`]. It is used to
instantiate a MetaClip2 model according to the specified arguments, defining the text model and vision model configs.
Instantiating a configuration with the defaults will yield a similar configuration to that of the MetaClip2
[facebook/metaclip-2-worldwide-huge-quickgelu](https://huggingface.co/facebook/metaclip-2-worldwide-huge-quickgelu) architecture.
Configuration objects inherit from [`PreTrainedConfig`] and can be used to control the model outputs. Read the
documentation from [`PreTrainedConfig`] for more information.
Args:
text_config (`dict`, *optional*):
Dictionary of configuration options used to initialize [`MetaClip2TextConfig`].
vision_config (`dict`, *optional*):
Dictionary of configuration options used to initialize [`MetaClip2VisionConfig`].
projection_dim (`int`, *optional*, defaults to 512):
Dimensionality of text and vision projection layers.
logit_scale_init_value (`float`, *optional*, defaults to 2.6592):
The initial value of the *logit_scale* parameter. Default is used as per the original MetaClip2 implementation.
kwargs (*optional*):
Dictionary of keyword arguments.
Example:
```python
>>> from transformers import MetaClip2Config, MetaClip2Model
>>> # Initializing a MetaClip2Config with facebook/metaclip-2-worldwide-huge-quickgelu style configuration
>>> configuration = MetaClip2Config()
>>> # Initializing a MetaClip2Model (with random weights) from the facebook/metaclip-2-worldwide-huge-quickgelu style configuration
>>> model = MetaClip2Model(configuration)
>>> # Accessing the model configuration
>>> configuration = model.config
>>> # We can also initialize a MetaClip2Config from a MetaClip2TextConfig and a MetaClip2VisionConfig
>>> from transformers import MetaClip2TextConfig, MetaClip2VisionConfig
>>> # Initializing a MetaClip2Text and MetaClip2Vision configuration
>>> config_text = MetaClip2TextConfig()
>>> config_vision = MetaClip2VisionConfig()
>>> config = MetaClip2Config(text_config=config_text, vision_config=config_vision)
```"""
model_type = "metaclip_2"
sub_configs = {"text_config": MetaClip2TextConfig, "vision_config": MetaClip2VisionConfig}
def __init__(
self, text_config=None, vision_config=None, projection_dim=512, logit_scale_init_value=2.6592, **kwargs
):
# If `_config_dict` exist, we use them for the backward compatibility.
# We pop out these 2 attributes before calling `super().__init__` to avoid them being saved (which causes a lot
# of confusion!).
text_config_dict = kwargs.pop("text_config_dict", None)
vision_config_dict = kwargs.pop("vision_config_dict", None)
# Instead of simply assigning `[text|vision]_config_dict` to `[text|vision]_config`, we use the values in
# `[text|vision]_config_dict` to update the values in `[text|vision]_config`. The values should be same in most
# cases, but we don't want to break anything regarding `_config_dict` that existed before commit `8827e1b2`.
if text_config_dict is not None:
if text_config is None:
text_config = {}
# This is the complete result when using `text_config_dict`.
_text_config_dict = MetaClip2TextConfig(**text_config_dict).to_dict()
# Give a warning if the values exist in both `_text_config_dict` and `text_config` but being different.
for key, value in _text_config_dict.items():
if key in text_config and value != text_config[key] and key != "transformers_version":
# If specified in `text_config_dict`
if key in text_config_dict:
message = (
f"`{key}` is found in both `text_config_dict` and `text_config` but with different values. "
f'The value `text_config_dict["{key}"]` will be used instead.'
)
# If inferred from default argument values (just to be super careful)
else:
message = (
f"`text_config_dict` is provided which will be used to initialize `CLIPTextConfig`. The "
f'value `text_config["{key}"]` will be overridden.'
)
logger.info(message)
# Update all values in `text_config` with the ones in `_text_config_dict`.
text_config.update(_text_config_dict)
if vision_config_dict is not None:
if vision_config is None:
vision_config = {}
# This is the complete result when using `vision_config_dict`.
_vision_config_dict = MetaClip2VisionConfig(**vision_config_dict).to_dict()
# convert keys to string instead of integer
if "id2label" in _vision_config_dict:
_vision_config_dict["id2label"] = {
str(key): value for key, value in _vision_config_dict["id2label"].items()
}
# Give a warning if the values exist in both `_vision_config_dict` and `vision_config` but being different.
for key, value in _vision_config_dict.items():
if key in vision_config and value != vision_config[key] and key != "transformers_version":
# If specified in `vision_config_dict`
if key in vision_config_dict:
message = (
f"`{key}` is found in both `vision_config_dict` and `vision_config` but with different "
f'values. The value `vision_config_dict["{key}"]` will be used instead.'
)
# If inferred from default argument values (just to be super careful)
else:
message = (
f"`vision_config_dict` is provided which will be used to initialize `CLIPVisionConfig`. "
f'The value `vision_config["{key}"]` will be overridden.'
)
logger.info(message)
# Update all values in `vision_config` with the ones in `_vision_config_dict`.
vision_config.update(_vision_config_dict)
if text_config is None:
text_config = MetaClip2TextConfig()
logger.info("`text_config` is `None`. initializing the `MetaClip2TextConfig` with default values.")
elif isinstance(text_config, dict):
text_config = MetaClip2TextConfig(**text_config)
if vision_config is None:
vision_config = MetaClip2VisionConfig()
logger.info("`vision_config` is `None`. initializing the `MetaClip2VisionConfig` with default values.")
elif isinstance(vision_config, dict):
vision_config = MetaClip2VisionConfig(**vision_config)
self.text_config = text_config
self.vision_config = vision_config
self.projection_dim = projection_dim
self.logit_scale_init_value = logit_scale_init_value
self.initializer_factor = 1.0
super().__init__(**kwargs)
__all__ = ["MetaClip2Config", "MetaClip2TextConfig", "MetaClip2VisionConfig"]
| MetaClip2Config |
python | dagster-io__dagster | python_modules/dagster-graphql/dagster_graphql/schema/roots/mutation.py | {
"start": 10571,
"end": 11249
} | class ____(graphene.Mutation):
"""Launches a job run."""
Output = graphene.NonNull(GrapheneLaunchRunResult)
class Arguments:
executionParams = graphene.NonNull(GrapheneExecutionParams)
class Meta:
name = "LaunchRunMutation"
@capture_error
@require_permission_check(Permissions.LAUNCH_PIPELINE_EXECUTION)
async def mutate(
self, graphene_info: ResolveInfo, executionParams: GrapheneExecutionParams
) -> Union[GrapheneLaunchRunSuccess, GrapheneError, GraphenePythonError]:
return await create_execution_params_and_launch_pipeline_exec(
graphene_info, executionParams
)
| GrapheneLaunchRunMutation |
python | kamyu104__LeetCode-Solutions | Python/divide-array-into-arrays-with-max-difference.py | {
"start": 40,
"end": 361
} | class ____(object):
def divideArray(self, nums, k):
"""
:type nums: List[int]
:type k: int
:rtype: List[List[int]]
"""
nums.sort()
return [nums[i:i+3] for i in xrange(0, len(nums), 3)] if all(nums[i+2]-nums[i] <= k for i in xrange(0, len(nums), 3)) else []
| Solution |
python | huggingface__transformers | tests/models/mobilevit/test_modeling_mobilevit.py | {
"start": 10472,
"end": 14368
} | class ____(unittest.TestCase):
@cached_property
def default_image_processor(self):
return MobileViTImageProcessor.from_pretrained("apple/mobilevit-xx-small") if is_vision_available() else None
@slow
def test_inference_image_classification_head(self):
model = MobileViTForImageClassification.from_pretrained("apple/mobilevit-xx-small").to(torch_device)
image_processor = self.default_image_processor
image = prepare_img()
inputs = image_processor(images=image, return_tensors="pt").to(torch_device)
# forward pass
with torch.no_grad():
outputs = model(**inputs)
# verify the logits
expected_shape = torch.Size((1, 1000))
self.assertEqual(outputs.logits.shape, expected_shape)
expectations = Expectations(
{
(None, None): [-1.9364, -1.2327, -0.4653],
("cuda", 8): [-1.9364, -1.2327, -0.4653],
}
)
expected_slice = torch.tensor(expectations.get_expectation()).to(torch_device)
torch.testing.assert_close(outputs.logits[0, :3], expected_slice, rtol=2e-4, atol=2e-4)
@slow
def test_inference_semantic_segmentation(self):
model = MobileViTForSemanticSegmentation.from_pretrained("apple/deeplabv3-mobilevit-xx-small")
model = model.to(torch_device)
image_processor = MobileViTImageProcessor.from_pretrained("apple/deeplabv3-mobilevit-xx-small")
image = prepare_img()
inputs = image_processor(images=image, return_tensors="pt").to(torch_device)
# forward pass
with torch.no_grad():
outputs = model(**inputs)
logits = outputs.logits
# verify the logits
expected_shape = torch.Size((1, 21, 32, 32))
self.assertEqual(logits.shape, expected_shape)
expectations = Expectations(
{
(None, None): [
[[6.9713, 6.9786, 7.2422], [7.2893, 7.2825, 7.4446], [7.6580, 7.8797, 7.9420]],
[[-10.6869, -10.3250, -10.3471], [-10.4228, -9.9868, -9.7132], [-11.0405, -11.0221, -10.7318]],
[[-3.3089, -2.8539, -2.6740], [-3.2706, -2.5621, -2.5108], [-3.2534, -2.6615, -2.6651]],
],
("cuda", 8): [
[[6.9713, 6.9786, 7.2422], [7.2893, 7.2825, 7.4446], [7.6580, 7.8797, 7.9420]],
[[-10.6869, -10.3250, -10.3471], [-10.4229, -9.9868, -9.7132], [-11.0405, -11.0221, -10.7318]],
[[-3.3089, -2.8539, -2.6739], [-3.2706, -2.5621, -2.5108], [-3.2534, -2.6615, -2.6651]],
],
}
)
expected_slice = torch.tensor(expectations.get_expectation()).to(torch_device)
torch.testing.assert_close(logits[0, :3, :3, :3], expected_slice, rtol=2e-4, atol=2e-4)
@slow
def test_post_processing_semantic_segmentation(self):
model = MobileViTForSemanticSegmentation.from_pretrained("apple/deeplabv3-mobilevit-xx-small")
model = model.to(torch_device)
image_processor = MobileViTImageProcessor.from_pretrained("apple/deeplabv3-mobilevit-xx-small")
image = prepare_img()
inputs = image_processor(images=image, return_tensors="pt").to(torch_device)
# forward pass
with torch.no_grad():
outputs = model(**inputs)
outputs.logits = outputs.logits.detach().cpu()
segmentation = image_processor.post_process_semantic_segmentation(outputs=outputs, target_sizes=[(50, 60)])
expected_shape = torch.Size((50, 60))
self.assertEqual(segmentation[0].shape, expected_shape)
segmentation = image_processor.post_process_semantic_segmentation(outputs=outputs)
expected_shape = torch.Size((32, 32))
self.assertEqual(segmentation[0].shape, expected_shape)
| MobileViTModelIntegrationTest |
python | PrefectHQ__prefect | src/prefect/cache_policies.py | {
"start": 4996,
"end": 5649
} | class ____(CachePolicy):
"""
This policy accepts a custom function with signature f(task_run_context, task_parameters, flow_parameters) -> str
and uses it to compute a task run cache key.
"""
# making it optional for tests
cache_key_fn: Optional[
Callable[["TaskRunContext", dict[str, Any]], Optional[str]]
] = None
def compute_key(
self,
task_ctx: TaskRunContext,
inputs: dict[str, Any],
flow_parameters: dict[str, Any],
**kwargs: Any,
) -> Optional[str]:
if self.cache_key_fn:
return self.cache_key_fn(task_ctx, inputs)
@dataclass
| CacheKeyFnPolicy |
python | django__django | tests/admin_views/admin.py | {
"start": 16291,
"end": 16367
} | class ____(PostAdmin):
form = FieldOverridePostForm
| FieldOverridePostAdmin |
python | django__django | tests/test_utils/tests.py | {
"start": 54871,
"end": 55635
} | class ____(formset_factory(TestForm)):
@classmethod
def _get_cleaned_formset(cls, field_value):
formset = cls(
{
"form-TOTAL_FORMS": "1",
"form-INITIAL_FORMS": "0",
"form-0-field": field_value,
}
)
formset.full_clean()
return formset
@classmethod
def valid(cls):
return cls._get_cleaned_formset("valid")
@classmethod
def invalid(cls, nonfield=False, nonform=False):
if nonform:
formset = cls({}, error_messages={"missing_management_form": "error"})
formset.full_clean()
return formset
return cls._get_cleaned_formset("invalid_non_field" if nonfield else "invalid")
| TestFormset |
python | wandb__wandb | wandb/vendor/pygments/lexers/templates.py | {
"start": 57393,
"end": 59819
} | class ____(RegexLexer):
"""
Generic `handlebars <http://handlebarsjs.com/>` template lexer.
Highlights only the Handlebars template tags (stuff between `{{` and `}}`).
Everything else is left for a delegating lexer.
.. versionadded:: 2.0
"""
name = "Handlebars"
aliases = ['handlebars']
tokens = {
'root': [
(r'[^{]+', Other),
(r'\{\{!.*\}\}', Comment),
(r'(\{\{\{)(\s*)', bygroups(Comment.Special, Text), 'tag'),
(r'(\{\{)(\s*)', bygroups(Comment.Preproc, Text), 'tag'),
],
'tag': [
(r'\s+', Text),
(r'\}\}\}', Comment.Special, '#pop'),
(r'\}\}', Comment.Preproc, '#pop'),
# Handlebars
(r'([#/]*)(each|if|unless|else|with|log|in(line)?)', bygroups(Keyword,
Keyword)),
(r'#\*inline', Keyword),
# General {{#block}}
(r'([#/])([\w-]+)', bygroups(Name.Function, Name.Function)),
# {{opt=something}}
(r'([\w-]+)(=)', bygroups(Name.Attribute, Operator)),
# Partials {{> ...}}
(r'(>)(\s*)(@partial-block)', bygroups(Keyword, Text, Keyword)),
(r'(#?>)(\s*)([\w-]+)', bygroups(Keyword, Text, Name.Variable)),
(r'(>)(\s*)(\()', bygroups(Keyword, Text, Punctuation),
'dynamic-partial'),
include('generic'),
],
'dynamic-partial': [
(r'\s+', Text),
(r'\)', Punctuation, '#pop'),
(r'(lookup)(\s+)(\.|this)(\s+)', bygroups(Keyword, Text,
Name.Variable, Text)),
(r'(lookup)(\s+)(\S+)', bygroups(Keyword, Text,
using(this, state='variable'))),
(r'[\w-]+', Name.Function),
include('generic'),
],
'variable': [
(r'[a-zA-Z][\w-]*', Name.Variable),
(r'\.[\w-]+', Name.Variable),
(r'(this\/|\.\/|(\.\.\/)+)[\w-]+', Name.Variable),
],
'generic': [
include('variable'),
# borrowed from DjangoLexer
(r':?"(\\\\|\\"|[^"])*"', String.Double),
(r":?'(\\\\|\\'|[^'])*'", String.Single),
(r"[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|"
r"0[xX][0-9a-fA-F]+[Ll]?", Number),
]
}
| HandlebarsLexer |
python | getsentry__sentry | src/sentry/net/http.py | {
"start": 7672,
"end": 8130
} | class ____(Session):
def __init__(
self, is_ipaddress_permitted: IsIpAddressPermitted = None, max_retries: Retry | None = None
) -> None:
Session.__init__(self)
self.headers.update({"User-Agent": USER_AGENT})
adapter = BlacklistAdapter(
is_ipaddress_permitted=is_ipaddress_permitted, max_retries=max_retries
)
self.mount("https://", adapter)
self.mount("http://", adapter)
| SafeSession |
python | facebook__pyre-check | scripts/callgraph_utilities.py | {
"start": 2355,
"end": 2739
} | class ____(InputFormat):
def extract_callee(self, callee: JSON) -> str:
if not isinstance(callee, str):
raise ValueError(
f"Expected value for individual callee to be a string, got {type(callee)}: {callee}"
)
return callee
def extract_caller(self, qualifier: str) -> str:
return qualifier
| PysaCallGraphInputFormat |
python | xlwings__xlwings | xlwings/_xlwindows.py | {
"start": 61978,
"end": 63254
} | class ____(base_classes.Picture):
def __init__(self, xl):
self.xl = xl
@property
def api(self):
return self.xl
@property
def name(self):
return self.xl.Name
@name.setter
def name(self, value):
self.xl.Name = value
@property
def parent(self):
return Sheet(xl=self.xl.Parent)
@property
def left(self):
return self.xl.Left
@left.setter
def left(self, value):
self.xl.Left = value
@property
def top(self):
return self.xl.Top
@top.setter
def top(self, value):
self.xl.Top = value
@property
def width(self):
return self.xl.Width
@width.setter
def width(self, value):
self.xl.Width = value
@property
def height(self):
return self.xl.Height
@height.setter
def height(self, value):
self.xl.Height = value
def delete(self):
self.xl.Delete()
@property
def lock_aspect_ratio(self):
return self.xl.ShapeRange.LockAspectRatio
@lock_aspect_ratio.setter
def lock_aspect_ratio(self, value):
self.xl.ShapeRange.LockAspectRatio = value
def update(self, filename):
return utils.excel_update_picture(self, filename)
| Picture |
python | sqlalchemy__sqlalchemy | test/orm/test_versioning.py | {
"start": 22813,
"end": 24061
} | class ____(fixtures.DeclarativeMappedTest):
"""test #12748"""
run_setup_classes = "each"
@classmethod
def setup_classes(cls):
Base = cls.DeclarativeBasic
class Parent(Base):
__tablename__ = "parent"
id = mapped_column(Integer, primary_key=True)
related = relationship(
"Related", post_update=True, cascade="all, delete-orphan"
)
class Related(Base):
__tablename__ = "related"
id = mapped_column(Integer, primary_key=True)
parent_id = mapped_column(ForeignKey("parent.id"))
version = mapped_column(Uuid)
__mapper_args__ = {
"version_id_col": version,
"version_id_generator": lambda v: uuid.uuid4(),
}
def test_random_versionids(self, connection):
Parent, Related = self.classes("Parent", "Related")
p1 = Parent(related=[Related(), Related(), Related()])
with Session(connection, expire_on_commit=False) as sess:
sess.add(p1)
sess.commit()
with Session(connection, expire_on_commit=False) as sess:
sess.delete(p1)
sess.commit()
| PostUpdatePrefetchTest |
python | sqlalchemy__sqlalchemy | test/orm/test_query.py | {
"start": 254495,
"end": 258017
} | class ____(QueryTest):
def _fn_fixture(self):
def query(*arg, **kw):
return Query(*arg, **kw)
return query
def _subclass_fixture(self):
class MyQuery(Query):
pass
return MyQuery
def _callable_fixture(self):
class MyQueryFactory:
def __call__(self, *arg, **kw):
return Query(*arg, **kw)
return MyQueryFactory()
def _plain_fixture(self):
return Query
def _test_get(self, fixture):
User = self.classes.User
s = fixture_session(query_cls=fixture())
assert s.get(User, 19) is None
u = s.get(User, 7)
u2 = s.get(User, 7)
assert u is u2
def _test_o2m_lazyload(self, fixture):
User, Address = self.classes("User", "Address")
s = fixture_session(query_cls=fixture())
u1 = s.query(User).filter(User.id == 7).first()
eq_(u1.addresses, [Address(id=1)])
def _test_m2o_lazyload(self, fixture):
User, Address = self.classes("User", "Address")
s = fixture_session(query_cls=fixture())
a1 = s.query(Address).filter(Address.id == 1).first()
eq_(a1.user, User(id=7))
def _test_expr(self, fixture):
User, Address = self.classes("User", "Address")
s = fixture_session(query_cls=fixture())
q = s.query(func.max(User.id).label("max"))
eq_(q.scalar(), 10)
def _test_expr_undocumented_query_constructor(self, fixture):
# see #4269. not documented but already out there.
User, Address = self.classes("User", "Address")
s = fixture_session(query_cls=fixture())
q = Query(func.max(User.id).label("max")).with_session(s)
eq_(q.scalar(), 10)
def test_plain_get(self):
self._test_get(self._plain_fixture)
def test_callable_get(self):
self._test_get(self._callable_fixture)
def test_subclass_get(self):
self._test_get(self._subclass_fixture)
def test_fn_get(self):
self._test_get(self._fn_fixture)
def test_plain_expr(self):
self._test_expr(self._plain_fixture)
def test_callable_expr(self):
self._test_expr(self._callable_fixture)
def test_subclass_expr(self):
self._test_expr(self._subclass_fixture)
def test_fn_expr(self):
self._test_expr(self._fn_fixture)
def test_plain_expr_undocumented_query_constructor(self):
self._test_expr_undocumented_query_constructor(self._plain_fixture)
def test_callable_expr_undocumented_query_constructor(self):
self._test_expr_undocumented_query_constructor(self._callable_fixture)
def test_subclass_expr_undocumented_query_constructor(self):
self._test_expr_undocumented_query_constructor(self._subclass_fixture)
def test_fn_expr_undocumented_query_constructor(self):
self._test_expr_undocumented_query_constructor(self._fn_fixture)
def test_callable_o2m_lazyload(self):
self._test_o2m_lazyload(self._callable_fixture)
def test_subclass_o2m_lazyload(self):
self._test_o2m_lazyload(self._subclass_fixture)
def test_fn_o2m_lazyload(self):
self._test_o2m_lazyload(self._fn_fixture)
def test_callable_m2o_lazyload(self):
self._test_m2o_lazyload(self._callable_fixture)
def test_subclass_m2o_lazyload(self):
self._test_m2o_lazyload(self._subclass_fixture)
def test_fn_m2o_lazyload(self):
self._test_m2o_lazyload(self._fn_fixture)
| QueryClsTest |
python | sympy__sympy | sympy/core/power.py | {
"start": 729,
"end": 73089
} | class ____(Expr):
"""
Defines the expression x**y as "x raised to a power y"
.. deprecated:: 1.7
Using arguments that aren't subclasses of :class:`~.Expr` in core
operators (:class:`~.Mul`, :class:`~.Add`, and :class:`~.Pow`) is
deprecated. See :ref:`non-expr-args-deprecated` for details.
Singleton definitions involving (0, 1, -1, oo, -oo, I, -I):
+--------------+---------+-----------------------------------------------+
| expr | value | reason |
+==============+=========+===============================================+
| z**0 | 1 | Although arguments over 0**0 exist, see [2]. |
+--------------+---------+-----------------------------------------------+
| z**1 | z | |
+--------------+---------+-----------------------------------------------+
| (-oo)**(-1) | 0 | |
+--------------+---------+-----------------------------------------------+
| (-1)**-1 | -1 | |
+--------------+---------+-----------------------------------------------+
| S.Zero**-1 | zoo | This is not strictly true, as 0**-1 may be |
| | | undefined, but is convenient in some contexts |
| | | where the base is assumed to be positive. |
+--------------+---------+-----------------------------------------------+
| 1**-1 | 1 | |
+--------------+---------+-----------------------------------------------+
| oo**-1 | 0 | |
+--------------+---------+-----------------------------------------------+
| 0**oo | 0 | Because for all complex numbers z near |
| | | 0, z**oo -> 0. |
+--------------+---------+-----------------------------------------------+
| 0**-oo | zoo | This is not strictly true, as 0**oo may be |
| | | oscillating between positive and negative |
| | | values or rotating in the complex plane. |
| | | It is convenient, however, when the base |
| | | is positive. |
+--------------+---------+-----------------------------------------------+
| 1**oo | nan | Because there are various cases where |
| 1**-oo | | lim(x(t),t)=1, lim(y(t),t)=oo (or -oo), |
| | | but lim( x(t)**y(t), t) != 1. See [3]. |
+--------------+---------+-----------------------------------------------+
| b**zoo | nan | Because b**z has no limit as z -> zoo |
+--------------+---------+-----------------------------------------------+
| (-1)**oo | nan | Because of oscillations in the limit. |
| (-1)**(-oo) | | |
+--------------+---------+-----------------------------------------------+
| oo**oo | oo | |
+--------------+---------+-----------------------------------------------+
| oo**-oo | 0 | |
+--------------+---------+-----------------------------------------------+
| (-oo)**oo | nan | |
| (-oo)**-oo | | |
+--------------+---------+-----------------------------------------------+
| oo**I | nan | oo**e could probably be best thought of as |
| (-oo)**I | | the limit of x**e for real x as x tends to |
| | | oo. If e is I, then the limit does not exist |
| | | and nan is used to indicate that. |
+--------------+---------+-----------------------------------------------+
| oo**(1+I) | zoo | If the real part of e is positive, then the |
| (-oo)**(1+I) | | limit of abs(x**e) is oo. So the limit value |
| | | is zoo. |
+--------------+---------+-----------------------------------------------+
| oo**(-1+I) | 0 | If the real part of e is negative, then the |
| -oo**(-1+I) | | limit is 0. |
+--------------+---------+-----------------------------------------------+
Because symbolic computations are more flexible than floating point
calculations and we prefer to never return an incorrect answer,
we choose not to conform to all IEEE 754 conventions. This helps
us avoid extra test-case code in the calculation of limits.
See Also
========
sympy.core.numbers.Infinity
sympy.core.numbers.NegativeInfinity
sympy.core.numbers.NaN
References
==========
.. [1] https://en.wikipedia.org/wiki/Exponentiation
.. [2] https://en.wikipedia.org/wiki/Zero_to_the_power_of_zero
.. [3] https://en.wikipedia.org/wiki/Indeterminate_forms
"""
is_Pow = True
__slots__ = ('is_commutative',)
if TYPE_CHECKING:
@property
def args(self) -> tuple[Expr, Expr]:
...
@property
def base(self) -> Expr:
return self.args[0]
@property
def exp(self) -> Expr:
return self.args[1]
@property
def kind(self):
if self.exp.kind is NumberKind:
return self.base.kind
else:
return UndefinedKind
@cacheit
def __new__(cls, b: Expr | complex, e: Expr | complex, evaluate=None) -> Expr: # type: ignore
if evaluate is None:
evaluate = global_parameters.evaluate
base = _sympify(b)
exp = _sympify(e)
# XXX: This can be removed when non-Expr args are disallowed rather
# than deprecated.
from .relational import Relational
if isinstance(base, Relational) or isinstance(exp, Relational):
raise TypeError('Relational cannot be used in Pow')
# XXX: This should raise TypeError once deprecation period is over:
for arg in [base, exp]:
if not isinstance(arg, Expr):
sympy_deprecation_warning(
f"""
Using non-Expr arguments in Pow is deprecated (in this case, one of the
arguments is of type {type(arg).__name__!r}).
If you really did intend to construct a power with this base, use the **
operator instead.""",
deprecated_since_version="1.7",
active_deprecations_target="non-expr-args-deprecated",
stacklevel=4,
)
if evaluate:
if exp is S.ComplexInfinity:
return S.NaN
if exp is S.Infinity:
if is_gt(base, S.One):
return S.Infinity
if is_gt(base, S.NegativeOne) and is_lt(base, S.One):
return S.Zero
if is_lt(base, S.NegativeOne):
if base.is_finite:
return S.ComplexInfinity
if base.is_finite is False:
return S.NaN
if exp is S.Zero:
return S.One
elif exp is S.One:
return base
elif exp == -1 and not base:
return S.ComplexInfinity
elif exp.__class__.__name__ == "AccumulationBounds":
if base == S.Exp1:
from sympy.calculus.accumulationbounds import AccumBounds
return AccumBounds(Pow(base, exp.min), Pow(base, exp.max))
# autosimplification if base is a number and exp odd/even
# if base is Number then the base will end up positive; we
# do not do this with arbitrary expressions since symbolic
# cancellation might occur as in (x - 1)/(1 - x) -> -1. If
# we returned Piecewise((-1, Ne(x, 1))) for such cases then
# we could do this...but we don't
elif (exp.is_Symbol and exp.is_integer or exp.is_Integer
) and (base.is_number and base.is_Mul or base.is_Number
) and base.could_extract_minus_sign():
if exp.is_even:
base = -base
elif exp.is_odd:
return -Pow(-base, exp)
if S.NaN in (base, exp): # XXX S.NaN**x -> S.NaN under assumption that x != 0
return S.NaN
elif base is S.One:
if abs(exp).is_infinite:
return S.NaN
return S.One
else:
# recognize base as E
from sympy.functions.elementary.exponential import exp_polar
if not exp.is_Atom and base is not S.Exp1 and not isinstance(base, exp_polar):
from .exprtools import factor_terms
from sympy.functions.elementary.exponential import log
from sympy.simplify.radsimp import fraction
c, ex = factor_terms(exp, sign=False).as_coeff_Mul()
num, den = fraction(ex)
if isinstance(den, log) and den.args[0] == base:
return S.Exp1**(c*num)
elif den.is_Add:
from sympy.functions.elementary.complexes import sign, im
s = sign(im(base))
if s.is_Number and s and den == \
log(-factor_terms(base, sign=False)) + s*S.ImaginaryUnit*S.Pi:
return S.Exp1**(c*num)
obj = base._eval_power(exp)
if obj is not None:
return obj
obj = Expr.__new__(cls, base, exp)
obj = cls._exec_constructor_postprocessors(obj)
if not isinstance(obj, Pow):
return obj
obj.is_commutative = (base.is_commutative and exp.is_commutative)
return obj
def inverse(self, argindex=1):
if self.base == S.Exp1:
from sympy.functions.elementary.exponential import log
return log
return None
@classmethod
def class_key(cls):
return 3, 2, cls.__name__
def _eval_refine(self, assumptions):
from sympy.assumptions.ask import ask, Q
b, e = self.as_base_exp()
if ask(Q.integer(e), assumptions) and b.could_extract_minus_sign():
if ask(Q.even(e), assumptions):
return Pow(-b, e)
elif ask(Q.odd(e), assumptions):
return -Pow(-b, e)
def _eval_power(self, expt):
b, e = self.as_base_exp()
if b is S.NaN:
return (b**e)**expt # let __new__ handle it
s = None
if expt.is_integer:
s = 1
elif b.is_polar: # e.g. exp_polar, besselj, var('p', polar=True)...
s = 1
elif e.is_extended_real is not None:
from sympy.functions.elementary.complexes import arg, im, re, sign
from sympy.functions.elementary.exponential import exp, log
from sympy.functions.elementary.integers import floor
# helper functions ===========================
def _half(e):
"""Return True if the exponent has a literal 2 as the
denominator, else None."""
if getattr(e, 'q', None) == 2:
return True
n, d = e.as_numer_denom()
if n.is_integer and d == 2:
return True
def _n2(e):
"""Return ``e`` evaluated to a Number with 2 significant
digits, else None."""
try:
rv = e.evalf(2, strict=True)
if rv.is_Number:
return rv
except PrecisionExhausted:
pass
# ===================================================
if e.is_extended_real:
# we need _half(expt) with constant floor or
# floor(S.Half - e*arg(b)/2/pi) == 0
# handle -1 as special case
if e == -1:
# floor arg. is 1/2 + arg(b)/2/pi
if _half(expt):
if b.is_negative is True:
return S.NegativeOne**expt*Pow(-b, e*expt)
elif b.is_negative is False: # XXX ok if im(b) != 0?
return Pow(b, -expt)
elif e.is_even:
if b.is_extended_real:
b = abs(b)
if b.is_imaginary:
b = abs(im(b))*S.ImaginaryUnit
if (abs(e) < 1) == True or e == 1:
s = 1 # floor = 0
elif b.is_extended_nonnegative:
s = 1 # floor = 0
elif re(b).is_extended_nonnegative and (abs(e) < 2) == True:
s = 1 # floor = 0
elif _half(expt):
s = exp(2*S.Pi*S.ImaginaryUnit*expt*floor(
S.Half - e*arg(b)/(2*S.Pi)))
if s.is_extended_real and _n2(sign(s) - s) == 0:
s = sign(s)
else:
s = None
else:
# e.is_extended_real is False requires:
# _half(expt) with constant floor or
# floor(S.Half - im(e*log(b))/2/pi) == 0
try:
s = exp(2*S.ImaginaryUnit*S.Pi*expt*
floor(S.Half - im(e*log(b))/2/S.Pi))
# be careful to test that s is -1 or 1 b/c sign(I) == I:
# so check that s is real
if s.is_extended_real and _n2(sign(s) - s) == 0:
s = sign(s)
else:
s = None
except PrecisionExhausted:
s = None
if s is not None:
return s*Pow(b, e*expt)
def _eval_Mod(self, q):
r"""A dispatched function to compute `b^e \bmod q`, dispatched
by ``Mod``.
Notes
=====
Algorithms:
1. For unevaluated integer power, use built-in ``pow`` function
with 3 arguments, if powers are not too large wrt base.
2. For very large powers, use totient reduction if $e \ge \log(m)$.
Bound on m, is for safe factorization memory wise i.e. $m^{1/4}$.
For pollard-rho to be faster than built-in pow $\log(e) > m^{1/4}$
check is added.
3. For any unevaluated power found in `b` or `e`, the step 2
will be recursed down to the base and the exponent
such that the $b \bmod q$ becomes the new base and
$\phi(q) + e \bmod \phi(q)$ becomes the new exponent, and then
the computation for the reduced expression can be done.
"""
base, exp = self.base, self.exp
if exp.is_integer and exp.is_positive:
if q.is_integer and base % q == 0:
return S.Zero
from sympy.functions.combinatorial.numbers import totient
if base.is_Integer and exp.is_Integer and q.is_Integer:
b, e, m = int(base), int(exp), int(q)
mb = m.bit_length()
if mb <= 80 and e >= mb and e.bit_length()**4 >= m:
phi = int(totient(m))
return Integer(pow(b, phi + e%phi, m))
return Integer(pow(b, e, m))
from .mod import Mod
if isinstance(base, Pow) and base.is_integer and base.is_number:
base = Mod(base, q)
return Mod(Pow(base, exp, evaluate=False), q)
if isinstance(exp, Pow) and exp.is_integer and exp.is_number:
bit_length = int(q).bit_length()
# XXX Mod-Pow actually attempts to do a hanging evaluation
# if this dispatched function returns None.
# May need some fixes in the dispatcher itself.
if bit_length <= 80:
phi = totient(q)
exp = phi + Mod(exp, phi)
return Mod(Pow(base, exp, evaluate=False), q)
def _eval_is_even(self):
if self.exp.is_integer and self.exp.is_positive:
return self.base.is_even
def _eval_is_negative(self):
ext_neg = Pow._eval_is_extended_negative(self)
if ext_neg is True:
return self.is_finite
return ext_neg
def _eval_is_extended_positive(self):
if self.base == self.exp:
if self.base.is_extended_nonnegative:
return True
elif self.base.is_positive:
if self.exp.is_real:
return True
elif self.base.is_extended_negative:
if self.exp.is_even:
return True
if self.exp.is_odd:
return False
elif self.base.is_zero:
if self.exp.is_extended_real:
return self.exp.is_zero
elif self.base.is_extended_nonpositive:
if self.exp.is_odd:
return False
elif self.base.is_imaginary:
if self.exp.is_integer:
m = self.exp % 4
if m.is_zero:
return True
if m.is_integer and m.is_zero is False:
return False
if self.exp.is_imaginary:
from sympy.functions.elementary.exponential import log
return log(self.base).is_imaginary
def _eval_is_extended_negative(self):
if self.exp is S.Half:
if self.base.is_complex or self.base.is_extended_real:
return False
if self.base.is_extended_negative:
if self.exp.is_odd and self.base.is_finite:
return True
if self.exp.is_even:
return False
elif self.base.is_extended_positive or self.base.is_zero:
if self.exp.is_extended_real:
return False
elif self.base.is_extended_nonnegative:
if self.exp.is_extended_nonnegative:
return False
elif self.base.is_extended_nonpositive or self.base.is_extended_real:
if self.exp.is_even:
return False
def _eval_is_zero(self):
if self.base.is_zero:
if self.exp.is_extended_positive:
return True
elif self.exp.is_extended_nonpositive:
return False
elif self.base == S.Exp1:
return self.exp is S.NegativeInfinity
elif self.base.is_zero is False:
if self.base.is_finite and self.exp.is_finite:
return False
elif self.exp.is_negative:
return self.base.is_infinite
elif self.exp.is_nonnegative:
return False
elif self.exp.is_infinite and self.exp.is_extended_real:
if (1 - abs(self.base)).is_extended_positive:
return self.exp.is_extended_positive
elif (1 - abs(self.base)).is_extended_negative:
return self.exp.is_extended_negative
elif self.base.is_finite and self.exp.is_negative:
# when self.base.is_zero is None
return False
def _eval_is_integer(self):
b, e = self.args
if b.is_rational:
if b.is_integer is False and e.is_positive:
return False # rat**nonneg
if b.is_integer and e.is_integer:
if b is S.NegativeOne:
return True
if e.is_nonnegative or e.is_positive:
return True
if b.is_integer and e.is_negative and (e.is_finite or e.is_integer):
if fuzzy_not((b - 1).is_zero) and fuzzy_not((b + 1).is_zero):
return False
if b.is_Number and e.is_Number:
check = self.func(*self.args)
return check.is_Integer
if e.is_negative and b.is_positive and (b - 1).is_positive:
return False
if e.is_negative and b.is_negative and (b + 1).is_negative:
return False
def _eval_is_extended_real(self):
if self.base is S.Exp1:
if self.exp.is_extended_real:
return True
elif self.exp.is_imaginary:
return (2*S.ImaginaryUnit*self.exp/S.Pi).is_even
from sympy.functions.elementary.exponential import log, exp
real_b = self.base.is_extended_real
if real_b is None:
if self.base.func == exp and self.base.exp.is_imaginary:
return self.exp.is_imaginary
if self.base.func == Pow and self.base.base is S.Exp1 and self.base.exp.is_imaginary:
return self.exp.is_imaginary
return
real_e = self.exp.is_extended_real
if real_e is None:
return
if real_b and real_e:
if self.base.is_extended_positive:
return True
elif self.base.is_extended_nonnegative and self.exp.is_extended_nonnegative:
return True
elif self.exp.is_integer and self.base.is_extended_nonzero:
return True
elif self.exp.is_integer and self.exp.is_nonnegative:
return True
elif self.base.is_extended_negative:
if self.exp.is_Rational:
return False
if real_e and self.exp.is_extended_negative and self.base.is_zero is False:
return Pow(self.base, -self.exp).is_extended_real
im_b = self.base.is_imaginary
im_e = self.exp.is_imaginary
if im_b:
if self.exp.is_integer:
if self.exp.is_even:
return True
elif self.exp.is_odd:
return False
elif im_e and log(self.base).is_imaginary:
return True
elif self.exp.is_Add:
c, a = self.exp.as_coeff_Add()
if c and c.is_Integer:
return Mul(
self.base**c, self.base**a, evaluate=False).is_extended_real
elif self.base in (-S.ImaginaryUnit, S.ImaginaryUnit):
if (self.exp/2).is_integer is False:
return False
if real_b and im_e:
if self.base is S.NegativeOne:
return True
c = self.exp.coeff(S.ImaginaryUnit)
if c:
if self.base.is_rational and c.is_rational:
if self.base.is_nonzero and (self.base - 1).is_nonzero and c.is_nonzero:
return False
ok = (c*log(self.base)/S.Pi).is_integer
if ok is not None:
return ok
if real_b is False and real_e: # we already know it's not imag
if isinstance(self.exp, Rational) and self.exp.p == 1:
return False
from sympy.functions.elementary.complexes import arg
i = arg(self.base)*self.exp/S.Pi
if i.is_complex: # finite
return i.is_integer
def _eval_is_complex(self):
if self.base == S.Exp1:
return fuzzy_or([self.exp.is_complex, self.exp.is_extended_negative])
if all(a.is_complex for a in self.args) and self._eval_is_finite():
return True
def _eval_is_imaginary(self):
if self.base.is_commutative is False:
return False
if self.base.is_imaginary:
if self.exp.is_integer:
odd = self.exp.is_odd
if odd is not None:
return odd
return
if self.base == S.Exp1:
f = 2 * self.exp / (S.Pi*S.ImaginaryUnit)
# exp(pi*integer) = 1 or -1, so not imaginary
if f.is_even:
return False
# exp(pi*integer + pi/2) = I or -I, so it is imaginary
if f.is_odd:
return True
return None
if self.exp.is_imaginary:
from sympy.functions.elementary.exponential import log
imlog = log(self.base).is_imaginary
if imlog is not None:
return False # I**i -> real; (2*I)**i -> complex ==> not imaginary
if self.base.is_extended_real and self.exp.is_extended_real:
if self.base.is_positive:
return False
else:
rat = self.exp.is_rational
if not rat:
return rat
if self.exp.is_integer:
return False
else:
half = (2*self.exp).is_integer
if half:
return self.base.is_negative
return half
if self.base.is_extended_real is False: # we already know it's not imag
from sympy.functions.elementary.complexes import arg
i = arg(self.base)*self.exp/S.Pi
isodd = (2*i).is_odd
if isodd is not None:
return isodd
def _eval_is_odd(self):
if self.exp.is_integer:
if self.exp.is_positive:
return self.base.is_odd
elif self.exp.is_nonnegative and self.base.is_odd:
return True
elif self.base is S.NegativeOne:
return True
def _eval_is_finite(self):
if self.exp.is_negative:
if self.base.is_zero:
return False
if self.base.is_infinite or self.base.is_nonzero:
return True
c1 = self.base.is_finite
if c1 is None:
return
c2 = self.exp.is_finite
if c2 is None:
return
if c1 and c2:
if self.exp.is_nonnegative or fuzzy_not(self.base.is_zero):
return True
def _eval_is_prime(self):
'''
An integer raised to the n(>=2)-th power cannot be a prime.
'''
if self.base.is_integer and self.exp.is_integer and (self.exp - 1).is_positive:
return False
def _eval_is_composite(self):
"""
A power is composite if both base and exponent are greater than 1
"""
if (self.base.is_integer and self.exp.is_integer and
((self.base - 1).is_positive and (self.exp - 1).is_positive or
(self.base + 1).is_negative and self.exp.is_positive and self.exp.is_even)):
return True
def _eval_is_polar(self):
return self.base.is_polar
def _eval_subs(self, old, new):
from sympy.calculus.accumulationbounds import AccumBounds
if isinstance(self.exp, AccumBounds):
b = self.base.subs(old, new)
e = self.exp.subs(old, new)
if isinstance(e, AccumBounds):
return e.__rpow__(b)
return self.func(b, e)
from sympy.functions.elementary.exponential import exp, log
def _check(ct1, ct2, old):
"""Return (bool, pow, remainder_pow) where, if bool is True, then the
exponent of Pow `old` will combine with `pow` so the substitution
is valid, otherwise bool will be False.
For noncommutative objects, `pow` will be an integer, and a factor
`Pow(old.base, remainder_pow)` needs to be included. If there is
no such factor, None is returned. For commutative objects,
remainder_pow is always None.
cti are the coefficient and terms of an exponent of self or old
In this _eval_subs routine a change like (b**(2*x)).subs(b**x, y)
will give y**2 since (b**x)**2 == b**(2*x); if that equality does
not hold then the substitution should not occur so `bool` will be
False.
"""
coeff1, terms1 = ct1
coeff2, terms2 = ct2
if terms1 == terms2:
if old.is_commutative:
# Allow fractional powers for commutative objects
pow = coeff1/coeff2
try:
as_int(pow, strict=False)
combines = True
except ValueError:
b, e = old.as_base_exp()
# These conditions ensure that (b**e)**f == b**(e*f) for any f
combines = b.is_positive and e.is_real or b.is_nonnegative and e.is_nonnegative
return combines, pow, None
else:
# With noncommutative symbols, substitute only integer powers
if not isinstance(terms1, tuple):
terms1 = (terms1,)
if not all(term.is_integer for term in terms1):
return False, None, None
try:
# Round pow toward zero
pow, remainder = divmod(as_int(coeff1), as_int(coeff2))
if pow < 0 and remainder != 0:
pow += 1
remainder -= as_int(coeff2)
if remainder == 0:
remainder_pow = None
else:
remainder_pow = Mul(remainder, *terms1)
return True, pow, remainder_pow
except ValueError:
# Can't substitute
pass
return False, None, None
if old == self.base or (old == exp and self.base == S.Exp1):
if new.is_Function and isinstance(new, Callable):
return new(self.exp._subs(old, new))
else:
return new**self.exp._subs(old, new)
# issue 10829: (4**x - 3*y + 2).subs(2**x, y) -> y**2 - 3*y + 2
if isinstance(old, self.func) and self.exp == old.exp:
l = log(self.base, old.base)
if l.is_Number:
return Pow(new, l)
if isinstance(old, self.func) and self.base == old.base:
if self.exp.is_Add is False:
ct1 = self.exp.as_independent(Symbol, as_Add=False)
ct2 = old.exp.as_independent(Symbol, as_Add=False)
ok, pow, remainder_pow = _check(ct1, ct2, old)
if ok:
# issue 5180: (x**(6*y)).subs(x**(3*y),z)->z**2
result = self.func(new, pow)
if remainder_pow is not None:
result = Mul(result, Pow(old.base, remainder_pow))
return result
else: # b**(6*x + a).subs(b**(3*x), y) -> y**2 * b**a
# exp(exp(x) + exp(x**2)).subs(exp(exp(x)), w) -> w * exp(exp(x**2))
oarg = old.exp
new_l = []
o_al = []
ct2 = oarg.as_coeff_mul()
for a in self.exp.args:
newa = a._subs(old, new)
ct1 = newa.as_coeff_mul()
ok, pow, remainder_pow = _check(ct1, ct2, old)
if ok:
new_l.append(new**pow)
if remainder_pow is not None:
o_al.append(remainder_pow)
continue
elif not old.is_commutative and not newa.is_integer:
# If any term in the exponent is non-integer,
# we do not do any substitutions in the noncommutative case
return
o_al.append(newa)
if new_l:
expo = Add(*o_al)
new_l.append(Pow(self.base, expo, evaluate=False) if expo != 1 else self.base)
return Mul(*new_l)
if (isinstance(old, exp) or (old.is_Pow and old.base is S.Exp1)) and self.exp.is_extended_real and self.base.is_positive:
ct1 = old.exp.as_independent(Symbol, as_Add=False)
ct2 = (self.exp*log(self.base)).as_independent(
Symbol, as_Add=False)
ok, pow, remainder_pow = _check(ct1, ct2, old)
if ok:
result = self.func(new, pow) # (2**x).subs(exp(x*log(2)), z) -> z
if remainder_pow is not None:
result = Mul(result, Pow(old.base, remainder_pow))
return result
def as_base_exp(self):
"""Return base and exp of self.
Explanation
===========
If base a Rational less than 1, then return 1/Rational, -exp.
If this extra processing is not needed, the base and exp
properties will give the raw arguments.
Examples
========
>>> from sympy import Pow, S
>>> p = Pow(S.Half, 2, evaluate=False)
>>> p.as_base_exp()
(2, -2)
>>> p.args
(1/2, 2)
>>> p.base, p.exp
(1/2, 2)
"""
b, e = self.args
if b.is_Rational and b.p == 1 and b.q != 1:
return Integer(b.q), -e
return b, e
def _eval_adjoint(self):
from sympy.functions.elementary.complexes import adjoint
i, p = self.exp.is_integer, self.base.is_positive
if i:
return adjoint(self.base)**self.exp
if p:
return self.base**adjoint(self.exp)
if i is False and p is False:
expanded = expand_complex(self)
if expanded != self:
return adjoint(expanded)
def _eval_conjugate(self):
from sympy.functions.elementary.complexes import conjugate as c
i, p = self.exp.is_integer, self.base.is_positive
if i:
return c(self.base)**self.exp
if p:
return self.base**c(self.exp)
if i is False and p is False:
expanded = expand_complex(self)
if expanded != self:
return c(expanded)
if self.is_extended_real:
return self
def _eval_transpose(self):
from sympy.functions.elementary.complexes import transpose
if self.base == S.Exp1:
return self.func(S.Exp1, self.exp.transpose())
i, p = self.exp.is_integer, (self.base.is_complex or self.base.is_infinite)
if p:
return self.base**self.exp
if i:
return transpose(self.base)**self.exp
if i is False and p is False:
expanded = expand_complex(self)
if expanded != self:
return transpose(expanded)
def _eval_expand_power_exp(self, **hints):
"""a**(n + m) -> a**n*a**m"""
b = self.base
e = self.exp
if b == S.Exp1:
from sympy.concrete.summations import Sum
if isinstance(e, Sum) and e.is_commutative:
from sympy.concrete.products import Product
return Product(self.func(b, e.function), *e.limits)
if e.is_Add and (hints.get('force', False) or
b.is_zero is False or e._all_nonneg_or_nonppos()):
if e.is_commutative:
return Mul(*[self.func(b, x) for x in e.args])
if b.is_commutative:
c, nc = sift(e.args, lambda x: x.is_commutative, binary=True)
if c:
return Mul(*[self.func(b, x) for x in c]
)*b**Add._from_args(nc)
return self
def _eval_expand_power_base(self, **hints):
"""(a*b)**n -> a**n * b**n"""
force = hints.get('force', False)
b = self.base
e = self.exp
if not b.is_Mul:
return self
cargs, nc = b.args_cnc(split_1=False)
# expand each term - this is top-level-only
# expansion but we have to watch out for things
# that don't have an _eval_expand method
if nc:
nc = [i._eval_expand_power_base(**hints)
if hasattr(i, '_eval_expand_power_base') else i
for i in nc]
if e.is_Integer:
if e.is_positive:
rv = Mul(*nc*e)
else:
rv = Mul(*[i**-1 for i in nc[::-1]]*-e)
if cargs:
rv *= Mul(*cargs)**e
return rv
if not cargs:
return self.func(Mul(*nc), e, evaluate=False)
nc = [Mul(*nc)]
# sift the commutative bases
other, maybe_real = sift(cargs, lambda x: x.is_extended_real is False,
binary=True)
def pred(x):
if x is S.ImaginaryUnit:
return S.ImaginaryUnit
polar = x.is_polar
if polar:
return True
if polar is None:
return fuzzy_bool(x.is_extended_nonnegative)
sifted = sift(maybe_real, pred)
nonneg = sifted[True]
other += sifted[None]
neg = sifted[False]
imag = sifted[S.ImaginaryUnit]
if imag:
I = S.ImaginaryUnit
i = len(imag) % 4
if i == 0:
pass
elif i == 1:
other.append(I)
elif i == 2:
if neg:
nonn = -neg.pop()
if nonn is not S.One:
nonneg.append(nonn)
else:
neg.append(S.NegativeOne)
else:
if neg:
nonn = -neg.pop()
if nonn is not S.One:
nonneg.append(nonn)
else:
neg.append(S.NegativeOne)
other.append(I)
del imag
# bring out the bases that can be separated from the base
if force or e.is_integer:
# treat all commutatives the same and put nc in other
cargs = nonneg + neg + other
other = nc
else:
# this is just like what is happening automatically, except
# that now we are doing it for an arbitrary exponent for which
# no automatic expansion is done
assert not e.is_Integer
# handle negatives by making them all positive and putting
# the residual -1 in other
if len(neg) > 1:
o = S.One
if not other and neg[0].is_Number:
o *= neg.pop(0)
if len(neg) % 2:
o = -o
for n in neg:
nonneg.append(-n)
if o is not S.One:
other.append(o)
elif neg and other:
if neg[0].is_Number and neg[0] is not S.NegativeOne:
other.append(S.NegativeOne)
nonneg.append(-neg[0])
else:
other.extend(neg)
else:
other.extend(neg)
del neg
cargs = nonneg
other += nc
rv = S.One
if cargs:
if e.is_Rational:
npow, cargs = sift(cargs, lambda x: x.is_Pow and
x.exp.is_Rational and x.base.is_number,
binary=True)
rv = Mul(*[self.func(b.func(*b.args), e) for b in npow])
rv *= Mul(*[self.func(b, e, evaluate=False) for b in cargs])
if other:
rv *= self.func(Mul(*other), e, evaluate=False)
return rv
def _eval_expand_multinomial(self, **hints):
"""(a + b + ..)**n -> a**n + n*a**(n-1)*b + .., n is nonzero integer"""
base, exp = self.args
result = self
if exp.is_Rational and exp.p > 0 and base.is_Add:
if not exp.is_Integer:
n = Integer(exp.p // exp.q)
if not n:
return result
else:
radical, result = self.func(base, exp - n), []
expanded_base_n = self.func(base, n)
if expanded_base_n.is_Pow:
expanded_base_n = \
expanded_base_n._eval_expand_multinomial()
for term in Add.make_args(expanded_base_n):
result.append(term*radical)
return Add(*result)
n = int(exp)
if base.is_commutative:
order_terms, other_terms = [], []
for b in base.args:
if b.is_Order:
order_terms.append(b)
else:
other_terms.append(b)
if order_terms:
# (f(x) + O(x^n))^m -> f(x)^m + m*f(x)^{m-1} *O(x^n)
f = Add(*other_terms)
o = Add(*order_terms)
if n == 2:
return expand_multinomial(f**n, deep=False) + n*f*o
else:
g = expand_multinomial(f**(n - 1), deep=False)
return expand_mul(f*g, deep=False) + n*g*o
if base.is_number:
# Efficiently expand expressions of the form (a + b*I)**n
# where 'a' and 'b' are real numbers and 'n' is integer.
a, b = base.as_real_imag()
if a.is_Rational and b.is_Rational:
if not a.is_Integer:
if not b.is_Integer:
k = self.func(a.q * b.q, n)
a, b = a.p*b.q, a.q*b.p
else:
k = self.func(a.q, n)
a, b = a.p, a.q*b
elif not b.is_Integer:
k = self.func(b.q, n)
a, b = a*b.q, b.p
else:
k = 1
a, b, c, d = int(a), int(b), 1, 0
while n:
if n & 1:
c, d = a*c - b*d, b*c + a*d
n -= 1
a, b = a*a - b*b, 2*a*b
n //= 2
I = S.ImaginaryUnit
if k == 1:
return c + I*d
else:
return Integer(c)/k + I*d/k
p = other_terms
# (x + y)**3 -> x**3 + 3*x**2*y + 3*x*y**2 + y**3
# in this particular example:
# p = [x,y]; n = 3
# so now it's easy to get the correct result -- we get the
# coefficients first:
from sympy.ntheory.multinomial import multinomial_coefficients
from sympy.polys.polyutils import basic_from_dict
expansion_dict = multinomial_coefficients(len(p), n)
# in our example: {(3, 0): 1, (1, 2): 3, (0, 3): 1, (2, 1): 3}
# and now construct the expression.
return basic_from_dict(expansion_dict, *p)
else:
if n == 2:
return Add(*[f*g for f in base.args for g in base.args])
else:
multi = (base**(n - 1))._eval_expand_multinomial()
if multi.is_Add:
return Add(*[f*g for f in base.args
for g in multi.args])
else:
# XXX can this ever happen if base was an Add?
return Add(*[f*multi for f in base.args])
elif (exp.is_Rational and exp.p < 0 and base.is_Add and
abs(exp.p) > exp.q):
return 1 / self.func(base, -exp)._eval_expand_multinomial()
elif exp.is_Add and base.is_Number and (hints.get('force', False) or
base.is_zero is False or exp._all_nonneg_or_nonppos()):
# a + b a b
# n --> n n, where n, a, b are Numbers
# XXX should be in expand_power_exp?
coeff, tail = [], []
for term in exp.args:
if term.is_Number:
coeff.append(self.func(base, term))
else:
tail.append(term)
return Mul(*(coeff + [self.func(base, Add._from_args(tail))]))
else:
return result
def as_real_imag(self, deep=True, **hints):
if self.exp.is_Integer:
from sympy.polys.polytools import poly
exp = self.exp
re_e, im_e = self.base.as_real_imag(deep=deep)
if not im_e:
return self, S.Zero
a, b = symbols('a b', cls=Dummy)
if exp >= 0:
if re_e.is_Number and im_e.is_Number:
# We can be more efficient in this case
expr = expand_multinomial(self.base**exp)
if expr != self:
return expr.as_real_imag()
expr = poly(
(a + b)**exp) # a = re, b = im; expr = (a + b*I)**exp
else:
mag = re_e**2 + im_e**2
re_e, im_e = re_e/mag, -im_e/mag
if re_e.is_Number and im_e.is_Number:
# We can be more efficient in this case
expr = expand_multinomial((re_e + im_e*S.ImaginaryUnit)**-exp)
if expr != self:
return expr.as_real_imag()
expr = poly((a + b)**-exp)
# Terms with even b powers will be real
r = [i for i in expr.terms() if not i[0][1] % 2]
re_part = Add(*[cc*a**aa*b**bb for (aa, bb), cc in r])
# Terms with odd b powers will be imaginary
r = [i for i in expr.terms() if i[0][1] % 4 == 1]
im_part1 = Add(*[cc*a**aa*b**bb for (aa, bb), cc in r])
r = [i for i in expr.terms() if i[0][1] % 4 == 3]
im_part3 = Add(*[cc*a**aa*b**bb for (aa, bb), cc in r])
return (re_part.subs({a: re_e, b: S.ImaginaryUnit*im_e}),
im_part1.subs({a: re_e, b: im_e}) + im_part3.subs({a: re_e, b: -im_e}))
from sympy.functions.elementary.trigonometric import atan2, cos, sin
if self.exp.is_Rational:
re_e, im_e = self.base.as_real_imag(deep=deep)
if im_e.is_zero and self.exp is S.Half:
if re_e.is_extended_nonnegative:
return self, S.Zero
if re_e.is_extended_nonpositive:
return S.Zero, (-self.base)**self.exp
# XXX: This is not totally correct since for x**(p/q) with
# x being imaginary there are actually q roots, but
# only a single one is returned from here.
r = self.func(self.func(re_e, 2) + self.func(im_e, 2), S.Half)
t = atan2(im_e, re_e)
rp, tp = self.func(r, self.exp), t*self.exp
return rp*cos(tp), rp*sin(tp)
elif self.base is S.Exp1:
from sympy.functions.elementary.exponential import exp
re_e, im_e = self.exp.as_real_imag()
if deep:
re_e = re_e.expand(deep, **hints)
im_e = im_e.expand(deep, **hints)
c, s = cos(im_e), sin(im_e)
return exp(re_e)*c, exp(re_e)*s
else:
from sympy.functions.elementary.complexes import im, re
if deep:
hints['complex'] = False
expanded = self.expand(deep, **hints)
if hints.get('ignore') == expanded:
return None
else:
return (re(expanded), im(expanded))
else:
return re(self), im(self)
def _eval_derivative(self, s):
from sympy.functions.elementary.exponential import log
dbase = self.base.diff(s)
dexp = self.exp.diff(s)
return self * (dexp * log(self.base) + dbase * self.exp/self.base)
def _eval_evalf(self, prec):
base, exp = self.as_base_exp()
if base == S.Exp1:
# Use mpmath function associated to class "exp":
from sympy.functions.elementary.exponential import exp as exp_function
return exp_function(self.exp, evaluate=False)._eval_evalf(prec)
base = base._evalf(prec)
if not exp.is_Integer:
exp = exp._evalf(prec)
if exp.is_negative and base.is_number and base.is_extended_real is False:
base = base.conjugate() / (base * base.conjugate())._evalf(prec)
exp = -exp
return self.func(base, exp).expand()
return self.func(base, exp)
def _eval_is_polynomial(self, syms):
if self.exp.has(*syms):
return False
if self.base.has(*syms):
return bool(self.base._eval_is_polynomial(syms) and
self.exp.is_Integer and (self.exp >= 0))
else:
return True
def _eval_is_rational(self):
# The evaluation of self.func below can be very expensive in the case
# of integer**integer if the exponent is large. We should try to exit
# before that if possible:
if (self.exp.is_integer and self.base.is_rational
and fuzzy_not(fuzzy_and([self.exp.is_negative, self.base.is_zero]))):
return True
p = self.func(*self.as_base_exp()) # in case it's unevaluated
if not p.is_Pow:
return p.is_rational
b, e = p.as_base_exp()
if e.is_Rational and b.is_Rational:
# we didn't check that e is not an Integer
# because Rational**Integer autosimplifies
return False
if e.is_integer:
if b.is_rational:
if fuzzy_not(b.is_zero) or e.is_nonnegative:
return True
if b == e: # always rational, even for 0**0
return True
elif b.is_irrational:
return e.is_zero
if b is S.Exp1:
if e.is_rational and e.is_nonzero:
return False
def _eval_is_algebraic(self):
def _is_one(expr):
try:
return (expr - 1).is_zero
except ValueError:
# when the operation is not allowed
return False
if self.base.is_zero or _is_one(self.base):
return True
elif self.base is S.Exp1:
s = self.func(*self.args)
if s.func == self.func:
if self.exp.is_nonzero:
if self.exp.is_algebraic:
return False
elif (self.exp/S.Pi).is_rational:
return False
elif (self.exp/(S.ImaginaryUnit*S.Pi)).is_rational:
return True
else:
return s.is_algebraic
elif self.exp.is_rational:
if self.base.is_algebraic is False:
return self.exp.is_zero
if self.base.is_zero is False:
if self.exp.is_nonzero:
return self.base.is_algebraic
elif self.base.is_algebraic:
return True
if self.exp.is_positive:
return self.base.is_algebraic
elif self.base.is_algebraic and self.exp.is_algebraic:
if ((fuzzy_not(self.base.is_zero)
and fuzzy_not(_is_one(self.base)))
or self.base.is_integer is False
or self.base.is_irrational):
return self.exp.is_rational
def _eval_is_rational_function(self, syms):
if self.exp.has(*syms):
return False
if self.base.has(*syms):
return self.base._eval_is_rational_function(syms) and \
self.exp.is_Integer
else:
return True
def _eval_is_meromorphic(self, x, a):
# f**g is meromorphic if g is an integer and f is meromorphic.
# E**(log(f)*g) is meromorphic if log(f)*g is meromorphic
# and finite.
base_merom = self.base._eval_is_meromorphic(x, a)
exp_integer = self.exp.is_Integer
if exp_integer:
return base_merom
exp_merom = self.exp._eval_is_meromorphic(x, a)
if base_merom is False:
# f**g = E**(log(f)*g) may be meromorphic if the
# singularities of log(f) and g cancel each other,
# for example, if g = 1/log(f). Hence,
return False if exp_merom else None
elif base_merom is None:
return None
b = self.base.subs(x, a)
# b is extended complex as base is meromorphic.
# log(base) is finite and meromorphic when b != 0, zoo.
b_zero = b.is_zero
if b_zero:
log_defined = False
else:
log_defined = fuzzy_and((b.is_finite, fuzzy_not(b_zero)))
if log_defined is False: # zero or pole of base
return exp_integer # False or None
elif log_defined is None:
return None
if not exp_merom:
return exp_merom # False or None
return self.exp.subs(x, a).is_finite
def _eval_is_algebraic_expr(self, syms):
if self.exp.has(*syms):
return False
if self.base.has(*syms):
return self.base._eval_is_algebraic_expr(syms) and \
self.exp.is_Rational
else:
return True
def _eval_rewrite_as_exp(self, base, expo, **kwargs):
from sympy.functions.elementary.exponential import exp, log
if base.is_zero or base.has(exp) or expo.has(exp):
return base**expo
evaluate = expo.has(Symbol)
if base.has(Symbol):
# delay evaluation if expo is non symbolic
# (as exp(x*log(5)) automatically reduces to x**5)
if global_parameters.exp_is_pow:
return Pow(S.Exp1, log(base)*expo, evaluate=evaluate)
else:
return exp(log(base)*expo, evaluate=evaluate)
else:
from sympy.functions.elementary.complexes import arg, Abs
return exp((log(Abs(base)) + S.ImaginaryUnit*arg(base))*expo)
def as_numer_denom(self):
if not self.is_commutative:
return self, S.One
base, exp = self.as_base_exp()
n, d = base.as_numer_denom()
# this should be the same as ExpBase.as_numer_denom wrt
# exponent handling
neg_exp = exp.is_negative
if exp.is_Mul and not neg_exp and not exp.is_positive:
neg_exp = exp.could_extract_minus_sign()
int_exp = exp.is_integer
# the denominator cannot be separated from the numerator if
# its sign is unknown unless the exponent is an integer, e.g.
# sqrt(a/b) != sqrt(a)/sqrt(b) when a=1 and b=-1. But if the
# denominator is negative the numerator and denominator can
# be negated and the denominator (now positive) separated.
if not (d.is_extended_real or int_exp):
n = base
d = S.One
dnonpos = d.is_nonpositive
if dnonpos:
n, d = -n, -d
elif dnonpos is None and not int_exp:
n = base
d = S.One
if neg_exp:
n, d = d, n
exp = -exp
if exp.is_infinite:
if n is S.One and d is not S.One:
return n, self.func(d, exp)
if n is not S.One and d is S.One:
return self.func(n, exp), d
return self.func(n, exp), self.func(d, exp)
def matches(self, expr, repl_dict=None, old=False):
expr = _sympify(expr)
if repl_dict is None:
repl_dict = {}
# special case, pattern = 1 and expr.exp can match to 0
if expr is S.One:
d = self.exp.matches(S.Zero, repl_dict)
if d is not None:
return d
# make sure the expression to be matched is an Expr
if not isinstance(expr, Expr):
return None
b, e = expr.as_base_exp()
# special case number
sb, se = self.as_base_exp()
if sb.is_Symbol and se.is_Integer and expr:
if e.is_rational:
return sb.matches(b**(e/se), repl_dict)
return sb.matches(expr**(1/se), repl_dict)
d = repl_dict.copy()
d = self.base.matches(b, d)
if d is None:
return None
d = self.exp.xreplace(d).matches(e, d)
if d is None:
return Expr.matches(self, expr, repl_dict)
return d
def _eval_nseries(self, x, n, logx, cdir=0):
# NOTE! This function is an important part of the gruntz algorithm
# for computing limits. It has to return a generalized power
# series with coefficients in C(log, log(x)). In more detail:
# It has to return an expression
# c_0*x**e_0 + c_1*x**e_1 + ... (finitely many terms)
# where e_i are numbers (not necessarily integers) and c_i are
# expressions involving only numbers, the log function, and log(x).
# The series expansion of b**e is computed as follows:
# 1) We express b as f*(1 + g) where f is the leading term of b.
# g has order O(x**d) where d is strictly positive.
# 2) Then b**e = (f**e)*((1 + g)**e).
# (1 + g)**e is computed using binomial series.
from sympy.functions.elementary.exponential import exp, log
from sympy.series.limits import limit
from sympy.series.order import Order
from sympy.core.sympify import sympify
if self.base is S.Exp1:
e_series = self.exp.nseries(x, n=n, logx=logx)
if e_series.is_Order:
return 1 + e_series
e0 = limit(e_series.removeO(), x, 0)
if e0 is S.NegativeInfinity:
return Order(x**n, x)
if e0 is S.Infinity:
return self
t = e_series - e0
exp_series = term = exp(e0)
# series of exp(e0 + t) in t
for i in range(1, n):
term *= t/i
term = term.nseries(x, n=n, logx=logx)
exp_series += term
exp_series += Order(t**n, x)
from sympy.simplify.powsimp import powsimp
return powsimp(exp_series, deep=True, combine='exp')
from sympy.simplify.powsimp import powdenest
from .numbers import _illegal
self = powdenest(self, force=True).trigsimp()
b, e = self.as_base_exp()
if e.has(*_illegal):
raise PoleError()
if e.has(x):
return exp(e*log(b))._eval_nseries(x, n=n, logx=logx, cdir=cdir)
if logx is not None and b.has(log):
from .symbol import Wild
c, ex = symbols('c, ex', cls=Wild, exclude=[x])
b = b.replace(log(c*x**ex), log(c) + ex*logx)
self = b**e
b = b.removeO()
try:
from sympy.functions.special.gamma_functions import polygamma
if b.has(polygamma, S.EulerGamma) and logx is not None:
raise ValueError()
_, m = b.leadterm(x)
except (ValueError, NotImplementedError, PoleError):
b = b._eval_nseries(x, n=max(2, n), logx=logx, cdir=cdir).removeO()
if b.has(S.NaN, S.ComplexInfinity):
raise NotImplementedError()
_, m = b.leadterm(x)
if e.has(log):
from sympy.simplify.simplify import logcombine
e = logcombine(e).cancel()
if not (m.is_zero or e.is_number and e.is_real):
if self == self._eval_as_leading_term(x, logx=logx, cdir=cdir):
res = exp(e*log(b))._eval_nseries(x, n=n, logx=logx, cdir=cdir)
if res == exp(e*log(b)):
return self
return res
f = b.as_leading_term(x, logx=logx)
g = (_mexpand(b) - f).cancel()
g = g/f
if not m.is_number:
raise NotImplementedError()
maxpow = n - m*e
if maxpow.has(Symbol):
maxpow = sympify(n)
if maxpow.is_negative:
return Order(x**(m*e), x)
if g.is_zero:
r = f**e
if r != self:
r += Order(x**n, x)
return r
def coeff_exp(term, x):
coeff, exp = S.One, S.Zero
for factor in Mul.make_args(term):
if factor.has(x):
base, exp = factor.as_base_exp()
if base != x:
try:
return term.leadterm(x)
except ValueError:
return term, S.Zero
else:
coeff *= factor
return coeff, exp
def mul(d1, d2):
res = {}
for e1, e2 in product(d1, d2):
ex = e1 + e2
if ex < maxpow:
res[ex] = res.get(ex, S.Zero) + d1[e1]*d2[e2]
return res
try:
c, d = g.leadterm(x, logx=logx)
except (ValueError, NotImplementedError):
if limit(g/x**maxpow, x, 0) == 0:
# g has higher order zero
return f**e + e*f**e*g # first term of binomial series
else:
raise NotImplementedError()
if c.is_Float and d == S.Zero:
# Convert floats like 0.5 to exact SymPy numbers like S.Half, to
# prevent rounding errors which can induce wrong values of d leading
# to a NotImplementedError being returned from the block below.
g = g.replace(lambda x: x.is_Float, lambda x: Rational(x))
_, d = g.leadterm(x, logx=logx)
if not d.is_positive:
g = g.simplify()
if g.is_zero:
return f**e
_, d = g.leadterm(x, logx=logx)
if not d.is_positive:
g = ((b - f)/f).expand()
_, d = g.leadterm(x, logx=logx)
if not d.is_positive:
raise NotImplementedError()
from sympy.functions.elementary.integers import ceiling
gpoly = g._eval_nseries(x, n=ceiling(maxpow), logx=logx, cdir=cdir).removeO()
gterms = {}
for term in Add.make_args(gpoly):
co1, e1 = coeff_exp(term, x)
gterms[e1] = gterms.get(e1, S.Zero) + co1
k = S.One
terms = {S.Zero: S.One}
tk = gterms
from sympy.functions.combinatorial.factorials import factorial, ff
while (k*d - maxpow).is_negative:
coeff = ff(e, k)/factorial(k)
for ex in tk:
terms[ex] = terms.get(ex, S.Zero) + coeff*tk[ex]
tk = mul(tk, gterms)
k += S.One
from sympy.functions.elementary.complexes import im
if not e.is_integer and m.is_zero and f.is_negative:
ndir = (b - f).dir(x, cdir)
if im(ndir).is_negative:
inco, inex = coeff_exp(f**e*(-1)**(-2*e), x)
elif im(ndir).is_zero:
inco, inex = coeff_exp(exp(e*log(b)).as_leading_term(x, logx=logx, cdir=cdir), x)
else:
inco, inex = coeff_exp(f**e, x)
else:
inco, inex = coeff_exp(f**e, x)
res = S.Zero
for e1 in terms:
ex = e1 + inex
res += terms[e1]*inco*x**(ex)
if not (e.is_integer and e.is_positive and (e*d - n).is_nonpositive and
res == _mexpand(self)):
try:
res += Order(x**n, x)
except NotImplementedError:
return exp(e*log(b))._eval_nseries(x, n=n, logx=logx, cdir=cdir)
return res
def _eval_as_leading_term(self, x, logx, cdir):
from sympy.functions.elementary.exponential import exp, log
e = self.exp
b = self.base
if self.base is S.Exp1:
arg = e.as_leading_term(x, logx=logx)
arg0 = arg.subs(x, 0)
if arg0 is S.NaN:
arg0 = arg.limit(x, 0)
if arg0.is_infinite is False:
return S.Exp1**arg0
raise PoleError("Cannot expand %s around 0" % (self))
elif e.has(x):
lt = exp(e * log(b))
return lt.as_leading_term(x, logx=logx, cdir=cdir)
else:
from sympy.functions.elementary.complexes import im
try:
f = b.as_leading_term(x, logx=logx, cdir=cdir)
except PoleError:
return self
if not e.is_integer and f.is_negative and not f.has(x):
ndir = (b - f).dir(x, cdir)
if im(ndir).is_negative:
# Normally, f**e would evaluate to exp(e*log(f)) but on branch cuts
# an other value is expected through the following computation
# exp(e*(log(f) - 2*pi*I)) == f**e*exp(-2*e*pi*I) == f**e*(-1)**(-2*e).
return self.func(f, e) * (-1)**(-2*e)
elif im(ndir).is_zero:
log_leadterm = log(b)._eval_as_leading_term(x, logx=logx, cdir=cdir)
if log_leadterm.is_infinite is False:
return exp(e*log_leadterm)
return self.func(f, e)
@cacheit
def _taylor_term(self, n, x, *previous_terms): # of (1 + x)**e
from sympy.functions.combinatorial.factorials import binomial
return binomial(self.exp, n) * self.func(x, n)
def taylor_term(self, n, x, *previous_terms):
if self.base is not S.Exp1:
return super().taylor_term(n, x, *previous_terms)
if n < 0:
return S.Zero
if n == 0:
return S.One
from .sympify import sympify
x = sympify(x)
if previous_terms:
p = previous_terms[-1]
if p is not None:
return p * x / n
from sympy.functions.combinatorial.factorials import factorial
return x**n/factorial(n)
def _eval_rewrite_as_sin(self, base, exp, **hints):
if self.base is S.Exp1:
from sympy.functions.elementary.trigonometric import sin
return sin(S.ImaginaryUnit*self.exp + S.Pi/2) - S.ImaginaryUnit*sin(S.ImaginaryUnit*self.exp)
def _eval_rewrite_as_cos(self, base, exp, **hints):
if self.base is S.Exp1:
from sympy.functions.elementary.trigonometric import cos
return cos(S.ImaginaryUnit*self.exp) + S.ImaginaryUnit*cos(S.ImaginaryUnit*self.exp + S.Pi/2)
def _eval_rewrite_as_tanh(self, base, exp, **hints):
if self.base is S.Exp1:
from sympy.functions.elementary.hyperbolic import tanh
return (1 + tanh(self.exp/2))/(1 - tanh(self.exp/2))
def _eval_rewrite_as_sqrt(self, base, exp, **kwargs):
from sympy.functions.elementary.trigonometric import sin, cos
if base is not S.Exp1:
return None
if exp.is_Mul:
coeff = exp.coeff(S.Pi * S.ImaginaryUnit)
if coeff and coeff.is_number:
cosine, sine = cos(S.Pi*coeff), sin(S.Pi*coeff)
if not isinstance(cosine, cos) and not isinstance (sine, sin):
return cosine + S.ImaginaryUnit*sine
def as_content_primitive(self, radical=False, clear=True):
"""Return the tuple (R, self/R) where R is the positive Rational
extracted from self.
Examples
========
>>> from sympy import sqrt
>>> sqrt(4 + 4*sqrt(2)).as_content_primitive()
(2, sqrt(1 + sqrt(2)))
>>> sqrt(3 + 3*sqrt(2)).as_content_primitive()
(1, sqrt(3)*sqrt(1 + sqrt(2)))
>>> from sympy import expand_power_base, powsimp, Mul
>>> from sympy.abc import x, y
>>> ((2*x + 2)**2).as_content_primitive()
(4, (x + 1)**2)
>>> (4**((1 + y)/2)).as_content_primitive()
(2, 4**(y/2))
>>> (3**((1 + y)/2)).as_content_primitive()
(1, 3**((y + 1)/2))
>>> (3**((5 + y)/2)).as_content_primitive()
(9, 3**((y + 1)/2))
>>> eq = 3**(2 + 2*x)
>>> powsimp(eq) == eq
True
>>> eq.as_content_primitive()
(9, 3**(2*x))
>>> powsimp(Mul(*_))
3**(2*x + 2)
>>> eq = (2 + 2*x)**y
>>> s = expand_power_base(eq); s.is_Mul, s
(False, (2*x + 2)**y)
>>> eq.as_content_primitive()
(1, (2*(x + 1))**y)
>>> s = expand_power_base(_[1]); s.is_Mul, s
(True, 2**y*(x + 1)**y)
See docstring of Expr.as_content_primitive for more examples.
"""
b, e = self.as_base_exp()
b = _keep_coeff(*b.as_content_primitive(radical=radical, clear=clear))
ce, pe = e.as_content_primitive(radical=radical, clear=clear)
if b.is_Rational:
#e
#= ce*pe
#= ce*(h + t)
#= ce*h + ce*t
#=> self
#= b**(ce*h)*b**(ce*t)
#= b**(cehp/cehq)*b**(ce*t)
#= b**(iceh + r/cehq)*b**(ce*t)
#= b**(iceh)*b**(r/cehq)*b**(ce*t)
#= b**(iceh)*b**(ce*t + r/cehq)
h, t = pe.as_coeff_Add()
if h.is_Rational and b != S.Zero:
ceh = ce*h
c = self.func(b, ceh)
r = S.Zero
if not c.is_Rational:
iceh, r = divmod(ceh.p, ceh.q)
c = self.func(b, iceh)
return c, self.func(b, _keep_coeff(ce, t + r/ce/ceh.q))
e = _keep_coeff(ce, pe)
# b**e = (h*t)**e = h**e*t**e = c*m*t**e
if e.is_Rational and b.is_Mul:
h, t = b.as_content_primitive(radical=radical, clear=clear) # h is positive
c, m = self.func(h, e).as_coeff_Mul() # so c is positive
m, me = m.as_base_exp()
if m is S.One or me == e: # probably always true
# return the following, not return c, m*Pow(t, e)
# which would change Pow into Mul; we let SymPy
# decide what to do by using the unevaluated Mul, e.g
# should it stay as sqrt(2 + 2*sqrt(5)) or become
# sqrt(2)*sqrt(1 + sqrt(5))
return c, self.func(_keep_coeff(m, t), e)
return S.One, self.func(b, e)
def is_constant(self, *wrt, **flags):
expr = self
if flags.get('simplify', True):
expr = expr.simplify()
b, e = expr.as_base_exp()
bz = b.equals(0)
if bz: # recalculate with assumptions in case it's unevaluated
new = b**e
if new != expr:
return new.is_constant()
econ = e.is_constant(*wrt)
bcon = b.is_constant(*wrt)
if bcon:
if econ:
return True
bz = b.equals(0)
if bz is False:
return False
elif bcon is None:
return None
return e.equals(0)
def _eval_difference_delta(self, n, step):
b, e = self.args
if e.has(n) and not b.has(n):
new_e = e.subs(n, n + step)
return (b**(new_e - e) - 1) * self
power = Dispatcher('power')
power.add((object, object), Pow)
from .add import Add
from .numbers import Integer, Rational
from .mul import Mul, _keep_coeff
from .symbol import Symbol, Dummy, symbols
| Pow |
python | jazzband__django-model-utils | tests/models.py | {
"start": 8495,
"end": 8598
} | class ____(FieldTracker):
tracker_class = LoopDetectionFieldInstanceTracker
| LoopDetectionFieldTracker |
python | getsentry__sentry | tests/sentry/notifications/utils/test_participants.py | {
"start": 2010,
"end": 2740
} | class ____(TestCase):
def assert_recipients_are(
self,
actual: Mapping[ExternalProviders, set[Actor]],
*,
email: Iterable[int] = (),
slack: Iterable[int] = (),
) -> None:
expected: dict[ExternalProviders, set[Actor]] = collections.defaultdict(set)
for provider, user_ids in [
(ExternalProviders.EMAIL, email),
(ExternalProviders.SLACK, slack),
]:
if user_ids:
for user_id in user_ids:
user = user_service.get_user(user_id)
assert user is not None
expected[provider].add(Actor.from_rpc_user(user))
assert actual == expected
| _ParticipantsTest |
python | airbytehq__airbyte | airbyte-integrations/connectors/source-github/source_github/github_schema.py | {
"start": 475542,
"end": 475855
} | class ____(sgqlc.types.Type):
"""Represents a Git blame."""
__schema__ = github_schema
__field_names__ = ("ranges",)
ranges = sgqlc.types.Field(sgqlc.types.non_null(sgqlc.types.list_of(sgqlc.types.non_null("BlameRange"))), graphql_name="ranges")
"""The list of ranges from a Git blame."""
| Blame |
python | apache__airflow | providers/salesforce/tests/unit/salesforce/operators/test_salesforce_apex_rest.py | {
"start": 961,
"end": 1815
} | class ____:
"""
Test class for SalesforceApexRestOperator
"""
@patch("airflow.providers.salesforce.operators.salesforce_apex_rest.SalesforceHook.get_conn")
def test_execute_salesforce_apex_rest(self, mock_get_conn):
"""
Test execute apex rest
"""
endpoint = "User/Activity"
method = "POST"
payload = {"activity": [{"user": "12345", "action": "update page", "time": "2014-04-21T13:00:15Z"}]}
mock_get_conn.return_value.apexecute = Mock()
operator = SalesforceApexRestOperator(
task_id="task", endpoint=endpoint, method=method, payload=payload
)
operator.execute(context={})
mock_get_conn.return_value.apexecute.assert_called_once_with(
action=endpoint, method=method, data=payload
)
| TestSalesforceApexRestOperator |
python | facelessuser__soupsieve | tests/test_level3/test_only_child.py | {
"start": 55,
"end": 729
} | class ____(util.TestCase):
"""Test only child selectors."""
def test_only_child(self):
"""Test only child."""
self.assert_selector(
"""
<div id="div">
<p id="0">Some text <span id="1"> in a paragraph</span>.</p>
<a id="2" href="http://google.com">Link</a>
<span id="3">Direct child</span>
<pre id="pre">
<span id="4">Child 1</span>
<span id="5">Child 2</span>
<span id="6">Child 3</span>
</pre>
</div>
""",
"span:only-child",
["1"],
flags=util.HTML
)
| TestOnlyChild |
python | scipy__scipy | benchmarks/benchmarks/go_benchmark_functions/go_funcs_E.py | {
"start": 3875,
"end": 4887
} | class ____(Benchmark):
r"""
Egg Crate objective function.
This class defines the Egg Crate [1]_ global optimization problem. This
is a multimodal minimization problem defined as follows:
.. math::
f_{\text{EggCrate}}(x) = x_1^2 + x_2^2 + 25 \left[ \sin^2(x_1)
+ \sin^2(x_2) \right]
with :math:`x_i \in [-5, 5]` for :math:`i = 1, 2`.
*Global optimum*: :math:`f(x) = 0` for :math:`x = [0, 0]`
.. [1] Jamil, M. & Yang, X.-S. A Literature Survey of Benchmark Functions
For Global Optimization Problems Int. Journal of Mathematical Modelling
and Numerical Optimisation, 2013, 4, 150-194.
"""
def __init__(self, dimensions=2):
Benchmark.__init__(self, dimensions)
self._bounds = list(zip([-5.0] * self.N, [5.0] * self.N))
self.global_optimum = [[0.0, 0.0]]
self.fglob = 0.0
def fun(self, x, *args):
self.nfev += 1
return x[0] ** 2 + x[1] ** 2 + 25 * (sin(x[0]) ** 2 + sin(x[1]) ** 2)
| EggCrate |
python | jschneier__django-storages | tests/test_s3.py | {
"start": 37316,
"end": 37685
} | class ____(TestCase):
def setUp(self):
self.storage = S3ManifestStaticStorageTestStorage()
self.storage._connections.connection = mock.MagicMock()
def test_querystring_auth(self):
self.assertFalse(self.storage.querystring_auth)
def test_save(self):
self.storage.save("x.txt", ContentFile(b"abc"))
| S3ManifestStaticStorageTests |
python | getsentry__sentry | src/sentry/users/web/accounts.py | {
"start": 2102,
"end": 18302
} | class ____(Exception):
"""email already exists as a verified email on the account"""
pass
def get_template(mode: str, name: str) -> str:
return f"sentry/account/{mode}/{name}.html"
@login_required
@control_silo_view
def login_redirect(request: HttpRequest) -> HttpResponseRedirect:
login_url = auth.get_login_redirect(request)
return HttpResponseRedirect(login_url)
@control_silo_view
def expired(request: HttpRequest, user: User) -> HttpResponse:
hash = lost_password_hash_service.get_or_create(user_id=user.id).hash
LostPasswordHash.send_recover_password_email(user, hash, request.META["REMOTE_ADDR"])
context = {"email": user.email}
return render_to_response(get_template("recover", "expired"), context, request)
@control_silo_view
def recover(request: HttpRequest) -> HttpResponse:
from sentry import ratelimits as ratelimiter
extra = {
"ip_address": request.META["REMOTE_ADDR"],
"user_agent": request.META.get("HTTP_USER_AGENT"),
}
if request.method == "POST" and ratelimiter.backend.is_limited(
"accounts:recover:{}".format(extra["ip_address"]),
limit=5,
window=60, # 5 per minute should be enough for anyone
):
logger.warning("recover.rate-limited", extra=extra)
return HttpResponse(
"You have made too many password recovery attempts. Please try again later.",
content_type="text/plain",
status=429,
)
prefill = {"user": request.GET.get("email")}
form = RecoverPasswordForm(request.POST or None, initial=prefill)
extra["user_recovered"] = form.data.get("user")
if form.is_valid():
email = form.cleaned_data["user"]
if email:
password_hash = lost_password_hash_service.get_or_create(user_id=email.id)
LostPasswordHash.send_recover_password_email(
email, password_hash.hash, request.META["REMOTE_ADDR"]
)
extra["passwordhash_id"] = password_hash.id
extra["user_id"] = password_hash.user_id
logger.info("recover.sent", extra=extra)
context = {"email": email}
return render_to_response(get_template("recover", "sent"), context, request)
if form.errors:
logger.warning("recover.error", extra=extra)
context = {"form": form}
return render_to_response(get_template("recover", "index"), context, request)
@set_referrer_policy("strict-origin-when-cross-origin")
@control_silo_view
def relocate_reclaim(request: HttpRequest, user_id: int) -> HttpResponse:
"""
Ask to receive a new "claim this user" email.
"""
from sentry import ratelimits as ratelimiter
extra = {
"ip_address": request.META["REMOTE_ADDR"],
"user_agent": request.META.get("HTTP_USER_AGENT"),
"user_id": user_id,
}
if request.method != "POST":
logger.warning("reclaim.error", extra=extra)
return render_to_response(get_template("relocate", "error"), {}, request)
if ratelimiter.backend.is_limited(
"accounts:reclaim:{}".format(extra["ip_address"]),
limit=5,
window=60, # 5 per minute should be enough for anyone
):
logger.warning("reclaim.rate-limited", extra=extra)
return HttpResponse(
"You have made too many password recovery attempts. Please try again later.",
content_type="text/plain",
status=429,
)
# Verify that the user is unclaimed. If they are already claimed, tell the requester that this
# is the case, since of course claiming this account would be impossible.
user = User.objects.filter(id=user_id).first()
if user is None:
logger.warning("reclaim.user_not_found", extra=extra)
return render_to_response(get_template("relocate", "error"), {}, request)
if not user.is_unclaimed:
logger.warning("reclaim.already_claimed", extra=extra)
return render_to_response(get_template("relocate", "claimed"), {}, request)
# Get all orgs for user. We'll need this info to properly compose the new relocation email.
org_ids = OrganizationMemberMapping.objects.filter(user_id=user_id).values_list(
"organization_id", flat=True
)
org_slugs = list(
OrganizationMapping.objects.filter(organization_id__in=org_ids).values_list(
"slug", flat=True
)
)
if len(org_slugs) == 0:
logger.warning("reclaim.error", extra=extra)
return render_to_response(get_template("relocate", "error"), {}, request)
# Make a new `LostPasswordHash`, and send the "this user has been relocated ..." email again.
password_hash = lost_password_hash_service.get_or_create(user_id=user_id)
LostPasswordHash.send_relocate_account_email(user, password_hash.hash, org_slugs)
extra["passwordhash_id"] = password_hash.id
extra["org_slugs"] = org_slugs
# Let the user know that we've sent them a new email.
logger.info("recover.sent", extra=extra)
return render_to_response(get_template("relocate", "sent"), {}, request)
@set_referrer_policy("strict-origin-when-cross-origin")
@control_silo_view
def recover_confirm(
request: HttpRequest, user_id: int, hash: str, mode: str = "recover"
) -> HttpResponse:
from sentry import ratelimits as ratelimiter
try:
password_hash = LostPasswordHash.objects.get(user=user_id, hash=hash)
if not password_hash.is_valid():
password_hash.delete()
raise LostPasswordHash.DoesNotExist
user = password_hash.user
except LostPasswordHash.DoesNotExist:
return render_to_response(get_template(mode, "failure"), {"user_id": user_id}, request)
extra = {
"ip_address": request.META["REMOTE_ADDR"],
"user_agent": request.META.get("HTTP_USER_AGENT"),
}
if request.method == "POST" and ratelimiter.backend.is_limited(
"accounts:confirm:{}".format(extra["ip_address"]),
limit=5,
window=60, # 5 per minute should be enough for anyone
):
logger.warning("confirm.rate-limited", extra=extra)
return HttpResponse(
"You have made too many attempts. Please try again later.",
content_type="text/plain",
status=429,
)
# TODO(getsentry/team-ospo#190): Clean up ternary logic and only show relocation form if user is unclaimed
form_cls = RelocationForm if mode == "relocate" else ChangePasswordRecoverForm
if request.method == "POST":
form = form_cls(request.POST, user=user)
if form.is_valid():
if mode == "relocate":
# Relocation form requires users to accept TOS and privacy policy with an org
# associated. We only need the first membership, since all of user's orgs will be in
# the same region.
membership = OrganizationMemberMapping.objects.filter(user=user).first()
assert membership is not None
mapping = OrganizationMapping.objects.get(
organization_id=membership.organization_id
)
# These service calls need to be outside of the transaction block. Claiming an
# account constitutes an email verifying action. We'll verify the primary email
# associated with this account in particular, since that is the only one the user
# claiming email could have been sent to.
rpc_user = user_service.get_user(user_id=user.id)
user_service.verify_user_email(email=user.email, user_id=user.id)
orgs = organization_service.get_organizations_by_user_and_scope(
region_name=mapping.region_name, user=rpc_user
)
for org in orgs:
terms_accepted.send_robust(
user=user,
organization=org,
ip_address=request.META["REMOTE_ADDR"],
sender=recover_confirm,
)
with transaction.atomic(router.db_for_write(User)):
if mode == "relocate":
user.username = form.cleaned_data["username"]
user.is_unclaimed = False
user.set_password(form.cleaned_data["password"])
user.refresh_session_nonce(request)
user.save()
# Ugly way of doing this, but Django requires the backend be set
auth = authenticate(username=user.username, password=form.cleaned_data["password"])
assert isinstance(auth, User), auth
user = auth
# Only log the user in if there is no two-factor on the
# account.
if not user.has_2fa():
login_user(request, user)
password_hash.delete()
capture_security_activity(
account=user,
type="password-changed",
actor=request.user,
ip_address=request.META["REMOTE_ADDR"],
send_email=True,
)
reset_2fa_rate_limits(user.id)
return login_redirect(request)
else:
form = form_cls(user=user)
return render_to_response(get_template(mode, "confirm"), {"form": form}, request)
# Set password variation of password recovery
set_password_confirm = partial(recover_confirm, mode="set_password")
# Relocation variation of password recovery
relocate_confirm = partial(recover_confirm, mode="relocate")
@login_required
@require_http_methods(["POST"])
@control_silo_view
def start_confirm_email(request: HttpRequest) -> HttpResponse:
from sentry import ratelimits as ratelimiter
if ratelimiter.backend.is_limited(
f"auth:confirm-email:{request.user.id}",
limit=10,
window=60, # 10 per minute should be enough for anyone
):
return HttpResponse(
"You have made too many email confirmation requests. Please try again later.",
content_type="text/plain",
status=429,
)
assert isinstance(
request.user, User
), "User must have an associated email to send confirm emails to"
if "primary-email" in request.POST:
email = request.POST.get("email")
try:
email_to_send = UserEmail.objects.get(user_id=request.user.id, email=email)
except UserEmail.DoesNotExist:
msg = _("There was an error confirming your email.")
level = messages.ERROR
else:
request.user.send_confirm_email_singular(email_to_send)
msg = _("A verification email has been sent to %s.") % (email)
level = messages.SUCCESS
messages.add_message(request, level, msg)
return HttpResponseRedirect(reverse("sentry-account-settings"))
elif request.user.has_unverified_emails():
request.user.send_confirm_emails()
unverified_emails = [e.email for e in request.user.get_unverified_emails()]
msg = _("A verification email has been sent to %s.") % (", ").join(unverified_emails)
for email in unverified_emails:
logger.info(
"user.email.start_confirm",
extra={
"user_id": request.user.id,
"ip_address": request.META["REMOTE_ADDR"],
"email": email,
},
)
else:
msg = _("Your email (%s) has already been verified.") % request.user.email
messages.add_message(request, messages.SUCCESS, msg)
return HttpResponseRedirect(reverse("sentry-account-settings-emails"))
@set_referrer_policy("strict-origin-when-cross-origin")
@login_required
@control_silo_view
def confirm_email(request: HttpRequest, user_id: int, hash: str) -> HttpResponseRedirect:
msg = _("Thanks for confirming your email")
level = messages.SUCCESS
try:
if request.user.id != int(user_id):
raise InvalidRequest
email = UserEmail.objects.get(user=user_id, validation_hash=hash)
if not email.hash_is_valid():
raise UserEmail.DoesNotExist
except UserEmail.DoesNotExist:
if request.user.is_anonymous or request.user.has_unverified_emails():
msg = ERR_CONFIRMING_EMAIL
level = messages.ERROR
except InvalidRequest:
msg = ERR_CONFIRMING_EMAIL
level = messages.ERROR
else:
email.is_verified = True
email.validation_hash = ""
email.save(update_fields=["is_verified", "validation_hash"])
email_verified.send(email=email.email, sender=email)
logger.info(
"user.email.confirm",
extra={
"user_id": user_id,
"ip_address": request.META["REMOTE_ADDR"],
"useremail_id": email.id,
"email": email.email.lower(),
},
)
messages.add_message(request, level, msg)
return HttpResponseRedirect(reverse("sentry-account-settings-emails"))
@set_referrer_policy("strict-origin-when-cross-origin")
@login_required
@control_silo_view
def confirm_signed_email(
request: HttpRequest, signed_data: str
) -> HttpResponseRedirect | HttpResponse:
EMAIL_CONFIRMATION_SALT = options.get("user-settings.signed-url-confirmation-emails-salt")
use_signed_urls = options.get("user-settings.signed-url-confirmation-emails")
if not use_signed_urls:
msg = ERR_CONFIRMING_EMAIL
level = messages.ERROR
messages.add_message(request, level, msg)
return HttpResponseRedirect(reverse("sentry-account-settings-emails"))
msg = _("Thanks for confirming your email")
level = messages.SUCCESS
try:
data = unsign(
signed_data, salt=EMAIL_CONFIRMATION_SALT, max_age=2 * 24 * 60 * 60
) # max age is 2 days in seconds
# is the currently logged in user the one that
# wants to add the email to their account
if request.user.id != int(data["user_id"]):
raise InvalidRequest
# check to see if the email has already been verified
try:
email = UserEmail.objects.get(user=request.user.id, email=data["email"])
if email.is_verified:
raise VerifiedEmailAlreadyExists()
except UserEmail.DoesNotExist:
# user email does not exist, so we can create it
pass
except VerifiedEmailAlreadyExists:
msg = INFO_EMAIL_ALREADY_VERIFIED
level = messages.INFO
messages.add_message(request, level, msg)
return HttpResponseRedirect(reverse("sentry-account-settings-emails"))
except SignatureExpired:
msg = ERR_SIGNATURE_EXPIRED
level = messages.ERROR
messages.add_message(request, level, msg)
return HttpResponseRedirect(reverse("sentry-account-settings-emails"))
except (InvalidRequest, BadSignature):
msg = ERR_CONFIRMING_EMAIL
level = messages.ERROR
messages.add_message(request, level, msg)
return HttpResponseRedirect(reverse("sentry-account-settings-emails"))
except Exception:
logger.exception("user.email.signed-confirm.error")
msg = ERR_CONFIRMING_EMAIL
level = messages.ERROR
messages.add_message(request, level, msg)
return HttpResponseRedirect(reverse("sentry-account-settings-emails"))
user = User.objects.get(id=request.user.id)
email = UserEmail.objects.create(
user=user,
email=data["email"],
validation_hash="",
is_verified=True,
)
email.save()
email_verified.send(email=email.email, sender=email)
logger.info(
"user.email.signed-confirm",
extra={
"user_id": request.user.id,
"ip_address": request.META["REMOTE_ADDR"],
"email": email.email,
},
)
messages.add_message(request, level, msg)
return HttpResponseRedirect(reverse("sentry-account-settings-emails"))
| VerifiedEmailAlreadyExists |
python | ray-project__ray | python/ray/serve/_private/logging_utils.py | {
"start": 3412,
"end": 3895
} | class ____(logging.Filter):
"""Serve log attribute removal filter.
The filter will remove unwanted attributes on the log record so they won't be
included in the structured logs.
Note: the filter doesn't do any filtering, it only removes unwanted attributes.
"""
def filter(self, record):
for key in SERVE_LOG_UNWANTED_ATTRS:
if hasattr(record, key):
delattr(record, key)
return True
| ServeLogAttributeRemovalFilter |
python | doocs__leetcode | solution/1900-1999/1923.Longest Common Subpath/Solution.py | {
"start": 0,
"end": 1179
} | class ____:
def longestCommonSubpath(self, n: int, paths: List[List[int]]) -> int:
def check(k: int) -> bool:
cnt = Counter()
for h in hh:
vis = set()
for i in range(1, len(h) - k + 1):
j = i + k - 1
x = (h[j] - h[i - 1] * p[j - i + 1]) % mod
if x not in vis:
vis.add(x)
cnt[x] += 1
return max(cnt.values()) == m
m = len(paths)
mx = max(len(path) for path in paths)
base = 133331
mod = 2**64 + 1
p = [0] * (mx + 1)
p[0] = 1
for i in range(1, len(p)):
p[i] = p[i - 1] * base % mod
hh = []
for path in paths:
k = len(path)
h = [0] * (k + 1)
for i, x in enumerate(path, 1):
h[i] = h[i - 1] * base % mod + x
hh.append(h)
l, r = 0, min(len(path) for path in paths)
while l < r:
mid = (l + r + 1) >> 1
if check(mid):
l = mid
else:
r = mid - 1
return l
| Solution |
python | ZoranPandovski__al-go-rithms | stats.py | {
"start": 544,
"end": 2081
} | class ____:
"""Store for saving and retrieval of JSON objects.
Ideally the JSON would be stored on a remote server and made available on
a specific URL.
Parameters:
bin_id - the ID of the remote json key (bin id).
"""
def __init__(self, bin_id):
self.bin_id = bin_id
def save(self, data):
"""Stores the given data on a remote server.
"""
data = dumps(data).encode('utf-8')
resp = request(
url=self.get_url(),
method='PUT',
data=data,
headers={
'Content-Type': 'application/json',
})
resp['data'] = resp['data'].decode('utf-8') if resp['data'] else ''
if resp['code'] != 200:
raise Exception('Failed to store JSON data. ' +
'Err code: {code}. Message: {data}'.format(**resp))
return loads(resp['data'])
def read(self):
"""Reads the data from the remote server.
"""
resp = request(
url=self.get_url(),
method='GET',
)
resp['data'] = resp['data'].decode('utf-8') if resp['data'] else ''
if resp['code'] != 200:
raise Exception('Failed to fetch JSON data. ' +
'Err code: {code}. Message: {data}'.format(**resp))
return loads(resp['data'])
def get_url(self):
"""Returns the data URL.
"""
return 'https://api.myjson.com/bins/{bin_id}'.format(bin_id=self.bin_id)
| JsonStore |
python | walkccc__LeetCode | solutions/3335. Total Characters in String After Transformations I/3335.py | {
"start": 0,
"end": 490
} | class ____:
def lengthAfterTransformations(self, s: str, t: int) -> int:
MOD = 1_000_000_007
count = [0] * 26
for c in s:
count[ord(c) - ord('a')] += 1
for _ in range(t):
newCount = [0] * 26
# 'a' -> 'b', 'b' -> 'c', ..., 'y' -> 'z'
for i in range(25):
newCount[i + 1] = count[i]
# 'z' -> 'ab'
newCount[0] = count[25]
newCount[1] = (newCount[1] + count[25]) % MOD
count = newCount
return sum(count) % MOD
| Solution |
python | getsentry__sentry | src/sentry/core/endpoints/organization_member_invite/index.py | {
"start": 4021,
"end": 8606
} | class ____(OrganizationEndpoint):
# TODO (mifu67): make these PUBLIC once ready
publish_status = {
"GET": ApiPublishStatus.EXPERIMENTAL,
"POST": ApiPublishStatus.EXPERIMENTAL,
}
permission_classes = (MemberInviteAndStaffPermission,)
owner = ApiOwner.ENTERPRISE
def _invite_member(self, request, organization) -> Response:
allowed_roles = get_allowed_org_roles(request, organization, creating_org_invite=True)
is_member = not request.access.has_scope("member:admin") and (
request.access.has_scope("member:invite")
)
validator = OrganizationMemberInviteRequestValidator(
data=request.data,
context={
"organization": organization,
"allowed_roles": allowed_roles,
"is_integration_token": request.access.is_integration_token,
"is_member": is_member,
"actor": request.user,
},
)
if not validator.is_valid():
return Response(validator.errors, status=400)
result = validator.validated_data
if ratelimits.for_organization_member_invite(
organization=organization,
email=result["email"],
user=request.user,
auth=request.auth,
):
metrics.incr(
"member-invite.attempt",
instance="rate_limited",
skip_internal=True,
sample_rate=1.0,
)
return Response({"detail": ERR_RATE_LIMITED}, status=429)
omi = _create_invite_object(request, organization, result, is_request=False)
referrer = request.query_params.get("referrer")
omi.send_invite_email(referrer)
member_invited.send_robust(
member=omi,
user=request.user,
sender=self,
referrer=request.data.get("referrer"),
)
return Response(serialize(omi), status=201)
def _request_to_invite_member(self, request: Request, organization) -> Response:
validator = OrganizationMemberInviteRequestValidator(
data=request.data,
context={
"organization": organization,
"allowed_roles": roles.get_all(),
"actor": request.user,
},
)
if not validator.is_valid():
return Response(validator.errors, status=400)
result = validator.validated_data
omi = _create_invite_object(request, organization, result, is_request=True)
async_send_notification(InviteRequestNotification, omi, request.user)
return Response(serialize(omi), status=201)
def get(self, request: Request, organization: Organization) -> Response:
"""
List all organization member invites.
"""
if not features.has(
"organizations:new-organization-member-invite", organization, actor=request.user
):
return Response({"detail": MISSING_FEATURE_MESSAGE}, status=403)
queryset = OrganizationMemberInvite.objects.filter(organization=organization).order_by(
"invite_status", "email"
)
return self.paginate(
request=request,
queryset=queryset,
on_results=lambda x: serialize(x, request.user, OrganizationMemberInviteSerializer()),
paginator_cls=OffsetPaginator,
)
def post(self, request: Request, organization) -> Response:
if not features.has(
"organizations:new-organization-member-invite", organization, actor=request.user
):
return Response({"detail": MISSING_FEATURE_MESSAGE}, status=403)
assigned_org_role = request.data.get("orgRole") or organization_roles.get_default().id
billing_bypass = assigned_org_role == "billing" and features.has(
"organizations:invite-billing", organization
)
if not billing_bypass and not features.has(
"organizations:invite-members", organization, actor=request.user
):
return Response(
{"organization": "Your organization is not allowed to invite members"}, status=403
)
# Check to see if the requesting user can invite members. If not, create an invite
# request.
if not _can_invite_member(request, organization):
return self._request_to_invite_member(request, organization)
return self._invite_member(request, organization)
| OrganizationMemberInviteIndexEndpoint |
python | pytorch__pytorch | torch/testing/_internal/autograd_function_db.py | {
"start": 5584,
"end": 6390
} | class ____(torch.autograd.Function):
@staticmethod
def forward(x):
x_np = to_numpy(x)
np.exp(x_np, x_np)
return x
@staticmethod
def setup_context(ctx, inputs, output):
x, = inputs
ctx.mark_dirty(x)
ctx.save_for_backward(output)
ctx.save_for_forward(output)
@staticmethod
def backward(ctx, grad_output):
output, = ctx.saved_tensors
return NumpyMul.apply(grad_output, output)
@staticmethod
def vmap(info, in_dims, x):
NumpyExp_.apply(x)
return x, in_dims[0]
@staticmethod
def jvp(ctx, x_tangent):
# Doesn't call numpy operations because I didn't want to write NumpyMul_
output, = ctx.saved_tensors
x_tangent.mul_(output)
return x_tangent
| NumpyExp_ |
python | fluentpython__example-code | attic/attributes/exists_truthy.py | {
"start": 417,
"end": 1043
} | class ____:
def __init__(self):
self.gadget = True
gizmo = Gizmo()
test_keys = 'hasattr', 'getattr', 'tryget'
def average(timings):
sample = timings[1:-1]
return sum(sample) / len(sample)
def do_tests():
for test_key in test_keys:
func_name = 'exists_and_truthy_' + test_key
test = func_name + '(gizmo, "gadget")'
setup = 'from __main__ import gizmo, ' + func_name
elapsed = average(timeit.repeat(test, repeat=5, setup=setup))
print(test_key.rjust(7), format(elapsed, '0.5f'))
if __name__ == '__main__':
do_tests()
del gizmo.gadget
do_tests()
| Gizmo |
python | django__django | tests/template_tests/filter_tests/test_filesizeformat.py | {
"start": 136,
"end": 2428
} | class ____(SimpleTestCase):
def test_formats(self):
tests = [
(0, "0\xa0bytes"),
(1, "1\xa0byte"),
(1023, "1023\xa0bytes"),
(1024, "1.0\xa0KB"),
(10 * 1024, "10.0\xa0KB"),
(1024 * 1024 - 1, "1024.0\xa0KB"),
(1024 * 1024, "1.0\xa0MB"),
(1024 * 1024 * 50, "50.0\xa0MB"),
(1024 * 1024 * 1024 - 1, "1024.0\xa0MB"),
(1024 * 1024 * 1024, "1.0\xa0GB"),
(1024 * 1024 * 1024 * 1024, "1.0\xa0TB"),
(1024 * 1024 * 1024 * 1024 * 1024, "1.0\xa0PB"),
(1024 * 1024 * 1024 * 1024 * 1024 * 2000, "2000.0\xa0PB"),
(complex(1, -1), "0\xa0bytes"),
("", "0\xa0bytes"),
("\N{GREEK SMALL LETTER ALPHA}", "0\xa0bytes"),
]
for value, expected in tests:
with self.subTest(value=value):
self.assertEqual(filesizeformat(value), expected)
def test_localized_formats(self):
tests = [
(0, "0\xa0Bytes"),
(1, "1\xa0Byte"),
(1023, "1023\xa0Bytes"),
(1024, "1,0\xa0KB"),
(10 * 1024, "10,0\xa0KB"),
(1024 * 1024 - 1, "1024,0\xa0KB"),
(1024 * 1024, "1,0\xa0MB"),
(1024 * 1024 * 50, "50,0\xa0MB"),
(1024 * 1024 * 1024 - 1, "1024,0\xa0MB"),
(1024 * 1024 * 1024, "1,0\xa0GB"),
(1024 * 1024 * 1024 * 1024, "1,0\xa0TB"),
(1024 * 1024 * 1024 * 1024 * 1024, "1,0\xa0PB"),
(1024 * 1024 * 1024 * 1024 * 1024 * 2000, "2000,0\xa0PB"),
(complex(1, -1), "0\xa0Bytes"),
("", "0\xa0Bytes"),
("\N{GREEK SMALL LETTER ALPHA}", "0\xa0Bytes"),
]
with translation.override("de"):
for value, expected in tests:
with self.subTest(value=value):
self.assertEqual(filesizeformat(value), expected)
def test_negative_numbers(self):
tests = [
(-1, "-1\xa0byte"),
(-100, "-100\xa0bytes"),
(-1024 * 1024 * 50, "-50.0\xa0MB"),
]
for value, expected in tests:
with self.subTest(value=value):
self.assertEqual(filesizeformat(value), expected)
| FunctionTests |
python | pytorch__pytorch | test/test_jit_fuser_te.py | {
"start": 107027,
"end": 109795
} | class ____(TestLoopnestRandomizationParent):
def setUp(self):
super(TestLoopnestRandomizationParent, self).setUp()
self.old_cpu_fuser_state = torch._C._jit_can_fuse_on_cpu()
self.old_must_use_cpu_state = torch._C._jit_get_te_must_use_llvm_cpu()
self.old_gpu_fuser_state = torch._C._jit_can_fuse_on_gpu()
torch._C._jit_override_can_fuse_on_cpu(True)
# TODO: force LLVM. need to add it to asan, mac, windows builds + sandcastle
# torch._C._jit_set_te_must_use_llvm_cpu(True)
torch._C._jit_override_can_fuse_on_gpu(True)
self.old_profiling_executor = torch._C._jit_set_profiling_executor(True)
self.old_profiling_mode = torch._C._get_graph_executor_optimize(True)
self.old_fusion_inlining = torch._C._debug_get_fusion_group_inlining()
torch._C._debug_set_fusion_group_inlining(False)
self.texpr_fuser_state = torch._C._jit_texpr_fuser_enabled()
torch._C._jit_set_texpr_fuser_enabled(True)
self.old_te_must_use_llvm_cpu = torch._C._jit_get_te_must_use_llvm_cpu()
torch._C._jit_set_te_must_use_llvm_cpu(False)
# Set the seed to 1. This tests the codepath through random
# transformation.
os.environ["PYTORCH_TENSOREXPR_RANDOM_TRANSFORM_SEED"] = "1"
def tearDown(self):
torch._C._jit_set_profiling_executor(self.old_profiling_executor)
torch._C._get_graph_executor_optimize(self.old_profiling_mode)
torch._C._jit_override_can_fuse_on_gpu(self.old_gpu_fuser_state)
torch._C._jit_override_can_fuse_on_cpu(self.old_cpu_fuser_state)
torch._C._jit_set_te_must_use_llvm_cpu(self.old_must_use_cpu_state)
torch._C._debug_set_fusion_group_inlining(self.old_fusion_inlining)
torch._C._jit_set_texpr_fuser_enabled(self.texpr_fuser_state)
torch._C._jit_set_te_must_use_llvm_cpu(self.old_te_must_use_llvm_cpu)
# Set it back to 0.
os.environ["PYTORCH_TENSOREXPR_RANDOM_TRANSFORM_SEED"] = "0"
super(TestLoopnestRandomizationParent, self).tearDown()
@onlyCPU
@unittest.skipIf(not LLVM_ENABLED, "Compiles with TensorExprKernel")
def test_relu(self, device):
def fn_test_relu(x, y):
return F.relu(x + 0.5 * y)
x = torch.randn(4, 4, dtype=torch.float, device=device)
y = torch.randn(4, 4, dtype=torch.float, device=device)
fn = fn_test_relu
traced_fn = torch.jit.trace(fn, (x, y))
ref = fn(x, y)
res = traced_fn(x, y)
assert torch.allclose(ref, res)
instantiate_device_type_tests(TestLoopnestRandomization, globals(), only_for=("cpu"))
if __name__ == "__main__":
if sys.version_info < (3, 14):
run_tests()
| TestLoopnestRandomization |
python | viewflow__viewflow | viewflow/urls/base.py | {
"start": 702,
"end": 932
} | class ____(str):
"""
Dump str wrapper.
Just to keep a reference over django url resolve calling
hierarchy.
"""
def __init__(self, value) -> None:
str.__init__(value)
self.extra = {}
| _UrlName |
python | huggingface__transformers | src/transformers/models/falcon_mamba/modeling_falcon_mamba.py | {
"start": 25027,
"end": 26120
} | class ____(GradientCheckpointingLayer):
def __init__(self, config, layer_idx):
super().__init__()
self.config = config
self.layer_idx = layer_idx
self.residual_in_fp32 = config.residual_in_fp32
self.norm = FalconMambaRMSNorm(config.hidden_size, eps=config.layer_norm_epsilon)
self.mixer = FalconMambaMixer(config, layer_idx=layer_idx)
def forward(
self,
hidden_states,
cache_params: Optional[FalconMambaCache] = None,
cache_position: Optional[torch.LongTensor] = None,
attention_mask: Optional[torch.LongTensor] = None,
):
residual = hidden_states
hidden_states = self.norm(hidden_states.to(dtype=self.norm.weight.dtype))
if self.residual_in_fp32:
residual = residual.to(torch.float32)
hidden_states = self.mixer(
hidden_states, cache_params=cache_params, cache_position=cache_position, attention_mask=attention_mask
)
hidden_states = residual + hidden_states
return hidden_states
@auto_docstring
| FalconMambaBlock |
python | spyder-ide__spyder | spyder/widgets/arraybuilder.py | {
"start": 1341,
"end": 1682
} | class ____(ArrayBuilderType):
ELEMENT_SEPARATOR = ', '
ROW_SEPARATOR = ';'
BRACES = '], ['
EXTRA_VALUES = {
'np.nan': ['nan', 'NAN', 'NaN', 'Na', 'NA', 'na'],
'np.inf': ['inf', 'INF'],
}
ARRAY_PREFIX = 'np.array([['
_REGISTERED_ARRAY_BUILDERS = {
'python': ArrayBuilderPython,
}
| ArrayBuilderPython |
python | pyinstaller__pyinstaller | PyInstaller/utils/hooks/setuptools.py | {
"start": 9717,
"end": 15734
} | class ____:
def __init__(self):
pass
def __repr__(self):
return "SetuptoolsInfo"
# Delay initialization of setuptools information until until the corresponding attributes are first requested.
def __getattr__(self, name):
if 'available' in self.__dict__:
# Initialization was already done, but requested attribute is not available.
raise AttributeError(name)
# Load setuptools info...
self._load_setuptools_info()
# ... and return the requested attribute
return getattr(self, name)
def _load_setuptools_info(self):
logger.info("%s: initializing cached setuptools info...", self)
# Initialize variables so that they might be accessed even if setuptools is unavailable or if initialization
# fails for some reason.
self.available = False
self.version = None
self.distutils_vendored = False
self.distutils_modules = []
self.vendored_status = dict()
self.vendored_modules = []
self.vendored_data = []
self.vendored_namespace_package_paths = dict()
try:
setuptools_info = _retrieve_setuptools_info()
except Exception as e:
logger.warning("%s: failed to obtain setuptools info: %s", self, e)
return
# If package could not be imported, `_retrieve_setuptools_info` returns None. In such cases, emit a debug
# message instead of a warning, because this initialization might be triggered by a helper function that is
# trying to determine availability of `setuptools` by inspecting the `available` attribute.
if setuptools_info is None:
logger.debug("%s: failed to obtain setuptools info: setuptools could not be imported.", self)
return
# Copy properties
for key, value in setuptools_info.items():
setattr(self, key, value)
def is_vendored(self, module_name):
return self.vendored_status.get(module_name, False)
@staticmethod
def _create_vendored_aliases(vendored_name, module_name, modules_list):
# Create aliases for all submodules
prefix_len = len(vendored_name) # Length of target-name prefix to remove
return ((module_name + vendored_module[prefix_len:], vendored_module) for vendored_module in modules_list
if vendored_module.startswith(vendored_name))
def get_vendored_aliases(self, module_name):
vendored_name = f"setuptools._vendor.{module_name}"
return self._create_vendored_aliases(vendored_name, module_name, self.vendored_modules)
def get_distutils_aliases(self):
vendored_name = "setuptools._distutils"
return self._create_vendored_aliases(vendored_name, "distutils", self.distutils_modules)
setuptools_info = SetuptoolsInfo()
def pre_safe_import_module_for_top_level_namespace_packages(api):
"""
A common implementation of pre_safe_import_module hook function for handling vendored top-level namespace packages
(i.e., `backports` and `jaraco`).
This function can be either called from the `pre_safe_import_module` function in a pre-safe-import-module hook, or
just imported into the hook and aliased to `pre_safe_import_module`.
"""
module_name = api.module_name
# Check if the package/module is a vendored copy. This also returns False is setuptools is unavailable, because
# vendored module status dictionary will be empty.
vendored = setuptools_info.is_vendored(module_name)
if not vendored:
return
if vendored == 'fully':
# For a fully-vendored copy, force creation of aliases; on one hand, this aims to ensure that submodules are
# resolvable, but on the other, it also prevents creation of unvendored top-level package, which should not
# exit in this case.
vendored_name = f"setuptools._vendor.{module_name}"
logger.info(
"Setuptools: %r appears to be a full setuptools-vendored copy - creating alias to %r!", module_name,
vendored_name
)
# Create aliases for all (sub)modules
for aliased_name, real_vendored_name in setuptools_info.get_vendored_aliases(module_name):
api.add_alias_module(real_vendored_name, aliased_name)
elif vendored == 'partially':
# For a partially-vendored copy, adjust the submodule search paths, so that submodules from all locations are
# discoverable (especially from the setuptools vendor directory, which might not be in the search path yet).
search_paths = setuptools_info.vendored_namespace_package_paths.get(module_name, [])
logger.info(
"Setuptools: %r appears to be a partial setuptools-vendored copy - extending search paths to %r!",
module_name, search_paths
)
for path in search_paths:
api.append_package_path(path)
else:
logger.warning("Setuptools: %r has unhandled vendored status: %r", module_name, vendored)
def pre_safe_import_module(api):
"""
A common implementation of pre_safe_import_module hook function.
This function can be either called from the `pre_safe_import_module` function in a pre-safe-import-module hook, or
just imported into the hook.
"""
module_name = api.module_name
# Check if the package/module is a vendored copy. This also returns False is setuptools is unavailable, because
# vendored module status dictionary will be empty.
if not setuptools_info.is_vendored(module_name):
return
vendored_name = f"setuptools._vendor.{module_name}"
logger.info(
"Setuptools: %r appears to be a setuptools-vendored copy - creating alias to %r!", module_name, vendored_name
)
# Create aliases for all (sub)modules
for aliased_name, real_vendored_name in setuptools_info.get_vendored_aliases(module_name):
api.add_alias_module(real_vendored_name, aliased_name)
| SetuptoolsInfo |
python | airbytehq__airbyte | airbyte-integrations/connectors/source-github/source_github/github_schema.py | {
"start": 10055,
"end": 10324
} | class ____(sgqlc.types.Enum):
"""Properties by which deployment connections can be ordered.
Enumeration Choices:
* `CREATED_AT`: Order collection by creation time
"""
__schema__ = github_schema
__choices__ = ("CREATED_AT",)
| DeploymentOrderField |
python | huggingface__transformers | src/transformers/models/ministral/modeling_ministral.py | {
"start": 18804,
"end": 21869
} | class ____(MinistralPreTrainedModel, GenerationMixin):
_tied_weights_keys = {"lm_head.weight": "model.embed_tokens.weight"}
_tp_plan = {"lm_head": "colwise_rep"}
_pp_plan = {"lm_head": (["hidden_states"], ["logits"])}
def __init__(self, config):
super().__init__(config)
self.model = MinistralModel(config)
self.vocab_size = config.vocab_size
self.lm_head = nn.Linear(config.hidden_size, config.vocab_size, bias=False)
# Initialize weights and apply final processing
self.post_init()
@can_return_tuple
@auto_docstring
def forward(
self,
input_ids: Optional[torch.LongTensor] = None,
attention_mask: Optional[torch.Tensor] = None,
position_ids: Optional[torch.LongTensor] = None,
past_key_values: Optional[Cache] = None,
inputs_embeds: Optional[torch.FloatTensor] = None,
labels: Optional[torch.LongTensor] = None,
use_cache: Optional[bool] = None,
cache_position: Optional[torch.LongTensor] = None,
logits_to_keep: Union[int, torch.Tensor] = 0,
**kwargs: Unpack[TransformersKwargs],
) -> CausalLMOutputWithPast:
r"""
Example:
```python
>>> from transformers import AutoTokenizer, MinistralForCausalLM
>>> model = MinistralForCausalLM.from_pretrained("meta-ministral/Ministral-2-7b-hf")
>>> tokenizer = AutoTokenizer.from_pretrained("meta-ministral/Ministral-2-7b-hf")
>>> prompt = "Hey, are you conscious? Can you talk to me?"
>>> inputs = tokenizer(prompt, return_tensors="pt")
>>> # Generate
>>> generate_ids = model.generate(inputs.input_ids, max_length=30)
>>> tokenizer.batch_decode(generate_ids, skip_special_tokens=True, clean_up_tokenization_spaces=False)[0]
"Hey, are you conscious? Can you talk to me?\nI'm not conscious, but I can talk to you."
```"""
outputs: BaseModelOutputWithPast = self.model(
input_ids=input_ids,
attention_mask=attention_mask,
position_ids=position_ids,
past_key_values=past_key_values,
inputs_embeds=inputs_embeds,
use_cache=use_cache,
cache_position=cache_position,
**kwargs,
)
hidden_states = outputs.last_hidden_state
# Only compute necessary logits, and do not upcast them to float if we are not computing the loss
slice_indices = slice(-logits_to_keep, None) if isinstance(logits_to_keep, int) else logits_to_keep
logits = self.lm_head(hidden_states[:, slice_indices, :])
loss = None
if labels is not None:
loss = self.loss_function(logits=logits, labels=labels, vocab_size=self.config.vocab_size, **kwargs)
return CausalLMOutputWithPast(
loss=loss,
logits=logits,
past_key_values=outputs.past_key_values,
hidden_states=outputs.hidden_states,
attentions=outputs.attentions,
)
| MinistralForCausalLM |
python | numba__numba | numba/core/typing/dictdecl.py | {
"start": 486,
"end": 1872
} | class ____(AbstractTemplate):
def generic(self, args, kws):
if kws:
raise errors.TypingError(_message_dict_support)
if args:
iterable, = args
if isinstance(iterable, types.IterableType):
dtype = iterable.iterator_type.yield_type
if isinstance(dtype, types.UniTuple):
length = dtype.count
if length != 2:
msg = ("dictionary update sequence element has length "
f"{length}; 2 is required")
raise errors.TypingError(msg)
k = v = dtype.key[0]
elif isinstance(dtype, types.Tuple):
k, v = dtype.key
else:
raise errors.TypingError(_message_dict_support)
# dict key must be hashable
if not isinstance(k, types.Hashable):
msg = f"Unhashable type: {k}"
raise errors.TypingError(msg)
return signature(types.DictType(k, v), iterable)
else:
msg = ("Non-iterable args used in dict(iterable) "
f"constructor. Got 'dict({args[0]})'")
raise errors.TypingError(msg)
return signature(types.DictType(types.undefined, types.undefined))
| DictBuiltin |
python | getsentry__sentry | tests/sentry/seer/endpoints/test_group_ai_autofix.py | {
"start": 911,
"end": 31913
} | class ____(APITestCase, SnubaTestCase):
def _get_url(self, group_id: int) -> str:
return f"/api/0/issues/{group_id}/autofix/"
def setUp(self) -> None:
super().setUp()
self.organization.update_option("sentry:gen_ai_consent_v2024_11_14", True)
@patch("sentry.seer.endpoints.group_ai_autofix.get_autofix_state")
def test_ai_autofix_get_endpoint_with_autofix(
self, mock_get_autofix_state, mock_get_seer_org_acknowledgement
):
group = self.create_group()
mock_get_autofix_state.return_value = AutofixState(
run_id=123,
request={
"project_id": 456,
"organization_id": group.organization.id,
"issue": {"id": 789, "title": "Test Issue"},
"repos": [],
},
updated_at=datetime.fromisoformat("2023-07-18T12:00:00Z"),
status=AutofixStatus.PROCESSING,
)
self.login_as(user=self.user)
response = self.client.get(self._get_url(group.id), format="json")
assert response.status_code == 200
assert response.data["autofix"] is not None
assert response.data["autofix"]["status"] == "PROCESSING"
assert "issue" not in response.data["autofix"]["request"]
assert "trace_tree" not in response.data["autofix"]["request"]
assert "profile" not in response.data["autofix"]["request"]
assert "issue_summary" not in response.data["autofix"]["request"]
mock_get_autofix_state.assert_called_once_with(
group_id=group.id,
check_repo_access=True,
is_user_fetching=False,
organization_id=group.organization.id,
)
@patch("sentry.seer.endpoints.group_ai_autofix.get_autofix_state")
def test_ai_autofix_get_endpoint_without_autofix(
self, mock_get_autofix_state, mock_get_seer_org_acknowledgement
):
group = self.create_group()
mock_get_autofix_state.return_value = None
self.login_as(user=self.user)
response = self.client.get(self._get_url(group.id), format="json")
assert response.status_code == 200
assert response.data["autofix"] is None
mock_get_autofix_state.assert_called_once_with(
group_id=group.id,
check_repo_access=True,
is_user_fetching=False,
organization_id=group.organization.id,
)
@patch("sentry.seer.endpoints.group_ai_autofix.get_autofix_state")
@patch("sentry.seer.endpoints.group_ai_autofix.get_sorted_code_mapping_configs")
def test_ai_autofix_get_endpoint_repositories(
self,
mock_get_sorted_code_mapping_configs,
mock_get_autofix_state,
mock_get_seer_org_acknowledgement,
):
group = self.create_group()
autofix_state = AutofixState(
run_id=123,
request={
"project_id": 456,
"organization_id": group.organization.id,
"issue": {"id": 789, "title": "Test Issue"},
"repos": [],
},
updated_at=datetime.fromisoformat("2023-07-18T12:00:00Z"),
status=AutofixStatus.PROCESSING,
codebases={
"id123": CodebaseState(
repo_external_id="id123",
is_readable=True,
is_writeable=True,
)
},
)
mock_get_autofix_state.return_value = autofix_state
class TestRepo:
def __init__(self):
self.url = "example.com"
self.external_id = "id123"
self.name = "test_repo"
self.provider = "github"
self.integration_id = 42
mock_get_sorted_code_mapping_configs.return_value = [
Mock(repository=TestRepo(), default_branch="main"),
]
self.login_as(user=self.user)
response = self.client.get(self._get_url(group.id), format="json")
assert response.status_code == 200
assert response.data["autofix"] is not None
assert len(response.data["autofix"]["repositories"]) == 1
repo = response.data["autofix"]["repositories"][0]
assert repo["default_branch"] == "main"
assert repo["name"] == "test_repo"
assert repo["provider"] == "github"
assert repo["external_id"] == "id123"
assert repo["url"] == "example.com"
assert repo["integration_id"] == 42
assert repo["is_readable"] is True
assert repo["is_writeable"] is True
@patch("sentry.seer.endpoints.group_ai_autofix.get_autofix_state")
@patch("sentry.seer.endpoints.group_ai_autofix.get_sorted_code_mapping_configs")
def test_ai_autofix_get_endpoint_multiple_repositories(
self,
mock_get_sorted_code_mapping_configs,
mock_get_autofix_state,
mock_get_seer_org_acknowledgement,
):
group = self.create_group()
autofix_state = AutofixState(
run_id=123,
request={
"project_id": 456,
"organization_id": group.organization.id,
"issue": {"id": 789, "title": "Test Issue"},
"repos": [],
},
updated_at=datetime.fromisoformat("2023-07-18T12:00:00Z"),
status=AutofixStatus.PROCESSING,
codebases={
"id123": CodebaseState(
repo_external_id="id123",
is_readable=True,
is_writeable=True,
),
"id456": CodebaseState(
repo_external_id="id456",
is_readable=True,
is_writeable=False,
),
},
)
mock_get_autofix_state.return_value = autofix_state
class TestRepo:
def __init__(self, external_id, name, provider, url, integration_id):
self.url = url
self.external_id = external_id
self.name = name
self.provider = provider
self.integration_id = integration_id
repo1 = TestRepo("id123", "repo1", "github", "example.com/repo1", 42)
repo2 = TestRepo("id456", "repo2", "gitlab", "example.com/repo2", 43)
mock_get_sorted_code_mapping_configs.return_value = [
Mock(repository=repo1, default_branch="main"),
Mock(repository=repo2, default_branch="master"),
]
self.login_as(user=self.user)
response = self.client.get(self._get_url(group.id), format="json")
assert response.status_code == 200
assert response.data["autofix"] is not None
assert len(response.data["autofix"]["repositories"]) == 2
repositories = sorted(
response.data["autofix"]["repositories"], key=lambda x: x["integration_id"]
)
# Check first repo
repo = repositories[0]
assert repo["default_branch"] == "main"
assert repo["name"] == "repo1"
assert repo["provider"] == "github"
assert repo["external_id"] == "id123"
assert repo["url"] == "example.com/repo1"
assert repo["integration_id"] == 42
assert repo["is_readable"] is True
assert repo["is_writeable"] is True
# Check second repo
repo = repositories[1]
assert repo["default_branch"] == "master"
assert repo["name"] == "repo2"
assert repo["provider"] == "gitlab"
assert repo["external_id"] == "id456"
assert repo["url"] == "example.com/repo2"
assert repo["integration_id"] == 43
assert repo["is_readable"] is True
assert repo["is_writeable"] is False
@patch("sentry.seer.endpoints.group_ai_autofix.get_autofix_state")
@patch("sentry.seer.endpoints.group_ai_autofix.get_sorted_code_mapping_configs")
def test_ai_autofix_get_endpoint_repository_not_in_codebase(
self,
mock_get_sorted_code_mapping_configs,
mock_get_autofix_state,
mock_get_seer_org_acknowledgement,
):
group = self.create_group()
autofix_state = AutofixState(
run_id=123,
request={
"project_id": 456,
"organization_id": group.organization.id,
"issue": {"id": 789, "title": "Test Issue"},
"repos": [],
},
updated_at=datetime.fromisoformat("2023-07-18T12:00:00Z"),
status=AutofixStatus.PROCESSING,
codebases={
"id123": CodebaseState(
repo_external_id="id123",
is_readable=True,
is_writeable=True,
)
},
)
mock_get_autofix_state.return_value = autofix_state
class TestRepo:
def __init__(self, external_id):
self.url = "example.com"
self.external_id = external_id
self.name = "test_repo"
self.provider = "github"
self.integration_id = 42
# Create a repo with a different external_id than what's in the codebase
mock_get_sorted_code_mapping_configs.return_value = [
Mock(repository=TestRepo("different_id"), default_branch="main"),
]
self.login_as(user=self.user)
response = self.client.get(self._get_url(group.id), format="json")
assert response.status_code == 200
assert response.data["autofix"] is not None
# No repositories should be included since the external_id doesn't match
assert len(response.data["autofix"]["repositories"]) == 0
@patch("sentry.seer.endpoints.group_ai_autofix.get_autofix_state")
@patch("sentry.seer.endpoints.group_ai_autofix.get_sorted_code_mapping_configs")
def test_ai_autofix_get_endpoint_no_codebases(
self,
mock_get_sorted_code_mapping_configs,
mock_get_autofix_state,
mock_get_seer_org_acknowledgement,
):
group = self.create_group()
autofix_state = AutofixState(
run_id=123,
request={
"project_id": 456,
"organization_id": group.organization.id,
"issue": {"id": 789, "title": "Test Issue"},
"repos": [],
},
updated_at=datetime.fromisoformat("2023-07-18T12:00:00Z"),
status=AutofixStatus.PROCESSING,
# Empty codebases dictionary
codebases={},
)
mock_get_autofix_state.return_value = autofix_state
class TestRepo:
def __init__(self):
self.url = "example.com"
self.external_id = "id123"
self.name = "test_repo"
self.provider = "github"
self.integration_id = 42
mock_get_sorted_code_mapping_configs.return_value = [
Mock(repository=TestRepo(), default_branch="main"),
]
self.login_as(user=self.user)
response = self.client.get(self._get_url(group.id), format="json")
assert response.status_code == 200
assert response.data["autofix"] is not None
# Should have empty repositories list since there are no codebases
assert len(response.data["autofix"]["repositories"]) == 0
@patch("sentry.seer.explorer.utils.get_from_profiling_service")
@patch("sentry.seer.autofix.autofix._get_profile_from_trace_tree")
@patch("sentry.seer.autofix.autofix._call_autofix")
@patch("sentry.seer.autofix.autofix._get_trace_tree_for_event")
@patch("sentry.tasks.autofix.check_autofix_status.apply_async")
def test_ai_autofix_post_endpoint(
self,
mock_check_autofix_status,
mock_get_trace_tree,
mock_call,
mock_get_profile,
mock_get_from_profiling,
mock_get_seer_org_acknowledgement,
):
# Set up mock return values
mock_get_trace_tree.return_value = None
mock_call.return_value = 123 # Mocking the run_id returned by _call_autofix
release = self.create_release(project=self.project, version="1.0.0")
repo = self.create_repo(
project=self.project,
name="getsentry/sentry",
provider="integrations:github",
external_id="123",
)
self.create_code_mapping(project=self.project, repo=repo)
data = load_data("python", timestamp=before_now(minutes=1))
event = self.store_event(
data={
**data,
"release": release.version,
"exception": {"values": [{"type": "exception", "data": {"values": []}}]},
},
project_id=self.project.id,
)
group = event.group
assert group is not None
group.save()
self.login_as(user=self.user)
response = self.client.post(
self._get_url(group.id),
data={"instruction": "Yes", "event_id": event.event_id},
format="json",
)
# Verify that _call_autofix was called once
mock_call.assert_called_once()
# Check individual parameters that we care about
call_kwargs = mock_call.call_args.kwargs
assert call_kwargs["group"].id == group.id # Check that the group object matches
# Check that the repos parameter contains the expected data
expected_repo_fields = {
"provider": "integrations:github",
"owner": "getsentry",
"name": "sentry",
"external_id": "123",
}
assert any(
all(repo.get(key) == value for key, value in expected_repo_fields.items())
for repo in call_kwargs["repos"]
)
# Check that the instruction was passed correctly
assert call_kwargs["instruction"] == "Yes"
# Check other parameters
assert call_kwargs["timeout_secs"] == TIMEOUT_SECONDS
# Verify that the serialized event has an exception entry
serialized_event_arg = call_kwargs["serialized_event"]
assert any(
[entry.get("type") == "exception" for entry in serialized_event_arg.get("entries", [])]
)
assert response.status_code == 202
mock_check_autofix_status.assert_called_once_with(
args=[123, group.organization.id], countdown=900
)
@patch("sentry.seer.explorer.utils.get_from_profiling_service")
@patch("sentry.seer.autofix.autofix._get_profile_from_trace_tree")
@patch("sentry.seer.autofix.autofix._call_autofix")
@patch("sentry.seer.autofix.autofix._get_trace_tree_for_event")
@patch("sentry.tasks.autofix.check_autofix_status.apply_async")
def test_ai_autofix_post_without_code_mappings(
self,
mock_check_autofix_status,
mock_get_trace_tree,
mock_call,
mock_get_profile,
mock_get_from_profiling,
mock_get_seer_org_acknowledgement,
):
# Set up mock return values
mock_get_trace_tree.return_value = None
mock_call.return_value = 123 # Mocking the run_id returned by _call_autofix
release = self.create_release(project=self.project, version="1.0.0")
data = load_data("python", timestamp=before_now(minutes=1))
event = self.store_event(
data={
**data,
"release": release.version,
"exception": {"values": [{"type": "exception", "data": {"values": []}}]},
},
project_id=self.project.id,
)
group = event.group
assert group is not None
group.save()
self.login_as(user=self.user)
response = self.client.post(
self._get_url(group.id),
data={"instruction": "Yes", "event_id": event.event_id},
format="json",
)
# Verify that _call_autofix was called once
mock_call.assert_called_once()
# Check individual parameters that we care about
call_kwargs = mock_call.call_args.kwargs
assert call_kwargs["group"].id == group.id # Check that the group object matches
# Check that the repos parameter is an empty list (no code mappings)
assert call_kwargs["repos"] == []
# Check that the instruction was passed correctly
assert call_kwargs["instruction"] == "Yes"
# Check other parameters
assert call_kwargs["timeout_secs"] == TIMEOUT_SECONDS
# Verify that the serialized event has an exception entry
serialized_event_arg = call_kwargs["serialized_event"]
assert any(
[entry.get("type") == "exception" for entry in serialized_event_arg.get("entries", [])]
)
assert response.status_code == 202
mock_check_autofix_status.assert_called_once_with(
args=[123, group.organization.id], countdown=900
)
@patch("sentry.seer.explorer.utils.get_from_profiling_service")
@patch("sentry.seer.autofix.autofix._get_profile_from_trace_tree")
@patch("sentry.seer.autofix.autofix._call_autofix")
@patch("sentry.seer.autofix.autofix._get_trace_tree_for_event")
@patch("sentry.tasks.autofix.check_autofix_status.apply_async")
def test_ai_autofix_post_without_event_id(
self,
mock_check_autofix_status,
mock_get_trace_tree,
mock_call,
mock_get_profile,
mock_get_from_profiling,
mock_get_seer_org_acknowledgement,
):
# Set up mock return values
mock_get_trace_tree.return_value = None
mock_call.return_value = 123 # Mocking the run_id returned by _call_autofix
release = self.create_release(project=self.project, version="1.0.0")
repo = self.create_repo(
project=self.project,
name="getsentry/sentry",
provider="integrations:github",
external_id="123",
)
self.create_code_mapping(project=self.project, repo=repo)
data = load_data("python", timestamp=before_now(minutes=1))
event = self.store_event(
data={
**data,
"release": release.version,
"exception": {"values": [{"type": "exception", "data": {"values": []}}]},
},
project_id=self.project.id,
)
group = event.group
assert group is not None
group.save()
self.login_as(user=self.user)
response = self.client.post(
self._get_url(group.id), data={"instruction": "Yes"}, format="json"
)
# Verify that _call_autofix was called once
mock_call.assert_called_once()
# Check individual parameters that we care about
call_kwargs = mock_call.call_args.kwargs
assert call_kwargs["group"].id == group.id # Check that the group object matches
# Check that the repos parameter contains the expected data
expected_repo_fields = {
"provider": "integrations:github",
"owner": "getsentry",
"name": "sentry",
"external_id": "123",
}
assert any(
all(repo.get(key) == value for key, value in expected_repo_fields.items())
for repo in call_kwargs["repos"]
)
# Check that the instruction was passed correctly
assert call_kwargs["instruction"] == "Yes"
# Check other parameters
assert call_kwargs["timeout_secs"] == TIMEOUT_SECONDS
# Verify that the serialized event has an exception entry
serialized_event_arg = call_kwargs["serialized_event"]
assert any(
[entry.get("type") == "exception" for entry in serialized_event_arg.get("entries", [])]
)
assert response.status_code == 202
mock_check_autofix_status.assert_called_once_with(
args=[123, group.organization.id], countdown=900
)
@patch("sentry.models.Group.get_recommended_event_for_environments", return_value=None)
@patch("sentry.seer.explorer.utils.get_from_profiling_service")
@patch("sentry.seer.autofix.autofix._call_autofix")
@patch("sentry.seer.autofix.autofix._get_trace_tree_for_event")
@patch("sentry.tasks.autofix.check_autofix_status.apply_async")
def test_ai_autofix_post_without_event_id_no_recommended_event(
self,
mock_check_autofix_status,
mock_get_trace_tree,
mock_call,
mock_get_profiling,
mock_event,
mock_get_seer_org_acknowledgement,
):
# Set up mock return values
mock_get_trace_tree.return_value = None
mock_call.return_value = 123 # Mocking the run_id returned by _call_autofix
release = self.create_release(project=self.project, version="1.0.0")
repo = self.create_repo(
project=self.project,
name="getsentry/sentry",
provider="integrations:github",
external_id="123",
)
self.create_code_mapping(project=self.project, repo=repo)
data = load_data("python", timestamp=before_now(minutes=1))
event = self.store_event(
data={
**data,
"release": release.version,
"exception": {"values": [{"type": "exception", "data": {"values": []}}]},
},
project_id=self.project.id,
)
group = event.group
assert group is not None
group.save()
self.login_as(user=self.user)
response = self.client.post(
self._get_url(group.id), data={"instruction": "Yes"}, format="json"
)
# Verify that _call_autofix was called once
mock_call.assert_called_once()
# Check individual parameters that we care about
call_kwargs = mock_call.call_args.kwargs
assert call_kwargs["group"].id == group.id # Check that the group object matches
# Check that the repos parameter contains the expected data
expected_repo_fields = {
"provider": "integrations:github",
"owner": "getsentry",
"name": "sentry",
"external_id": "123",
}
assert any(
all(repo.get(key) == value for key, value in expected_repo_fields.items())
for repo in call_kwargs["repos"]
)
# Check that the instruction was passed correctly
assert call_kwargs["instruction"] == "Yes"
# Check other parameters
assert call_kwargs["timeout_secs"] == TIMEOUT_SECONDS
# Verify that the serialized event has an exception entry
serialized_event_arg = call_kwargs["serialized_event"]
assert any(
[entry.get("type") == "exception" for entry in serialized_event_arg.get("entries", [])]
)
assert response.status_code == 202
mock_check_autofix_status.assert_called_once_with(
args=[123, group.organization.id], countdown=900
)
@patch("sentry.models.Group.get_recommended_event_for_environments", return_value=None)
@patch("sentry.models.Group.get_latest_event", return_value=None)
def test_ai_autofix_post_without_event_id_error(
self, mock_latest_event, mock_recommended_event, mock_get_seer_org_acknowledgement
):
release = self.create_release(project=self.project, version="1.0.0")
repo = self.create_repo(
project=self.project,
name="getsentry/sentry",
provider="integrations:github",
external_id="123",
)
self.create_code_mapping(project=self.project, repo=repo)
data = load_data("python", timestamp=before_now(minutes=1))
event = self.store_event(
data={
**data,
"release": release.version,
"exception": {"values": [{"type": "exception", "data": {"values": []}}]},
},
project_id=self.project.id,
)
group = event.group
assert group is not None
group.save()
self.login_as(user=self.user)
response = self.client.post(
self._get_url(group.id), data={"instruction": "Yes"}, format="json"
)
assert response.status_code == 400
@patch("sentry.seer.endpoints.group_ai_autofix.get_autofix_state")
@patch("sentry.seer.endpoints.group_ai_autofix.cache")
def test_ai_autofix_get_endpoint_cache_miss(
self, mock_cache, mock_get_autofix_state, mock_get_seer_org_acknowledgement
):
"""Test that repo access is checked when cache is empty"""
# Set up cache miss
mock_cache.get.return_value = None
# Set up mock autofix state
mock_get_autofix_state.return_value = None
url = self._get_url(self.group.id)
self.login_as(user=self.user)
response = self.client.get(url)
# Verify response
assert response.status_code == 200
# Verify cache behavior - cache miss should trigger repo access check
mock_cache.get.assert_called_once_with(f"autofix_access_check:{self.group.id}")
mock_get_autofix_state.assert_called_once_with(
group_id=self.group.id,
check_repo_access=True,
is_user_fetching=False,
organization_id=self.group.organization.id,
)
# Verify the cache was set with a 60-second timeout
mock_cache.set.assert_called_once_with(
f"autofix_access_check:{self.group.id}", True, timeout=60
)
@patch("sentry.seer.endpoints.group_ai_autofix.get_autofix_state")
@patch("sentry.seer.endpoints.group_ai_autofix.cache")
def test_ai_autofix_get_endpoint_cache_hit(
self, mock_cache, mock_get_autofix_state, mock_get_seer_org_acknowledgement
):
"""Test that repo access is not checked when cache has a value"""
# Set up cache hit
mock_cache.get.return_value = True
# Set up mock autofix state
mock_get_autofix_state.return_value = None
url = self._get_url(self.group.id)
self.login_as(user=self.user)
response = self.client.get(url)
# Verify response
assert response.status_code == 200
# Verify cache behavior - cache hit should skip repo access check
mock_cache.get.assert_called_once_with(f"autofix_access_check:{self.group.id}")
mock_get_autofix_state.assert_called_once_with(
group_id=self.group.id,
check_repo_access=False,
is_user_fetching=False,
organization_id=self.group.organization.id,
)
# Verify the cache was not set again
mock_cache.set.assert_not_called()
@patch("sentry.seer.endpoints.group_ai_autofix.get_autofix_state")
@patch("sentry.seer.endpoints.group_ai_autofix.cache")
def test_ai_autofix_get_endpoint_polling_behavior(
self, mock_cache, mock_get_autofix_state, mock_get_seer_org_acknowledgement
):
"""Test that polling the endpoint only performs repository access checks once per minute"""
group = self.create_group()
url = self._get_url(group.id)
self.login_as(user=self.user)
# Mock the autofix state
mock_get_autofix_state.return_value = AutofixState(
run_id=123,
request={
"project_id": 456,
"organization_id": group.organization.id,
"issue": {"id": 789, "title": "Test Issue"},
"repos": [],
},
updated_at=datetime.fromisoformat("2023-07-18T12:00:00Z"),
status=AutofixStatus.PROCESSING,
)
# Simulate first request (cache miss)
mock_cache.get.return_value = None
response1 = self.client.get(url)
assert response1.status_code == 200
# Verify first request behavior
mock_cache.get.assert_called_once_with(f"autofix_access_check:{group.id}")
mock_get_autofix_state.assert_called_once_with(
group_id=group.id,
check_repo_access=True,
is_user_fetching=False,
organization_id=group.organization.id,
)
mock_cache.set.assert_called_once_with(f"autofix_access_check:{group.id}", True, timeout=60)
# Reset mocks for second request
mock_cache.reset_mock()
mock_get_autofix_state.reset_mock()
# Simulate second request within the 1-minute window (cache hit)
mock_cache.get.return_value = True
response2 = self.client.get(url)
assert response2.status_code == 200
# Verify second request behavior
mock_cache.get.assert_called_once_with(f"autofix_access_check:{group.id}")
mock_get_autofix_state.assert_called_once_with(
group_id=group.id,
check_repo_access=False,
is_user_fetching=False,
organization_id=group.organization.id,
)
mock_cache.set.assert_not_called()
# Reset mocks for third request
mock_cache.reset_mock()
mock_get_autofix_state.reset_mock()
# Simulate third request after cache expiration (cache miss again)
mock_cache.get.return_value = None
response3 = self.client.get(url)
assert response3.status_code == 200
# Verify third request behavior - should be like the first request
mock_cache.get.assert_called_once_with(f"autofix_access_check:{group.id}")
mock_get_autofix_state.assert_called_once_with(
group_id=group.id,
check_repo_access=True,
is_user_fetching=False,
organization_id=group.organization.id,
)
mock_cache.set.assert_called_once_with(f"autofix_access_check:{group.id}", True, timeout=60)
def test_ai_autofix_post_invalid_stopping_point_string(self, mock_get_seer_org_acknowledgement):
group = self.create_group()
self.login_as(user=self.user)
response = self.client.post(
self._get_url(group.id),
data={"instruction": "test", "stopping_point": "invalid"},
format="json",
)
assert response.status_code == 400
assert "stoppingPoint" in response.data
assert "not a valid choice" in str(response.data["stoppingPoint"])
def test_ai_autofix_post_invalid_stopping_point_type(self, mock_get_seer_org_acknowledgement):
group = self.create_group()
self.login_as(user=self.user)
response = self.client.post(
self._get_url(group.id),
data={"instruction": "test", "stopping_point": 123},
format="json",
)
assert response.status_code == 400
assert "stoppingPoint" in response.data
assert "not a valid choice" in str(response.data["stoppingPoint"])
| GroupAutofixEndpointTest |
python | keras-team__keras | keras/src/layers/preprocessing/image_preprocessing/random_saturation.py | {
"start": 321,
"end": 6105
} | class ____(BaseImagePreprocessingLayer):
"""Randomly adjusts the saturation on given images.
This layer will randomly increase/reduce the saturation for the input RGB
images.
**Note:** This layer is safe to use inside a `tf.data` or `grain` pipeline
(independently of which backend you're using).
Args:
factor: A tuple of two floats or a single float.
`factor` controls the extent to which the image saturation
is impacted. `factor=0.5` makes this layer perform a no-op
operation. `factor=0.0` makes the image fully grayscale.
`factor=1.0` makes the image fully saturated. Values should
be between `0.0` and `1.0`. If a tuple is used, a `factor`
is sampled between the two values for every image augmented.
If a single float is used, a value between `0.0` and the passed
float is sampled. To ensure the value is always the same,
pass a tuple with two identical floats: `(0.5, 0.5)`.
value_range: the range of values the incoming images will have.
Represented as a two-number tuple written `[low, high]`. This is
typically either `[0, 1]` or `[0, 255]` depending on how your
preprocessing pipeline is set up.
seed: Integer. Used to create a random seed.
Example:
```python
(images, labels), _ = keras.datasets.cifar10.load_data()
images = images.astype("float32")
random_saturation = keras.layers.RandomSaturation(factor=0.2)
augmented_images = random_saturation(images)
```
"""
_VALUE_RANGE_VALIDATION_ERROR = (
"The `value_range` argument should be a list of two numbers. "
)
def __init__(
self,
factor,
value_range=(0, 255),
data_format=None,
seed=None,
**kwargs,
):
super().__init__(data_format=data_format, **kwargs)
self._set_factor(factor)
self._set_value_range(value_range)
self.seed = seed
self.generator = SeedGenerator(seed)
def _set_value_range(self, value_range):
if not isinstance(value_range, (tuple, list)):
raise ValueError(
self._VALUE_RANGE_VALIDATION_ERROR
+ f"Received: value_range={value_range}"
)
if len(value_range) != 2:
raise ValueError(
self._VALUE_RANGE_VALIDATION_ERROR
+ f"Received: value_range={value_range}"
)
self.value_range = sorted(value_range)
def get_random_transformation(self, data, training=True, seed=None):
if isinstance(data, dict):
images = data["images"]
else:
images = data
images_shape = self.backend.shape(images)
rank = len(images_shape)
if rank == 3:
batch_size = 1
elif rank == 4:
batch_size = images_shape[0]
else:
raise ValueError(
"Expected the input image to be rank 3 or 4. Received: "
f"inputs.shape={images_shape}"
)
if seed is None:
seed = self._get_seed_generator(self.backend._backend)
factor = self.backend.random.uniform(
(batch_size,),
minval=self.factor[0],
maxval=self.factor[1],
seed=seed,
)
factor = factor / (1 - factor + epsilon())
return {"factor": factor}
def transform_images(self, images, transformation=None, training=True):
if training:
adjust_factors = transformation["factor"]
adjust_factors = self.backend.cast(
adjust_factors, self.compute_dtype
)
adjust_factors = self.backend.numpy.reshape(
adjust_factors, self.backend.shape(adjust_factors) + (1, 1)
)
images = self.backend.image.rgb_to_hsv(
images, data_format=self.data_format
)
if self.data_format == "channels_first":
s_channel = self.backend.numpy.multiply(
images[:, 1, :, :], adjust_factors
)
s_channel = self.backend.numpy.clip(
s_channel, self.value_range[0], self.value_range[1]
)
images = self.backend.numpy.stack(
[images[:, 0, :, :], s_channel, images[:, 2, :, :]], axis=1
)
else:
s_channel = self.backend.numpy.multiply(
images[..., 1], adjust_factors
)
s_channel = self.backend.numpy.clip(
s_channel, self.value_range[0], self.value_range[1]
)
images = self.backend.numpy.stack(
[images[..., 0], s_channel, images[..., 2]], axis=-1
)
images = self.backend.image.hsv_to_rgb(
images, data_format=self.data_format
)
return images
def transform_labels(self, labels, transformation, training=True):
return labels
def transform_segmentation_masks(
self, segmentation_masks, transformation, training=True
):
return segmentation_masks
def transform_bounding_boxes(
self, bounding_boxes, transformation, training=True
):
return bounding_boxes
def get_config(self):
config = super().get_config()
config.update(
{
"factor": self.factor,
"value_range": self.value_range,
"seed": self.seed,
}
)
return config
def compute_output_shape(self, input_shape):
return input_shape
| RandomSaturation |
python | numba__numba | numba/core/typing/templates.py | {
"start": 41089,
"end": 46440
} | class ____(_OverloadAttributeTemplate):
"""
A base class of templates for @overload_method functions.
"""
is_method = True
def _init_once(self):
"""
Overriding parent definition
"""
attr = self._attr
registry = self._get_target_registry('method')
@registry.lower((self.key, attr), self.key, types.VarArg(types.Any))
def method_impl(context, builder, sig, args):
typ = sig.args[0]
typing_context = context.typing_context
fnty = self._get_function_type(typing_context, typ)
sig = self._get_signature(typing_context, fnty, sig.args, {})
call = context.get_function(fnty, sig)
# Link dependent library
context.add_linking_libs(getattr(call, 'libs', ()))
return call(builder, args)
def _resolve(self, typ, attr):
if self._attr != attr:
return None
if isinstance(typ, types.TypeRef):
assert typ == self.key
elif isinstance(typ, types.Callable):
assert typ == self.key
else:
assert isinstance(typ, self.key)
class MethodTemplate(AbstractTemplate):
key = (self.key, attr)
_inline = self._inline
_overload_func = staticmethod(self._overload_func)
_inline_overloads = self._inline_overloads
prefer_literal = self.prefer_literal
def generic(_, args, kws):
args = (typ,) + tuple(args)
fnty = self._get_function_type(self.context, typ)
sig = self._get_signature(self.context, fnty, args, kws)
sig = sig.replace(pysig=utils.pysignature(self._overload_func))
for template in fnty.templates:
self._inline_overloads.update(template._inline_overloads)
if sig is not None:
return sig.as_method()
def get_template_info(self):
basepath = os.path.dirname(os.path.dirname(numba.__file__))
impl = self._overload_func
code, firstlineno, path = self.get_source_code_info(impl)
sig = str(utils.pysignature(impl))
info = {
'kind': "overload_method",
'name': getattr(impl, '__qualname__', impl.__name__),
'sig': sig,
'filename': utils.safe_relpath(path, start=basepath),
'lines': (firstlineno, firstlineno + len(code) - 1),
'docstring': impl.__doc__
}
return info
return types.BoundFunction(MethodTemplate, typ)
def make_overload_attribute_template(typ, attr, overload_func, inline='never',
prefer_literal=False,
base=_OverloadAttributeTemplate,
**kwargs):
"""
Make a template class for attribute *attr* of *typ* overloaded by
*overload_func*.
"""
assert isinstance(typ, types.Type) or issubclass(typ, types.Type)
name = "OverloadAttributeTemplate_%s_%s" % (typ, attr)
# Note the implementation cache is subclass-specific
dct = dict(key=typ, _attr=attr, _impl_cache={},
_inline=staticmethod(InlineOptions(inline)),
_inline_overloads={},
_overload_func=staticmethod(overload_func),
prefer_literal=prefer_literal,
metadata=kwargs,
)
obj = type(base)(name, (base,), dct)
return obj
def make_overload_method_template(typ, attr, overload_func, inline,
prefer_literal=False, **kwargs):
"""
Make a template class for method *attr* of *typ* overloaded by
*overload_func*.
"""
return make_overload_attribute_template(
typ, attr, overload_func, inline=inline,
base=_OverloadMethodTemplate, prefer_literal=prefer_literal,
**kwargs,
)
def bound_function(template_key):
"""
Wrap an AttributeTemplate resolve_* method to allow it to
resolve an instance method's signature rather than a instance attribute.
The wrapped method must return the resolved method's signature
according to the given self type, args, and keywords.
It is used thusly:
class ComplexAttributes(AttributeTemplate):
@bound_function("complex.conjugate")
def resolve_conjugate(self, ty, args, kwds):
return ty
*template_key* (e.g. "complex.conjugate" above) will be used by the
target to look up the method's implementation, as a regular function.
"""
def wrapper(method_resolver):
@functools.wraps(method_resolver)
def attribute_resolver(self, ty):
class MethodTemplate(AbstractTemplate):
key = template_key
def generic(_, args, kws):
sig = method_resolver(self, ty, args, kws)
if sig is not None and sig.recvr is None:
sig = sig.replace(recvr=ty)
return sig
return types.BoundFunction(MethodTemplate, ty)
return attribute_resolver
return wrapper
# -----------------------------
| _OverloadMethodTemplate |
python | ansible__ansible | lib/ansible/_internal/_templating/_jinja_bits.py | {
"start": 10838,
"end": 12053
} | class ____:
"""
Utility wrapper to wrap/unwrap args passed to Jinja `Template.render` and `TemplateExpression.__call__`.
e.g., see https://github.com/pallets/jinja/blob/3.1.3/src/jinja2/environment.py#L1296 and
https://github.com/pallets/jinja/blob/3.1.3/src/jinja2/environment.py#L1566.
"""
jinja_vars: c.Mapping[str, t.Any] | None
@classmethod
def package_jinja_vars(cls, jinja_vars: c.Mapping[str, t.Any]) -> dict[str, ArgSmuggler]:
"""Wrap the supplied vars dict in an ArgSmuggler to prevent premature templating from Jinja's internal dict copy."""
return dict(_smuggled_vars=ArgSmuggler(jinja_vars=jinja_vars))
@classmethod
def extract_jinja_vars(cls, maybe_smuggled_vars: c.Mapping[str, t.Any] | None) -> c.Mapping[str, t.Any]:
"""
If the supplied vars dict contains an ArgSmuggler instance with the expected key, unwrap it and return the smuggled value.
Otherwise, return the supplied dict as-is.
"""
if maybe_smuggled_vars and ((smuggler := maybe_smuggled_vars.get('_smuggled_vars')) and isinstance(smuggler, ArgSmuggler)):
return smuggler.jinja_vars
return maybe_smuggled_vars
| ArgSmuggler |
python | django__django | django/contrib/admin/filters.py | {
"start": 8043,
"end": 12178
} | class ____(FieldListFilter):
def __init__(self, field, request, params, model, model_admin, field_path):
other_model = get_model_from_relation(field)
self.lookup_kwarg = "%s__%s__exact" % (field_path, field.target_field.name)
self.lookup_kwarg_isnull = "%s__isnull" % field_path
self.lookup_val = params.get(self.lookup_kwarg)
self.lookup_val_isnull = get_last_value_from_parameters(
params, self.lookup_kwarg_isnull
)
super().__init__(field, request, params, model, model_admin, field_path)
self.lookup_choices = self.field_choices(field, request, model_admin)
if hasattr(field, "verbose_name"):
self.lookup_title = field.verbose_name
else:
self.lookup_title = other_model._meta.verbose_name
self.title = self.lookup_title
self.empty_value_display = model_admin.get_empty_value_display()
@property
def include_empty_choice(self):
"""
Return True if a "(None)" choice should be included, which filters
out everything except empty relationships.
"""
return self.field.null or (self.field.is_relation and self.field.many_to_many)
def has_output(self):
if self.include_empty_choice:
extra = 1
else:
extra = 0
return len(self.lookup_choices) + extra > 1
def expected_parameters(self):
return [self.lookup_kwarg, self.lookup_kwarg_isnull]
def field_admin_ordering(self, field, request, model_admin):
"""
Return the model admin's ordering for related field, if provided.
"""
try:
related_admin = model_admin.admin_site.get_model_admin(
field.remote_field.model
)
except NotRegistered:
return ()
else:
return related_admin.get_ordering(request)
def field_choices(self, field, request, model_admin):
ordering = self.field_admin_ordering(field, request, model_admin)
return field.get_choices(include_blank=False, ordering=ordering)
def get_facet_counts(self, pk_attname, filtered_qs):
counts = {
f"{pk_val}__c": models.Count(
pk_attname, filter=models.Q(**{self.lookup_kwarg: pk_val})
)
for pk_val, _ in self.lookup_choices
}
if self.include_empty_choice:
counts["__c"] = models.Count(
pk_attname, filter=models.Q(**{self.lookup_kwarg_isnull: True})
)
return counts
def choices(self, changelist):
add_facets = changelist.add_facets
facet_counts = self.get_facet_queryset(changelist) if add_facets else None
yield {
"selected": self.lookup_val is None and not self.lookup_val_isnull,
"query_string": changelist.get_query_string(
remove=[self.lookup_kwarg, self.lookup_kwarg_isnull]
),
"display": _("All"),
}
count = None
for pk_val, val in self.lookup_choices:
if add_facets:
count = facet_counts[f"{pk_val}__c"]
val = f"{val} ({count})"
yield {
"selected": self.lookup_val is not None
and str(pk_val) in self.lookup_val,
"query_string": changelist.get_query_string(
{self.lookup_kwarg: pk_val}, [self.lookup_kwarg_isnull]
),
"display": val,
}
empty_title = self.empty_value_display
if self.include_empty_choice:
if add_facets:
count = facet_counts["__c"]
empty_title = f"{empty_title} ({count})"
yield {
"selected": bool(self.lookup_val_isnull),
"query_string": changelist.get_query_string(
{self.lookup_kwarg_isnull: "True"}, [self.lookup_kwarg]
),
"display": empty_title,
}
FieldListFilter.register(lambda f: f.remote_field, RelatedFieldListFilter)
| RelatedFieldListFilter |
python | django__django | tests/modeladmin/test_checks.py | {
"start": 45847,
"end": 46673
} | class ____(CheckTestCase):
def test_not_integer(self):
class ValidationTestInline(TabularInline):
model = ValidationTestInlineModel
min_num = "hello"
class TestModelAdmin(ModelAdmin):
inlines = [ValidationTestInline]
self.assertIsInvalid(
TestModelAdmin,
ValidationTestModel,
"The value of 'min_num' must be an integer.",
"admin.E205",
invalid_obj=ValidationTestInline,
)
def test_valid_case(self):
class ValidationTestInline(TabularInline):
model = ValidationTestInlineModel
min_num = 2
class TestModelAdmin(ModelAdmin):
inlines = [ValidationTestInline]
self.assertIsValid(TestModelAdmin, ValidationTestModel)
| MinNumCheckTests |
python | django-mptt__django-mptt | tests/myapp/tests.py | {
"start": 4279,
"end": 10122
} | class ____(TreeTestCase):
"""
Test that trees are in the appropriate state after reparenting and
that reparented items have the correct tree attributes defined,
should they be required for use after a save.
"""
fixtures = ["genres.json"]
def test_new_root_from_subtree(self):
shmup = Genre.objects.get(id=6)
shmup.parent = None
shmup.save()
self.assertTreeEqual([shmup], "6 - 3 0 1 6")
self.assertTreeEqual(
Genre.objects.all(),
"""
1 - 1 0 1 10
2 1 1 1 2 9
3 2 1 2 3 4
4 2 1 2 5 6
5 2 1 2 7 8
9 - 2 0 1 6
10 9 2 1 2 3
11 9 2 1 4 5
6 - 3 0 1 6
7 6 3 1 2 3
8 6 3 1 4 5
""",
)
def test_new_root_from_leaf_with_siblings(self):
platformer_2d = Genre.objects.get(id=3)
platformer_2d.parent = None
platformer_2d.save()
self.assertTreeEqual([platformer_2d], "3 - 3 0 1 2")
self.assertTreeEqual(
Genre.objects.all(),
"""
1 - 1 0 1 14
2 1 1 1 2 7
4 2 1 2 3 4
5 2 1 2 5 6
6 1 1 1 8 13
7 6 1 2 9 10
8 6 1 2 11 12
9 - 2 0 1 6
10 9 2 1 2 3
11 9 2 1 4 5
3 - 3 0 1 2
""",
)
def test_new_child_from_root(self):
action = Genre.objects.get(id=1)
rpg = Genre.objects.get(id=9)
action.parent = rpg
action.save()
self.assertTreeEqual([action], "1 9 2 1 6 21")
self.assertTreeEqual([rpg], "9 - 2 0 1 22")
self.assertTreeEqual(
Genre.objects.all(),
"""
9 - 2 0 1 22
10 9 2 1 2 3
11 9 2 1 4 5
1 9 2 1 6 21
2 1 2 2 7 14
3 2 2 3 8 9
4 2 2 3 10 11
5 2 2 3 12 13
6 1 2 2 15 20
7 6 2 3 16 17
8 6 2 3 18 19
""",
)
def test_move_leaf_to_other_tree(self):
shmup_horizontal = Genre.objects.get(id=8)
rpg = Genre.objects.get(id=9)
shmup_horizontal.parent = rpg
shmup_horizontal.save()
self.assertTreeEqual([shmup_horizontal], "8 9 2 1 6 7")
self.assertTreeEqual([rpg], "9 - 2 0 1 8")
self.assertTreeEqual(
Genre.objects.all(),
"""
1 - 1 0 1 14
2 1 1 1 2 9
3 2 1 2 3 4
4 2 1 2 5 6
5 2 1 2 7 8
6 1 1 1 10 13
7 6 1 2 11 12
9 - 2 0 1 8
10 9 2 1 2 3
11 9 2 1 4 5
8 9 2 1 6 7
""",
)
def test_move_subtree_to_other_tree(self):
shmup = Genre.objects.get(id=6)
trpg = Genre.objects.get(id=11)
shmup.parent = trpg
shmup.save()
self.assertTreeEqual([shmup], "6 11 2 2 5 10")
self.assertTreeEqual([trpg], "11 9 2 1 4 11")
self.assertTreeEqual(
Genre.objects.all(),
"""
1 - 1 0 1 10
2 1 1 1 2 9
3 2 1 2 3 4
4 2 1 2 5 6
5 2 1 2 7 8
9 - 2 0 1 12
10 9 2 1 2 3
11 9 2 1 4 11
6 11 2 2 5 10
7 6 2 3 6 7
8 6 2 3 8 9
""",
)
def test_move_child_up_level(self):
shmup_horizontal = Genre.objects.get(id=8)
action = Genre.objects.get(id=1)
shmup_horizontal.parent = action
shmup_horizontal.save()
self.assertTreeEqual([shmup_horizontal], "8 1 1 1 14 15")
self.assertTreeEqual([action], "1 - 1 0 1 16")
self.assertTreeEqual(
Genre.objects.all(),
"""
1 - 1 0 1 16
2 1 1 1 2 9
3 2 1 2 3 4
4 2 1 2 5 6
5 2 1 2 7 8
6 1 1 1 10 13
7 6 1 2 11 12
8 1 1 1 14 15
9 - 2 0 1 6
10 9 2 1 2 3
11 9 2 1 4 5
""",
)
def test_move_subtree_down_level(self):
shmup = Genre.objects.get(id=6)
platformer = Genre.objects.get(id=2)
shmup.parent = platformer
shmup.save()
self.assertTreeEqual([shmup], "6 2 1 2 9 14")
self.assertTreeEqual([platformer], "2 1 1 1 2 15")
self.assertTreeEqual(
Genre.objects.all(),
"""
1 - 1 0 1 16
2 1 1 1 2 15
3 2 1 2 3 4
4 2 1 2 5 6
5 2 1 2 7 8
6 2 1 2 9 14
7 6 1 3 10 11
8 6 1 3 12 13
9 - 2 0 1 6
10 9 2 1 2 3
11 9 2 1 4 5
""",
)
def test_move_to(self):
rpg = Genre.objects.get(pk=9)
action = Genre.objects.get(pk=1)
rpg.move_to(action)
rpg.save()
self.assertEqual(rpg.parent, action)
def test_invalid_moves(self):
# A node may not be made a child of itself
action = Genre.objects.get(id=1)
action.parent = action
platformer = Genre.objects.get(id=2)
platformer.parent = platformer
self.assertRaises(InvalidMove, action.save)
self.assertRaises(InvalidMove, platformer.save)
# A node may not be made a child of any of its descendants
platformer_4d = Genre.objects.get(id=5)
action.parent = platformer_4d
platformer.parent = platformer_4d
self.assertRaises(InvalidMove, action.save)
self.assertRaises(InvalidMove, platformer.save)
# New parent is still set when an error occurs
self.assertEqual(action.parent, platformer_4d)
self.assertEqual(platformer.parent, platformer_4d)
| ReparentingTestCase |
python | google__pytype | pytype/tests/test_typing2.py | {
"start": 22382,
"end": 23492
} | class ____(test_base.BaseTest):
"""Tests for typing.Counter."""
def test_counter_generic(self):
ty, _ = self.InferWithErrors("""
import collections
import typing
def freqs(s: str) -> typing.Counter[str]:
return collections.Counter(s)
x = freqs("")
y = freqs("")
z = collections.Counter() # type: typing.Counter[int]
x - y
x + y
x | y
x & y
x - z # unsupported-operands
x.most_common(1, 2, 3) # wrong-arg-count
a = x.most_common()
b = x.most_common(1)
c = x.elements()
d = z.elements()
e = x.copy()
f = x | z
""")
self.assertTypesMatchPytd(
ty,
"""
import collections
import typing
from typing import Counter, Iterable, List, Tuple, Union
a: List[Tuple[str, int]]
b: List[Tuple[str, int]]
c: Iterable[str]
d: Iterable[int]
e: Counter[str]
f: Counter[Union[int, str]]
x: Counter[str]
y: Counter[str]
z: Counter[int]
def freqs(s: str) -> Counter[str]: ...
""",
)
| CounterTest |
python | patrick-kidger__equinox | equinox/_jit.py | {
"start": 6442,
"end": 15225
} | class ____(Module):
fn: str # this attribute exists solely to give a nice repr
_signature: inspect.Signature = field(static=True, repr=False)
_dynamic_fun: PyTree = field(repr=False)
_static_fun: Any = field(static=True, repr=False)
_cached: Any = field(static=True, repr=False)
filter_warning: bool = field(static=True)
donate_first: bool = field(static=True)
donate_rest: bool = field(static=True)
@property
def __wrapped__(self):
return hashable_combine(self._dynamic_fun, self._static_fun)
def __call__(self, /, *args, **kwargs):
__tracebackhide__ = True
try:
return _call(self, False, args, kwargs)
except EquinoxRuntimeError as e:
# Use a two-part try/except here and in `_call` to delete the
# `raise EquinoxRuntimeError` line from the stack trace.
e.__traceback__ = None
raise
def lower(self, /, *args, **kwargs) -> Lowered:
return _call(self, True, args, kwargs)
def __get__(self, instance, owner):
del owner
if instance is None:
return self
return Partial(self, instance)
# _call is not a member method of _JitWrapper (even though it effectively does
# the same thing) because we want to avoid _call being wrapped in a _wrap_method,
# which adds about ~90μs per call.
def _call(jit_wrapper: _JitWrapper, is_lower, args, kwargs):
__tracebackhide__ = True
__equinox_jit_id__ = os.urandom(16)
info = (
jit_wrapper._signature,
jit_wrapper._dynamic_fun,
jit_wrapper._static_fun,
jit_wrapper.donate_first,
jit_wrapper.donate_rest,
)
dynamic_donate, dynamic_nodonate, static = _preprocess( # pyright: ignore
info, args, kwargs, return_static=True
)
if is_lower:
return Lowered(
jit_wrapper._cached.lower(dynamic_donate, dynamic_nodonate, static),
info,
_preprocess, # pyright: ignore
_postprocess, # pyright: ignore
)
else:
filter = _FilterCallback()
callback_logger = logging.getLogger("jax._src.callback")
callback_logger.addFilter(filter)
try:
if jit_wrapper.filter_warning:
with warnings.catch_warnings():
warnings.filterwarnings(
"ignore", message="Some donated buffers were not usable*"
)
marker, _, _ = out = jit_wrapper._cached(
dynamic_donate, dynamic_nodonate, static
)
else:
marker, _, _ = out = jit_wrapper._cached(
dynamic_donate, dynamic_nodonate, static
)
# We need to include the explicit `isinstance(marker, jax.Array)` check due
# to https://github.com/patrick-kidger/equinox/issues/988
if not isinstance(marker, jax.core.Tracer) and isinstance(
marker, jax.Array
):
marker.block_until_ready()
except JaxRuntimeError as e:
# Catch Equinox's runtime errors, and re-raise them with actually useful
# information. (By default XlaRuntimeError produces a lot of terrifying
# but useless information.)
if last_error_info is not None and "_EquinoxRuntimeError: " in str(e):
last_msg, last_stack = last_error_info
last_stack_pieces: list[str] = []
for id_or_str in last_stack:
if type(id_or_str) is str:
last_stack_pieces.append(id_or_str)
else:
if id_or_str == __equinox_jit_id__:
break
last_stack_str = "".join(reversed(last_stack_pieces))
raise EquinoxRuntimeError(
_on_error_msg.format(msg=last_msg, stack=last_stack_str)
) from None
# `from None` to hide the large but uninformative XlaRuntimeError.
else:
raise
finally:
callback_logger.removeFilter(filter)
return _postprocess(out)
@overload
def filter_jit(
*,
donate: Literal[
"all", "all-except-first", "warn", "warn-except-first", "none"
] = "none",
) -> Callable[[Callable[_P, _T]], Callable[_P, _T]]: ...
@overload
def filter_jit(
fun: Callable[_P, _T],
*,
donate: Literal[
"all", "all-except-first", "warn", "warn-except-first", "none"
] = "none",
) -> Callable[_P, _T]: ...
@doc_remove_args("jitkwargs")
def filter_jit(
fun=sentinel,
*,
donate: Literal[
"all", "all-except-first", "warn", "warn-except-first", "none"
] = "none",
**jitkwargs,
):
"""An easier-to-use version of `jax.jit`. All JAX and NumPy arrays are traced, and
all other types are held static.
**Arguments:**
- `fun` is a pure function to JIT compile.
- `donate` indicates whether the buffers of JAX arrays are donated or not. It
should either be:
- `'all'`: donate all arrays and suppress all warnings about unused buffers;
- `'all-except-first'`: donate all arrays except for those in the first
argument, and suppress all warnings about unused buffers;
- `'warn'`: as above, but don't suppress unused buffer warnings;
- `'warn-except-first'`: as above, but don't suppress unused buffer warnings;
- `'none'`: no buffer donation. (This the default.)
**Returns:**
The JIT'd version of `fun`.
!!! example
```python
# Basic behaviour
@eqx.filter_jit
def f(x, y): # both args traced if arrays, static if non-arrays
return x + y, x - y
f(jnp.array(1), jnp.array(2)) # both args traced
f(jnp.array(1), 2) # first arg traced, second arg static
f(1, 2) # both args static
```
!!! info
Donating arguments allows their underlying memory to be used in the
computation. This can produce speed and memory improvements, but means that you
cannot use any donated arguments again, as their underlying memory has been
overwritten. (JAX will throw an error if you try to.)
!!! info
If you want to trace Python `bool`/`int`/`float`/`complex` as well then you
can do this by wrapping them into a JAX array: `jnp.asarray(x)`.
If you want to donate only some arguments then this can be done by setting
`filter_jit(donate="all-except-first")` and then passing all arguments that you
don't want to donate through the first argument. (Packing multiple values into
a tuple if necessary.)
"""
if fun is sentinel:
return ft.partial(filter_jit, donate=donate, **jitkwargs)
deprecated_0_10(jitkwargs, "default")
deprecated_0_10(jitkwargs, "fn")
deprecated_0_10(jitkwargs, "args")
deprecated_0_10(jitkwargs, "kwargs")
deprecated_0_10(jitkwargs, "out")
if any(
x in jitkwargs
for x in (
"static_argnums",
"static_argnames",
"donate_argnums",
"in_shardings",
"out_shardings",
)
):
raise ValueError(
"`jitkwargs` cannot contain 'static_argnums', 'static_argnames', "
"'donate_argnums', 'in_shardings', or 'out_shardings'."
)
signature = inspect.signature(fun)
if donate == "all":
filter_warning = True
donate_first = True
donate_rest = True
elif donate == "all-except-first":
filter_warning = True
donate_first = False
donate_rest = True
elif donate == "warn":
filter_warning = False
donate_first = True
donate_rest = True
elif donate == "warn-except-first":
filter_warning = False
donate_first = False
donate_rest = True
elif donate == "none":
filter_warning = False
donate_first = False
donate_rest = False
else:
raise ValueError(
"`filter_jit(..., donate=...)` must be one of 'all', 'all-except-first', "
"'warn', 'warn-except-first', or 'none'."
)
_, name = get_fn_names(fun)
dynamic_fun, static_fun = hashable_partition(fun, is_array)
cached = _filter_jit_cache(fun, jitkwargs)
jit_wrapper = _JitWrapper(
fn=name,
_signature=signature,
_dynamic_fun=dynamic_fun,
_static_fun=static_fun,
_cached=cached,
filter_warning=filter_warning,
donate_first=donate_first,
donate_rest=donate_rest,
)
return module_update_wrapper(jit_wrapper)
| _JitWrapper |
python | redis__redis-py | tests/test_asyncio/test_credentials.py | {
"start": 21017,
"end": 23453
} | class ____:
@pytest.mark.parametrize(
"r_credential",
[
{
"cred_provider_class": EntraIdCredentialsProvider,
},
{
"cred_provider_class": EntraIdCredentialsProvider,
"cred_provider_kwargs": {"block_for_initial": True},
},
{
"cred_provider_class": EntraIdCredentialsProvider,
"idp_kwargs": {"auth_type": AuthType.DEFAULT_AZURE_CREDENTIAL},
},
],
ids=["blocked", "non-blocked", "DefaultAzureCredential"],
indirect=True,
)
@pytest.mark.asyncio
@pytest.mark.onlynoncluster
@pytest.mark.cp_integration
async def test_async_auth_pool_with_credential_provider(self, r_credential: Redis):
assert await r_credential.ping() is True
@pytest.mark.parametrize(
"r_credential",
[
{
"cred_provider_class": EntraIdCredentialsProvider,
},
{
"cred_provider_class": EntraIdCredentialsProvider,
"cred_provider_kwargs": {"block_for_initial": True},
},
],
ids=["blocked", "non-blocked"],
indirect=True,
)
@pytest.mark.asyncio
@pytest.mark.onlynoncluster
@pytest.mark.cp_integration
async def test_async_pipeline_with_credential_provider(self, r_credential: Redis):
pipe = r_credential.pipeline()
await pipe.set("key", "value")
await pipe.get("key")
assert await pipe.execute() == [True, b"value"]
@pytest.mark.parametrize(
"r_credential",
[
{
"cred_provider_class": EntraIdCredentialsProvider,
},
],
indirect=True,
)
@pytest.mark.asyncio
@pytest.mark.onlynoncluster
@pytest.mark.cp_integration
async def test_async_auth_pubsub_with_credential_provider(
self, r_credential: Redis
):
p = r_credential.pubsub()
await p.subscribe("entraid")
await r_credential.publish("entraid", "test")
await r_credential.publish("entraid", "test")
msg1 = await p.get_message()
assert msg1["type"] == "subscribe"
@pytest.mark.asyncio
@pytest.mark.onlycluster
@pytest.mark.cp_integration
@pytest.mark.skipif(not EntraIdCredentialsProvider, reason="requires redis-entraid")
| TestEntraIdCredentialsProvider |
python | django__django | tests/defer_regress/models.py | {
"start": 1433,
"end": 1598
} | class ____(models.Model):
item = models.OneToOneField(Item, models.CASCADE, related_name="one_to_one_item")
name = models.CharField(max_length=15)
| OneToOneItem |
python | huggingface__transformers | tests/models/plbart/test_modeling_plbart.py | {
"start": 13411,
"end": 14042
} | class ____(unittest.TestCase):
maxDiff = 1000 # longer string compare tracebacks
checkpoint_name = None
@classmethod
def setUpClass(cls):
cls.tokenizer = AutoTokenizer.from_pretrained(cls.checkpoint_name, use_fast=False)
return cls
@cached_property
def model(self):
"""Only load the model if needed."""
model = PLBartForConditionalGeneration.from_pretrained(self.checkpoint_name).to(torch_device)
if "cuda" in torch_device:
model = model.half()
return model
@require_torch
@require_sentencepiece
@require_tokenizers
| AbstractSeq2SeqIntegrationTest |
python | tensorflow__tensorflow | tensorflow/python/data/experimental/ops/sleep.py | {
"start": 884,
"end": 1881
} | class ____(dataset_ops.UnaryUnchangedStructureDataset):
"""A `Dataset` that sleeps before producing each upstream element."""
def __init__(self, input_dataset, sleep_microseconds):
self._input_dataset = input_dataset
self._sleep_microseconds = sleep_microseconds
variant_tensor = gen_experimental_dataset_ops.sleep_dataset(
self._input_dataset._variant_tensor, # pylint: disable=protected-access
self._sleep_microseconds,
**self._flat_structure)
super(_SleepDataset, self).__init__(input_dataset, variant_tensor)
def sleep(sleep_microseconds):
"""Sleeps for `sleep_microseconds` before producing each input element.
Args:
sleep_microseconds: The number of microseconds to sleep before producing an
input element.
Returns:
A `Dataset` transformation function, which can be passed to
`tf.data.Dataset.apply`.
"""
def _apply_fn(dataset):
return _SleepDataset(dataset, sleep_microseconds)
return _apply_fn
| _SleepDataset |
python | crytic__slither | slither/printers/summary/data_depenency.py | {
"start": 700,
"end": 2151
} | class ____(AbstractPrinter):
ARGUMENT = "data-dependency"
HELP = "Print the data dependencies of the variables"
WIKI = "https://github.com/trailofbits/slither/wiki/Printer-documentation#data-dependencies"
def output(self, _filename: str) -> Output:
"""
_filename is not used
Args:
_filename(string)
"""
all_tables = []
all_txt = ""
txt = ""
for c in self.contracts:
txt += f"\nContract {c.name}\n"
table = MyPrettyTable(["Variable", "Dependencies"])
for v in c.state_variables:
assert v.name
table.add_row([v.name, sorted(_get(v, c))])
txt += str(table)
txt += "\n"
for f in c.functions_and_modifiers_declared:
txt += f"\nFunction {f.full_name}\n"
table = MyPrettyTable(["Variable", "Dependencies"])
for v in f.variables:
table.add_row([v.name, sorted(_get(v, f))])
for v in c.state_variables:
table.add_row([v.canonical_name, sorted(_get(v, f))])
txt += str(table)
self.info(txt)
all_txt += txt
all_tables.append((c.name, table))
res = self.generate_output(all_txt)
for name, table in all_tables:
res.add_pretty_table(table, name)
return res
| DataDependency |
python | doocs__leetcode | lcci/17.04.Missing Number/Solution2.py | {
"start": 0,
"end": 122
} | class ____:
def missingNumber(self, nums: List[int]) -> int:
return sum(range(len(nums) + 1)) - sum(nums)
| Solution |
python | great-expectations__great_expectations | great_expectations/expectations/metrics/map_metric_provider/column_map_metric_provider.py | {
"start": 611,
"end": 2844
} | class ____(MapMetricProvider):
"""Defines metrics that are evaluated for every row for a single column. An example of a column map
metric is `column_values.null` (which is implemented as a `ColumnMapMetricProvider`, a subclass of
`MapMetricProvider`).
---Documentation---
- https://docs.greatexpectations.io/docs/guides/expectations/creating_custom_expectations/how_to_create_custom_column_map_expectations
""" # noqa: E501 # FIXME CoP
condition_domain_keys = (
"batch_id",
"table",
"column",
"row_condition",
"condition_parser",
)
function_domain_keys = (
"batch_id",
"table",
"column",
"row_condition",
"condition_parser",
)
condition_value_keys = tuple()
function_value_keys = tuple()
@classmethod
@override
def _get_evaluation_dependencies(
cls,
metric: MetricConfiguration,
configuration: Optional[ExpectationConfiguration] = None,
execution_engine: Optional[ExecutionEngine] = None,
runtime_configuration: Optional[dict] = None,
):
dependencies: dict = super()._get_evaluation_dependencies(
metric=metric,
configuration=configuration,
execution_engine=execution_engine,
runtime_configuration=runtime_configuration,
)
table_domain_kwargs: dict = {
k: v for k, v in metric.metric_domain_kwargs.items() if k != "column"
}
dependencies["table.column_types"] = MetricConfiguration(
metric_name="table.column_types",
metric_domain_kwargs=table_domain_kwargs,
metric_value_kwargs={
"include_nested": True,
},
)
dependencies["table.columns"] = MetricConfiguration(
metric_name="table.columns",
metric_domain_kwargs=table_domain_kwargs,
metric_value_kwargs=None,
)
dependencies["table.row_count"] = MetricConfiguration(
metric_name="table.row_count",
metric_domain_kwargs=table_domain_kwargs,
metric_value_kwargs=None,
)
return dependencies
| ColumnMapMetricProvider |
python | pandas-dev__pandas | pandas/core/groupby/base.py | {
"start": 245,
"end": 2721
} | class ____:
label: Hashable
position: int
# special case to prevent duplicate plots when catching exceptions when
# forwarding methods from NDFrames
plotting_methods = frozenset(["plot", "hist"])
# cythonized transformations or canned "agg+broadcast", which do not
# require postprocessing of the result by transform.
cythonized_kernels = frozenset(["cumprod", "cumsum", "shift", "cummin", "cummax"])
# List of aggregation/reduction functions.
# These map each group to a single numeric value
reduction_kernels = frozenset(
[
"all",
"any",
"corrwith",
"count",
"first",
"idxmax",
"idxmin",
"last",
"max",
"mean",
"median",
"min",
"nunique",
"prod",
# as long as `quantile`'s signature accepts only
# a single quantile value, it's a reduction.
# GH#27526 might change that.
"quantile",
"sem",
"size",
"skew",
"kurt",
"std",
"sum",
"var",
]
)
# List of transformation functions.
# a transformation is a function that, for each group,
# produces a result that has the same shape as the group.
transformation_kernels = frozenset(
[
"bfill",
"cumcount",
"cummax",
"cummin",
"cumprod",
"cumsum",
"diff",
"ffill",
"ngroup",
"pct_change",
"rank",
"shift",
]
)
# these are all the public methods on Grouper which don't belong
# in either of the above lists
groupby_other_methods = frozenset(
[
"agg",
"aggregate",
"apply",
"boxplot",
# corr and cov return ngroups*ncolumns rows, so they
# are neither a transformation nor a reduction
"corr",
"cov",
"describe",
"expanding",
"ewm",
"filter",
"get_group",
"groups",
"head",
"hist",
"indices",
"ndim",
"ngroups",
"nth",
"ohlc",
"pipe",
"plot",
"resample",
"rolling",
"tail",
"take",
"transform",
"sample",
"value_counts",
]
)
# Valid values of `name` for `groupby.transform(name)`
# NOTE: do NOT edit this directly. New additions should be inserted
# into the appropriate list above.
transform_kernel_allowlist = reduction_kernels | transformation_kernels
| OutputKey |
python | tensorflow__tensorflow | tensorflow/python/kernel_tests/distributions/util_test.py | {
"start": 9790,
"end": 11336
} | class ____(test.TestCase):
@test_util.run_deprecated_v1
def testTooSmall(self):
with self.cached_session():
with self.assertRaises(ValueError):
param = array_ops.ones([1], dtype=np.float16)
checked_param = du.embed_check_categorical_event_shape(
param)
with self.assertRaisesOpError(
"must have at least 2 events"):
param = array_ops.placeholder(dtype=dtypes.float16)
checked_param = du.embed_check_categorical_event_shape(
param)
checked_param.eval(feed_dict={param: np.ones([1])})
@test_util.run_deprecated_v1
def testTooLarge(self):
with self.cached_session():
with self.assertRaises(ValueError):
param = array_ops.ones([int(2**11+1)], dtype=dtypes.float16)
checked_param = du.embed_check_categorical_event_shape(
param)
with self.assertRaisesOpError(
"Number of classes exceeds `dtype` precision"):
param = array_ops.placeholder(dtype=dtypes.float16)
checked_param = du.embed_check_categorical_event_shape(
param)
checked_param.eval(feed_dict={param: np.ones([int(2**11+1)])})
@test_util.disable_tfrt("b/169901260")
@test_util.run_in_graph_and_eager_modes
def testUnsupportedDtype(self):
param = ops.convert_to_tensor(
np.ones([2**11 + 1]).astype(dtypes.qint16.as_numpy_dtype),
dtype=dtypes.qint16)
with self.assertRaises(TypeError):
du.embed_check_categorical_event_shape(param)
| EmbedCheckCategoricalEventShapeTest |
python | huggingface__transformers | src/transformers/models/qwen2_5_omni/modeling_qwen2_5_omni.py | {
"start": 135184,
"end": 137597
} | class ____(nn.Module):
def __init__(self, dim, mult=4, dropout=0.0):
super().__init__()
inner_dim = int(dim * mult)
self.ff = nn.ModuleList(
[
nn.Linear(dim, inner_dim),
nn.GELU(approximate="tanh"),
nn.Dropout(dropout),
nn.Linear(inner_dim, dim),
]
)
def forward(self, hidden_states):
for layer in self.ff:
hidden_states = layer(hidden_states)
return hidden_states
# Modified from Llama with a different rotate function, will fixed in next release
def apply_rotary_pos_emb(q, k, cos, sin, position_ids=None, unsqueeze_dim=1):
"""Applies Rotary Position Embedding to the query and key tensors.
Args:
q (`torch.Tensor`): The query tensor.
k (`torch.Tensor`): The key tensor.
cos (`torch.Tensor`): The cosine part of the rotary embedding.
sin (`torch.Tensor`): The sine part of the rotary embedding.
position_ids (`torch.Tensor`, *optional*):
Deprecated and unused.
unsqueeze_dim (`int`, *optional*, defaults to 1):
The 'unsqueeze_dim' argument specifies the dimension along which to unsqueeze cos[position_ids] and
sin[position_ids] so that they can be properly broadcasted to the dimensions of q and k. For example, note
that cos[position_ids] and sin[position_ids] have the shape [batch_size, seq_len, head_dim]. Then, if q and
k have the shape [batch_size, heads, seq_len, head_dim], then setting unsqueeze_dim=1 makes
cos[position_ids] and sin[position_ids] broadcastable to the shapes of q and k. Similarly, if q and k have
the shape [batch_size, seq_len, heads, head_dim], then set unsqueeze_dim=2.
Returns:
`tuple(torch.Tensor)` comprising of the query and key tensors rotated using the Rotary Position Embedding.
"""
def rotate_half_codec(x):
# x = rearrange(x, "... (d r) -> ... d r", r=2)
x = x.reshape(*x.shape[:-1], -1, 2)
x1, x2 = x.unbind(dim=-1)
x = torch.stack((-x2, x1), dim=-1)
return x.reshape(*x.shape[:-2], -1)
cos = cos.unsqueeze(unsqueeze_dim)
sin = sin.unsqueeze(unsqueeze_dim)
q_embed = (q * cos) + (rotate_half_codec(q) * sin)
k_embed = (k * cos) + (rotate_half_codec(k) * sin)
return q_embed, k_embed
| DiTMLP |
python | patrick-kidger__equinox | equinox/_ad.py | {
"start": 25834,
"end": 31163
} | class ____:
"""Filtered version of `jax.custom_jvp`.
Works in the same way as `jax.custom_jvp`, except that you do not need to specify
`nondiff_argnums`. Instead, arguments are automatically split into differentiable
and nondifferentiable. (Everything that is not a floating-point array is necessarily
nondifferentiable. In addition, some floating-point arrays may happen not to have
been differentiated.)
The tangents of the nondifferentiable arguments will be passed as `None`.
The return types must still all be JAX types.
Supports keyword arguments, which are always treated as nondifferentiable.
!!! Example
```python
@equinox.filter_custom_jvp
def call(x, y, *, fn):
return fn(x, y)
@call.def_jvp
def call_jvp(primals, tangents, *, fn):
x, y = primals
tx, ty = tangents
# `y` is not differentiated below, so it has a symbolic zero tangent,
# represented as a `None`.
assert ty is None
primal_out = call(x, y, fn=fn)
tangent_out = 2 * tx
return primal_out, tangent_out
x = jnp.array(2.0)
y = jnp.array(2.0)
fn = lambda a, b: a + b
# This only computes gradients for the first argument `x`.
equinox.filter_grad(call)(x, y, fn=fn)
```
"""
def __init__(self, fn):
def fn_wrapper(static, dynamic):
args, kwargs = combine(dynamic, static)
return fn(*args, **kwargs)
self.fn = jax.custom_jvp(fn_wrapper, nondiff_argnums=(0,))
def defjvp(self, fn_jvp):
warnings.warn(
"As of Equinox 0.10.7, `equinox.filter_custom_jvp.defjvp` is deprecated in "
"favour of `.def_jvp`. This new API supports symbolic zeros, which allow "
"for more efficient autodifferentiation rules. In particular:, `None` was "
"previously passed to indicate a symbolic zero tangent for all objects "
"that weren't inexact arrays, but all inexact arrays always had an "
"array-valued tangent. Now, `None` may also be passed to indicate that an "
"inexact array has a symbolic zero tangent.",
stacklevel=2,
)
def _fn_jvp(args, t_args, **kwargs):
t_args = jtu.tree_map(_materialise_symbolic_zero, args, t_args)
return fn_jvp(args, t_args, **kwargs)
self.def_jvp(_fn_jvp)
def def_jvp(self, fn_jvp):
def fn_jvp_wrapper(static, dynamic, tangents):
(dynamic,) = dynamic
(tangents,) = tangents
t_args, t_kwargs = jtu.tree_map(_drop_nondiff, tangents, dynamic)
if len(jtu.tree_leaves(t_kwargs)) > 0:
raise ValueError("Received keyword tangent")
args, kwargs = combine(dynamic, static)
out, t_out = fn_jvp(args, t_args, **kwargs)
t_out = jtu.tree_map(_none_to_zero, t_out, out, is_leaf=_is_none)
return out, t_out
self.fn.defjvp(fn_jvp_wrapper, symbolic_zeros=True)
def defjvps(self, *a, **kw):
raise NotImplementedError(
"`equinox.filter_custom_jvp.defjvps` is not implemented"
)
def __call__(self, *args, **kwargs):
dynamic, static = partition((args, kwargs), is_array)
return self.fn(static, dynamic)
@ft.partial(jax.custom_jvp, nondiff_argnums=(0,))
def _nondifferentiable(msg: str, x: PyTree[Array]):
return x
@_nondifferentiable.defjvp
def _nondifferentiable_jvp(msg: str, primals, tangents):
raise RuntimeError(msg)
def nondifferentiable(
x: PyTree, *, name: str | None = None, msg: str | None = None
) -> PyTree:
"""Identity function, which raises an error if it is differentiated (in forward or
reverse mode).
"""
dynamic, static = partition(x, is_array)
if msg is None:
if name is None:
name = "This operation"
msg = f"Unexpected tangent. {name} cannot be autodifferentiated."
dynamic = _nondifferentiable(msg, dynamic)
return combine(dynamic, static)
def _get_perturbed(x):
assert type(x) is jax.custom_derivatives.CustomVJPPrimal
return x.perturbed
def _get_value(x):
assert type(x) is jax.custom_derivatives.CustomVJPPrimal
return x.value
def _get_value_assert_unperturbed(x):
assert type(x) is jax.custom_derivatives.CustomVJPPrimal
assert x.perturbed is False
return x.value
def _zero_to_none(ct):
if isinstance(ct, jax.custom_derivatives.SymbolicZero):
return None
else:
return ct
def _none_to_zero(ct, x):
if ct is None:
if x is None:
return None
else:
aval = jax.core.get_aval(x)
if hasattr(aval, "to_tangent_aval"):
# Earlier versions of JAX were internally inconsistent, and expected
# e.g. integer primals to have integer tangents from `custom_{jvp,vjp}`
# rules.
# That changed in JAX 0.4.34.
aval = aval.to_tangent_aval() # pyright: ignore
else:
aval = jax.core.raise_to_shaped(aval) # pyright: ignore
return jax.custom_derivatives.SymbolicZero(aval)
else:
return ct
| filter_custom_jvp |
python | getsentry__sentry | src/sentry/sentry_apps/utils/webhooks.py | {
"start": 611,
"end": 690
} | class ____(SentryAppActionType):
TRIGGERED = "triggered"
| IssueAlertActionType |
python | doocs__leetcode | solution/0000-0099/0028.Find the Index of the First Occurrence in a String/Solution.py | {
"start": 0,
"end": 239
} | class ____:
def strStr(self, haystack: str, needle: str) -> int:
n, m = len(haystack), len(needle)
for i in range(n - m + 1):
if haystack[i : i + m] == needle:
return i
return -1
| Solution |
python | mlflow__mlflow | mlflow/types/responses.py | {
"start": 802,
"end": 1763
} | class ____(BaseRequestPayload):
"""Request object for ResponsesAgent.
Args:
input: List of simple `role` and `content` messages or output items. See examples at
https://mlflow.org/docs/latest/genai/flavors/responses-agent-intro#testing-out-your-agent
and
https://mlflow.org/docs/latest/genai/flavors/responses-agent-intro#creating-agent-output.
custom_inputs (Dict[str, Any]): An optional param to provide arbitrary additional context
to the model. The dictionary values must be JSON-serializable.
**Optional** defaults to ``None``
context (:py:class:`mlflow.types.agent.ChatContext`): The context to be used in the chat
endpoint. Includes conversation_id and user_id. **Optional** defaults to ``None``
"""
input: list[Message | OutputItem]
custom_inputs: dict[str, Any] | None = None
context: ChatContext | None = None
| ResponsesAgentRequest |
python | spack__spack | lib/spack/spack/util/compression.py | {
"start": 17522,
"end": 17864
} | class ____(CompressedFileTypeInterface):
_MAGIC_NUMBER = b"\xfd7zXZ"
extension = "xz"
name = "xz compressed data"
def peek(self, stream: BinaryIO, num_bytes: int) -> Optional[io.BytesIO]:
if LZMA_SUPPORTED:
return _decompressed_peek(lzma.LZMAFile(stream), stream, num_bytes)
return None
| LzmaFileType |
python | spack__spack | lib/spack/spack/install_test.py | {
"start": 41579,
"end": 41662
} | class ____(Exception):
"""Raised when a test (part) is being skipped."""
| SkipTest |
python | google__jax | jax/_src/clusters/slurm_cluster.py | {
"start": 858,
"end": 2555
} | class ____(clusters.ClusterEnv):
name: str = "slurm"
@classmethod
def is_env_present(cls) -> bool:
return all(var in os.environ for var in
(_JOBID_PARAM, _NODE_LIST, _PROCESS_COUNT, _PROCESS_ID, _LOCAL_PROCESS_ID))
@classmethod
def get_coordinator_address(cls, timeout_secs: int | None, override_coordinator_port: str | None) -> str:
if override_coordinator_port:
port = override_coordinator_port
else:
# Pick port in ephemeral range [(65535 - 2^12 + 1), 65535]
port = str(int(os.environ[_JOBID_PARAM]) % 2**12 + (65535 - 2**12 + 1))
# Parse the first hostname of the job
# If we are looking for 'node001',
# node_list potential formats are 'node001', 'node001,host2',
# 'node[001-0015],host2', and 'node[001,007-015],host2'.
node_list = os.environ[_NODE_LIST]
delims = {',', '['}
ind = next((i for i, ch in enumerate(node_list) if ch in delims), len(node_list))
if ind == len(node_list) or node_list[ind] == ',': # Formats: 'node001' or 'node001,host2'
return f'{node_list[:ind]}:{port}'
else: # Formats: 'node[001-0015],host2' or 'node[001,007-015],host2'
prefix = node_list[:ind]
suffix = node_list[ind+1:]
delims2 = {',', '-'}
ind2 = next((i for i, ch in enumerate(suffix) if ch in delims2), None)
return f'{prefix}{suffix[:ind2]}:{port}'
@classmethod
def get_process_count(cls) -> int:
return int(os.environ[_PROCESS_COUNT])
@classmethod
def get_process_id(cls) -> int:
return int(os.environ[_PROCESS_ID])
@classmethod
def get_local_process_id(cls) -> int | None:
return int(os.environ[_LOCAL_PROCESS_ID])
| SlurmCluster |
python | pytorch__pytorch | test/dynamo/test_higher_order_ops.py | {
"start": 132236,
"end": 139084
} | class ____(torch.nn.Module):
def forward(self, L_x_: "f32[4, 3]", L_y_: "f32[3, 4]"):
l_x_ = L_x_
l_y_ = L_y_
tensor: "i64[1]" = torch.tensor((12,))
cumsum: "i64[1]" = tensor.cumsum(dim = 0); tensor = None
getitem: "i64[0]" = cumsum[slice(None, -1, None)]; cumsum = None
neg: "i64[0]" = getitem.neg(); getitem = None
unbind = neg.unbind(); neg = unbind = None
chunk: "f32[12, 12]" = l_y_.new_zeros(12, 12)
diagonal: "f32[12]" = chunk.diagonal(0)
fill_: "f32[12]" = diagonal.fill_(1); diagonal = fill_ = None
child: "f32[12, 3, 4]" = chunk.view(12, 3, 4); chunk = None
lazy_load_decompositions = torch._functorch.predispatch.lazy_load_decompositions(); lazy_load_decompositions = None
_vmap_increment_nesting = torch._functorch.predispatch._vmap_increment_nesting(12, 'error'); _vmap_increment_nesting = None
child_1: "f32[3, 4]" = torch._functorch.predispatch._add_batch_dim(child, 0, 1); child = None
_jvp_increment_nesting = torch._C._functorch._jvp_increment_nesting(); _jvp_increment_nesting = None
_set_fwd_grad_enabled = torch._C._set_fwd_grad_enabled(True); _set_fwd_grad_enabled = None
_enter_dual_level = torch._C._enter_dual_level(); _enter_dual_level = None
_maybe_load_decompositions = torch.autograd.forward_ad._maybe_load_decompositions(); _maybe_load_decompositions = None
child_3: "f32[3, 4]" = torch._make_dual(l_y_, child_1, level = 0); child_1 = None
child_2: "f32[4, 3]" = torch._C._functorch._wrap_for_grad(l_x_, 2); l_x_ = None
_wrap_for_grad_1: "f32[3, 4]" = torch._C._functorch._wrap_for_grad(l_y_, 2); l_y_ = _wrap_for_grad_1 = None
_saved_tensors_hooks_disable = torch._C._autograd._saved_tensors_hooks_disable("torch.func.{grad, vjp, jacrev, hessian} don't yet support saved tensor hooks. Please open an issue with your use case."); _saved_tensors_hooks_disable = None
_grad_increment_nesting = torch._C._functorch._grad_increment_nesting(); _grad_increment_nesting = None
_wrap_for_grad_2: "f32[4, 3]" = torch._C._functorch._wrap_for_grad(child_2, 3); child_2 = None
child_4: "f32[3, 4]" = torch._C._functorch._wrap_for_grad(child_3, 3); child_3 = None
set_inplace_requires_grad_allowed = torch._C._functorch.set_inplace_requires_grad_allowed(True); set_inplace_requires_grad_allowed = None
_set_tensor_requires_grad: "f32[3, 4]" = torch._functorch.eager_transforms._set_tensor_requires_grad(child_4); _set_tensor_requires_grad = None
set_inplace_requires_grad_allowed_1 = torch._C._functorch.set_inplace_requires_grad_allowed(False); set_inplace_requires_grad_allowed_1 = None
primals_out: "f32[4, 3]" = _wrap_for_grad_2.sin(); _wrap_for_grad_2 = None
results: "f32[4, 3]" = torch._C._functorch._unwrap_for_grad(primals_out, 3)
_grad_decrement_nesting = torch._C._functorch._grad_decrement_nesting(); _grad_decrement_nesting = None
_saved_tensors_hooks_enable = torch._C._autograd._saved_tensors_hooks_enable(); _saved_tensors_hooks_enable = None
tensor_1: "i64[1]" = torch.tensor((12,))
cumsum_1: "i64[1]" = tensor_1.cumsum(dim = 0); tensor_1 = None
getitem_1: "i64[0]" = cumsum_1[slice(None, -1, None)]; cumsum_1 = None
neg_1: "i64[0]" = getitem_1.neg(); getitem_1 = None
unbind_1 = neg_1.unbind(); neg_1 = unbind_1 = None
chunk_1: "f32[12, 12]" = results.new_zeros(12, 12); results = None
diagonal_1: "f32[12]" = chunk_1.diagonal(0)
fill__1: "f32[12]" = diagonal_1.fill_(1); diagonal_1 = fill__1 = None
basis: "f32[12, 4, 3]" = chunk_1.view(12, 4, 3); chunk_1 = None
lazy_load_decompositions_1 = torch._functorch.predispatch.lazy_load_decompositions(); lazy_load_decompositions_1 = None
_vmap_increment_nesting_1 = torch._functorch.predispatch._vmap_increment_nesting(12, 'error'); _vmap_increment_nesting_1 = None
_add_batch_dim_1: "f32[4, 3]" = torch._functorch.predispatch._add_batch_dim(basis, 0, 3); basis = None
_autograd_grad = torch._functorch.eager_transforms._autograd_grad([primals_out], [child_4], [_add_batch_dim_1], retain_graph = True, create_graph = True); primals_out = child_4 = _add_batch_dim_1 = None
child_5: "f32[3, 4]" = _autograd_grad[0]; _autograd_grad = None
child_6: "f32[12, 3, 4]" = torch._functorch.predispatch._remove_batch_dim(child_5, 3, 12, 0); child_5 = None
_vmap_decrement_nesting = torch._functorch.predispatch._vmap_decrement_nesting(); _vmap_decrement_nesting = None
split = child_6.split((12,), dim = 0); child_6 = None
split_1: "f32[12, 3, 4]" = split[0]; split = None
child_7: "f32[4, 3, 3, 4]" = split_1.view((4, 3, 3, 4)); split_1 = None
_unpack_dual = torch._unpack_dual(child_7, level = 0); child_7 = None
primal: "f32[4, 3, 3, 4]" = _unpack_dual[0]; _unpack_dual = None
tangent: "f32[4, 3, 3, 4]" = torch.zeros_like(primal)
child_8: "f32[4, 3, 3, 4]" = torch._C._functorch._unwrap_for_grad(primal, 2); primal = child_8 = None
child_9: "f32[4, 3, 3, 4]" = torch._C._functorch._unwrap_for_grad(tangent, 2); tangent = None
_exit_dual_level = torch._C._exit_dual_level(0); _exit_dual_level = None
_set_fwd_grad_enabled_1 = torch._C._set_fwd_grad_enabled(True); _set_fwd_grad_enabled_1 = None
_jvp_decrement_nesting = torch._C._functorch._jvp_decrement_nesting(); _jvp_decrement_nesting = None
child_10: "f32[12, 4, 3, 3, 4]" = torch._functorch.predispatch._remove_batch_dim(child_9, 1, 12, 0); child_9 = None
_vmap_decrement_nesting_1 = torch._functorch.predispatch._vmap_decrement_nesting(); _vmap_decrement_nesting_1 = None
movedim: "f32[4, 3, 3, 4, 12]" = child_10.movedim(0, -1); child_10 = None
split_2 = movedim.split((12,), dim = -1); movedim = None
jac_out_in: "f32[4, 3, 3, 4, 12]" = split_2[0]; split_2 = None
unflatten: "f32[4, 3, 3, 4, 3, 4]" = jac_out_in.unflatten(-1, (3, 4)); jac_out_in = None""",
)
self.assertExpectedInline(
actual.split("\n")[-2],
""" return (unflatten,)""",
)
def test_jacrev(self):
counters.clear()
def wrapper_fn(x):
return torch.func.jacrev(torch.sin)(x)
x = torch.randn(4, 3)
wrapped_gm = self._compile_check(wrapper_fn, (x,))
# Dynamic shapes produce a slightly different graph.
if check_dynamic_shape_capture():
return
actual = normalize_gm(wrapped_gm.print_readable(print_output=False))
self.assertExpectedInline(
actual,
"""\
| GraphModule |
python | spyder-ide__spyder | spyder/plugins/application/plugin.py | {
"start": 1528,
"end": 45733
} | class ____(SpyderPluginV2):
NAME = 'application'
REQUIRES = [Plugins.Console, Plugins.Preferences]
OPTIONAL = [
Plugins.Editor,
Plugins.Help,
Plugins.IPythonConsole,
Plugins.MainMenu,
Plugins.StatusBar,
Plugins.Shortcuts, # Needed to display the app context menu
Plugins.Toolbar,
Plugins.UpdateManager, # Required in the confpage
]
CONTAINER_CLASS = ApplicationContainer
CONF_SECTION = 'main'
CONF_FILE = False
CONF_WIDGET_CLASS = ApplicationConfigPage
CAN_BE_DISABLED = False
def __init__(self, parent, configuration=None):
super().__init__(parent, configuration)
self.focused_plugin: Optional[SpyderDockablePlugin] = None
self.file_action_enabled: Dict[Tuple[str, str], bool] = {}
self.edit_action_enabled: Dict[Tuple[str, str], bool] = {}
self.search_action_enabled: Dict[Tuple[str, str], bool] = {}
@staticmethod
def get_name():
return _('Application')
@classmethod
def get_icon(cls):
return cls.create_icon('genprefs')
@staticmethod
def get_description():
return _('Provide main application base actions.')
def on_initialize(self):
container = self.get_container()
container.sig_report_issue_requested.connect(self.report_issue)
container.sig_new_file_requested.connect(self.create_new_file)
container.sig_open_file_in_plugin_requested.connect(
self.open_file_in_plugin
)
container.sig_open_file_using_dialog_requested.connect(
self.open_file_using_dialog
)
container.sig_open_last_closed_requested.connect(
self.open_last_closed_file
)
container.sig_save_file_requested.connect(self.save_file)
container.sig_save_all_requested.connect(self.save_all)
container.sig_save_file_as_requested.connect(self.save_file_as)
container.sig_save_copy_as_requested.connect(self.save_copy_as)
container.sig_revert_file_requested.connect(self.revert_file)
container.sig_close_file_requested.connect(self.close_file)
container.sig_close_all_requested.connect(self.close_all)
container.sig_undo_requested.connect(self.undo)
container.sig_redo_requested.connect(self.redo)
container.sig_cut_requested.connect(self.cut)
container.sig_copy_requested.connect(self.copy)
container.sig_paste_requested.connect(self.paste)
container.sig_select_all_requested.connect(self.select_all)
container.sig_find_requested.connect(self.find)
container.sig_find_next_requested.connect(self.find_next)
container.sig_find_previous_requested.connect(self.find_previous)
container.sig_replace_requested.connect(self.replace)
container.set_window(self._window)
self.sig_focused_plugin_changed.connect(self._update_focused_plugin)
# --------------------- PLUGIN INITIALIZATION -----------------------------
@on_plugin_available(plugin=Plugins.IPythonConsole)
def on_ipythonconsole_available(self):
if self.is_plugin_available(Plugins.MainMenu):
self._populate_help_menu()
@on_plugin_available(plugin=Plugins.Console)
def on_console_available(self):
if self.is_plugin_available(Plugins.MainMenu):
self.report_action.setVisible(True)
@on_plugin_available(plugin=Plugins.Preferences)
def on_preferences_available(self):
# Register conf page
preferences = self.get_plugin(Plugins.Preferences)
preferences.register_plugin_preferences(self)
@on_plugin_available(plugin=Plugins.MainMenu)
def on_main_menu_available(self):
self._populate_file_menu()
self._populate_edit_menu()
self._populate_search_menu()
self._populate_tools_menu()
if self.is_plugin_enabled(Plugins.IPythonConsole):
if self.is_plugin_available(Plugins.IPythonConsole):
self._populate_help_menu()
else:
self._populate_help_menu()
if not self.is_plugin_available(Plugins.Console):
self.report_action.setVisible(False)
@on_plugin_available(plugin=Plugins.Editor)
def on_editor_available(self):
editor = self.get_plugin(Plugins.Editor)
self.get_container().sig_load_log_file.connect(editor.load)
@on_plugin_available(plugin=Plugins.StatusBar)
def on_statusbar_available(self):
statusbar = self.get_plugin(Plugins.StatusBar)
inapp_appeal_status = self.get_container().inapp_appeal_status
statusbar.add_status_widget(inapp_appeal_status)
@on_plugin_available(plugin=Plugins.Toolbar)
def on_toolbar_available(self):
container = self.get_container()
toolbar = self.get_plugin(Plugins.Toolbar)
for action in [
container.new_action,
container.open_action,
container.save_action,
container.save_all_action
]:
toolbar.add_item_to_application_toolbar(
action,
toolbar_id=ApplicationToolbars.File,
before=EditorWidgetActions.NewCell
)
# -------------------------- PLUGIN TEARDOWN ------------------------------
@on_plugin_teardown(plugin=Plugins.Preferences)
def on_preferences_teardown(self):
preferences = self.get_plugin(Plugins.Preferences)
preferences.deregister_plugin_preferences(self)
@on_plugin_teardown(plugin=Plugins.Editor)
def on_editor_teardown(self):
editor = self.get_plugin(Plugins.Editor)
self.get_container().sig_load_log_file.disconnect(editor.load)
@on_plugin_teardown(plugin=Plugins.Console)
def on_console_teardown(self):
if self.is_plugin_available(Plugins.MainMenu):
self.report_action.setVisible(False)
@on_plugin_teardown(plugin=Plugins.MainMenu)
def on_main_menu_teardown(self):
self._depopulate_file_menu()
self._depopulate_edit_menu()
self._depopulate_search_menu()
self._depopulate_tools_menu()
self._depopulate_help_menu()
self.report_action.setVisible(False)
@on_plugin_teardown(plugin=Plugins.StatusBar)
def on_statusbar_teardown(self):
statusbar = self.get_plugin(Plugins.StatusBar)
inapp_appeal_status = self.get_container().inapp_appeal_status
statusbar.remove_status_widget(inapp_appeal_status.ID)
@on_plugin_teardown(plugin=Plugins.Toolbar)
def on_toolbar_teardown(self):
toolbar = self.get_plugin(Plugins.Toolbar)
for action in [
ApplicationActions.NewFile,
ApplicationActions.OpenFile,
ApplicationActions.SaveFile,
ApplicationActions.SaveAll
]:
toolbar.remove_item_from_application_toolbar(
action,
toolbar_id=ApplicationToolbars.File
)
def on_close(self, _unused=True):
self.get_container().on_close()
def on_mainwindow_visible(self):
"""Actions after the mainwindow in visible."""
container = self.get_container()
# Show dialog with missing dependencies
if not running_under_pytest():
container.compute_dependencies()
# Handle DPI scale and window changes to show a restart message.
# Don't activate this functionality on macOS because it's being
# triggered in the wrong situations.
# See spyder-ide/spyder#11846
if not sys.platform == 'darwin':
window = self._window.windowHandle()
window.screenChanged.connect(container.handle_new_screen)
screen = self._window.windowHandle().screen()
container.current_dpi = screen.logicalDotsPerInch()
screen.logicalDotsPerInchChanged.connect(
container.show_dpi_change_message)
# Show appeal the fifth and 25th time Spyder starts
spyder_runs = self.get_conf("spyder_runs_for_appeal", default=1)
if spyder_runs in [5, 25]:
container.inapp_appeal_status.show_appeal()
# Increase counting in one to not get stuck at this point.
# Fixes spyder-ide/spyder#22457
self.set_conf("spyder_runs_for_appeal", spyder_runs + 1)
else:
if spyder_runs < 25:
self.set_conf("spyder_runs_for_appeal", spyder_runs + 1)
# ---- Private API
# ------------------------------------------------------------------------
def _populate_file_menu(self):
container = self.get_container()
mainmenu = self.get_plugin(Plugins.MainMenu)
# New section
mainmenu.add_item_to_application_menu(
container.new_action,
menu_id=ApplicationMenus.File,
section=FileMenuSections.New,
before_section=FileMenuSections.Open
)
# Open section
open_actions = [
container.open_action,
container.open_last_closed_action,
container.recent_files_menu,
]
for open_action in open_actions:
mainmenu.add_item_to_application_menu(
open_action,
menu_id=ApplicationMenus.File,
section=FileMenuSections.Open,
before_section=FileMenuSections.Save
)
# Save section
save_actions = [
container.save_action,
container.save_all_action,
container.save_as_action,
container.save_copy_as_action,
container.revert_action
]
for save_action in save_actions:
mainmenu.add_item_to_application_menu(
save_action,
menu_id=ApplicationMenus.File,
section=FileMenuSections.Save,
before_section=FileMenuSections.Print
)
# Close section
close_actions = [
container.close_file_action,
container.close_all_action
]
for close_action in close_actions:
mainmenu.add_item_to_application_menu(
close_action,
menu_id=ApplicationMenus.File,
section=FileMenuSections.Close,
before_section=FileMenuSections.Restart
)
# Restart section
mainmenu.add_item_to_application_menu(
self.restart_action,
menu_id=ApplicationMenus.File,
section=FileMenuSections.Restart
)
mainmenu.add_item_to_application_menu(
self.restart_debug_action,
menu_id=ApplicationMenus.File,
section=FileMenuSections.Restart
)
def _populate_edit_menu(self):
container = self.get_container()
mainmenu = self.get_plugin(Plugins.MainMenu)
# Undo/Redo section
for action in [container.undo_action, container.redo_action]:
mainmenu.add_item_to_application_menu(
action,
menu_id=ApplicationMenus.Edit,
section=EditMenuSections.UndoRedo,
before_section=EditMenuSections.Editor
)
# Edit section
edit_actions = [
container.cut_action,
container.copy_action,
container.paste_action,
container.select_all_action
]
for edit_action in edit_actions:
mainmenu.add_item_to_application_menu(
edit_action,
menu_id=ApplicationMenus.Edit,
section=EditMenuSections.Copy,
before_section=EditMenuSections.Editor
)
def _populate_search_menu(self):
container = self.get_container()
mainmenu = self.get_plugin(Plugins.MainMenu)
# Find section
for action in [
container.find_action,
container.find_next_action,
container.find_previous_action,
container.replace_action,
]:
mainmenu.add_item_to_application_menu(
action,
menu_id=ApplicationMenus.Search,
section=SearchMenuSections.FindInText,
before_section=SearchMenuSections.Cursor,
)
def _populate_tools_menu(self):
"""Add base actions and menus to the Tools menu."""
mainmenu = self.get_plugin(Plugins.MainMenu)
mainmenu.add_item_to_application_menu(
self.user_env_action,
menu_id=ApplicationMenus.Tools,
section=ToolsMenuSections.Managers,
before_section=ToolsMenuSections.Preferences,
)
if get_debug_level() >= 2:
mainmenu.add_item_to_application_menu(
self.debug_logs_menu,
menu_id=ApplicationMenus.Help,
section=HelpMenuSections.Support,
before_section=HelpMenuSections.About,
)
def _populate_help_menu(self):
"""Add base actions and menus to the Help menu."""
self._populate_help_menu_documentation_section()
self._populate_help_menu_support_section()
self._populate_help_menu_about_section()
def _populate_help_menu_documentation_section(self):
"""Add base Spyder documentation actions to the Help main menu."""
mainmenu = self.get_plugin(Plugins.MainMenu)
ipythonconsole = self.get_plugin(Plugins.IPythonConsole)
ipython_documentation_submenu = None
if ipythonconsole:
from spyder.plugins.ipythonconsole.api import (
IPythonConsoleWidgetMenus
)
ipython_documentation_submenu = (
IPythonConsoleWidgetMenus.Documentation
)
for documentation_action in [
self.documentation_action, self.video_action]:
mainmenu.add_item_to_application_menu(
documentation_action,
menu_id=ApplicationMenus.Help,
section=HelpMenuSections.ExternalDocumentation,
before=ipython_documentation_submenu,
before_section=HelpMenuSections.Support,
)
def _populate_help_menu_support_section(self):
"""Add Spyder base support actions to the Help main menu."""
mainmenu = self.get_plugin(Plugins.MainMenu)
for support_action in [
self.trouble_action,
self.support_group_action,
self.dependencies_action,
self.report_action,
]:
mainmenu.add_item_to_application_menu(
support_action,
menu_id=ApplicationMenus.Help,
section=HelpMenuSections.Support,
before_section=HelpMenuSections.About,
)
def _populate_help_menu_about_section(self):
"""Create Spyder base about actions."""
mainmenu = self.get_plugin(Plugins.MainMenu)
for about_action in [
self.get_action(ApplicationActions.HelpSpyderAction),
self.about_action,
]:
mainmenu.add_item_to_application_menu(
about_action,
menu_id=ApplicationMenus.Help,
section=HelpMenuSections.About,
)
@property
def _window(self):
return self.main.window()
def _depopulate_help_menu(self):
self._depopulate_help_menu_documentation_section()
self._depopulate_help_menu_support_section()
self._depopulate_help_menu_about_section()
def _depopulate_help_menu_documentation_section(self):
mainmenu = self.get_plugin(Plugins.MainMenu)
for documentation_action in [
ApplicationActions.SpyderDocumentationAction,
ApplicationActions.SpyderDocumentationVideoAction]:
mainmenu.remove_item_from_application_menu(
documentation_action,
menu_id=ApplicationMenus.Help)
def _depopulate_help_menu_support_section(self):
"""Remove Spyder base support actions from the Help main menu."""
mainmenu = self.get_plugin(Plugins.MainMenu)
for support_action in [
ApplicationActions.SpyderTroubleshootingAction,
ConsoleActions.SpyderReportAction,
ApplicationActions.SpyderDependenciesAction,
ApplicationActions.SpyderSupportAction]:
mainmenu.remove_item_from_application_menu(
support_action,
menu_id=ApplicationMenus.Help)
def _depopulate_help_menu_about_section(self):
mainmenu = self.get_plugin(Plugins.MainMenu)
mainmenu.remove_item_from_application_menu(
ApplicationActions.SpyderAbout,
menu_id=ApplicationMenus.Help)
def _depopulate_file_menu(self):
container = self.get_container()
mainmenu = self.get_plugin(Plugins.MainMenu)
for action_id in [
ApplicationActions.NewFile,
ApplicationActions.OpenFile,
ApplicationActions.OpenLastClosed,
container.recent_file_menu,
ApplicationActions.SaveFile,
ApplicationActions.SaveAll,
ApplicationActions.SaveAs,
ApplicationActions.SaveCopyAs,
ApplicationActions.RevertFile,
ApplicationActions.CloseFile,
ApplicationActions.CloseAll,
ApplicationActions.SpyderRestart,
ApplicationActions.SpyderRestartDebug
]:
mainmenu.remove_item_from_application_menu(
action_id,
menu_id=ApplicationMenus.File)
def _depopulate_edit_menu(self):
mainmenu = self.get_plugin(Plugins.MainMenu)
for action_id in [
ApplicationActions.Undo,
ApplicationActions.Redo,
ApplicationActions.Cut,
ApplicationActions.Copy,
ApplicationActions.Paste,
ApplicationActions.SelectAll,
]:
mainmenu.remove_item_from_application_menu(
action_id,
menu_id=ApplicationMenus.File)
def _depopulate_search_menu(self):
mainmenu = self.get_plugin(Plugins.MainMenu)
for action_id in [
ApplicationActions.FindText,
ApplicationActions.FindNext,
ApplicationActions.FindPrevious,
ApplicationActions.ReplaceText,
]:
mainmenu.remove_item_from_application_menu(
action_id,
menu_id=ApplicationMenus.Search
)
def _depopulate_tools_menu(self):
"""Add base actions and menus to the Tools menu."""
mainmenu = self.get_plugin(Plugins.MainMenu)
mainmenu.remove_item_from_application_menu(
ApplicationActions.SpyderUserEnvVariables,
menu_id=ApplicationMenus.Tools)
if get_debug_level() >= 2:
mainmenu.remove_item_from_application_menu(
ApplicationPluginMenus.DebugLogsMenu,
menu_id=ApplicationMenus.Tools)
def _update_focused_plugin(
self, plugin: Optional[SpyderDockablePlugin]
) -> None:
"""
Update which plugin has currently focus.
This function is called if another plugin gets keyboard focus.
"""
self.focused_plugin = plugin
self._update_file_actions()
self._update_edit_actions()
self._update_search_actions()
def _update_file_actions(self) -> None:
"""
Update which file actions are enabled.
File actions are enabled depending on whether the plugin that would
currently process the file action has enabled it or not.
"""
plugin = self.focused_plugin
if not plugin or not getattr(plugin, 'CAN_HANDLE_FILE_ACTIONS', False):
plugin = self.get_plugin(Plugins.Editor, error=False)
if plugin:
for action_name in [
ApplicationActions.NewFile,
ApplicationActions.OpenLastClosed,
ApplicationActions.SaveFile,
ApplicationActions.SaveAll,
ApplicationActions.SaveAs,
ApplicationActions.SaveCopyAs,
ApplicationActions.RevertFile,
ApplicationActions.CloseFile,
ApplicationActions.CloseAll,
]:
action = self.get_action(action_name)
key = (plugin.NAME, action_name)
state = self.file_action_enabled.get(key, True)
action.setEnabled(state)
def _update_edit_actions(self) -> None:
"""
Update which edit actions are enabled.
Edit actions are enabled depending on whether the plugin that would
currently process the edit action has enabled it or not.
"""
plugin = self.focused_plugin
if not plugin or not getattr(plugin, 'CAN_HANDLE_EDIT_ACTIONS', False):
plugin = self.get_plugin(Plugins.Editor, error=False)
if plugin:
for action_name in [
ApplicationActions.Undo,
ApplicationActions.Redo,
ApplicationActions.Cut,
ApplicationActions.Copy,
ApplicationActions.Paste,
ApplicationActions.SelectAll,
]:
action = self.get_action(action_name)
key = (plugin.NAME, action_name)
state = self.edit_action_enabled.get(key, True)
action.setEnabled(state)
def _update_search_actions(self) -> None:
"""
Update which search actions are enabled.
Search actions are enabled depending on whether the plugin that would
currently process the search action has enabled it or not.
"""
plugin = self.focused_plugin
if not plugin or not getattr(plugin, 'CAN_HANDLE_SEARCH_ACTIONS', False):
plugin = self.get_plugin(Plugins.Editor, error=False)
if plugin:
for action_name in [
ApplicationActions.FindText,
ApplicationActions.FindNext,
ApplicationActions.FindPrevious,
ApplicationActions.ReplaceText,
]:
action = self.get_action(action_name)
key = (plugin.NAME, action_name)
state = self.search_action_enabled.get(key, True)
action.setEnabled(state)
# ---- Public API
# ------------------------------------------------------------------------
def get_application_context_menu(self, parent=None):
"""
Return menu with the actions to be shown by the Spyder context menu.
"""
tutorial_action = None
shortcuts_action = None
help_plugin = self.get_plugin(Plugins.Help)
shortcuts = self.get_plugin(Plugins.Shortcuts)
menu = SpyderMenu(parent=parent)
actions = [self.documentation_action]
# Help actions
if help_plugin:
from spyder.plugins.help.plugin import HelpActions
tutorial_action = help_plugin.get_action(
HelpActions.ShowSpyderTutorialAction)
actions += [tutorial_action]
# Shortcuts actions
if shortcuts:
from spyder.plugins.shortcuts.plugin import ShortcutActions
shortcuts_action = shortcuts.get_action(
ShortcutActions.ShortcutSummaryAction)
actions.append(shortcuts_action)
# Application actions
actions += [MENU_SEPARATOR, self.about_action]
add_actions(menu, actions)
return menu
def report_issue(self):
if self.is_plugin_available(Plugins.Console):
console = self.get_plugin(Plugins.Console)
console.report_issue()
def apply_settings(self):
"""Apply applications settings."""
self._main.apply_settings()
@Slot()
def restart(self, reset=False, close_immediately=False):
"""
Quit and Restart Spyder application.
If reset True it allows to reset spyder on restart.
"""
# Get console plugin reference to call the quit action
console = self.get_plugin(Plugins.Console)
# Get start path to use in restart script
spyder_start_directory = get_module_path('spyder')
restart_script = osp.join(spyder_start_directory, 'app', 'restart.py')
# Get any initial argument passed when spyder was started
# Note: Variables defined in bootstrap.py and spyder/app/start.py
env = os.environ.copy()
bootstrap_args = env.pop('SPYDER_BOOTSTRAP_ARGS', None)
spyder_args = env.pop('SPYDER_ARGS')
# Get current process and python running spyder
pid = os.getpid()
python = SHORTCUT_EXE or sys.executable
# Check if started with bootstrap.py
if bootstrap_args is not None:
spyder_args = bootstrap_args
is_bootstrap = True
else:
is_bootstrap = False
# Pass variables as environment variables (str) to restarter subprocess
env['SPYDER_ARGS'] = spyder_args
env['SPYDER_PID'] = str(pid)
env['SPYDER_IS_BOOTSTRAP'] = str(is_bootstrap)
# Build the command and popen arguments depending on the OS
if os.name == 'nt':
# Hide flashing command prompt
startupinfo = subprocess.STARTUPINFO()
startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
shell = False
else:
startupinfo = None
shell = True
command = '"{0}" "{1}"'
command = command.format(python, restart_script)
try:
if self.main.closing(True, close_immediately=close_immediately):
subprocess.Popen(command, shell=shell, env=env,
startupinfo=startupinfo)
console.quit()
except Exception as error:
# If there is an error with subprocess, Spyder should not quit and
# the error can be inspected in the internal console
print(error) # spyder: test-skip
print(command) # spyder: test-skip
def create_new_file(self) -> None:
"""
Create new file in a suitable plugin.
If the plugin that currently has focus, has its
`CAN_HANDLE_FILE_ACTIONS` attribute set to `True`, then create a new
file in that plugin. Otherwise, create a new file in the Editor plugin.
"""
plugin = self.focused_plugin
if plugin and getattr(plugin, 'CAN_HANDLE_FILE_ACTIONS', False):
plugin.create_new_file()
elif self.is_plugin_available(Plugins.Editor):
editor = self.get_plugin(Plugins.Editor)
editor.new()
def open_file_using_dialog(self) -> None:
"""
Show Open File dialog and open the selected file.
Try asking the plugin that currently has focus for the name of the
displayed file and whether it is a temporary file. If that does not
work, ask the Editor plugin.
"""
plugin = self.focused_plugin
if plugin:
filename = plugin.get_current_filename()
else:
filename = None
if filename is None and self.is_plugin_available(Plugins.Editor):
plugin = self.get_plugin(Plugins.Editor)
filename = plugin.get_current_filename()
if filename is not None and not plugin.current_file_is_temporary():
basedir = osp.dirname(filename)
else:
basedir = getcwd_or_home()
self.get_container().open_file_using_dialog(filename, basedir)
def open_file_in_plugin(self, filename: str) -> None:
"""
Open given file in a suitable plugin.
Go through all plugins and open the file in the first plugin that
registered the extension of the given file name. If none is found,
then open the file in the Editor plugin.
"""
ext = osp.splitext(filename)[1]
for plugin_name in PLUGIN_REGISTRY:
if PLUGIN_REGISTRY.is_plugin_available(plugin_name):
plugin = PLUGIN_REGISTRY.get_plugin(plugin_name)
if (
isinstance(plugin, SpyderDockablePlugin)
and ext in plugin.FILE_EXTENSIONS
):
plugin.switch_to_plugin()
plugin.open_file(filename)
return
if self.is_plugin_available(Plugins.Editor):
editor = self.get_plugin(Plugins.Editor)
editor.load(filename)
def open_last_closed_file(self) -> None:
"""
Open the last closed file again.
If the plugin that currently has focus, has its
`CAN_HANDLE_FILE_ACTIONS` attribute set to `True`, then open the
last closed file in that plugin. Otherwise, open the last closed file
in the Editor plugin.
"""
plugin = self.focused_plugin
if plugin and getattr(plugin, 'CAN_HANDLE_FILE_ACTIONS', False):
plugin.open_last_closed_file()
elif self.is_plugin_available(Plugins.Editor):
editor = self.get_plugin(Plugins.Editor)
editor.open_last_closed()
def add_recent_file(self, fname: str) -> None:
"""
Add file to list of recent files.
This function adds the given file name to the list of recent files,
which is used in the `File > Open recent` menu. The function ensures
that the list has no duplicates and it is no longer than the maximum
length.
"""
self.get_container().add_recent_file(fname)
def save_file(self) -> None:
"""
Save current file.
If the plugin that currently has focus, has its
`CAN_HANDLE_FILE_ACTIONS` attribute set to `True`, then save the
current file in that plugin. Otherwise, save the current file in the
Editor plugin.
"""
plugin = self.focused_plugin
if plugin and getattr(plugin, 'CAN_HANDLE_FILE_ACTIONS', False):
plugin.save_file()
elif self.is_plugin_available(Plugins.Editor):
editor = self.get_plugin(Plugins.Editor)
editor.save()
def save_file_as(self) -> None:
"""
Save current file under a different name.
If the plugin that currently has focus, has its
`CAN_HANDLE_FILE_ACTIONS` attribute set to `True`, then save the
current file in that plugin under a different name. Otherwise, save
the current file in the Editor plugin under a different name.
"""
plugin = self.focused_plugin
if plugin and getattr(plugin, 'CAN_HANDLE_FILE_ACTIONS', False):
plugin.save_file_as()
elif self.is_plugin_available(Plugins.Editor):
editor = self.get_plugin(Plugins.Editor)
editor.save_as()
def save_copy_as(self) -> None:
"""
Save copy of current file under a different name.
If the plugin that currently has focus, has its
`CAN_HANDLE_FILE_ACTIONS` attribute set to `True`, then save a copy of
the current file in that plugin under a different name. Otherwise, save
a copy of the current file in the Editor plugin under a different name.
"""
plugin = self.focused_plugin
if plugin and getattr(plugin, 'CAN_HANDLE_FILE_ACTIONS', False):
plugin.save_copy_as()
elif self.is_plugin_available(Plugins.Editor):
editor = self.get_plugin(Plugins.Editor)
editor.save_copy_as()
def save_all(self) -> None:
"""
Save all files.
If the plugin that currently has focus, has its
`CAN_HANDLE_FILE_ACTIONS` attribute set to `True`, then save all files
in that plugin. Otherwise, save all files in the Editor plugin.
"""
plugin = self.focused_plugin
if plugin and getattr(plugin, 'CAN_HANDLE_FILE_ACTIONS', False):
plugin.save_all()
elif self.is_plugin_available(Plugins.Editor):
editor = self.get_plugin(Plugins.Editor)
editor.save_all()
def revert_file(self) -> None:
"""
Revert current file to the version on disk.
If the plugin that currently has focus, has its
`CAN_HANDLE_FILE_ACTIONS` attribute set to `True`, then revert the
current file in that plugin to the version stored on disk. Otherwise,
revert the current file in the Editor plugin.
"""
plugin = self.focused_plugin
if plugin and getattr(plugin, 'CAN_HANDLE_FILE_ACTIONS', False):
plugin.revert_file()
elif self.is_plugin_available(Plugins.Editor):
editor = self.get_plugin(Plugins.Editor)
editor.revert_file()
def close_file(self) -> None:
"""
Close the current file.
If the plugin that currently has focus, has its
`CAN_HANDLE_FILE_ACTIONS` attribute set to `True`, then close the
current file in that plugin. Otherwise, close the current file in the
Editor plugin.
"""
plugin = self.focused_plugin
if plugin and getattr(plugin, 'CAN_HANDLE_FILE_ACTIONS', False):
plugin.close_file()
elif self.is_plugin_available(Plugins.Editor):
editor = self.get_plugin(Plugins.Editor)
editor.close_file()
def close_all(self) -> None:
"""
Close all opened files in the current plugin.
If the plugin that currently has focus, has its
`CAN_HANDLE_FILE_ACTIONS` attribute set to `True`, then close all
files in that plugin. Otherwise, close all files in the Editor plugin.
"""
plugin = self.focused_plugin
if plugin and getattr(plugin, 'CAN_HANDLE_FILE_ACTIONS', False):
plugin.close_all()
elif self.is_plugin_available(Plugins.Editor):
editor = self.get_plugin(Plugins.Editor)
editor.close_all_files()
def enable_file_action(
self,
action_name: str,
enabled: bool,
plugin: str
) -> None:
"""
Enable or disable file actions for a given plugin.
Parameters
----------
action_name : str
The name of the action to be enabled or disabled. These names
are listed in ApplicationActions, for instance "New file"
enabled : bool
True to enable the action, False to disable it.
plugin : str
The name of the plugin for which the file action needs to be
enabled or disabled.
"""
self.file_action_enabled[(plugin, action_name)] = enabled
self._update_file_actions()
def undo(self) -> None:
"""
Do an undo operation in the current plugin.
If the plugin that currently has focus, has its
`CAN_HANDLE_EDIT_ACTIONS` attribute set to `True`, then do the undo
operation in that plugin. Otherwise, undo in the Editor plugin.
"""
plugin = self.focused_plugin
if plugin and getattr(plugin, 'CAN_HANDLE_EDIT_ACTIONS', False):
plugin.undo()
elif self.is_plugin_available(Plugins.Editor):
editor = self.get_plugin(Plugins.Editor)
editor.undo()
def redo(self) -> None:
"""
Do a redo operation in the current plugin.
If the plugin that currently has focus, has its
`CAN_HANDLE_EDIT_ACTIONS` attribute set to `True`, then do the redo
operation in that plugin. Otherwise, redo in the Editor plugin.
"""
plugin = self.focused_plugin
if plugin and getattr(plugin, 'CAN_HANDLE_EDIT_ACTIONS', False):
plugin.redo()
elif self.is_plugin_available(Plugins.Editor):
editor = self.get_plugin(Plugins.Editor)
editor.redo()
def cut(self) -> None:
"""
Do a cut operation in the current plugin.
If the plugin that currently has focus, has its
`CAN_HANDLE_EDIT_ACTIONS` attribute set to `True`, then do the cut
operation in that plugin. Otherwise, cut in the Editor plugin.
"""
plugin = self.focused_plugin
if plugin and getattr(plugin, 'CAN_HANDLE_EDIT_ACTIONS', False):
plugin.cut()
elif self.is_plugin_available(Plugins.Editor):
editor = self.get_plugin(Plugins.Editor)
editor.cut()
def copy(self) -> None:
"""
Do a copy operation in the current plugin.
If the plugin that currently has focus, has its
`CAN_HANDLE_EDIT_ACTIONS` attribute set to `True`, then do the copy
operation in that plugin. Otherwise, copy in the Editor plugin.
"""
plugin = self.focused_plugin
if plugin and getattr(plugin, 'CAN_HANDLE_EDIT_ACTIONS', False):
plugin.copy()
elif self.is_plugin_available(Plugins.Editor):
editor = self.get_plugin(Plugins.Editor)
editor.copy()
def paste(self) -> None:
"""
Do a paste operation in the current plugin.
If the plugin that currently has focus, has its
`CAN_HANDLE_EDIT_ACTIONS` attribute set to `True`, then do the paste
operation in that plugin. Otherwise, paste in the Editor plugin.
"""
plugin = self.focused_plugin
if plugin and getattr(plugin, 'CAN_HANDLE_EDIT_ACTIONS', False):
plugin.paste()
elif self.is_plugin_available(Plugins.Editor):
editor = self.get_plugin(Plugins.Editor)
editor.paste()
def select_all(self) -> None:
"""
Do a select all operation in the current plugin.
If the plugin that currently has focus, has its
`CAN_HANDLE_EDIT_ACTIONS` attribute set to `True`, then do the select
all operation in that plugin. Otherwise, select all in the Editor
plugin.
"""
plugin = self.focused_plugin
if plugin and getattr(plugin, 'CAN_HANDLE_EDIT_ACTIONS', False):
plugin.select_all()
elif self.is_plugin_available(Plugins.Editor):
editor = self.get_plugin(Plugins.Editor)
editor.select_all()
def enable_edit_action(
self,
action_name: str,
enabled: bool,
plugin: str
) -> None:
"""
Enable or disable edit actions for a given plugin.
Parameters
----------
action_name : str
The name of the action to be enabled or disabled. These names
are listed in ApplicationActions, for instance "Undo".
enabled : bool
True to enable the action, False to disable it.
plugin : str
The name of the plugin for which the edit action needs to be
enabled or disabled.
"""
self.edit_action_enabled[(plugin, action_name)] = enabled
self._update_edit_actions()
def find(self) -> None:
"""
Do a find text in the current plugin.
If the plugin that currently has focus, has its
`CAN_HANDLE_SEARCH_ACTIONS` attribute set to `True`, then do the find
text in that plugin. Otherwise, find text in the Editor plugin.
"""
plugin = self.focused_plugin
if plugin and getattr(plugin, 'CAN_HANDLE_SEARCH_ACTIONS', False):
plugin.find()
elif self.is_plugin_available(Plugins.Editor):
editor = self.get_plugin(Plugins.Editor)
editor.find()
def find_next(self) -> None:
"""
Do a find next text occurrence in the current plugin.
If the plugin that currently has focus, has its
`CAN_HANDLE_SEARCH_ACTIONS` attribute set to `True`, then do the find
next text occurrence in that plugin. Otherwise, find next text
occurrence in the Editor plugin.
"""
plugin = self.focused_plugin
if plugin and getattr(plugin, 'CAN_HANDLE_SEARCH_ACTIONS', False):
plugin.find_next()
elif self.is_plugin_available(Plugins.Editor):
editor = self.get_plugin(Plugins.Editor)
editor.find_next()
def find_previous(self) -> None:
"""
Do a find previous text occurrence in the current plugin.
If the plugin that currently has focus, has its
`CAN_HANDLE_SEARCH_ACTIONS` attribute set to `True`, then do the find
previous text occurrence in that plugin. Otherwise, find previous text
occurrence in the Editor plugin.
"""
plugin = self.focused_plugin
if plugin and getattr(plugin, 'CAN_HANDLE_SEARCH_ACTIONS', False):
plugin.find_previous()
elif self.is_plugin_available(Plugins.Editor):
editor = self.get_plugin(Plugins.Editor)
editor.find_previous()
def replace(self) -> None:
"""
Do a replace text occurrence in the current plugin.
If the plugin that currently has focus, has its
`CAN_HANDLE_SEARCH_ACTIONS` attribute set to `True`, then do the
replace text occurrence in that plugin. Otherwise, replace text
occurrence in the Editor plugin.
"""
plugin = self.focused_plugin
if plugin and getattr(plugin, 'CAN_HANDLE_SEARCH_ACTIONS', False):
plugin.replace()
elif self.is_plugin_available(Plugins.Editor):
editor = self.get_plugin(Plugins.Editor)
editor.replace()
def enable_search_action(
self,
action_name: str,
enabled: bool,
plugin: str
) -> None:
"""
Enable or disable a search action for a given plugin.
Parameters
----------
action_name : str
The name of the action to be enabled or disabled. These names
are listed in ApplicationActions, for instance "FindText".
enabled : bool
True to enable the action, False to disable it.
plugin : str
The name of the plugin for which the search action needs to be
enabled or disabled.
"""
self.search_action_enabled[(plugin, action_name)] = enabled
self._update_search_actions()
@property
def documentation_action(self):
"""Open Spyder's Documentation in the browser."""
return self.get_container().documentation_action
@property
def video_action(self):
"""Open Spyder's video documentation in the browser."""
return self.get_container().video_action
@property
def trouble_action(self):
"""Open Spyder's troubleshooting documentation in the browser."""
return self.get_container().trouble_action
@property
def dependencies_action(self):
"""Show Spyder's Dependencies dialog box."""
return self.get_container().dependencies_action
@property
def support_group_action(self):
"""Open Spyder's Google support group in the browser."""
return self.get_container().support_group_action
@property
def about_action(self):
"""Show Spyder's About dialog box."""
return self.get_container().about_action
@property
def user_env_action(self):
"""Show Spyder's Windows user env variables dialog box."""
return self.get_container().user_env_action
@property
def restart_action(self):
"""Restart Spyder action."""
return self.get_container().restart_action
@property
def restart_debug_action(self):
"""Restart Spyder in DEBUG mode action."""
return self.get_container().restart_debug_action
@property
def report_action(self):
"""Restart Spyder action."""
return self.get_container().report_action
@property
def debug_logs_menu(self):
return self.get_container().get_menu(
ApplicationPluginMenus.DebugLogsMenu)
| Application |
python | apache__airflow | airflow-core/tests/unit/plugins/test_plugins_manager.py | {
"start": 2133,
"end": 14913
} | class ____:
@pytest.fixture(autouse=True)
def clean_plugins(self):
from airflow import plugins_manager
plugins_manager.loaded_plugins = set()
plugins_manager.plugins = []
def test_no_log_when_no_plugins(self, caplog):
with mock_plugin_manager(plugins=[]):
from airflow import plugins_manager
plugins_manager.ensure_plugins_loaded()
assert caplog.record_tuples == []
def test_loads_filesystem_plugins(self, caplog):
from airflow import plugins_manager
with mock.patch("airflow.plugins_manager.plugins", []):
plugins_manager.load_plugins_from_plugin_directory()
assert len(plugins_manager.plugins) == 10
for plugin in plugins_manager.plugins:
if "AirflowTestOnLoadPlugin" in str(plugin):
assert plugin.name == "postload"
break
else:
pytest.fail("Wasn't able to find a registered `AirflowTestOnLoadPlugin`")
assert caplog.record_tuples == []
def test_loads_filesystem_plugins_exception(self, caplog, tmp_path):
from airflow import plugins_manager
with mock.patch("airflow.plugins_manager.plugins", []):
(tmp_path / "testplugin.py").write_text(ON_LOAD_EXCEPTION_PLUGIN)
with conf_vars({("core", "plugins_folder"): os.fspath(tmp_path)}):
plugins_manager.load_plugins_from_plugin_directory()
assert len(plugins_manager.plugins) == 3 # three are loaded from examples
received_logs = caplog.text
assert "Failed to import plugin" in received_logs
assert "testplugin.py" in received_logs
def test_should_warning_about_incompatible_plugins(self, caplog):
class AirflowAdminViewsPlugin(AirflowPlugin):
name = "test_admin_views_plugin"
admin_views = [mock.MagicMock()]
class AirflowAdminMenuLinksPlugin(AirflowPlugin):
name = "test_menu_links_plugin"
menu_links = [mock.MagicMock()]
with (
mock_plugin_manager(plugins=[AirflowAdminViewsPlugin(), AirflowAdminMenuLinksPlugin()]),
caplog.at_level(logging.WARNING, logger="airflow.plugins_manager"),
):
from airflow import plugins_manager
plugins_manager.initialize_flask_plugins()
assert caplog.record_tuples == [
(
"airflow.plugins_manager",
logging.WARNING,
"Plugin 'test_admin_views_plugin' may not be compatible with the current Airflow version. "
"Please contact the author of the plugin.",
),
(
"airflow.plugins_manager",
logging.WARNING,
"Plugin 'test_menu_links_plugin' may not be compatible with the current Airflow version. "
"Please contact the author of the plugin.",
),
]
def test_should_warning_about_conflicting_url_route(self, caplog):
class TestPluginA(AirflowPlugin):
name = "test_plugin_a"
external_views = [{"url_route": "/test_route"}, {"wrong_view": "/no_url_route"}]
class TestPluginB(AirflowPlugin):
name = "test_plugin_b"
external_views = [{"url_route": "/test_route"}]
react_apps = [{"url_route": "/test_route"}]
with (
mock_plugin_manager(plugins=[TestPluginA(), TestPluginB()]),
caplog.at_level(logging.WARNING, logger="airflow.plugins_manager"),
):
from airflow import plugins_manager
plugins_manager.initialize_ui_plugins()
# Verify that the conflicting external view and react app are not loaded
plugin_b = next(plugin for plugin in plugins_manager.plugins if plugin.name == "test_plugin_b")
assert plugin_b.external_views == []
assert plugin_b.react_apps == []
assert len(plugins_manager.external_views) == 1
assert len(plugins_manager.react_apps) == 0
def test_should_warning_about_external_views_or_react_app_wrong_object(self, caplog):
class TestPluginA(AirflowPlugin):
name = "test_plugin_a"
external_views = [[{"nested_list": "/test_route"}], {"url_route": "/test_route"}]
react_apps = [[{"nested_list": "/test_route"}], {"url_route": "/test_route_react_app"}]
with (
mock_plugin_manager(plugins=[TestPluginA()]),
caplog.at_level(logging.WARNING, logger="airflow.plugins_manager"),
):
from airflow import plugins_manager
plugins_manager.initialize_ui_plugins()
# Verify that the conflicting external view and react app are not loaded
plugin_a = next(plugin for plugin in plugins_manager.plugins if plugin.name == "test_plugin_a")
assert plugin_a.external_views == [{"url_route": "/test_route"}]
assert plugin_a.react_apps == [{"url_route": "/test_route_react_app"}]
assert len(plugins_manager.external_views) == 1
assert len(plugins_manager.react_apps) == 1
assert caplog.record_tuples == [
(
"airflow.plugins_manager",
logging.WARNING,
"Plugin 'test_plugin_a' has an external view that is not a dictionary. "
"The view will not be loaded.",
),
(
"airflow.plugins_manager",
logging.WARNING,
"Plugin 'test_plugin_a' has a React App that is not a dictionary. "
"The React App will not be loaded.",
),
]
def test_should_not_warning_about_fab_plugins(self, caplog):
class AirflowAdminViewsPlugin(AirflowPlugin):
name = "test_admin_views_plugin"
appbuilder_views = [mock.MagicMock()]
class AirflowAdminMenuLinksPlugin(AirflowPlugin):
name = "test_menu_links_plugin"
appbuilder_menu_items = [mock.MagicMock()]
with (
mock_plugin_manager(plugins=[AirflowAdminViewsPlugin(), AirflowAdminMenuLinksPlugin()]),
caplog.at_level(logging.WARNING, logger="airflow.plugins_manager"),
):
from airflow import plugins_manager
plugins_manager.initialize_flask_plugins()
assert caplog.record_tuples == []
def test_should_not_warning_about_fab_and_flask_admin_plugins(self, caplog):
class AirflowAdminViewsPlugin(AirflowPlugin):
name = "test_admin_views_plugin"
admin_views = [mock.MagicMock()]
appbuilder_views = [mock.MagicMock()]
class AirflowAdminMenuLinksPlugin(AirflowPlugin):
name = "test_menu_links_plugin"
menu_links = [mock.MagicMock()]
appbuilder_menu_items = [mock.MagicMock()]
with (
mock_plugin_manager(plugins=[AirflowAdminViewsPlugin(), AirflowAdminMenuLinksPlugin()]),
caplog.at_level(logging.WARNING, logger="airflow.plugins_manager"),
):
from airflow import plugins_manager
plugins_manager.initialize_flask_plugins()
assert caplog.record_tuples == []
def test_entrypoint_plugin_errors_dont_raise_exceptions(self, mock_metadata_distribution, caplog):
"""
Test that Airflow does not raise an error if there is any Exception because of a plugin.
"""
from airflow.plugins_manager import import_errors, load_entrypoint_plugins
mock_dist = mock.Mock()
mock_dist.metadata = {"Name": "test-dist"}
mock_entrypoint = mock.Mock()
mock_entrypoint.name = "test-entrypoint"
mock_entrypoint.group = "airflow.plugins"
mock_entrypoint.module = "test.plugins.test_plugins_manager"
mock_entrypoint.load.side_effect = ImportError("my_fake_module not found")
mock_dist.entry_points = [mock_entrypoint]
with (
mock_metadata_distribution(return_value=[mock_dist]),
caplog.at_level(logging.ERROR, logger="airflow.plugins_manager"),
):
load_entrypoint_plugins()
received_logs = caplog.text
# Assert Traceback is shown too
assert "Traceback (most recent call last):" in received_logs
assert "my_fake_module not found" in received_logs
assert "Failed to import plugin test-entrypoint" in received_logs
assert ("test.plugins.test_plugins_manager", "my_fake_module not found") in import_errors.items()
def test_registering_plugin_macros(self, request):
"""
Tests whether macros that originate from plugins are being registered correctly.
"""
from airflow.plugins_manager import integrate_macros_plugins
from airflow.sdk.execution_time import macros
def cleanup_macros():
"""Reloads the macros module such that the symbol table is reset after the test."""
# We're explicitly deleting the module from sys.modules and importing it again
# using import_module() as opposed to using importlib.reload() because the latter
# does not undo the changes to the airflow.sdk.execution_time.macros module that are being caused by
# invoking integrate_macros_plugins()
del sys.modules["airflow.sdk.execution_time.macros"]
importlib.import_module("airflow.sdk.execution_time.macros")
request.addfinalizer(cleanup_macros)
def custom_macro():
return "foo"
class MacroPlugin(AirflowPlugin):
name = "macro_plugin"
macros = [custom_macro]
with mock_plugin_manager(plugins=[MacroPlugin()]):
# Ensure the macros for the plugin have been integrated.
integrate_macros_plugins()
# Test whether the modules have been created as expected.
plugin_macros = importlib.import_module(f"airflow.sdk.execution_time.macros.{MacroPlugin.name}")
for macro in MacroPlugin.macros:
# Verify that the macros added by the plugin are being set correctly
# on the plugin's macro module.
assert hasattr(plugin_macros, macro.__name__)
# Verify that the symbol table in airflow.sdk.execution_time.macros has been updated with an entry for
# this plugin, this is necessary in order to allow the plugin's macros to be used when
# rendering templates.
assert hasattr(macros, MacroPlugin.name)
@skip_if_force_lowest_dependencies_marker
def test_registering_plugin_listeners(self):
from airflow import plugins_manager
assert not get_listener_manager().has_listeners
with mock.patch("airflow.plugins_manager.plugins", []):
plugins_manager.load_plugins_from_plugin_directory()
plugins_manager.integrate_listener_plugins(get_listener_manager())
assert get_listener_manager().has_listeners
listeners = get_listener_manager().pm.get_plugins()
listener_names = [el.__name__ if inspect.ismodule(el) else qualname(el) for el in listeners]
# sort names as order of listeners is not guaranteed
assert sorted(listener_names) == [
"airflow.example_dags.plugins.event_listener",
"unit.listeners.class_listener.ClassBasedListener",
"unit.listeners.empty_listener",
]
@skip_if_force_lowest_dependencies_marker
def test_should_import_plugin_from_providers(self):
from airflow import plugins_manager
with mock.patch("airflow.plugins_manager.plugins", []):
assert len(plugins_manager.plugins) == 0
plugins_manager.load_providers_plugins()
assert len(plugins_manager.plugins) >= 2
@skip_if_force_lowest_dependencies_marker
def test_does_not_double_import_entrypoint_provider_plugins(self):
from airflow import plugins_manager
mock_entrypoint = mock.Mock()
mock_entrypoint.name = "test-entrypoint-plugin"
mock_entrypoint.module = "module_name_plugin"
mock_dist = mock.Mock()
mock_dist.metadata = {"Name": "test-entrypoint-plugin"}
mock_dist.version = "1.0.0"
mock_dist.entry_points = [mock_entrypoint]
with mock.patch("airflow.plugins_manager.plugins", []):
assert len(plugins_manager.plugins) == 0
plugins_manager.load_entrypoint_plugins()
plugins_manager.load_providers_plugins()
assert len(plugins_manager.plugins) == 4
| TestPluginsManager |
python | charliermarsh__ruff | crates/ruff_linter/resources/test/fixtures/pydocstyle/D208.py | {
"start": 20,
"end": 182
} | class ____:
""" Remove sampler
Args:
Returns:
"""
def memory_test():
"""
参数含义:precision:精确到小数点后几位
"""
| Platform |
python | getsentry__sentry | tests/acceptance/test_create_team.py | {
"start": 198,
"end": 1524
} | class ____(AcceptanceTestCase):
def setUp(self) -> None:
super().setUp()
self.user = self.create_user("foo@example.com")
self.org = self.create_organization(name="Rowdy Tiger", owner=None)
self.team = self.create_team(organization=self.org, name="Mariachi Band")
self.project = self.create_project(organization=self.org, teams=[self.team], name="Bengal")
self.create_member(user=self.user, organization=self.org, role="owner", teams=[self.team])
self.login_as(self.user)
self.path = f"/settings/{self.org.slug}/teams/"
def test_create(self) -> None:
self.browser.get(self.path)
self.browser.wait_until_test_id("team-list")
# Open the modal
self.browser.click('button[aria-label="Create Team"]')
self.browser.wait_until("[role='dialog']")
self.browser.element('input[id="slug"]').send_keys("new-team")
self.browser.click("[role='dialog'] button[aria-label='Create Team']")
# Wait for modal to go away.
self.browser.wait_until_not("[role='dialog']")
# New team should be in dom
assert self.browser.find_element(by=By.XPATH, value="//span[contains(text(), 'new-team')]")
assert Team.objects.filter(slug="new-team", organization=self.org).exists()
| CreateTeamTest |
python | numpy__numpy | numpy/testing/tests/test_utils.py | {
"start": 13046,
"end": 16307
} | class ____(TestArrayEqual):
def _assert_func(self, *args, **kwargs):
assert_equal(*args, **kwargs)
def test_nan_items(self):
self._assert_func(np.nan, np.nan)
self._assert_func([np.nan], [np.nan])
self._test_not_equal(np.nan, [np.nan])
self._test_not_equal(np.nan, 1)
def test_inf_items(self):
self._assert_func(np.inf, np.inf)
self._assert_func([np.inf], [np.inf])
self._test_not_equal(np.inf, [np.inf])
def test_datetime(self):
self._test_equal(
np.datetime64("2017-01-01", "s"),
np.datetime64("2017-01-01", "s")
)
self._test_equal(
np.datetime64("2017-01-01", "s"),
np.datetime64("2017-01-01", "m")
)
# gh-10081
self._test_not_equal(
np.datetime64("2017-01-01", "s"),
np.datetime64("2017-01-02", "s")
)
self._test_not_equal(
np.datetime64("2017-01-01", "s"),
np.datetime64("2017-01-02", "m")
)
def test_nat_items(self):
# not a datetime
nadt_no_unit = np.datetime64("NaT")
nadt_s = np.datetime64("NaT", "s")
nadt_d = np.datetime64("NaT", "ns")
# not a timedelta
natd_no_unit = np.timedelta64("NaT")
natd_s = np.timedelta64("NaT", "s")
natd_d = np.timedelta64("NaT", "ns")
dts = [nadt_no_unit, nadt_s, nadt_d]
tds = [natd_no_unit, natd_s, natd_d]
for a, b in itertools.product(dts, dts):
self._assert_func(a, b)
self._assert_func([a], [b])
self._test_not_equal([a], b)
for a, b in itertools.product(tds, tds):
self._assert_func(a, b)
self._assert_func([a], [b])
self._test_not_equal([a], b)
for a, b in itertools.product(tds, dts):
self._test_not_equal(a, b)
self._test_not_equal(a, [b])
self._test_not_equal([a], [b])
self._test_not_equal([a], np.datetime64("2017-01-01", "s"))
self._test_not_equal([b], np.datetime64("2017-01-01", "s"))
self._test_not_equal([a], np.timedelta64(123, "s"))
self._test_not_equal([b], np.timedelta64(123, "s"))
def test_non_numeric(self):
self._assert_func('ab', 'ab')
self._test_not_equal('ab', 'abb')
def test_complex_item(self):
self._assert_func(complex(1, 2), complex(1, 2))
self._assert_func(complex(1, np.nan), complex(1, np.nan))
self._test_not_equal(complex(1, np.nan), complex(1, 2))
self._test_not_equal(complex(np.nan, 1), complex(1, np.nan))
self._test_not_equal(complex(np.nan, np.inf), complex(np.nan, 2))
def test_negative_zero(self):
self._test_not_equal(ncu.PZERO, ncu.NZERO)
def test_complex(self):
x = np.array([complex(1, 2), complex(1, np.nan)])
y = np.array([complex(1, 2), complex(1, 2)])
self._assert_func(x, x)
self._test_not_equal(x, y)
def test_object(self):
# gh-12942
import datetime
a = np.array([datetime.datetime(2000, 1, 1),
datetime.datetime(2000, 1, 2)])
self._test_not_equal(a, a[::-1])
| TestEqual |
python | microsoft__pyright | packages/pyright-internal/src/tests/samples/callbackProtocol7.py | {
"start": 124,
"end": 275
} | class ____(Protocol):
def __call__(self, x: int, /, y: str) -> Any: ...
def test1(x: int, /, y: str, z: None = None) -> Any: ...
x: P0 = test1
| P0 |
python | dagster-io__dagster | python_modules/dagster/dagster/_grpc/types.py | {
"start": 14793,
"end": 16228
} | class ____(
NamedTuple(
"_PartitionArgs",
[
("repository_origin", RemoteRepositoryOrigin),
# This is here for backcompat. it's expected to always be f"{job_name}_partition_set".
("partition_set_name", str),
("partition_name", str),
("job_name", Optional[str]),
("instance_ref", Optional[InstanceRef]),
],
)
):
def __new__(
cls,
repository_origin: RemoteRepositoryOrigin,
partition_set_name: str,
partition_name: str,
job_name: Optional[str] = None,
instance_ref: Optional[InstanceRef] = None,
):
return super().__new__(
cls,
repository_origin=check.inst_param(
repository_origin,
"repository_origin",
RemoteRepositoryOrigin,
),
partition_set_name=check.str_param(partition_set_name, "partition_set_name"),
job_name=check.opt_str_param(job_name, "job_name"),
partition_name=check.str_param(partition_name, "partition_name"),
instance_ref=check.opt_inst_param(instance_ref, "instance_ref", InstanceRef),
)
def get_job_name(self) -> str:
if self.job_name:
return self.job_name
else:
return job_name_for_partition_set_snap_name(self.partition_set_name)
@whitelist_for_serdes
| PartitionArgs |
python | getlogbook__logbook | src/logbook/base.py | {
"start": 37674,
"end": 38345
} | class ____(RecordDispatcher, LoggerMixin):
"""Instances of the Logger class represent a single logging channel.
A "logging channel" indicates an area of an application. Exactly
how an "area" is defined is up to the application developer.
Names used by logbook should be descriptive and are intended for user
display, not for filtering. Filtering should happen based on the
context information instead.
A logger internally is a subclass of a
:class:`~logbook.base.RecordDispatcher` that implements the actual
logic. If you want to implement a custom logger class, have a look
at the interface of that class as well.
"""
| Logger |
python | allegroai__clearml | clearml/automation/controller.py | {
"start": 184560,
"end": 269686
} | class ____(PipelineController):
_added_decorator: List[dict] = []
_ref_lazy_loader_id_to_node_name: dict = {}
_singleton: Optional["PipelineDecorator"] = None
_eager_step_artifact = "eager_step"
_eager_execution_instance = False
_debug_execute_step_process = False
_debug_execute_step_function = False
_default_execution_queue = None
_multi_pipeline_instances = []
_multi_pipeline_call_counter = -1
_atexit_registered = False
def __init__(
self,
name: str,
project: str,
version: Optional[str] = None,
pool_frequency: float = 0.2,
add_pipeline_tags: bool = False,
target_project: Optional[str] = None,
abort_on_failure: bool = False,
add_run_number: bool = True,
retry_on_failure: Optional[
Union[int, Callable[[PipelineController, PipelineController.Node, int], bool]]
] = None, # noqa
docker: Optional[str] = None,
docker_args: Optional[str] = None,
docker_bash_setup_script: Optional[str] = None,
packages: Optional[Union[bool, str, Sequence[str]]] = None,
repo: Optional[str] = None,
repo_branch: Optional[str] = None,
repo_commit: Optional[str] = None,
artifact_serialization_function: Optional[Callable[[Any], Union[bytes, bytearray]]] = None,
artifact_deserialization_function: Optional[Callable[[bytes], Any]] = None,
output_uri: Optional[Union[str, bool]] = None,
skip_global_imports: bool = False,
working_dir: Optional[str] = None,
enable_local_imports: bool = True,
) -> None:
"""
Create a new pipeline controller. The newly created object will launch and monitor the new experiments.
:param name: Provide pipeline name (if main Task exists it overrides its name)
:param project: Provide project storing the pipeline (if main Task exists it overrides its project)
:param version: Pipeline version. This version allows to uniquely identify the pipeline
template execution. Examples for semantic versions: version='1.0.1' , version='23', version='1.2'.
If not set, find the latest version of the pipeline and increment it. If no such version is found,
default to '1.0.0'
:param float pool_frequency: The pooling frequency (in minutes) for monitoring experiments / states.
:param bool add_pipeline_tags: (default: False) if True, add `pipe: <pipeline_task_id>` tag to all
steps (Tasks) created by this pipeline.
:param str target_project: If provided, all pipeline steps are cloned into the target project
:param bool abort_on_failure: If False (default), failed pipeline steps will not cause the pipeline
to stop immediately, instead any step that is not connected (or indirectly connected) to the failed step,
will still be executed. Nonetheless, the pipeline itself will be marked failed, unless the failed step
was specifically defined with "continue_on_fail=True".
If True, any failed step will cause the pipeline to immediately abort, stop all running steps,
and mark the pipeline as failed.
:param add_run_number: If True (default), add the run number of the pipeline to the pipeline name.
Example, the second time we launch the pipeline "best pipeline", we rename it to "best pipeline #2"
:param retry_on_failure: Integer (number of retries) or Callback function that returns True to allow a retry
- Integer: In case of node failure, retry the node the number of times indicated by this parameter.
- Callable: A function called on node failure. Takes as parameters:
the PipelineController instance, the PipelineController.Node that failed and an int
representing the number of previous retries for the node that failed.
The function must return ``True`` if the node should be retried and ``False`` otherwise.
If True, the node will be re-queued and the number of retries left will be decremented by 1.
By default, if this callback is not specified, the function will be retried the number of
times indicated by `retry_on_failure`.
.. code-block:: py
def example_retry_on_failure_callback(pipeline, node, retries):
print(node.name, ' failed')
# allow up to 5 retries (total of 6 runs)
return retries < 5
:param docker: Select the docker image to be executed in by the remote session
:param docker_args: Add docker arguments, pass a single string
:param docker_bash_setup_script: Add bash script to be executed
inside the docker before setting up the Task's environment
:param packages: Manually specify a list of required packages or a local requirements.txt file.
Example: ["tqdm>=2.1", "scikit-learn"] or "./requirements.txt"
If not provided, packages are automatically added.
Use `False` to install requirements from "requirements.txt" inside your git repository
:param repo: Optional, specify a repository to attach to the pipeline controller, when remotely executing.
Allow users to execute the controller inside the specified repository, enabling them to load modules/script
from the repository. Notice the execution work directory will be the repository root folder.
Supports both git repo url link, and local repository path (automatically converted into the remote
git/commit as is currently checkout).
Example remote url: 'https://github.com/user/repo.git'
Example local repo copy: './repo' -> will automatically store the remote
repo url and commit ID based on the locally cloned copy
Use empty string ("") to disable any repository auto-detection
:param repo_branch: Optional, specify the remote repository branch (Ignored, if local repo path is used)
:param repo_commit: Optional, specify the repository commit ID (Ignored, if local repo path is used)
:param artifact_serialization_function: A serialization function that takes one
parameter of any type which is the object to be serialized. The function should return
a `bytes` or `bytearray` object, which represents the serialized object. All parameter/return
artifacts uploaded by the pipeline will be serialized using this function.
All relevant imports must be done in this function. For example:
.. code-block:: py
def serialize(obj):
import dill
return dill.dumps(obj)
:param artifact_deserialization_function: A deserialization function that takes one parameter of type `bytes`,
which represents the serialized object. This function should return the deserialized object.
All parameter/return artifacts fetched by the pipeline will be deserialized using this function.
All relevant imports must be done in this function. For example:
.. code-block:: py
def deserialize(bytes_):
import dill
return dill.loads(bytes_)
:param output_uri: The storage / output url for this pipeline. This is the default location for output
models and other artifacts. Check Task.init reference docs for more info (output_uri is a parameter).
The `output_uri` of this pipeline's steps will default to this value.
:param skip_global_imports: If True, global imports will not be included in the steps' execution, otherwise all
global imports will be automatically imported in a safe manner at the beginning of each step’s execution.
Default is False
:param working_dir: Working directory to launch the pipeline from.
:param enable_local_imports: If True, allow pipeline steps to import from local files
by appending to the PYTHONPATH of each step the directory the pipeline controller
script resides in (sys.path[0]).
If False, the directory won't be appended to PYTHONPATH. Default is True.
Ignored while running remotely.
"""
super(PipelineDecorator, self).__init__(
name=name,
project=project,
version=version,
pool_frequency=pool_frequency,
add_pipeline_tags=add_pipeline_tags,
target_project=target_project,
abort_on_failure=abort_on_failure,
add_run_number=add_run_number,
retry_on_failure=retry_on_failure,
docker=docker,
docker_args=docker_args,
docker_bash_setup_script=docker_bash_setup_script,
packages=packages,
repo=repo,
repo_branch=repo_branch,
repo_commit=repo_commit,
always_create_from_code=False,
artifact_serialization_function=artifact_serialization_function,
artifact_deserialization_function=artifact_deserialization_function,
output_uri=output_uri,
skip_global_imports=skip_global_imports,
working_dir=working_dir,
enable_local_imports=enable_local_imports,
)
# if we are in eager execution, make sure parent class knows it
if self._eager_execution_instance:
self._mock_execution = True
if PipelineDecorator._default_execution_queue:
super(PipelineDecorator, self).set_default_execution_queue(PipelineDecorator._default_execution_queue)
for n in self._added_decorator:
self._add_function_step(**n)
self._added_decorator.clear()
PipelineDecorator._singleton = self
self._reference_callback = []
# store launched nodes, in case we call the same function multiple times, and need renaming:
self._launched_step_names = set()
# map eager steps task id to the new step name
self._eager_steps_task_id: Dict[str, str] = {}
def _daemon(self) -> None:
"""
The main pipeline execution loop. This loop is executed on its own dedicated thread.
override the daemon function, we only need to update the state
:return:
"""
pooling_counter = 0
launched_nodes = set()
last_monitor_report = last_plot_report = time()
while self._stop_event:
# stop request
if self._stop_event.wait(self._pool_frequency if pooling_counter else 0.01):
break
pooling_counter += 1
# check the pipeline time limit
if self._pipeline_time_limit and (time() - self._start_time) > self._pipeline_time_limit:
break
self._update_progress()
self._update_nodes_status()
# check the state of all current jobs
# if no a job ended, continue
completed_jobs = []
nodes_failed_stop_pipeline = []
force_execution_plot_update = False
for j in self._running_nodes:
node = self._nodes[j]
if not node.job:
continue
if node.job.is_stopped(aborted_nonresponsive_as_running=True):
node_failed = node.job.is_failed()
if node_failed:
if self._call_retries_callback(node):
self._relaunch_node(node)
continue
else:
self._final_failure[node.name] = True
completed_jobs.append(j)
if node.job.is_aborted():
node.executed = node.job.task_id() if not node.skip_children_on_abort else False
elif node_failed:
node.executed = node.job.task_id() if not node.skip_children_on_fail else False
else:
node.executed = node.job.task_id()
if j in launched_nodes:
launched_nodes.remove(j)
# check if we need to stop all running steps
if node_failed and self._abort_running_steps_on_failure and not node.continue_on_fail:
nodes_failed_stop_pipeline.append(node.name)
elif node.timeout:
started = node.job.task.data.started
if (datetime.now().astimezone(started.tzinfo) - started).total_seconds() > node.timeout:
node.job.abort()
completed_jobs.append(j)
node.executed = node.job.task_id()
elif j in launched_nodes and node.job.is_running():
# make sure update the execution graph when the job started running
# (otherwise it will still be marked queued)
launched_nodes.remove(j)
force_execution_plot_update = True
# update running jobs
self._running_nodes = [j for j in self._running_nodes if j not in completed_jobs]
# nothing changed, we can sleep
if not completed_jobs and self._running_nodes:
# force updating the pipeline state (plot) at least every 5 min.
if force_execution_plot_update or time() - last_plot_report > self._update_execution_plot_interval:
last_plot_report = time()
last_monitor_report = time()
self.update_execution_plot()
elif time() - last_monitor_report > self._monitor_node_interval:
last_monitor_report = time()
self._scan_monitored_nodes()
continue
# callback on completed jobs
if self._experiment_completed_cb or self._post_step_callbacks:
for job in completed_jobs:
job_node = self._nodes.get(job)
if not job_node:
continue
if self._experiment_completed_cb:
self._experiment_completed_cb(self, job_node)
if self._post_step_callbacks.get(job_node.name):
self._post_step_callbacks[job_node.name](self, job_node)
# check if we need to stop the pipeline, and abort all running steps
if nodes_failed_stop_pipeline:
print(
"Aborting pipeline and stopping all running steps, node {} failed".format(
nodes_failed_stop_pipeline
)
)
break
# update current state (in configuration, so that we could later continue an aborted pipeline)
self._force_task_configuration_update()
# visualize pipeline state (plot)
self.update_execution_plot()
# stop all currently running jobs, protect against changes while iterating):
for node in list(self._nodes.values()):
if node.executed is False and not node.continue_on_fail:
self._pipeline_task_status_failed = True
if node.job and not node.job.is_stopped():
node.job.abort()
elif not node.job and not node.executed:
# mark Node as skipped if it has no Job object and it is not executed
node.skip_job = True
# if this is a standalone node, we need to remove it from the graph
if not node.parents:
# check if this node is anyone's parent
found_parent = False
for v in list(self._nodes.values()):
if node.name in (v.parents or []):
found_parent = True
break
if not found_parent:
self._nodes.pop(node.name, None)
# visualize pipeline state (plot)
self.update_execution_plot()
self._scan_monitored_nodes()
if self._stop_event:
# noinspection PyBroadException
try:
self._stop_event.set()
except Exception:
pass
def update_execution_plot(self) -> None:
"""
Update sankey diagram of the current pipeline
"""
with self._reporting_lock:
self._update_eager_generated_steps()
super(PipelineDecorator, self).update_execution_plot()
def _update_eager_generated_steps(self) -> None:
# noinspection PyProtectedMember
self._task.reload()
artifacts = self._task.data.execution.artifacts
# check if we have a new step on the DAG
eager_artifacts = []
for a in artifacts:
if a.key and a.key.startswith("{}:".format(self._eager_step_artifact)):
# expected value: '"eager_step":"parent-node-task-id":"eager-step-task-id'
eager_artifacts.append(a)
# verify we have the step, if we do not, add it.
delete_artifact_keys = []
for artifact in eager_artifacts:
_, parent_step_task_id, eager_step_task_id = artifact.key.split(":", 2)
# deserialize node definition
eager_node_def = json.loads(artifact.type_data.preview)
eager_node_name, eager_node_def = list(eager_node_def.items())[0]
# verify we do not have any new nodes on the DAG (i.e. a step generating a Node eagerly)
parent_node = None
for node in list(self._nodes.values()):
if not node.job and not node.executed:
continue
t_id = node.executed or node.job.task_id
if t_id == parent_step_task_id:
parent_node = node
break
if not parent_node:
# should not happen
continue
new_step_node_name = "{}_{}".format(parent_node.name, eager_node_name)
counter = 1
while new_step_node_name in self._nodes:
new_step_node_name = "{}_{}".format(new_step_node_name, counter)
counter += 1
eager_node_def["name"] = new_step_node_name
eager_node_def["parents"] = [parent_node.name]
is_cached = eager_node_def.pop("is_cached", None)
self._nodes[new_step_node_name] = self.Node(**eager_node_def)
self._nodes[new_step_node_name].job = RunningJob(existing_task=eager_step_task_id)
if is_cached:
self._nodes[new_step_node_name].job.force_set_is_cached(is_cached)
# make sure we will not rescan it.
delete_artifact_keys.append(artifact.key)
# remove all processed eager step artifacts
if delete_artifact_keys:
# noinspection PyProtectedMember
self._task._delete_artifacts(delete_artifact_keys)
self._force_task_configuration_update()
def _create_task_from_function(
self,
docker: Optional[str],
docker_args: Optional[str],
docker_bash_setup_script: Optional[str],
function: Callable,
function_input_artifacts: Dict[str, str],
function_kwargs: Dict[str, Any],
function_return: List[str],
auto_connect_frameworks: Optional[dict],
auto_connect_arg_parser: Optional[dict],
packages: Optional[Union[bool, str, Sequence[str]]],
project_name: Optional[str],
task_name: Optional[str],
task_type: Optional[str],
repo: Optional[str],
branch: Optional[str],
commit: Optional[str],
helper_functions: Optional[Sequence[Callable]],
output_uri: Optional[Union[str, bool]] = None,
working_dir: Optional[str] = None,
) -> dict:
def sanitize(function_source: str) -> str:
matched = re.match(r"[\s]*@[\w]*.component[\s\\]*\(", function_source)
if matched:
function_source = function_source[matched.span()[1] :]
# find the last ")"
open_parenthesis = 0
last_index = -1
for i, c in enumerate(function_source):
if not open_parenthesis and c == ")":
last_index = i
break
elif c == ")":
open_parenthesis -= 1
elif c == "(":
open_parenthesis += 1
if last_index >= 0:
function_source = function_source[last_index + 1 :].lstrip()
return function_source
task_definition = CreateFromFunction.create_task_from_function(
a_function=function,
function_kwargs=function_kwargs or None,
function_input_artifacts=function_input_artifacts,
function_return=function_return,
project_name=project_name,
task_name=task_name,
task_type=task_type,
auto_connect_frameworks=auto_connect_frameworks,
auto_connect_arg_parser=auto_connect_arg_parser,
repo=repo,
branch=branch,
commit=commit,
packages=packages,
docker=docker,
docker_args=docker_args,
docker_bash_setup_script=docker_bash_setup_script,
output_uri=output_uri,
helper_functions=helper_functions,
dry_run=True,
task_template_header=self._task_template_header,
_sanitize_function=sanitize,
artifact_serialization_function=self._artifact_serialization_function,
artifact_deserialization_function=self._artifact_deserialization_function,
skip_global_imports=self._skip_global_imports,
working_dir=working_dir,
)
return task_definition
def _find_executed_node_leaves(self) -> List[PipelineController.Node]:
all_parents = set([p for n in list(self._nodes.values()) if n.executed for p in n.parents])
executed_leaves = [name for name, n in list(self._nodes.items()) if n.executed and name not in all_parents]
return executed_leaves
def _adjust_task_hashing(self, task_hash: dict) -> dict:
"""
Fix the Task hashing so that parameters pointing to the current Task artifact are encoded using the
hash content of the artifact, instead of the Task.id
:param task_hash: Task representation dict
:return: Adjusted Task representation dict
"""
if task_hash.get("hyper_params"):
updated_params = {}
for k, v in task_hash["hyper_params"].items():
if k.startswith("{}/".format(CreateFromFunction.input_artifact_section)) and str(v).startswith(
"{}.".format(self._task.id)
):
task_id, artifact_name = str(v).split(".", 1)
if artifact_name in self._task.artifacts:
updated_params[k] = self._task.artifacts[artifact_name].hash
task_hash["hyper_params"].update(updated_params)
return task_hash
@classmethod
def _wait_for_node(cls, node: PipelineController.Node) -> None:
pool_period = 5.0 if cls._debug_execute_step_process else 20.0
while True:
if not node.job:
break
node.job.wait(pool_period=pool_period, aborted_nonresponsive_as_running=True)
job_status = str(node.job.status(force=True))
if (
(
job_status == str(Task.TaskStatusEnum.stopped)
and node.job.status_message() == cls._relaunch_status_message
)
or (job_status == str(Task.TaskStatusEnum.failed) and not cls._final_failure.get(node.name))
or not node.job.is_stopped()
):
sleep(pool_period)
else:
break
@classmethod
def component(
cls,
_func: Any = None,
*,
return_values: Union[str, Sequence[str]] = ("return_object",),
name: Optional[str] = None,
cache: bool = False,
packages: Optional[Union[bool, str, Sequence[str]]] = None,
parents: Optional[List[str]] = None,
execution_queue: Optional[str] = None,
continue_on_fail: bool = False,
docker: Optional[str] = None,
docker_args: Optional[str] = None,
docker_bash_setup_script: Optional[str] = None,
task_type: Optional[str] = None,
auto_connect_frameworks: Optional[dict] = None,
auto_connect_arg_parser: Optional[dict] = None,
repo: Optional[str] = None,
repo_branch: Optional[str] = None,
repo_commit: Optional[str] = None,
helper_functions: Optional[Sequence[Callable]] = None,
monitor_metrics: Optional[List[Union[Tuple[str, str], Tuple]]] = None,
monitor_artifacts: Optional[List[Union[str, Tuple[str, str]]]] = None,
monitor_models: Optional[List[Union[str, Tuple[str, str]]]] = None,
retry_on_failure: Optional[
Union[int, Callable[[PipelineController, PipelineController.Node, int], bool]]
] = None, # noqa
pre_execute_callback: Optional[
Callable[[PipelineController, PipelineController.Node, dict], bool]
] = None, # noqa
post_execute_callback: Optional[Callable[[PipelineController, PipelineController.Node], None]] = None,
# noqa
status_change_callback: Optional[
Callable[[PipelineController, PipelineController.Node, str], None]
] = None, # noqa
tags: Optional[Union[str, Sequence[str]]] = None,
output_uri: Optional[Union[str, bool]] = None,
draft: Optional[bool] = False,
working_dir: Optional[str] = None,
continue_behaviour: Optional[dict] = None,
stage: Optional[str] = None
) -> Callable:
"""
pipeline component function to be executed remotely
:param _func: wrapper function
:param return_values: Provide a list of names for all the results.
Notice! If not provided, no results will be stored as artifacts.
:param name: Optional, set the name of the pipeline component task.
If not provided, the wrapped function name is used as the pipeline component name
:param cache: If True, before launching the new step,
after updating with the latest configuration, check if an exact Task with the same parameter/code
was already executed. If it was found, use it instead of launching a new Task. Default: False
:param packages: Manually specify a list of required packages or a local requirements.txt file.
Example: ["tqdm>=2.1", "scikit-learn"] or "./requirements.txt"
If not provided, packages are automatically added based on the imports used inside the wrapped function.
Use `False` to install requirements from "requirements.txt" inside your git repository
:param parents: Optional list of parent nodes in the DAG.
The current step in the pipeline will be sent for execution only after all the parent nodes
have been executed successfully.
:param execution_queue: Optional, the queue to use for executing this specific step.
If not provided, the task will be sent to the pipeline's default execution queue
:param continue_on_fail: (Deprecated, use `continue_behaviour` instead).
If True, failed step will not cause the pipeline to stop
(or marked as failed). Notice, that steps that are connected (or indirectly connected)
to the failed step will be skipped. Defaults to False
:param docker: Specify the docker image to be used when executing the pipeline step remotely
:param docker_args: Add docker execution arguments for the remote execution
(use single string for all docker arguments).
:param docker_bash_setup_script: Add a bash script to be executed inside the docker before
setting up the Task's environment
:param task_type: Optional, The task type to be created. Supported values: 'training', 'testing', 'inference',
'data_processing', 'application', 'monitor', 'controller', 'optimizer', 'service', 'qc', 'custom'
:param auto_connect_frameworks: Control the frameworks auto connect, see `Task.init` auto_connect_frameworks
:param auto_connect_arg_parser: Control the ArgParser auto connect, see `Task.init` auto_connect_arg_parser
:param repo: Optional, specify a repository to attach to the function, when remotely executing.
Allow users to execute the function inside the specified repository, enabling them to load modules/script
from the repository. Notice the execution work directory will be the repository root folder.
Supports both git repo url link, and local repository path (automatically converted into the remote
git/commit as is currently checkout).
Example remote url: 'https://github.com/user/repo.git'
Example local repo copy: './repo' -> will automatically store the remote
repo url and commit ID based on the locally cloned copy
:param repo_branch: Optional, specify the remote repository branch (Ignored, if local repo path is used)
:param repo_commit: Optional, specify the repository commit ID (Ignored, if local repo path is used)
:param helper_functions: Optional, a list of helper functions to make available
for the standalone pipeline step function Task. By default the pipeline step function has
no access to any of the other functions, by specifying additional functions here, the remote pipeline step
could call the additional functions.
Example, assuming we have two functions parse_data(), and load_data(): [parse_data, load_data]
:param monitor_metrics: Optional, Automatically log the step's reported metrics also on the pipeline Task.
The expected format is a list of pairs metric (title, series) to log: ``[(step_metric_title, step_metric_series), ]``.
For example: ``[('test', 'accuracy'), ]``.
Or a list of tuple pairs, to specify a different target metric to use on the pipeline Task:
``[((step_metric_title, step_metric_series), (target_metric_title, target_metric_series)), ]``.
For example: ``[[('test', 'accuracy'), ('model', 'accuracy')], ]``
:param monitor_artifacts: Optional, Automatically log the step's artifacts on the pipeline Task.
Provided a list of artifact names created by the step function, these artifacts will be logged
automatically also on the Pipeline Task itself.
Example: ``['processed_data', ]``
(target artifact name on the Pipeline Task will hav ethe same name as the original artifact).
Alternatively, provide a list of pairs ``(source_artifact_name, target_artifact_name)``:
where the first string is the artifact name as it appears on the component Task,
and the second is the target artifact name to put on the Pipeline Task.
Example: ``[('processed_data', 'final_processed_data'), ]``
:param monitor_models: Optional, Automatically log the step's output models on the pipeline Task.
Provided a list of model names created by the step's Task, they will also appear on the Pipeline itself.
Example: ``['model_weights', ]``.
To select the latest (lexicographic) model use "model_*", or the last created model with just "*".
Example: ``['model_weights_*', ]``.
Alternatively, provide a list of pairs ``(source_model_name, target_model_name)``:
where the first string is the model name as it appears on the component Task,
and the second is the target model name to put on the Pipeline Task.
Example: ``[('model_weights', 'final_model_weights'), ]``
:param retry_on_failure: Integer (number of retries) or Callback function that returns True to allow a retry
- Integer: In case of node failure, retry the node the number of times indicated by this parameter.
- Callable: A function called on node failure. Takes as parameters:
the PipelineController instance, the PipelineController.Node that failed and an int
representing the number of previous retries for the node that failed
The function must return a `bool`: True if the node should be retried and False otherwise.
If True, the node will be re-queued and the number of retries left will be decremented by 1.
By default, if this callback is not specified, the function will be retried the number of
times indicated by `retry_on_failure`.
.. code-block:: py
def example_retry_on_failure_callback(pipeline, node, retries):
print(node.name, ' failed')
# allow up to 5 retries (total of 6 runs)
return retries < 5
:param pre_execute_callback: Callback function, called when the step (Task) is created,
and before it is sent for execution. Allows a user to modify the Task before launch.
Use `node.job` to access the ClearmlJob object, or `node.job.task` to directly access the Task object.
``parameters`` are the configuration arguments passed to the ClearmlJob.
If the callback returned value is `False`,
the Node is skipped and so is any node in the DAG that relies on this node.
Notice the `parameters` are already parsed,
e.g. ``${step1.parameters.Args/param}`` is replaced with relevant value.
.. code-block:: py
def step_created_callback(
pipeline, # type: PipelineController,
node, # type: PipelineController.Node,
parameters, # type: dict
):
pass
:param post_execute_callback: Callback function, called when a step (Task) is completed
and other jobs are going to be executed. Allows a user to modify the Task status after completion.
.. code-block:: py
def step_completed_callback(
pipeline, # type: PipelineController,
node, # type: PipelineController.Node,
):
pass
:param status_change_callback: Callback function, called when the status of a step (Task) changes.
Use `node.job` to access the ClearmlJob object, or `node.job.task` to directly access the Task object.
The signature of the function must look the following way:
.. code-block:: py
def status_change_callback(
pipeline, # type: PipelineController,
node, # type: PipelineController.Node,
previous_status # type: str
):
pass
:param tags: A list of tags for the specific pipeline step.
When executing a Pipeline remotely
(i.e. launching the pipeline from the UI/enqueuing it), this method has no effect.
:param output_uri: The storage / output url for this step. This is the default location for output
models and other artifacts. Check Task.init reference docs for more info (output_uri is a parameter).
:param draft: (default False). If True, the Task will be created as a draft task.
:param working_dir: Working directory to launch the step from.
:param continue_behaviour: Controls whether the pipeline will continue running after a step failed/was aborted.
Different behaviours can be set using a dictionary of boolean options. Supported options are:
- continue_on_fail - If True, the pipeline will continue even if the step failed.
If False, the pipeline will stop
- continue_on_abort - If True, the pipeline will continue even if the step was aborted.
If False, the pipeline will stop
- skip_children_on_fail - If True, the children of this step will be skipped if it failed.
If False, the children will run even if this step failed.
Any parameters passed from the failed step to its children will default to None
- skip_children_on_abort - If True, the children of this step will be skipped if it was aborted.
If False, the children will run even if this step was aborted.
Any parameters passed from the failed step to its children will default to None
- If the keys are not present in the dictionary, their values will default to True
:param stage: Name of the stage. This parameter enables pipeline step grouping into stages
:return: function wrapper
"""
def decorator_wrap(func: Callable) -> Callable:
if continue_on_fail:
warnings.warn(
"`continue_on_fail` is deprecated. Use `continue_behaviour` instead",
DeprecationWarning,
)
# noinspection PyProtectedMember
unwrapped_func = CreateFromFunction._deep_extract_wrapped(func)
_name = name or str(unwrapped_func.__name__)
function_return = return_values if isinstance(return_values, (tuple, list)) else [return_values]
inspect_func = inspect.getfullargspec(unwrapped_func)
# add default argument values
if inspect_func.args:
default_values = list(inspect_func.defaults or [])
default_values = ([None] * (len(inspect_func.args) - len(default_values))) + default_values
function_kwargs = {k: v for k, v in zip(inspect_func.args, default_values)}
else:
function_kwargs = dict()
add_step_spec = dict(
name=_name,
function=func,
function_kwargs=function_kwargs,
function_return=function_return,
cache_executed_step=cache,
packages=packages,
parents=parents,
execution_queue=execution_queue,
continue_on_fail=continue_on_fail,
docker=docker,
docker_args=docker_args,
docker_bash_setup_script=docker_bash_setup_script,
auto_connect_frameworks=auto_connect_frameworks,
auto_connect_arg_parser=auto_connect_arg_parser,
task_type=task_type,
repo=repo,
repo_branch=repo_branch,
repo_commit=repo_commit,
helper_functions=helper_functions,
monitor_metrics=monitor_metrics,
monitor_models=monitor_models,
monitor_artifacts=monitor_artifacts,
pre_execute_callback=pre_execute_callback,
post_execute_callback=post_execute_callback,
status_change_callback=status_change_callback,
tags=tags,
output_uri=output_uri,
draft=draft,
working_dir=working_dir,
continue_behaviour=continue_behaviour,
stage=stage
)
if cls._singleton:
cls._singleton._add_function_step(**add_step_spec)
else:
cls._added_decorator.append(add_step_spec)
@functools.wraps(func)
def wrapper(*args: Any, **kwargs: Any) -> Union[LazyEvalWrapper, List[LazyEvalWrapper]]:
if cls._debug_execute_step_function:
args = walk_nested_dict_tuple_list(
args,
lambda x: x._remoteref() if isinstance(x, LazyEvalWrapper) else x,
)
kwargs = walk_nested_dict_tuple_list(
kwargs,
lambda x: x._remoteref() if isinstance(x, LazyEvalWrapper) else x,
)
func_return = []
def result_wrapper(a_func_return: List[Any], return_index: Optional[int]) -> Any:
if not a_func_return:
a_func_return.append(func(*args, **kwargs))
a_func_return = a_func_return[0]
return a_func_return if return_index is None else a_func_return[return_index]
if len(function_return) == 1:
ret_val = LazyEvalWrapper(
callback=functools.partial(result_wrapper, func_return, None),
remote_reference=functools.partial(result_wrapper, func_return, None),
)
cls._ref_lazy_loader_id_to_node_name[id(ret_val)] = _name
return ret_val
else:
return_w = [
LazyEvalWrapper(
callback=functools.partial(result_wrapper, func_return, i),
remote_reference=functools.partial(result_wrapper, func_return, i),
)
for i, _ in enumerate(function_return)
]
for i in return_w:
cls._ref_lazy_loader_id_to_node_name[id(i)] = _name
return return_w
# resolve all lazy objects if we have any:
kwargs_artifacts = {}
star_args_index = 0
for i, v in enumerate(args):
if not inspect_func.args or i >= len(inspect_func.args):
kwargs[str(star_args_index)] = v
star_args_index += 1
else:
kwargs[inspect_func.args[i]] = v
# We need to remember when a pipeline step's return value is evaluated by the pipeline
# controller, but not when it's done here (as we would remember the step every time).
# _add_to_evaluated_return_values protects that
tid = current_thread().ident
cls._add_to_evaluated_return_values[tid] = False
kwargs_artifacts.update(
{
k: walk_nested_dict_tuple_list(
v,
lambda x: x._remoteref() if isinstance(x, LazyEvalWrapper) else x,
)
for k, v in kwargs.items()
if isinstance(v, LazyEvalWrapper)
}
)
cls._add_to_evaluated_return_values[tid] = True
kwargs = {k: deepcopy(v) for k, v in kwargs.items() if not isinstance(v, LazyEvalWrapper)}
# check if we have the singleton
if not cls._singleton:
# todo: somehow make sure the generated tasks list the parent pipeline as parent
original_tags = (
Task.current_task().get_tags(),
Task.current_task().get_system_tags(),
)
# This is an adhoc pipeline step,
PipelineDecorator._eager_execution_instance = True
a_pipeline = PipelineDecorator(
name=name,
project="DevOps", # it will not actually be used
version="0.0.0",
pool_frequency=111,
add_pipeline_tags=False,
target_project=None,
)
target_queue = (
PipelineDecorator._default_execution_queue or Task.current_task().data.execution.queue
)
if target_queue:
PipelineDecorator.set_default_execution_queue(target_queue)
else:
# if we are not running from a queue, we are probably in debug mode
a_pipeline._clearml_job_class = LocalClearmlJob
a_pipeline._default_execution_queue = "mock"
# restore tags, the pipeline might add a few
Task.current_task().set_tags(original_tags[0])
Task.current_task().set_system_tags(original_tags[1])
# get node name
_node_name = _name
# check if we are launching the same node twice
if _node_name in cls._singleton._launched_step_names:
# if we already launched a JOB on the node, this means we are calling the same function/task
# twice inside the pipeline, this means we need to replicate the node.
_node = cls._singleton._nodes[_node_name].copy()
# reset paramters - there might be conflicts with the copied node and they are generated anyway
_node.parameters = {}
_node.parents = []
# find a new name
counter = 1
# Use nodes in `_singleton._nodes` that have not been launched.
# First check if we launched the node.
# If it wasn't launched we also need to check that the new name of `_node`
# points to the original code section it was meant to run.
# Note that for the first iteration (when `_node.name == _node_name`)
# we always increment the name, as the name is always in `_launched_step_names`
while _node.name in cls._singleton._launched_step_names or (
_node.name in cls._singleton._nodes
and cls._singleton._nodes[_node.name].job_code_section
!= cls._singleton._nodes[_node_name].job_code_section
):
_node.name = "{}_{}".format(_node_name, counter)
counter += 1
# Copy callbacks to the replicated node
if cls._singleton._pre_step_callbacks.get(_node_name):
cls._singleton._pre_step_callbacks[_node.name] = cls._singleton._pre_step_callbacks[_node_name]
if cls._singleton._post_step_callbacks.get(_node_name):
cls._singleton._post_step_callbacks[_node.name] = cls._singleton._post_step_callbacks[
_node_name
]
if cls._singleton._status_change_callbacks.get(_node_name):
cls._singleton._status_change_callbacks[_node.name] = cls._singleton._status_change_callbacks[
_node_name
]
_node_name = _node.name
if _node.name not in cls._singleton._nodes:
cls._singleton._nodes[_node.name] = _node
# get node and park is as launched
cls._singleton._launched_step_names.add(_node_name)
_node = cls._singleton._nodes[_node_name]
cls._retries[_node_name] = 0
cls._retries_callbacks[_node_name] = (
retry_on_failure
if callable(retry_on_failure)
else (
functools.partial(
cls._singleton._default_retry_on_failure_callback,
max_retries=retry_on_failure,
)
if isinstance(retry_on_failure, int)
else cls._singleton._retry_on_failure_callback
)
)
# The actual launch is a bit slow, we run it in the background
launch_thread = Thread(
target=cls._component_launch,
args=(
_node_name,
_node,
kwargs_artifacts,
kwargs,
current_thread().ident,
),
)
def results_reference(return_name: str) -> str:
# wait until launch is completed
if launch_thread and launch_thread.is_alive():
try:
launch_thread.join()
except: # noqa
pass
cls._wait_for_node(_node)
if not _node.job:
if not _node.executed:
raise ValueError("Job was not created and is also not cached/executed")
return "{}.{}".format(_node.executed, return_name)
if _node.job.is_failed() and not _node.continue_on_fail:
raise ValueError(
'Pipeline step "{}", Task ID={} failed'.format(_node.name, _node.job.task_id())
)
_node.executed = _node.job.task_id()
return "{}.{}".format(_node.job.task_id(), return_name)
def result_wrapper(return_name: str) -> Any:
# wait until launch is completed
if launch_thread and launch_thread.is_alive():
try:
launch_thread.join()
except: # noqa
pass
# skipped job
if not _node.job:
return None
cls._wait_for_node(_node)
if (_node.job.is_failed() and not _node.continue_on_fail) or (
_node.job.is_aborted() and not _node.continue_on_abort
):
raise ValueError(
'Pipeline step "{}", Task ID={} failed'.format(_node.name, _node.job.task_id())
)
_node.executed = _node.job.task_id()
# make sure we mark the current state of the DAG execution tree
# so that later we can find the "parents" to the current node
_tid = current_thread().ident
if cls._add_to_evaluated_return_values.get(_tid, True):
if _tid not in cls._evaluated_return_values:
cls._evaluated_return_values[_tid] = []
cls._evaluated_return_values[_tid].append(_node.name)
task = Task.get_task(_node.job.task_id())
if return_name in task.artifacts:
return task.artifacts[return_name].get(
deserialization_function=cls._singleton._artifact_deserialization_function
)
return task.get_parameters(cast=True).get(CreateFromFunction.return_section + "/" + return_name)
return_w = [
LazyEvalWrapper(
callback=functools.partial(result_wrapper, n),
remote_reference=functools.partial(results_reference, n),
)
for n in function_return
]
for i in return_w:
cls._ref_lazy_loader_id_to_node_name[id(i)] = _node_name
# start the launch thread now
launch_thread.start()
return return_w[0] if len(return_w) == 1 else return_w
return wrapper
return decorator_wrap if _func is None else decorator_wrap(_func)
@classmethod
def pipeline(
cls,
_func: Any = None,
*, # noqa
name: str,
project: str,
version: Optional[str] = None,
return_value: Optional[str] = None,
default_queue: Optional[str] = None,
pool_frequency: float = 0.2,
add_pipeline_tags: bool = False,
target_project: Optional[str] = None,
abort_on_failure: bool = False,
pipeline_execution_queue: Optional[str] = "services",
multi_instance_support: bool = False,
add_run_number: bool = True,
args_map: Dict[str, List[str]] = None,
start_controller_locally: bool = False,
retry_on_failure: Optional[
Union[int, Callable[[PipelineController, PipelineController.Node, int], bool]]
] = None, # noqa
docker: Optional[str] = None,
docker_args: Optional[str] = None,
docker_bash_setup_script: Optional[str] = None,
packages: Optional[Union[bool, str, Sequence[str]]] = None,
repo: Optional[str] = None,
repo_branch: Optional[str] = None,
repo_commit: Optional[str] = None,
artifact_serialization_function: Optional[Callable[[Any], Union[bytes, bytearray]]] = None,
artifact_deserialization_function: Optional[Callable[[bytes], Any]] = None,
output_uri: Optional[Union[str, bool]] = None,
skip_global_imports: bool = False,
working_dir: Optional[str] = None,
enable_local_imports: bool = True,
) -> Callable:
"""
Decorate pipeline logic function.
:param name: Provide pipeline name (if main Task exists it overrides its name)
:param project: Provide project storing the pipeline (if main Task exists it overrides its project)
:param version: Pipeline version. This version allows to uniquely identify the pipeline
template execution. Examples for semantic versions: version='1.0.1' , version='23', version='1.2'.
If not set, find the latest version of the pipeline and increment it. If no such version is found,
default to '1.0.0'
:param return_value: Optional, Provide an artifact name to store the pipeline function return object
Notice, If not provided the pipeline will not store the pipeline function return value.
:param default_queue: default pipeline step queue
:param float pool_frequency: The pooling frequency (in minutes) for monitoring experiments / states.
:param bool add_pipeline_tags: (default: False) if True, add `pipe: <pipeline_task_id>` tag to all
steps (Tasks) created by this pipeline.
:param str target_project: If provided, all pipeline steps are cloned into the target project
:param bool abort_on_failure: If False (default), failed pipeline steps will not cause the pipeline
to stop immediately, instead any step that is not connected (or indirectly connected) to the failed step,
will still be executed. Nonetheless, the pipeline itself will be marked failed, unless the failed step
was specifically defined with "continue_on_fail=True".
If True, any failed step will cause the pipeline to immediately abort, stop all running steps,
and mark the pipeline as failed.
:param pipeline_execution_queue: remote pipeline execution queue (default 'services' queue).
If None is passed, execute the pipeline logic locally (pipeline steps are still executed remotely)
:param multi_instance_support: If True, allow multiple calls to the same pipeline function,
each call creating a new Pipeline Task. Notice it is recommended to create an additional Task on the
"main process" acting as a master pipeline, automatically collecting the execution plots.
If multi_instance_support=='parallel' then the pipeline calls are executed in parallel,
in the `parallel` case the function calls return None, to collect all pipeline results call
`PipelineDecorator.wait_for_multi_pipelines()`.
Default False, no multi instance pipeline support.
:param add_run_number: If True (default), add the run number of the pipeline to the pipeline name.
Example, the second time we launch the pipeline "best pipeline", we rename it to "best pipeline #2"
:param args_map: Map arguments to their specific configuration section. Arguments not included in this map
will default to `Args` section. For example, for the following code:
.. code-block:: py
@PipelineDecorator.pipeline(args_map={'sectionA':['paramA'], 'sectionB:['paramB','paramC']
def executing_pipeline(paramA, paramB, paramC, paramD):
pass
Parameters would be stored as:
- paramA: sectionA/paramA
- paramB: sectionB/paramB
- paramC: sectionB/paramC
- paramD: Args/paramD
:param start_controller_locally: If True, start the controller on the local machine. The steps will run
remotely if `PipelineDecorator.run_locally` or `PipelineDecorator.debug_pipeline` are not called.
Default: False
:param retry_on_failure: Integer (number of retries) or Callback function that returns True to allow a retry
- Integer: In case of node failure, retry the node the number of times indicated by this parameter.
- Callable: A function called on node failure. Takes as parameters:
the PipelineController instance, the PipelineController.Node that failed and an int
representing the number of previous retries for the node that failed.
The function must return ``True`` if the node should be retried and ``False`` otherwise.
If True, the node will be re-queued and the number of retries left will be decremented by 1.
By default, if this callback is not specified, the function will be retried the number of
times indicated by `retry_on_failure`.
.. code-block:: py
def example_retry_on_failure_callback(pipeline, node, retries):
print(node.name, ' failed')
# allow up to 5 retries (total of 6 runs)
return retries < 5
:param docker: Select the docker image to be executed in by the remote session
:param docker_args: Add docker arguments, pass a single string
:param docker_bash_setup_script: Add bash script to be executed
inside the docker before setting up the Task's environment
:param packages: Manually specify a list of required packages or a local requirements.txt file.
Example: ["tqdm>=2.1", "scikit-learn"] or "./requirements.txt"
If not provided, packages are automatically added based on the imports used in the function.
Use `False` to install requirements from "requirements.txt" inside your git repository
:param repo: Optional, specify a repository to attach to the function, when remotely executing.
Allow users to execute the function inside the specified repository, enabling them to load modules/script
from the repository. Notice the execution work directory will be the repository root folder.
Supports both git repo url link, and local repository path (automatically converted into the remote
git/commit as is currently checkout).
Example remote url: 'https://github.com/user/repo.git'
Example local repo copy: './repo' -> will automatically store the remote
repo url and commit ID based on the locally cloned copy
Use empty string ("") to disable any repository auto-detection
:param repo_branch: Optional, specify the remote repository branch (Ignored, if local repo path is used)
:param repo_commit: Optional, specify the repository commit ID (Ignored, if local repo path is used)
:param artifact_serialization_function: A serialization function that takes one
parameter of any type which is the object to be serialized. The function should return
a `bytes` or `bytearray` object, which represents the serialized object. All parameter/return
artifacts uploaded by the pipeline will be serialized using this function.
All relevant imports must be done in this function. For example:
.. code-block:: py
def serialize(obj):
import dill
return dill.dumps(obj)
:param artifact_deserialization_function: A deserialization function that takes one parameter of type `bytes`,
which represents the serialized object. This function should return the deserialized object.
All parameter/return artifacts fetched by the pipeline will be deserialized using this function.
All relevant imports must be done in this function. For example:
.. code-block:: py
def deserialize(bytes_):
import dill
return dill.loads(bytes_)
:param output_uri: The storage / output url for this pipeline. This is the default location for output
models and other artifacts. Check Task.init reference docs for more info (output_uri is a parameter).
The `output_uri` of this pipeline's steps will default to this value.
:param skip_global_imports: If True, global imports will not be included in the steps' execution, otherwise all
global imports will be automatically imported in a safe manner at the beginning of each step’s execution.
Default is False
:param working_dir: Working directory to launch the pipeline from.
:param enable_local_imports: If True, allow pipeline steps to import from local files
by appending to the PYTHONPATH of each step the directory the pipeline controller
script resides in (sys.path[0]).
If False, the directory won't be appended to PYTHONPATH. Default is True.
Ignored while running remotely.
"""
def decorator_wrap(func: Callable) -> Callable:
def internal_decorator(*args: Any, **kwargs: Any) -> Any:
pipeline_kwargs = dict(**(kwargs or {}))
pipeline_kwargs_types = dict()
inspect_func = inspect.getfullargspec(func)
if args:
if not inspect_func.args:
raise ValueError("Could not parse function arguments")
pipeline_kwargs.update({inspect_func.args[i]: v for i, v in enumerate(args)})
# add default function arguments if we have defaults for all arguments
if inspect_func.args:
default_values = list(inspect_func.defaults or [])
default_values = ([None] * (len(inspect_func.args) - len(default_values))) + default_values
default_kwargs = {k: v for k, v in zip(inspect_func.args, default_values)}
default_kwargs.update(pipeline_kwargs)
pipeline_kwargs = default_kwargs
if inspect_func.annotations:
pipeline_kwargs_types = {str(k): inspect_func.annotations[k] for k in inspect_func.annotations}
# run the entire pipeline locally, as python functions
if cls._debug_execute_step_function:
a_pipeline = PipelineDecorator(
name=name,
project=project,
version=version,
pool_frequency=pool_frequency,
add_pipeline_tags=add_pipeline_tags,
target_project=target_project,
abort_on_failure=abort_on_failure,
add_run_number=add_run_number,
retry_on_failure=retry_on_failure,
docker=docker,
docker_args=docker_args,
docker_bash_setup_script=docker_bash_setup_script,
packages=packages,
repo=repo,
repo_branch=repo_branch,
repo_commit=repo_commit,
artifact_serialization_function=artifact_serialization_function,
artifact_deserialization_function=artifact_deserialization_function,
output_uri=output_uri,
skip_global_imports=skip_global_imports,
working_dir=working_dir,
enable_local_imports=enable_local_imports,
)
ret_val = func(**pipeline_kwargs)
LazyEvalWrapper.trigger_all_remote_references()
a_pipeline._task.close()
return ret_val
# check if we are in a multi pipeline
force_single_multi_pipeline_call = False
if multi_instance_support and cls._multi_pipeline_call_counter >= 0:
# check if we are running remotely
if not Task.running_locally():
# get the main Task property
t = Task.get_task(task_id=get_remote_task_id())
if str(t.task_type) == str(Task.TaskTypes.controller):
# noinspection PyBroadException
try:
# noinspection PyProtectedMember
multi_pipeline_call_counter = int(
t._get_runtime_properties().get("multi_pipeline_counter", None)
)
# NOTICE! if this is not our call we LEAVE immediately
# check if this is our call to start, if not we will wait for the next one
if multi_pipeline_call_counter != cls._multi_pipeline_call_counter:
return
except Exception:
# this is not the one, so we should just run the first
# instance and leave immediately
force_single_multi_pipeline_call = True
if default_queue:
cls.set_default_execution_queue(default_queue)
a_pipeline = PipelineDecorator(
name=name,
project=project,
version=version,
pool_frequency=pool_frequency,
add_pipeline_tags=add_pipeline_tags,
target_project=target_project,
abort_on_failure=abort_on_failure,
add_run_number=add_run_number,
retry_on_failure=retry_on_failure,
docker=docker,
docker_args=docker_args,
docker_bash_setup_script=docker_bash_setup_script,
packages=packages,
repo=repo,
repo_branch=repo_branch,
repo_commit=repo_commit,
artifact_serialization_function=artifact_serialization_function,
artifact_deserialization_function=artifact_deserialization_function,
output_uri=output_uri,
skip_global_imports=skip_global_imports,
working_dir=working_dir,
enable_local_imports=enable_local_imports,
)
a_pipeline._args_map = args_map or {}
if PipelineDecorator._debug_execute_step_process:
a_pipeline._clearml_job_class = LocalClearmlJob
a_pipeline._default_execution_queue = "mock"
a_pipeline._clearml_job_class.register_hashing_callback(a_pipeline._adjust_task_hashing)
# add pipeline arguments
for k in pipeline_kwargs:
a_pipeline.add_parameter(
name=k,
default=pipeline_kwargs.get(k),
param_type=pipeline_kwargs_types.get(k),
)
# sync multi-pipeline call counter (so we know which one to skip)
if Task.running_locally() and multi_instance_support and cls._multi_pipeline_call_counter >= 0:
# noinspection PyProtectedMember
a_pipeline._task._set_runtime_properties(
dict(multi_pipeline_counter=str(cls._multi_pipeline_call_counter))
)
# run the actual pipeline
if (
not start_controller_locally
and not PipelineDecorator._debug_execute_step_process
and pipeline_execution_queue
):
# rerun the pipeline on a remote machine
a_pipeline._task.execute_remotely(queue_name=pipeline_execution_queue)
# when we get here it means we are running remotely
# this will also deserialize the pipeline and arguments
a_pipeline._start(wait=False)
# sync arguments back (post deserialization and casting back)
for k in pipeline_kwargs.keys():
if k in a_pipeline.get_parameters():
pipeline_kwargs[k] = a_pipeline.get_parameters()[k]
# this time the pipeline is executed only on the remote machine
try:
pipeline_result = func(**pipeline_kwargs)
except Exception:
a_pipeline.stop(mark_failed=True)
raise
triggered_exception = None
try:
LazyEvalWrapper.trigger_all_remote_references()
except Exception as ex:
triggered_exception = ex
# make sure we wait for all nodes to finish
waited = True
while waited:
waited = False
for node in list(a_pipeline._nodes.values()):
if node.executed or not node.job or node.job.is_stopped(aborted_nonresponsive_as_running=True):
continue
cls._wait_for_node(node)
waited = True
# store the pipeline result of we have any:
if return_value and pipeline_result is not None:
a_pipeline._upload_pipeline_artifact(
artifact_name=str(return_value), artifact_object=pipeline_result
)
# now we can stop the pipeline
a_pipeline.stop()
# now we can raise the exception
if triggered_exception:
raise triggered_exception
# Make sure that if we do not need to run all pipelines we forcefully leave the process
if force_single_multi_pipeline_call:
leave_process()
# we will never get here
return pipeline_result
if multi_instance_support:
return cls._multi_pipeline_wrapper(
func=internal_decorator,
parallel=bool(multi_instance_support == "parallel"),
)
return internal_decorator
return decorator_wrap if _func is None else decorator_wrap(_func)
@classmethod
def set_default_execution_queue(cls, default_execution_queue: Optional[str]) -> None:
"""
Set the default execution queue if pipeline step does not specify an execution queue
:param default_execution_queue: The execution queue to use if no execution queue is provided
"""
cls._default_execution_queue = str(default_execution_queue) if default_execution_queue else None
@classmethod
def run_locally(cls) -> None:
"""
Set local mode, run all functions locally as subprocess
Run the full pipeline DAG locally, where steps are executed as sub-processes Tasks
Notice: running the DAG locally assumes the local code execution (i.e. it will not clone & apply git diff)
"""
cls._debug_execute_step_process = True
cls._debug_execute_step_function = False
@classmethod
def debug_pipeline(cls) -> None:
"""
Set debugging mode, run all functions locally as functions (serially)
Run the full pipeline DAG locally, where steps are executed as functions
.. note::
Running the DAG locally assumes local code execution (i.e. it will not clone & apply git diff).
Pipeline steps are executed as functions (no Task will be created).
"""
cls._debug_execute_step_process = True
cls._debug_execute_step_function = True
@classmethod
def get_current_pipeline(cls) -> "PipelineDecorator":
"""
Return the currently running pipeline instance
"""
return cls._singleton
@classmethod
def wait_for_multi_pipelines(cls) -> List[Any]:
# type () -> List[object]
"""
Wait until all background multi pipeline execution is completed.
Returns all the pipeline results in call order (first pipeline call at index 0)
:return: List of return values from executed pipeline, based on call order.
"""
return cls._wait_for_multi_pipelines()
@classmethod
def _component_launch(
cls,
node_name: str,
node: PipelineController.Node,
kwargs_artifacts: Dict[str, Any],
kwargs: Dict[str, Any],
tid: int,
) -> None:
_node_name = node_name
_node = node
# update artifacts kwargs
for k, v in kwargs_artifacts.items():
if k in kwargs:
kwargs.pop(k, None)
_node.parameters.pop("{}/{}".format(CreateFromFunction.kwargs_section, k), None)
_node.parameters["{}/{}".format(CreateFromFunction.input_artifact_section, k)] = v
if v and "." in str(v):
parent_id, _ = str(v).split(".", 1)
# find parent and push it into the _node.parents
for n, node in sorted(list(cls._singleton._nodes.items()), reverse=True):
if n != _node.name and node.executed and node.executed == parent_id:
if n not in _node.parents:
_node.parents.append(n)
break
leaves = cls._singleton._find_executed_node_leaves()
_node.parents = (_node.parents or []) + [x for x in cls._evaluated_return_values.get(tid, []) if x in leaves]
if not cls._singleton._abort_running_steps_on_failure:
for parent in _node.parents:
parent = cls._singleton._nodes[parent]
if (
parent.status == "failed"
and parent.skip_children_on_fail
or parent.status == "aborted"
and parent.skip_children_on_abort
or parent.status == "skipped"
):
_node.skip_job = True
return
for k, v in kwargs.items():
if v is None or isinstance(v, (float, int, bool, six.string_types)):
_node.parameters["{}/{}".format(CreateFromFunction.kwargs_section, k)] = v
else:
# we need to create an artifact
artifact_name = "result_{}_{}".format(re.sub(r"\W+", "", _node.name), k)
cls._singleton._upload_pipeline_artifact(artifact_name=artifact_name, artifact_object=v)
_node.parameters["{}/{}".format(CreateFromFunction.input_artifact_section, k)] = "{}.{}".format(
cls._singleton._task.id, artifact_name
)
# verify the new step
cls._singleton._verify_node(_node)
# launch the new step
cls._singleton._launch_node(_node)
# check if we generated the pipeline we need to update the new eager step
if PipelineDecorator._eager_execution_instance and _node.job:
# check if we need to add the pipeline tag on the new node
pipeline_tags = [t for t in Task.current_task().get_tags() or [] if str(t).startswith(cls._node_tag_prefix)]
if pipeline_tags and _node.job and _node.job.task:
pipeline_tags = list(set((_node.job.task.get_tags() or []) + pipeline_tags))
_node.job.task.set_tags(pipeline_tags)
# force parent task as pipeline
_node.job.task._edit(parent=Task.current_task().parent)
# store the new generated node, so we can later serialize it
pipeline_dag = cls._singleton._serialize()
# check if node is cached
if _node.job.is_cached_task():
pipeline_dag[_node_name]["is_cached"] = True
# store entire definition on the parent pipeline
from clearml.backend_api.services import tasks
artifact = tasks.Artifact(
key="{}:{}:{}".format(
cls._eager_step_artifact,
Task.current_task().id,
_node.job.task_id(),
),
type="json",
mode="output",
type_data=tasks.ArtifactTypeData(
preview=json.dumps({_node_name: pipeline_dag[_node_name]}),
content_type="application/pipeline",
),
)
req = tasks.AddOrUpdateArtifactsRequest(task=Task.current_task().parent, artifacts=[artifact], force=True)
res = Task.current_task().send(req, raise_on_errors=False)
if not res or not res.response or not res.response.updated:
pass
# update pipeline execution graph
cls._singleton.update_execution_plot()
@classmethod
def _multi_pipeline_wrapper(
cls,
func: Callable = None,
parallel: bool = False,
) -> Callable:
"""
Add support for multiple pipeline function calls,
enabling execute multiple instances of the same pipeline from a single script.
.. code-block:: py
@PipelineDecorator.pipeline(
multi_instance_support=True, name="custom pipeline logic", project="examples", version="1.0")
def pipeline(parameter=1):
print(f"running with parameter={parameter}")
# run both pipeline (if multi_instance_support=='parallel', run pipelines in parallel)
pipeline(parameter=1)
pipeline(parameter=2)
:param parallel: If True, the pipeline is running in the background, which implies calling
the pipeline twice means running the pipelines in parallel.
Default: False, pipeline function returns when pipeline completes
:return: Return wrapped pipeline function.
Notice the return value of the pipeline wrapped function:
if parallel==True, return will be None, otherwise expect the return of the pipeline wrapped function
"""
def internal_decorator(*args: Any, **kwargs: Any) -> Any:
cls._multi_pipeline_call_counter += 1
# if this is a debug run just call the function (no parallelization).
if cls._debug_execute_step_function:
return func(*args, **kwargs)
def sanitized_env(a_queue: Queue, *a_args: Any, **a_kwargs: Any) -> Any:
os.environ.pop("CLEARML_PROC_MASTER_ID", None)
os.environ.pop("TRAINS_PROC_MASTER_ID", None)
os.environ.pop("CLEARML_TASK_ID", None)
os.environ.pop("TRAINS_TASK_ID", None)
if Task.current_task():
# noinspection PyProtectedMember
Task.current_task()._reset_current_task_obj()
a_result = func(*a_args, **a_kwargs)
if a_queue is not None:
task_id = Task.current_task().id if Task.current_task() else None
a_queue.put((task_id, a_result))
return a_result
queue = Queue()
p = Process(target=sanitized_env, args=(queue,) + args, kwargs=kwargs)
# make sure we wait for the subprocess.
p.daemon = False
p.start()
if parallel and Task.running_locally():
cls._multi_pipeline_instances.append((p, queue))
return
else:
p.join()
# noinspection PyBroadException
try:
pipeline_task, result = queue.get_nowait()
except Exception:
return None
# we should update the master Task plot:
if pipeline_task and Task.current_task():
cls._add_pipeline_plots(pipeline_task)
return result
if parallel and not cls._atexit_registered:
cls._atexit_registered = True
atexit.register(cls._wait_for_multi_pipelines)
return internal_decorator
@classmethod
def _wait_for_multi_pipelines(cls) -> List[Any]:
results = []
if not cls._multi_pipeline_instances:
return results
print("Waiting for background pipelines to finish")
for p, queue in cls._multi_pipeline_instances:
try:
p.join()
except: # noqa
pass
# noinspection PyBroadException
try:
pipeline_task, result = queue.get_nowait()
results.append(result)
cls._add_pipeline_plots(pipeline_task)
except Exception:
pass
cls._multi_pipeline_instances = []
return results
@classmethod
def _add_pipeline_plots(cls, pipeline_task_id: str) -> None:
if not Task.current_task():
return
from clearml.backend_api.services import events
res = Task.current_task().send(
events.GetTaskPlotsRequest(task=pipeline_task_id, iters=1),
raise_on_errors=False,
ignore_errors=True,
)
execution_flow = None
execution_details = None
for p in res.response.plots:
try:
if (
p["metric"] == cls._report_plot_execution_flow["title"]
and p["variant"] == cls._report_plot_execution_flow["series"]
):
execution_flow = json.loads(p["plot_str"])
elif (
p["metric"] == cls._report_plot_execution_details["title"]
and p["variant"] == cls._report_plot_execution_details["series"]
):
execution_details = json.loads(p["plot_str"])
execution_details["layout"]["name"] += " - " + str(pipeline_task_id)
except Exception as ex:
getLogger("clearml.automation.controller").warning("Multi-pipeline plot update failed: {}".format(ex))
if execution_flow:
Task.current_task().get_logger().report_plotly(
title=cls._report_plot_execution_flow["title"],
series="{} - {}".format(cls._report_plot_execution_flow["series"], pipeline_task_id),
iteration=0,
figure=execution_flow,
)
if execution_details:
Task.current_task().get_logger().report_plotly(
title=cls._report_plot_execution_details["title"],
series="{} - {}".format(cls._report_plot_execution_details["series"], pipeline_task_id),
iteration=0,
figure=execution_details,
)
| PipelineDecorator |
python | allegroai__clearml | clearml/backend_interface/task/repo/detectors.py | {
"start": 440,
"end": 898
} | class ____(object):
""" " Repository information as queried by a detector"""
url = attr.ib(default="")
branch = attr.ib(default="")
commit = attr.ib(default="")
root = attr.ib(default="")
status = attr.ib(default="")
diff = attr.ib(default="")
modified = attr.ib(default=False, type=bool, converter=bool)
def is_empty(self) -> bool:
return not any(attr.asdict(self).values())
@six.add_metaclass(abc.ABCMeta)
| Result |
python | geekcomputers__Python | venv/Lib/site-packages/pip/_vendor/distlib/database.py | {
"start": 1928,
"end": 11675
} | class ____(object):
"""
Represents a set of distributions installed on a path (typically sys.path).
"""
def __init__(self, path=None, include_egg=False):
"""
Create an instance from a path, optionally including legacy (distutils/
setuptools/distribute) distributions.
:param path: The path to use, as a list of directories. If not specified,
sys.path is used.
:param include_egg: If True, this instance will look for and return legacy
distributions as well as those based on PEP 376.
"""
if path is None:
path = sys.path
self.path = path
self._include_dist = True
self._include_egg = include_egg
self._cache = _Cache()
self._cache_egg = _Cache()
self._cache_enabled = True
self._scheme = get_scheme('default')
def _get_cache_enabled(self):
return self._cache_enabled
def _set_cache_enabled(self, value):
self._cache_enabled = value
cache_enabled = property(_get_cache_enabled, _set_cache_enabled)
def clear_cache(self):
"""
Clears the internal cache.
"""
self._cache.clear()
self._cache_egg.clear()
def _yield_distributions(self):
"""
Yield .dist-info and/or .egg(-info) distributions.
"""
# We need to check if we've seen some resources already, because on
# some Linux systems (e.g. some Debian/Ubuntu variants) there are
# symlinks which alias other files in the environment.
seen = set()
for path in self.path:
finder = resources.finder_for_path(path)
if finder is None:
continue
r = finder.find('')
if not r or not r.is_container:
continue
rset = sorted(r.resources)
for entry in rset:
r = finder.find(entry)
if not r or r.path in seen:
continue
try:
if self._include_dist and entry.endswith(DISTINFO_EXT):
possible_filenames = [
METADATA_FILENAME, WHEEL_METADATA_FILENAME,
LEGACY_METADATA_FILENAME
]
for metadata_filename in possible_filenames:
metadata_path = posixpath.join(
entry, metadata_filename)
pydist = finder.find(metadata_path)
if pydist:
break
else:
continue
with contextlib.closing(pydist.as_stream()) as stream:
metadata = Metadata(fileobj=stream,
scheme='legacy')
logger.debug('Found %s', r.path)
seen.add(r.path)
yield new_dist_class(r.path,
metadata=metadata,
env=self)
elif self._include_egg and entry.endswith(
('.egg-info', '.egg')):
logger.debug('Found %s', r.path)
seen.add(r.path)
yield old_dist_class(r.path, self)
except Exception as e:
msg = 'Unable to read distribution at %s, perhaps due to bad metadata: %s'
logger.warning(msg, r.path, e)
import warnings
warnings.warn(msg % (r.path, e), stacklevel=2)
def _generate_cache(self):
"""
Scan the path for distributions and populate the cache with
those that are found.
"""
gen_dist = not self._cache.generated
gen_egg = self._include_egg and not self._cache_egg.generated
if gen_dist or gen_egg:
for dist in self._yield_distributions():
if isinstance(dist, InstalledDistribution):
self._cache.add(dist)
else:
self._cache_egg.add(dist)
if gen_dist:
self._cache.generated = True
if gen_egg:
self._cache_egg.generated = True
@classmethod
def distinfo_dirname(cls, name, version):
"""
The *name* and *version* parameters are converted into their
filename-escaped form, i.e. any ``'-'`` characters are replaced
with ``'_'`` other than the one in ``'dist-info'`` and the one
separating the name from the version number.
:parameter name: is converted to a standard distribution name by replacing
any runs of non- alphanumeric characters with a single
``'-'``.
:type name: string
:parameter version: is converted to a standard version string. Spaces
become dots, and all other non-alphanumeric characters
(except dots) become dashes, with runs of multiple
dashes condensed to a single dash.
:type version: string
:returns: directory name
:rtype: string"""
name = name.replace('-', '_')
return '-'.join([name, version]) + DISTINFO_EXT
def get_distributions(self):
"""
Provides an iterator that looks for distributions and returns
:class:`InstalledDistribution` or
:class:`EggInfoDistribution` instances for each one of them.
:rtype: iterator of :class:`InstalledDistribution` and
:class:`EggInfoDistribution` instances
"""
if not self._cache_enabled:
for dist in self._yield_distributions():
yield dist
else:
self._generate_cache()
for dist in self._cache.path.values():
yield dist
if self._include_egg:
for dist in self._cache_egg.path.values():
yield dist
def get_distribution(self, name):
"""
Looks for a named distribution on the path.
This function only returns the first result found, as no more than one
value is expected. If nothing is found, ``None`` is returned.
:rtype: :class:`InstalledDistribution`, :class:`EggInfoDistribution`
or ``None``
"""
result = None
name = name.lower()
if not self._cache_enabled:
for dist in self._yield_distributions():
if dist.key == name:
result = dist
break
else:
self._generate_cache()
if name in self._cache.name:
result = self._cache.name[name][0]
elif self._include_egg and name in self._cache_egg.name:
result = self._cache_egg.name[name][0]
return result
def provides_distribution(self, name, version=None):
"""
Iterates over all distributions to find which distributions provide *name*.
If a *version* is provided, it will be used to filter the results.
This function only returns the first result found, since no more than
one values are expected. If the directory is not found, returns ``None``.
:parameter version: a version specifier that indicates the version
required, conforming to the format in ``PEP-345``
:type name: string
:type version: string
"""
matcher = None
if version is not None:
try:
matcher = self._scheme.matcher('%s (%s)' % (name, version))
except ValueError:
raise DistlibException('invalid name or version: %r, %r' %
(name, version))
for dist in self.get_distributions():
# We hit a problem on Travis where enum34 was installed and doesn't
# have a provides attribute ...
if not hasattr(dist, 'provides'):
logger.debug('No "provides": %s', dist)
else:
provided = dist.provides
for p in provided:
p_name, p_ver = parse_name_and_version(p)
if matcher is None:
if p_name == name:
yield dist
break
else:
if p_name == name and matcher.match(p_ver):
yield dist
break
def get_file_path(self, name, relative_path):
"""
Return the path to a resource file.
"""
dist = self.get_distribution(name)
if dist is None:
raise LookupError('no distribution named %r found' % name)
return dist.get_resource_path(relative_path)
def get_exported_entries(self, category, name=None):
"""
Return all of the exported entries in a particular category.
:param category: The category to search for entries.
:param name: If specified, only entries with that name are returned.
"""
for dist in self.get_distributions():
r = dist.exports
if category in r:
d = r[category]
if name is not None:
if name in d:
yield d[name]
else:
for v in d.values():
yield v
| DistributionPath |
python | ray-project__ray | doc/source/serve/doc_code/http_guide/disconnects.py | {
"start": 1615,
"end": 2646
} | class ____:
async def snore(self):
await asyncio.sleep(1)
print("ZZZ")
async def __call__(self):
try:
print("SnoringSleeper received request!")
# Prevent the snore() method from being cancelled
await asyncio.shield(self.snore())
except asyncio.CancelledError:
print("SnoringSleeper's request was cancelled!")
app = SnoringSleeper.bind()
# __end_shielded_disconnect__
serve.run(app)
import requests
from requests.exceptions import Timeout
# Intentionally time out request to test cancellation behavior
try:
requests.get("http://localhost:8000", timeout=0.5)
except Timeout:
pass
wait_for_condition(
lambda: {
"SnoringSleeper received request!",
"SnoringSleeper's request was cancelled!",
"ZZZ",
}
== set(ray.get(print_storage_handle.get.remote())),
timeout=5,
)
sys.stdout.write(f"{ray.get(print_storage_handle.get.remote())}\n")
ray.get(print_storage_handle.clear.remote())
| SnoringSleeper |
python | pandas-dev__pandas | pandas/tests/indexing/test_loc.py | {
"start": 92684,
"end": 97419
} | class ____:
def test_loc_setitem_bool_mask_timedeltaindex(self):
# GH#14946
df = DataFrame({"x": range(10)})
df.index = to_timedelta(range(10), unit="s")
conditions = [df["x"] > 3, df["x"] == 3, df["x"] < 3]
expected_data = [
[0, 1, 2, 3, 10, 10, 10, 10, 10, 10],
[0, 1, 2, 10, 4, 5, 6, 7, 8, 9],
[10, 10, 10, 3, 4, 5, 6, 7, 8, 9],
]
for cond, data in zip(conditions, expected_data):
result = df.copy()
result.loc[cond, "x"] = 10
expected = DataFrame(
data,
index=to_timedelta(range(10), unit="s"),
columns=["x"],
dtype="int64",
)
tm.assert_frame_equal(expected, result)
@pytest.mark.parametrize("tz", [None, "UTC"])
def test_loc_setitem_mask_with_datetimeindex_tz(self, tz):
# GH#16889
# support .loc with alignment and tz-aware DatetimeIndex
mask = np.array([True, False, True, False])
idx = date_range("20010101", periods=4, tz=tz)
df = DataFrame({"a": np.arange(4)}, index=idx).astype("float64")
result = df.copy()
result.loc[mask, :] = df.loc[mask, :]
tm.assert_frame_equal(result, df)
result = df.copy()
result.loc[mask] = df.loc[mask]
tm.assert_frame_equal(result, df)
def test_loc_setitem_mask_and_label_with_datetimeindex(self):
# GH#9478
# a datetimeindex alignment issue with partial setting
df = DataFrame(
np.arange(6.0).reshape(3, 2),
columns=list("AB"),
index=date_range("1/1/2000", periods=3, freq="1h"),
)
expected = df.copy()
expected["C"] = [expected.index[0]] + [pd.NaT, pd.NaT]
mask = df.A < 1
df.loc[mask, "C"] = df.loc[mask].index
tm.assert_frame_equal(df, expected)
def test_loc_setitem_mask_td64_series_value(self):
# GH#23462 key list of bools, value is a Series
td1 = Timedelta(0)
td2 = Timedelta(28767471428571405)
df = DataFrame({"col": Series([td1, td2])})
df_copy = df.copy()
ser = Series([td1])
expected = df["col"].iloc[1]._value
df.loc[[True, False]] = ser
result = df["col"].iloc[1]._value
assert expected == result
tm.assert_frame_equal(df, df_copy)
def test_loc_setitem_boolean_and_column(self, float_frame):
expected = float_frame.copy()
mask = float_frame["A"] > 0
float_frame.loc[mask, "B"] = 0
values = expected.values.copy()
values[mask.values, 1] = 0
expected = DataFrame(values, index=expected.index, columns=expected.columns)
tm.assert_frame_equal(float_frame, expected)
def test_loc_setitem_ndframe_values_alignment(self):
# GH#45501
df = DataFrame({"a": [1, 2, 3], "b": [4, 5, 6]})
df.loc[[False, False, True], ["a"]] = DataFrame(
{"a": [10, 20, 30]}, index=[2, 1, 0]
)
expected = DataFrame({"a": [1, 2, 10], "b": [4, 5, 6]})
tm.assert_frame_equal(df, expected)
# same thing with Series RHS
df = DataFrame({"a": [1, 2, 3], "b": [4, 5, 6]})
df.loc[[False, False, True], ["a"]] = Series([10, 11, 12], index=[2, 1, 0])
tm.assert_frame_equal(df, expected)
# same thing but setting "a" instead of ["a"]
df = DataFrame({"a": [1, 2, 3], "b": [4, 5, 6]})
df.loc[[False, False, True], "a"] = Series([10, 11, 12], index=[2, 1, 0])
tm.assert_frame_equal(df, expected)
df = DataFrame({"a": [1, 2, 3], "b": [4, 5, 6]})
df_orig = df.copy()
ser = df["a"]
ser.loc[[False, False, True]] = Series([10, 11, 12], index=[2, 1, 0])
tm.assert_frame_equal(df, df_orig)
def test_loc_indexer_empty_broadcast(self):
# GH#51450
df = DataFrame({"a": [], "b": []}, dtype=object)
expected = df.copy()
df.loc[np.array([], dtype=np.bool_), ["a"]] = df["a"].copy()
tm.assert_frame_equal(df, expected)
def test_loc_indexer_all_false_broadcast(self):
# GH#51450
df = DataFrame({"a": ["x"], "b": ["y"]}, dtype=object)
expected = df.copy()
df.loc[np.array([False], dtype=np.bool_), ["a"]] = df["b"].copy()
tm.assert_frame_equal(df, expected)
def test_loc_indexer_length_one(self):
# GH#51435
df = DataFrame({"a": ["x"], "b": ["y"]}, dtype=object)
expected = DataFrame({"a": ["y"], "b": ["y"]}, dtype=object)
df.loc[np.array([True], dtype=np.bool_), ["a"]] = df["b"].copy()
tm.assert_frame_equal(df, expected)
| TestLocBooleanMask |
python | astropy__astropy | astropy/table/tests/test_init_table.py | {
"start": 371,
"end": 910
} | class ____(Mapping):
"""A minimal mapping-like object that does not subclass dict.
This is used to test code that expects dict-like but without actually
inheriting from dict.
"""
def __init__(self, *args, **kwargs):
self._data = dict(*args, **kwargs)
def __getitem__(self, item):
return self._data[item]
def __setitem__(self, item, value):
self._data[item] = value
def __iter__(self):
return iter(self._data)
def __len__(self):
return len(self._data)
| DictLike |
python | tornadoweb__tornado | tornado/test/auth_test.py | {
"start": 7603,
"end": 8318
} | class ____(TwitterClientHandler):
@gen.coroutine
def get(self):
# TODO: would be nice to go through the login flow instead of
# cheating with a hard-coded access token.
try:
response = yield self.twitter_request(
"/users/show/%s" % self.get_argument("name"),
access_token=dict(key="hjkl", secret="vbnm"),
)
except HTTPClientError:
# TODO(bdarnell): Should we catch HTTP errors and
# transform some of them (like 403s) into AuthError?
self.set_status(500)
self.finish("error from twitter request")
else:
self.finish(response)
| TwitterClientShowUserHandler |
python | huggingface__transformers | src/transformers/models/visual_bert/modeling_visual_bert.py | {
"start": 21092,
"end": 29510
} | class ____(VisualBertPreTrainedModel):
def __init__(self, config, add_pooling_layer=True):
r"""
add_pooling_layer (bool, *optional*, defaults to `True`):
Whether to add a pooling layer
"""
super().__init__(config)
self.config = config
self.embeddings = VisualBertEmbeddings(config)
self.encoder = VisualBertEncoder(config)
self.pooler = VisualBertPooler(config) if add_pooling_layer else None
self.bypass_transformer = config.bypass_transformer
if self.bypass_transformer:
self.additional_layer = VisualBertLayer(config)
# Initialize weights and apply final processing
self.post_init()
def get_input_embeddings(self):
return self.embeddings.word_embeddings
def set_input_embeddings(self, value):
self.embeddings.word_embeddings = value
@auto_docstring
def forward(
self,
input_ids: Optional[torch.LongTensor] = None,
attention_mask: Optional[torch.LongTensor] = None,
token_type_ids: Optional[torch.LongTensor] = None,
position_ids: Optional[torch.LongTensor] = None,
inputs_embeds: Optional[torch.FloatTensor] = None,
visual_embeds: Optional[torch.FloatTensor] = None,
visual_attention_mask: Optional[torch.LongTensor] = None,
visual_token_type_ids: Optional[torch.LongTensor] = None,
image_text_alignment: Optional[torch.LongTensor] = None,
output_attentions: Optional[bool] = None,
output_hidden_states: Optional[bool] = None,
return_dict: Optional[bool] = None,
) -> Union[tuple[torch.Tensor], BaseModelOutputWithPooling]:
r"""
visual_embeds (`torch.FloatTensor` of shape `(batch_size, visual_seq_length, visual_embedding_dim)`, *optional*):
The embedded representation of the visual inputs, generally derived using using an object detector.
visual_attention_mask (`torch.FloatTensor` of shape `(batch_size, visual_seq_length)`, *optional*):
Mask to avoid performing attention on visual embeddings. Mask values selected in `[0, 1]`:
- 1 for tokens that are **not masked**,
- 0 for tokens that are **masked**.
[What are attention masks?](../glossary#attention-mask)
visual_token_type_ids (`torch.LongTensor` of shape `(batch_size, visual_seq_length)`, *optional*):
Segment token indices to indicate different portions of the visual embeds.
[What are token type IDs?](../glossary#token-type-ids) The authors of VisualBERT set the
*visual_token_type_ids* to *1* for all tokens.
image_text_alignment (`torch.LongTensor` of shape `(batch_size, visual_seq_length, alignment_number)`, *optional*):
Image-Text alignment uses to decide the position IDs of the visual embeddings.
Example:
```python
# Assumption: *get_visual_embeddings(image)* gets the visual embeddings of the image.
from transformers import AutoTokenizer, VisualBertModel
import torch
tokenizer = AutoTokenizer.from_pretrained("google-bert/bert-base-uncased")
model = VisualBertModel.from_pretrained("uclanlp/visualbert-vqa-coco-pre")
inputs = tokenizer("The capital of France is Paris.", return_tensors="pt")
visual_embeds = get_visual_embeddings(image).unsqueeze(0)
visual_token_type_ids = torch.ones(visual_embeds.shape[:-1], dtype=torch.long)
visual_attention_mask = torch.ones(visual_embeds.shape[:-1], dtype=torch.float)
inputs.update(
{
"visual_embeds": visual_embeds,
"visual_token_type_ids": visual_token_type_ids,
"visual_attention_mask": visual_attention_mask,
}
)
outputs = model(**inputs)
last_hidden_states = outputs.last_hidden_state
```"""
output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions
output_hidden_states = (
output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states
)
return_dict = return_dict if return_dict is not None else self.config.use_return_dict
if input_ids is not None and inputs_embeds is not None:
raise ValueError("You cannot specify both input_ids and inputs_embeds at the same time")
elif input_ids is not None:
self.warn_if_padding_and_no_attention_mask(input_ids, attention_mask)
input_shape = input_ids.size()
elif inputs_embeds is not None:
input_shape = inputs_embeds.size()[:-1]
else:
raise ValueError("You have to specify either input_ids or inputs_embeds")
batch_size, seq_length = input_shape
device = input_ids.device if input_ids is not None else inputs_embeds.device
if visual_embeds is not None:
visual_input_shape = visual_embeds.size()[:-1]
if attention_mask is None:
attention_mask = torch.ones(input_shape, device=device)
if visual_embeds is not None and visual_attention_mask is None:
visual_attention_mask = torch.ones(visual_input_shape, device=device)
# We can provide a self-attention mask of dimensions [batch_size, from_seq_length, to_seq_length]
# ourselves in which case we just need to make it broadcastable to all heads.
if visual_embeds is not None:
combined_attention_mask = torch.cat((attention_mask, visual_attention_mask), dim=-1)
extended_attention_mask: torch.Tensor = self.get_extended_attention_mask(
combined_attention_mask, (batch_size, input_shape + visual_input_shape)
)
else:
extended_attention_mask: torch.Tensor = self.get_extended_attention_mask(
attention_mask, (batch_size, input_shape)
)
embedding_output = self.embeddings(
input_ids=input_ids,
position_ids=position_ids,
token_type_ids=token_type_ids,
inputs_embeds=inputs_embeds,
visual_embeds=visual_embeds,
visual_token_type_ids=visual_token_type_ids,
image_text_alignment=image_text_alignment,
)
if self.bypass_transformer and visual_embeds is not None:
text_length = input_ids.size(1)
text_embedding_output = embedding_output[:, :text_length, :]
visual_embedding_output = embedding_output[:, text_length:, :]
text_extended_attention_mask = extended_attention_mask[:, :, text_length, :text_length]
encoded_outputs = self.encoder(
text_embedding_output,
attention_mask=text_extended_attention_mask,
output_attentions=output_attentions,
output_hidden_states=output_hidden_states,
return_dict=return_dict,
)
sequence_output = encoded_outputs[0]
concatenated_input = torch.cat((sequence_output, visual_embedding_output), dim=1)
sequence_output = self.additional_layer(concatenated_input, extended_attention_mask)
pooled_output = self.pooler(sequence_output) if self.pooler is not None else None
else:
encoder_outputs = self.encoder(
embedding_output,
attention_mask=extended_attention_mask,
output_attentions=output_attentions,
output_hidden_states=output_hidden_states,
return_dict=return_dict,
)
sequence_output = encoder_outputs[0]
pooled_output = self.pooler(sequence_output) if self.pooler is not None else None
if not return_dict:
return (sequence_output, pooled_output) + encoder_outputs[1:]
return BaseModelOutputWithPooling(
last_hidden_state=sequence_output,
pooler_output=pooled_output,
hidden_states=encoder_outputs.hidden_states,
attentions=encoder_outputs.attentions,
)
@auto_docstring(
custom_intro="""
VisualBert Model with two heads on top as done during the pretraining: a `masked language modeling` head and a
`sentence-image prediction (classification)` head.
"""
)
| VisualBertModel |
python | pydantic__pydantic | pydantic/v1/errors.py | {
"start": 5587,
"end": 5727
} | class ____(UrlError):
code = 'url.extra'
msg_template = 'URL invalid, extra characters found after valid URL: {extra!r}'
| UrlExtraError |
python | realpython__materials | tic-tac-toe-ai-python/source_code_bonus/tic-tac-toe/frontends/browser/renderers.py | {
"start": 121,
"end": 1168
} | class ____(Renderer):
def render(self, game_state: GameState) -> None:
for i, cell in enumerate(game_state.grid.cells):
button = document.querySelector(f"[data-id='{i}'] text")
button.classList.remove("win")
button.innerHTML = " " if cell == " " else cell
status = document.querySelector("#status")
if game_state.game_over:
document.querySelector("#replay").classList.remove("hidden")
for select in document.querySelectorAll("select"):
select.removeAttribute("disabled")
if game_state.winner:
status.innerHTML = f"{game_state.winner} wins \N{PARTY POPPER}"
for i in game_state.winning_cells:
button = document.querySelector(f"[data-id='{i}'] text")
button.classList.add("win")
elif game_state.tie:
status.innerHTML = "Tie \N{NEUTRAL FACE}"
else:
document.querySelector("#status").innerHTML = ""
| BrowserRenderer |
python | tornadoweb__tornado | tornado/test/concurrent_test.py | {
"start": 2555,
"end": 2946
} | class ____(TCPServer):
@gen.coroutine
def handle_stream(self, stream, address):
data = yield stream.read_until(b"\n")
data = to_unicode(data)
if data == data.upper():
stream.write(b"error\talready capitalized\n")
else:
# data already has \n
stream.write(utf8("ok\t%s" % data.upper()))
stream.close()
| CapServer |
python | celery__celery | celery/loaders/default.py | {
"start": 493,
"end": 1520
} | class ____(BaseLoader):
"""The loader used by the default app."""
def setup_settings(self, settingsdict):
return DictAttribute(settingsdict)
def read_configuration(self, fail_silently=True):
"""Read configuration from :file:`celeryconfig.py`."""
configname = os.environ.get('CELERY_CONFIG_MODULE',
DEFAULT_CONFIG_MODULE)
try:
usercfg = self._import_config_module(configname)
except ImportError:
if not fail_silently:
raise
# billiard sets this if forked using execv
if C_WNOCONF and not os.environ.get('FORKED_BY_MULTIPROCESSING'):
warnings.warn(NotConfigured(
'No {module} module found! Please make sure it exists and '
'is available to Python.'.format(module=configname)))
return self.setup_settings({})
else:
self.configured = True
return self.setup_settings(usercfg)
| Loader |
python | rq__rq | tests/fixtures.py | {
"start": 2914,
"end": 3107
} | class ____:
def __init__(self, value):
self.value = value
@classmethod
def divide(cls, x, y):
return x * y
def div(self, y):
return self.value / y
| Number |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.