language stringclasses 1 value | repo stringclasses 346 values | path stringlengths 6 201 | class_span dict | source stringlengths 21 2.38M | target stringlengths 1 96 |
|---|---|---|---|---|---|
python | walkccc__LeetCode | solutions/3294. Convert Doubly Linked List to Array II/3294.py | {
"start": 0,
"end": 231
} | class ____:
def toArray(self, node: 'Optional[Node]') -> list[int]:
ans = []
curr = node
while curr.prev:
curr = curr.prev
while curr:
ans.append(curr.val)
curr = curr.next
return ans
| Solution |
python | dagster-io__dagster | python_modules/libraries/dagster-dbt/dagster_dbt/cloud/asset_defs.py | {
"start": 1560,
"end": 29311
} | class ____(CacheableAssetsDefinition):
def __init__(
self,
dbt_cloud_resource_def: Union[DbtCloudClientResource, ResourceDefinition],
job_id: int,
node_info_to_asset_key: Callable[[Mapping[str, Any]], AssetKey],
node_info_to_group_fn: Callable[[Mapping[str, Any]], Optional[str]],
node_info_to_freshness_policy_fn: Callable[
[Mapping[str, Any]], Optional[LegacyFreshnessPolicy]
],
node_info_to_auto_materialize_policy_fn: Callable[
[Mapping[str, Any]], Optional[AutoMaterializePolicy]
],
partitions_def: Optional[PartitionsDefinition] = None,
partition_key_to_vars_fn: Optional[Callable[[str], Mapping[str, Any]]] = None,
):
self._dbt_cloud_resource_def: ResourceDefinition = (
dbt_cloud_resource_def.get_resource_definition()
if isinstance(dbt_cloud_resource_def, DbtCloudClientResource)
else dbt_cloud_resource_def
)
self._dbt_cloud: DbtCloudClient = (
dbt_cloud_resource_def.process_config_and_initialize().get_dbt_client()
if isinstance(dbt_cloud_resource_def, DbtCloudClientResource)
else dbt_cloud_resource_def(build_init_resource_context())
)
self._job_id = job_id
self._project_id: int
self._has_generate_docs: bool
self._job_commands: list[str]
self._job_materialization_command_step: int
self._node_info_to_asset_key = node_info_to_asset_key
self._node_info_to_group_fn = node_info_to_group_fn
self._node_info_to_freshness_policy_fn = node_info_to_freshness_policy_fn
self._node_info_to_auto_materialize_policy_fn = node_info_to_auto_materialize_policy_fn
self._partitions_def = partitions_def
self._partition_key_to_vars_fn = partition_key_to_vars_fn
super().__init__(unique_id=f"dbt-cloud-{job_id}")
def compute_cacheable_data(self) -> Sequence[AssetsDefinitionCacheableData]:
manifest_json, executed_unique_ids = self._get_manifest_json_and_executed_unique_ids()
return [self._build_dbt_cloud_assets_cacheable_data(manifest_json, executed_unique_ids)]
def build_definitions(
self, data: Sequence[AssetsDefinitionCacheableData]
) -> Sequence[AssetsDefinition]:
return with_resources(
[
self._build_dbt_cloud_assets_from_cacheable_data(assets_definition_metadata)
for assets_definition_metadata in data
],
{"dbt_cloud": self._dbt_cloud_resource_def},
)
@staticmethod
def parse_dbt_command(dbt_command: str) -> Namespace:
from dbt.cli.flags import Flags, args_to_context
args = shlex.split(dbt_command)[1:]
# nasty hack to get dbt to parse the args, profiles-dir must be set to an existing directory
return Namespace(**vars(Flags(args_to_context(args + ["--profiles-dir", "."]))))
@staticmethod
def get_job_materialization_command_step(execute_steps: list[str]) -> int:
materialization_command_filter = [
DbtCloudCacheableAssetsDefinition.parse_dbt_command(command).which in ["run", "build"]
for command in execute_steps
]
if sum(materialization_command_filter) != 1:
raise DagsterDbtCloudJobInvariantViolationError(
"The dbt Cloud job must have a single `dbt run` or `dbt build` in its commands. "
f"Received commands: {execute_steps}."
)
return materialization_command_filter.index(True)
@staticmethod
def get_compile_filters(parsed_args: Namespace) -> list[str]:
dbt_compile_options: list[str] = []
selected_models = parsed_args.select or []
if selected_models:
dbt_compile_options.append(f"--select {' '.join(selected_models)}")
excluded_models = parsed_args.exclude or []
if excluded_models:
dbt_compile_options.append(f"--exclude {' '.join(excluded_models)}")
selector = getattr(parsed_args, "selector_name", None) or getattr(
parsed_args, "selector", None
)
if selector:
dbt_compile_options.append(f"--selector {selector}")
return dbt_compile_options
def _get_cached_compile_dbt_cloud_job_run(self, compile_run_id: int) -> tuple[int, int]:
# If the compile run is ongoing, allow it a grace period of 10 minutes to finish.
with suppress(Exception):
self._dbt_cloud.poll_run(run_id=compile_run_id, poll_timeout=600)
compile_run = self._dbt_cloud.get_run(
run_id=compile_run_id, include_related=["trigger", "run_steps"]
)
compile_run_status: str = compile_run["status_humanized"]
if compile_run_status != DbtCloudRunStatus.SUCCESS:
raise DagsterDbtCloudJobInvariantViolationError(
f"The cached dbt Cloud job run `{compile_run_id}` must have a status of"
f" `{DbtCloudRunStatus.SUCCESS}`. Received status: `{compile_run_status}. You can"
f" view the full status of your dbt Cloud run at {compile_run['href']}. Once it has"
" successfully completed, reload your Dagster definitions. If your run has failed,"
" you must manually refresh the cache using the `dagster-dbt"
" cache-compile-references` CLI."
)
compile_run_has_generate_docs = compile_run["trigger"]["generate_docs_override"]
compile_job_materialization_command_step = len(compile_run["run_steps"])
if compile_run_has_generate_docs:
compile_job_materialization_command_step -= 1
return compile_run_id, compile_job_materialization_command_step
def _compile_dbt_cloud_job(self, dbt_cloud_job: Mapping[str, Any]) -> tuple[int, int]:
# Retrieve the filters options from the dbt Cloud job's materialization command.
#
# There are three filters: `--select`, `--exclude`, and `--selector`.
materialization_command = self._job_commands[self._job_materialization_command_step]
parsed_args = DbtCloudCacheableAssetsDefinition.parse_dbt_command(materialization_command)
dbt_compile_options = DbtCloudCacheableAssetsDefinition.get_compile_filters(
parsed_args=parsed_args
)
# Add the partition variable as a variable to the dbt Cloud job command.
#
# If existing variables passed through the dbt Cloud job's command, an error will be
# raised. Since these are static variables anyways, they can be moved to the
# `dbt_project.yml` without loss of functionality.
#
# Since we're only doing this to generate the dependency structure, just use an arbitrary
# partition key (e.g. the last one) to retrieve the partition variable.
if parsed_args.vars and parsed_args.vars != "{}":
raise DagsterDbtCloudJobInvariantViolationError(
f"The dbt Cloud job '{dbt_cloud_job['name']}' ({dbt_cloud_job['id']}) must not have"
" variables defined from `--vars` in its `dbt run` or `dbt build` command."
" Instead, declare the variables in the `dbt_project.yml` file. Received commands:"
f" {self._job_commands}."
)
if self._partitions_def and self._partition_key_to_vars_fn:
last_partition_key = self._partitions_def.get_last_partition_key()
if last_partition_key is None:
check.failed("PartitionsDefinition has no partitions")
partition_var = self._partition_key_to_vars_fn(last_partition_key)
dbt_compile_options.append(f"--vars '{json.dumps(partition_var)}'")
# We need to retrieve the dependency structure for the assets in the dbt Cloud project.
# However, we can't just use the dependency structure from the latest run, because
# this historical structure may not be up-to-date with the current state of the project.
#
# By always doing a compile step, we can always get the latest dependency structure.
# This incurs some latency, but at least it doesn't run through the entire materialization
# process.
dbt_compile_command = f"dbt compile {' '.join(dbt_compile_options)}"
compile_run_dbt_output = self._dbt_cloud.run_job_and_poll(
job_id=self._job_id,
cause="Generating software-defined assets for Dagster.",
steps_override=[dbt_compile_command],
)
# Target the compile execution step when retrieving run artifacts, rather than assuming
# that the last step is the correct target.
#
# Here, we ignore the `dbt docs generate` step.
compile_job_materialization_command_step = len(
compile_run_dbt_output.run_details.get("run_steps", [])
)
if self._has_generate_docs:
compile_job_materialization_command_step -= 1
return compile_run_dbt_output.run_id, compile_job_materialization_command_step
def _get_manifest_json_and_executed_unique_ids(
self,
) -> tuple[Mapping[str, Any], frozenset[str]]:
"""For a given dbt Cloud job, fetch the latest run's dependency structure of executed nodes."""
# Fetch information about the job.
job = self._dbt_cloud.get_job(job_id=self._job_id)
self._project_id = job["project_id"]
self._has_generate_docs = job["generate_docs"]
# We constraint the kinds of dbt Cloud jobs that we support running.
#
# A simple constraint is that we only support jobs that run multiple steps,
# but it must contain one of either `dbt run` or `dbt build`.
#
# As a reminder, `dbt deps` is automatically run before the job's configured commands.
# And if the settings are enabled, `dbt docs generate` and `dbt source freshness` can
# automatically run after the job's configured commands.
#
# These commands that execute before and after the job's configured commands do not count
# towards the single command constraint.
self._job_commands = job["execute_steps"]
self._job_materialization_command_step = (
DbtCloudCacheableAssetsDefinition.get_job_materialization_command_step(
execute_steps=self._job_commands
)
)
# Determine whether to use a cached compile run. This should only be set up if the user is
# using a GitHub action along with their dbt project.
dbt_cloud_job_env_vars = self._dbt_cloud.get_job_environment_variables(
project_id=self._project_id, job_id=self._job_id
)
compile_run_id = (
dbt_cloud_job_env_vars.get(DAGSTER_DBT_COMPILE_RUN_ID_ENV_VAR, {})
.get("job", {})
.get("value")
)
compile_run_id, compile_job_materialization_command_step = (
# If a compile run is cached, then use it.
self._get_cached_compile_dbt_cloud_job_run(compile_run_id=int(compile_run_id))
if compile_run_id
# Otherwise, compile the dbt Cloud project in an ad-hoc manner.
else self._compile_dbt_cloud_job(dbt_cloud_job=job)
)
manifest_json = self._dbt_cloud.get_manifest(
run_id=compile_run_id, step=compile_job_materialization_command_step
)
run_results_json = self._dbt_cloud.get_run_results(
run_id=compile_run_id, step=compile_job_materialization_command_step
)
# Filter the manifest to only include the nodes that were executed.
executed_node_ids: set[str] = set(
result["unique_id"] for result in run_results_json["results"]
)
# If there are no executed nodes, then there are no assets to generate.
# Inform the user to inspect their dbt Cloud job's command.
if not executed_node_ids:
raise DagsterDbtCloudJobInvariantViolationError(
f"The dbt Cloud job '{job['name']}' ({job['id']}) does not generate any "
"software-defined assets. Ensure that your dbt project has nodes to execute, "
"and that your dbt Cloud job's materialization command has the proper filter "
f"options applied. Received commands: {self._job_commands}."
)
# sort to stabilize job snapshots
return manifest_json, frozenset(sorted(executed_node_ids))
def _build_dbt_cloud_assets_cacheable_data(
self, manifest_json: Mapping[str, Any], executed_unique_ids: frozenset[str]
) -> AssetsDefinitionCacheableData:
"""Given all of the nodes and dependencies for a dbt Cloud job, build the cacheable
representation that generate the asset definition for the job.
"""
class CustomDagsterDbtTranslator(DagsterDbtTranslator):
@classmethod
def get_asset_key(cls, dbt_resource_props): # pyright: ignore[reportIncompatibleMethodOverride]
return self._node_info_to_asset_key(dbt_resource_props)
@classmethod
def get_description(cls, dbt_resource_props): # pyright: ignore[reportIncompatibleMethodOverride]
# We shouldn't display the raw sql. Instead, inspect if dbt docs were generated,
# and attach metadata to link to the docs.
return default_description_fn(dbt_resource_props, display_raw_sql=False)
@classmethod
def get_group_name(cls, dbt_resource_props): # pyright: ignore[reportIncompatibleMethodOverride]
return self._node_info_to_group_fn(dbt_resource_props)
@classmethod
def get_auto_materialize_policy(cls, dbt_resource_props): # pyright: ignore[reportIncompatibleMethodOverride]
return self._node_info_to_auto_materialize_policy_fn(dbt_resource_props)
# generate specs for each executed node
specs = build_dbt_asset_specs(
manifest=manifest_json,
dagster_dbt_translator=CustomDagsterDbtTranslator(),
select=" ".join(
f"fqn:{'.'.join(get_node(manifest_json, unique_id)['fqn'])}"
for unique_id in executed_unique_ids
),
)
return AssetsDefinitionCacheableData(
# TODO: In the future, we should allow additional upstream assets to be specified.
keys_by_output_name={spec.key.to_python_identifier(): spec.key for spec in specs},
internal_asset_deps={
spec.key.to_python_identifier(): {dep.asset_key for dep in spec.deps}
for spec in specs
},
metadata_by_output_name={
spec.key.to_python_identifier(): self._build_dbt_cloud_assets_metadata(
get_node(
manifest_json,
spec.metadata[DAGSTER_DBT_UNIQUE_ID_METADATA_KEY],
)
)
for spec in specs
},
extra_metadata={
"job_id": self._job_id,
"job_commands": self._job_commands,
"job_materialization_command_step": self._job_materialization_command_step,
"group_names_by_output_name": {
spec.key.to_python_identifier(): spec.group_name for spec in specs
},
"fqns_by_output_name": {
spec.key.to_python_identifier(): get_node(
manifest_json,
spec.metadata[DAGSTER_DBT_UNIQUE_ID_METADATA_KEY],
)["fqn"]
for spec in specs
},
},
auto_materialize_policies_by_output_name={
spec.key.to_python_identifier(): spec.auto_materialize_policy
for spec in specs
if spec.auto_materialize_policy
},
)
def _build_dbt_cloud_assets_metadata(
self, resource_props: Mapping[str, Any]
) -> RawMetadataMapping:
metadata = {
"dbt Cloud Job": MetadataValue.url(
self._dbt_cloud.build_url_for_job(
project_id=self._project_id,
job_id=self._job_id,
)
),
}
if self._has_generate_docs:
metadata["dbt Cloud Documentation"] = MetadataValue.url(
self._dbt_cloud.build_url_for_cloud_docs(
job_id=self._job_id,
resource_type=resource_props["resource_type"],
unique_id=resource_props["unique_id"],
)
)
return metadata
def _rebuild_specs(self, cacheable_data: AssetsDefinitionCacheableData) -> Sequence[AssetSpec]:
specs = []
for id, key in (cacheable_data.keys_by_output_name or {}).items():
specs.append(
AssetSpec(
key=key,
group_name=(cacheable_data.extra_metadata or {})[
"group_names_by_output_name"
].get(id),
deps=(cacheable_data.internal_asset_deps or {}).get(id),
metadata=(cacheable_data.metadata_by_output_name or {}).get(id),
legacy_freshness_policy=(
cacheable_data.legacy_freshness_policies_by_output_name or {}
).get(id),
auto_materialize_policy=(
cacheable_data.auto_materialize_policies_by_output_name or {}
).get(id),
skippable=False,
)
)
return specs
def _build_dbt_cloud_assets_from_cacheable_data(
self, assets_definition_cacheable_data: AssetsDefinitionCacheableData
) -> AssetsDefinition:
metadata = cast("Mapping[str, Any]", assets_definition_cacheable_data.extra_metadata)
job_id = cast("int", metadata["job_id"])
job_commands = cast("list[str]", list(metadata["job_commands"]))
job_materialization_command_step = cast("int", metadata["job_materialization_command_step"])
fqns_by_output_name = cast("Mapping[str, list[str]]", metadata["fqns_by_output_name"])
@multi_asset(
name=f"dbt_cloud_job_{job_id}",
specs=self._rebuild_specs(assets_definition_cacheable_data),
partitions_def=self._partitions_def,
can_subset=True,
required_resource_keys={"dbt_cloud"},
compute_kind="dbt",
)
def _assets(context: AssetExecutionContext):
dbt_cloud = cast("DbtCloudClient", context.resources.dbt_cloud)
# Add the partition variable as a variable to the dbt Cloud job command.
dbt_options: list[str] = []
if context.has_partition_key and self._partition_key_to_vars_fn:
partition_var = self._partition_key_to_vars_fn(context.partition_key)
dbt_options.append(f"--vars '{json.dumps(partition_var)}'")
# Prepare the materialization step to be overriden with the selection filter
materialization_command = job_commands[job_materialization_command_step]
# Map the selected outputs to dbt models that should be materialized.
#
# From version 1.5.0 dbt allows multiple select args to be used in command,
# so we cannot just add our arg as last one to be used and need to remove
# both command-native --select args and --selector arg to run dagster-generated
# subset of models
#
# See https://docs.getdbt.com/reference/node-selection/syntax for details.
if context.is_subset:
selected_models = [
".".join(fqns_by_output_name[output_name])
for output_name in context.op_execution_context.selected_output_names
# outputs corresponding to asset checks from dbt tests won't be in this dict
if output_name in fqns_by_output_name
]
dbt_options.append(f"--select {' '.join(sorted(selected_models))}")
parser = ArgumentParser(description="Parse selection args from dbt command")
# Select arg should have nargs="+", but we probably want dbt itself to deal with it
parser.add_argument("-s", "--select", nargs="*", action="append")
parser.add_argument("--selector", nargs="*")
split_materialization_command = shlex.split(materialization_command)
_, non_selection_command_parts = parser.parse_known_args(
split_materialization_command
)
materialization_command = " ".join(non_selection_command_parts)
job_commands[job_materialization_command_step] = (
f"{materialization_command} {' '.join(dbt_options)}".strip()
)
# Run the dbt Cloud job to rematerialize the assets.
dbt_cloud_output = dbt_cloud.run_job_and_poll(
job_id=job_id,
cause=f"Materializing software-defined assets in Dagster run {context.run.run_id[:8]}",
steps_override=job_commands,
)
# Target the materialization step when retrieving run artifacts, rather than assuming
# that the last step is the correct target.
#
# We ignore the commands in front of the materialization command. And again, we ignore
# the `dbt docs generate` step.
materialization_command_step = len(dbt_cloud_output.run_details.get("run_steps", []))
materialization_command_step -= len(job_commands) - job_materialization_command_step - 1
if dbt_cloud_output.run_details.get("job", {}).get("generate_docs"):
materialization_command_step -= 1
# TODO: Assume the run completely fails or completely succeeds.
# In the future, we can relax this assumption.
manifest_json = dbt_cloud.get_manifest(
run_id=dbt_cloud_output.run_id, step=materialization_command_step
)
run_results_json = self._dbt_cloud.get_run_results(
run_id=dbt_cloud_output.run_id, step=materialization_command_step
)
for result in run_results_json.get("results", []):
yield from result_to_events(
result=result,
docs_url=dbt_cloud_output.docs_url,
node_info_to_asset_key=self._node_info_to_asset_key,
manifest_json=manifest_json,
# TODO: In the future, allow arbitrary mappings to Dagster output metadata from
# the dbt metadata.
extra_metadata=None,
generate_asset_outputs=True,
)
return _assets
@beta
@beta_param(param="partitions_def")
@beta_param(param="partition_key_to_vars_fn")
def load_assets_from_dbt_cloud_job(
dbt_cloud: Union[DbtCloudClientResource, ResourceDefinition],
job_id: int,
node_info_to_asset_key: Callable[[Mapping[str, Any]], AssetKey] = default_asset_key_fn,
node_info_to_group_fn: Callable[
[Mapping[str, Any]], Optional[str]
] = default_group_from_dbt_resource_props,
node_info_to_auto_materialize_policy_fn: Callable[
[Mapping[str, Any]], Optional[AutoMaterializePolicy]
] = default_auto_materialize_policy_fn,
partitions_def: Optional[PartitionsDefinition] = None,
partition_key_to_vars_fn: Optional[Callable[[str], Mapping[str, Any]]] = None,
) -> CacheableAssetsDefinition:
"""Loads a set of dbt models, managed by a dbt Cloud job, into Dagster assets. In order to
determine the set of dbt models, the project is compiled to generate the necessary artifacts
that define the dbt models and their dependencies.
One Dagster asset is created for each dbt model.
Args:
dbt_cloud (ResourceDefinition): The dbt Cloud resource to use to connect to the dbt Cloud API.
job_id (int): The ID of the dbt Cloud job to load assets from.
node_info_to_asset_key: (Mapping[str, Any] -> AssetKey): A function that takes a dictionary
of dbt metadata and returns the AssetKey that you want to represent a given model or
source. By default: dbt model -> AssetKey([model_name]) and
dbt source -> AssetKey([source_name, table_name])
node_info_to_group_fn (Dict[str, Any] -> Optional[str]): A function that takes a
dictionary of dbt node info and returns the group that this node should be assigned to.
node_info_to_auto_materialize_policy_fn (Dict[str, Any] -> Optional[AutoMaterializePolicy]):
A function that takes a dictionary of dbt node info and optionally returns a AutoMaterializePolicy
that should be applied to this node. By default, AutoMaterializePolicies will be created from
config applied to dbt models, i.e.:
`dagster_auto_materialize_policy={"type": "lazy"}` will result in that model being assigned
`AutoMaterializePolicy.lazy()`
node_info_to_definition_metadata_fn (Dict[str, Any] -> Optional[Dict[str, RawMetadataMapping]]):
A function that takes a dictionary of dbt node info and optionally returns a dictionary
of metadata to be attached to the corresponding definition. This is added to the default
metadata assigned to the node, which consists of the node's schema (if present).
partitions_def (Optional[PartitionsDefinition]): Defines the set of partition keys that
compose the dbt assets.
partition_key_to_vars_fn (Optional[str -> Dict[str, Any]]): A function to translate a given
partition key (e.g. '2022-01-01') to a dictionary of vars to be passed into the dbt
invocation (e.g. {"run_date": "2022-01-01"})
Returns:
CacheableAssetsDefinition: A definition for the loaded assets.
Examples:
.. code-block:: python
from dagster import repository
from dagster_dbt import dbt_cloud_resource, load_assets_from_dbt_cloud_job
DBT_CLOUD_JOB_ID = 1234
dbt_cloud = dbt_cloud_resource.configured(
{
"auth_token": {"env": "DBT_CLOUD_API_TOKEN"},
"account_id": {"env": "DBT_CLOUD_ACCOUNT_ID"},
}
)
dbt_cloud_assets = load_assets_from_dbt_cloud_job(
dbt_cloud=dbt_cloud, job_id=DBT_CLOUD_JOB_ID
)
@repository
def dbt_cloud_sandbox():
return [dbt_cloud_assets]
"""
if partition_key_to_vars_fn:
check.invariant(
partitions_def is not None,
"Cannot supply a `partition_key_to_vars_fn` without a `partitions_def`.",
)
return DbtCloudCacheableAssetsDefinition(
dbt_cloud_resource_def=dbt_cloud,
job_id=job_id,
node_info_to_asset_key=node_info_to_asset_key,
node_info_to_group_fn=node_info_to_group_fn,
node_info_to_freshness_policy_fn=lambda _: None,
node_info_to_auto_materialize_policy_fn=node_info_to_auto_materialize_policy_fn,
partitions_def=partitions_def,
partition_key_to_vars_fn=partition_key_to_vars_fn,
)
| DbtCloudCacheableAssetsDefinition |
python | pallets__quart | tests/test_app.py | {
"start": 874,
"end": 12463
} | class ____(Exception):
pass
def test_endpoint_overwrite() -> None:
app = Quart(__name__)
def route() -> str:
return ""
def route2() -> str:
return ""
async def route3() -> str:
return ""
app.add_url_rule("/a", "index", route, methods=["GET"])
app.add_url_rule(
"/a/a", "index", route, methods=["GET"]
) # Should not assert, as same view func
with pytest.raises(AssertionError):
app.add_url_rule("/a/b", "index", route2, methods=["GET"])
app.add_url_rule("/b", "async", route3, methods=["GET"])
app.add_url_rule(
"/b/a", "async", route3, methods=["GET"]
) # Should not assert, as same view func
with pytest.raises(AssertionError):
app.add_url_rule("/b/b", "async", route2, methods=["GET"])
@pytest.mark.parametrize(
"methods, required_methods, automatic_options",
[
({}, {}, False),
({}, {}, True),
({"GET", "PUT"}, {}, False),
({"GET", "PUT"}, {}, True),
({}, {"GET", "PUT"}, False),
({}, {"GET", "PUT"}, True),
],
)
def test_add_url_rule_methods(
methods: set[str], required_methods: set[str], automatic_options: bool
) -> None:
app = Quart(__name__)
def route() -> str:
return ""
route.methods = methods # type: ignore
route.required_methods = required_methods # type: ignore
non_func_methods = {"PATCH"} if not methods else None
app.add_url_rule(
"/",
"end",
route,
methods=non_func_methods,
provide_automatic_options=automatic_options,
)
result = {"PATCH"} if not methods else set()
result.update(methods)
result.update(required_methods)
if "GET" in result:
result.add("HEAD")
assert app.url_map._rules_by_endpoint["end"][0].methods == result
@pytest.mark.parametrize(
"methods, arg_automatic, func_automatic, expected_methods, expected_automatic",
[
({"GET"}, True, None, {"HEAD", "GET"}, True),
({"GET"}, None, None, {"HEAD", "GET", "OPTIONS"}, True),
({"GET"}, None, True, {"HEAD", "GET"}, True),
({"GET", "OPTIONS"}, None, None, {"HEAD", "GET", "OPTIONS"}, False),
({"GET"}, False, True, {"HEAD", "GET"}, False),
({"GET"}, None, False, {"HEAD", "GET"}, False),
],
)
def test_add_url_rule_automatic_options(
methods: set[str],
arg_automatic: bool | None,
func_automatic: bool | None,
expected_methods: set[str],
expected_automatic: bool,
) -> None:
app = Quart(__name__)
def route() -> str:
return ""
route.provide_automatic_options = func_automatic # type: ignore
app.add_url_rule(
"/", "end", route, methods=methods, provide_automatic_options=arg_automatic
)
assert app.url_map._rules_by_endpoint["end"][0].methods == expected_methods
assert (
app.url_map._rules_by_endpoint["end"][0].provide_automatic_options # type: ignore
== expected_automatic
)
async def test_host_matching() -> None:
app = Quart(__name__, static_host="quart.com", host_matching=True)
@app.route("/", host="quart.com")
async def route() -> str:
return ""
test_client = app.test_client()
response = await test_client.get("/", headers={"host": "quart.com"})
assert response.status_code == 200
response = await test_client.get("/", headers={"host": "localhost"})
assert response.status_code == 404
async def test_subdomain() -> None:
app = Quart(__name__, subdomain_matching=True)
app.config["SERVER_NAME"] = "quart.com"
@app.route("/", subdomain="<subdomain>")
async def route(subdomain: str) -> str:
return subdomain
test_client = app.test_client()
response = await test_client.get("/", headers={"host": "sub.quart.com"})
assert (await response.get_data(as_text=True)) == "sub"
@pytest.mark.parametrize(
"result, expected, raises",
[
(None, None, True),
((None, 201), None, True),
(TEST_RESPONSE, TEST_RESPONSE, False),
(
("hello", {"X-Header": "bob"}),
Response("hello", headers={"X-Header": "bob"}),
False,
),
(("hello", 201), Response("hello", 201), False),
(
("hello", 201, {"X-Header": "bob"}),
Response("hello", 201, headers={"X-Header": "bob"}),
False,
),
(
(WerkzeugResponse("hello"), 201, {"X-Header": "bob"}),
WerkzeugResponse("hello", 201, {"X-Header": "bob"}),
False,
),
(InternalServerError(), InternalServerError().get_response(), False),
((val for val in "abcd"), Response(val for val in "abcd"), False),
(int, None, True),
],
)
async def test_make_response(
result: ResponseReturnValue, expected: Response | WerkzeugResponse, raises: bool
) -> None:
app = Quart(__name__)
app.config["RESPONSE_TIMEOUT"] = None
try:
response = await app.make_response(result)
except TypeError:
if not raises:
raise
else:
assert set(response.headers.keys()) == set(expected.headers.keys())
assert response.status_code == expected.status_code
if isinstance(response, Response):
assert (await response.get_data()) == (await expected.get_data()) # type: ignore
elif isinstance(response, WerkzeugResponse):
assert response.get_data() == expected.get_data()
@pytest.fixture(name="basic_app")
def _basic_app() -> Quart:
app = Quart(__name__)
@app.route("/")
def route() -> str:
return ""
@app.route("/exception/")
def exception() -> str:
raise Exception()
return app
async def test_app_route_exception(basic_app: Quart) -> None:
test_client = basic_app.test_client()
response = await test_client.get("/exception/")
assert response.status_code == 500
async def test_app_before_request_exception(basic_app: Quart) -> None:
@basic_app.before_request
def before() -> None:
raise Exception()
test_client = basic_app.test_client()
response = await test_client.get("/")
assert response.status_code == 500
async def test_app_after_request_exception(basic_app: Quart) -> None:
@basic_app.after_request
def after(_: ResponseTypes) -> None:
raise Exception()
test_client = basic_app.test_client()
response = await test_client.get("/")
assert response.status_code == 500
async def test_app_after_request_handler_exception(basic_app: Quart) -> None:
@basic_app.after_request
def after(_: ResponseTypes) -> None:
raise Exception()
test_client = basic_app.test_client()
response = await test_client.get("/exception/")
assert response.status_code == 500
async def test_app_handle_request_asyncio_cancelled_error(
http_scope: HTTPScope,
) -> None:
app = Quart(__name__)
@app.route("/")
async def index() -> NoReturn:
raise asyncio.CancelledError()
request = app.request_class(
"GET",
"http",
"/",
b"",
Headers([("host", "quart.com")]),
"",
"1.1",
http_scope,
send_push_promise=no_op_push,
)
with pytest.raises(asyncio.CancelledError):
await app.handle_request(request)
async def test_app_handle_websocket_asyncio_cancelled_error(
websocket_scope: WebsocketScope,
) -> None:
app = Quart(__name__)
@app.websocket("/")
async def index() -> NoReturn:
raise asyncio.CancelledError()
websocket = app.websocket_class(
"/",
b"",
"wss",
Headers([("host", "quart.com")]),
"",
"1.1",
None,
None,
None,
None,
None,
websocket_scope,
)
with pytest.raises(asyncio.CancelledError):
await app.handle_websocket(websocket)
@pytest.fixture(name="session_app", scope="function")
def _session_app() -> Quart:
app = Quart(__name__)
app.session_interface = AsyncMock(spec=SessionInterface)
app.session_interface.open_session.return_value = SecureCookieSession()
app.session_interface.is_null_session.return_value = False
@app.route("/")
async def route() -> str:
session["a"] = "b"
return ""
@app.websocket("/ws/")
async def ws() -> None:
session["a"] = "b"
await websocket.accept()
await websocket.send("")
@app.websocket("/ws_return/")
async def ws_return() -> str:
session["a"] = "b"
return ""
return app
async def test_app_session(session_app: Quart) -> None:
test_client = session_app.test_client()
await test_client.get("/")
session_app.session_interface.open_session.assert_called() # type: ignore
session_app.session_interface.save_session.assert_called() # type: ignore
async def test_app_session_websocket(session_app: Quart) -> None:
test_client = session_app.test_client()
async with test_client.websocket("/ws/") as test_websocket:
await test_websocket.receive()
session_app.session_interface.open_session.assert_called() # type: ignore
session_app.session_interface.save_session.assert_called() # type: ignore
async def test_app_session_websocket_return(session_app: Quart) -> None:
test_client = session_app.test_client()
async with test_client.websocket("/ws_return/") as test_websocket:
with pytest.raises(WebsocketResponseError):
await test_websocket.receive()
session_app.session_interface.open_session.assert_called() # type: ignore
session_app.session_interface.save_session.assert_called() # type: ignore
@pytest.mark.parametrize(
"debug, testing, raises",
[
(False, False, False),
(True, False, True),
(False, True, True),
(True, True, True),
],
)
async def test_propagation(
debug: bool, testing: bool, raises: bool, http_scope: HTTPScope
) -> None:
app = Quart(__name__)
@app.route("/")
async def exception() -> ResponseReturnValue:
raise SimpleError()
app.debug = debug
app.testing = testing
test_client = app.test_client()
if raises:
with pytest.raises(SimpleError):
await app.handle_request(
Request(
"GET",
"http",
"/",
b"",
Headers(),
"",
"1.1",
http_scope,
send_push_promise=no_op_push,
)
)
else:
response = await test_client.get("/")
assert response.status_code == 500
async def test_test_app() -> None:
startup = False
shutdown = False
serving = []
app = Quart(__name__)
@app.before_serving
async def before() -> None:
nonlocal startup
startup = True
@app.after_serving
async def after() -> None:
nonlocal shutdown
shutdown = True
@app.while_serving
async def lifespan() -> AsyncGenerator[None, None]:
nonlocal serving
serving.append(1)
yield
serving.append(2)
@app.route("/")
async def index() -> str:
return ""
async with app.test_app() as test_app:
assert startup
test_client = test_app.test_client()
await test_client.get("/")
assert not shutdown
assert serving == [1]
assert shutdown
assert serving == [1, 2]
| SimpleError |
python | doocs__leetcode | solution/0900-0999/0941.Valid Mountain Array/Solution.py | {
"start": 0,
"end": 324
} | class ____:
def validMountainArray(self, arr: List[int]) -> bool:
n = len(arr)
if n < 3:
return False
i, j = 0, n - 1
while i + 1 < n - 1 and arr[i] < arr[i + 1]:
i += 1
while j - 1 > 0 and arr[j - 1] > arr[j]:
j -= 1
return i == j
| Solution |
python | kamyu104__LeetCode-Solutions | Python/arranging-coins.py | {
"start": 46,
"end": 269
} | class ____(object):
def arrangeCoins(self, n):
"""
:type n: int
:rtype: int
"""
return int((math.sqrt(8*n+1)-1) / 2) # sqrt is O(logn) time.
# Time: O(logn)
# Space: O(1)
| Solution |
python | scipy__scipy | scipy/stats/tests/test_multivariate.py | {
"start": 78094,
"end": 86104
} | class ____:
def test_frozen_dirichlet(self):
rng = np.random.default_rng(2846)
n = rng.integers(1, 32)
alpha = rng.uniform(10e-10, 100, n)
d = dirichlet(alpha)
assert_equal(d.var(), dirichlet.var(alpha))
assert_equal(d.mean(), dirichlet.mean(alpha))
assert_equal(d.entropy(), dirichlet.entropy(alpha))
num_tests = 10
for i in range(num_tests):
x = rng.uniform(10e-10, 100, n)
x /= np.sum(x)
assert_equal(d.pdf(x[:-1]), dirichlet.pdf(x[:-1], alpha))
assert_equal(d.logpdf(x[:-1]), dirichlet.logpdf(x[:-1], alpha))
def test_numpy_rvs_shape_compatibility(self):
rng = np.random.default_rng(2846)
alpha = np.array([1.0, 2.0, 3.0])
x = rng.dirichlet(alpha, size=7)
assert_equal(x.shape, (7, 3))
assert_raises(ValueError, dirichlet.pdf, x, alpha)
assert_raises(ValueError, dirichlet.logpdf, x, alpha)
dirichlet.pdf(x.T, alpha)
dirichlet.pdf(x.T[:-1], alpha)
dirichlet.logpdf(x.T, alpha)
dirichlet.logpdf(x.T[:-1], alpha)
def test_alpha_with_zeros(self):
rng = np.random.default_rng(2846)
alpha = [1.0, 0.0, 3.0]
# don't pass invalid alpha to np.random.dirichlet
x = rng.dirichlet(np.maximum(1e-9, alpha), size=7).T
assert_raises(ValueError, dirichlet.pdf, x, alpha)
assert_raises(ValueError, dirichlet.logpdf, x, alpha)
def test_alpha_with_negative_entries(self):
rng = np.random.default_rng(2846)
alpha = [1.0, -2.0, 3.0]
# don't pass invalid alpha to np.random.dirichlet
x = rng.dirichlet(np.maximum(1e-9, alpha), size=7).T
assert_raises(ValueError, dirichlet.pdf, x, alpha)
assert_raises(ValueError, dirichlet.logpdf, x, alpha)
def test_data_with_zeros(self):
alpha = np.array([1.0, 2.0, 3.0, 4.0])
x = np.array([0.1, 0.0, 0.2, 0.7])
dirichlet.pdf(x, alpha)
dirichlet.logpdf(x, alpha)
alpha = np.array([1.0, 1.0, 1.0, 1.0])
assert_almost_equal(dirichlet.pdf(x, alpha), 6)
assert_almost_equal(dirichlet.logpdf(x, alpha), np.log(6))
def test_data_with_zeros_and_small_alpha(self):
alpha = np.array([1.0, 0.5, 3.0, 4.0])
x = np.array([0.1, 0.0, 0.2, 0.7])
assert_raises(ValueError, dirichlet.pdf, x, alpha)
assert_raises(ValueError, dirichlet.logpdf, x, alpha)
def test_data_with_negative_entries(self):
alpha = np.array([1.0, 2.0, 3.0, 4.0])
x = np.array([0.1, -0.1, 0.3, 0.7])
assert_raises(ValueError, dirichlet.pdf, x, alpha)
assert_raises(ValueError, dirichlet.logpdf, x, alpha)
def test_data_with_too_large_entries(self):
alpha = np.array([1.0, 2.0, 3.0, 4.0])
x = np.array([0.1, 1.1, 0.3, 0.7])
assert_raises(ValueError, dirichlet.pdf, x, alpha)
assert_raises(ValueError, dirichlet.logpdf, x, alpha)
def test_data_too_deep_c(self):
alpha = np.array([1.0, 2.0, 3.0])
x = np.full((2, 7, 7), 1 / 14)
assert_raises(ValueError, dirichlet.pdf, x, alpha)
assert_raises(ValueError, dirichlet.logpdf, x, alpha)
def test_alpha_too_deep(self):
alpha = np.array([[1.0, 2.0], [3.0, 4.0]])
x = np.full((2, 2, 7), 1 / 4)
assert_raises(ValueError, dirichlet.pdf, x, alpha)
assert_raises(ValueError, dirichlet.logpdf, x, alpha)
def test_alpha_correct_depth(self):
alpha = np.array([1.0, 2.0, 3.0])
x = np.full((3, 7), 1 / 3)
dirichlet.pdf(x, alpha)
dirichlet.logpdf(x, alpha)
def test_non_simplex_data(self):
alpha = np.array([1.0, 2.0, 3.0])
x = np.full((3, 7), 1 / 2)
assert_raises(ValueError, dirichlet.pdf, x, alpha)
assert_raises(ValueError, dirichlet.logpdf, x, alpha)
def test_data_vector_too_short(self):
alpha = np.array([1.0, 2.0, 3.0, 4.0])
x = np.full((2, 7), 1 / 2)
assert_raises(ValueError, dirichlet.pdf, x, alpha)
assert_raises(ValueError, dirichlet.logpdf, x, alpha)
def test_data_vector_too_long(self):
alpha = np.array([1.0, 2.0, 3.0, 4.0])
x = np.full((5, 7), 1 / 5)
assert_raises(ValueError, dirichlet.pdf, x, alpha)
assert_raises(ValueError, dirichlet.logpdf, x, alpha)
def test_mean_var_cov(self):
# Reference values calculated by hand and confirmed with Mathematica, e.g.
# `Covariance[DirichletDistribution[{ 1, 0.8, 0.2, 10^-300}]]`
alpha = np.array([1., 0.8, 0.2])
d = dirichlet(alpha)
expected_mean = [0.5, 0.4, 0.1]
expected_var = [1. / 12., 0.08, 0.03]
expected_cov = [
[ 1. / 12, -1. / 15, -1. / 60],
[-1. / 15, 2. / 25, -1. / 75],
[-1. / 60, -1. / 75, 3. / 100],
]
assert_array_almost_equal(d.mean(), expected_mean)
assert_array_almost_equal(d.var(), expected_var)
assert_array_almost_equal(d.cov(), expected_cov)
def test_scalar_values(self):
alpha = np.array([0.2])
d = dirichlet(alpha)
# For alpha of length 1, mean and var should be scalar instead of array
assert_equal(d.mean().ndim, 0)
assert_equal(d.var().ndim, 0)
assert_equal(d.pdf([1.]).ndim, 0)
assert_equal(d.logpdf([1.]).ndim, 0)
def test_K_and_K_minus_1_calls_equal(self):
# Test that calls with K and K-1 entries yield the same results.
rng = np.random.default_rng(2846)
n = rng.integers(1, 32)
alpha = rng.uniform(10e-10, 100, n)
d = dirichlet(alpha)
num_tests = 10
for i in range(num_tests):
x = rng.uniform(10e-10, 100, n)
x /= np.sum(x)
assert_almost_equal(d.pdf(x[:-1]), d.pdf(x))
def test_multiple_entry_calls(self):
# Test that calls with multiple x vectors as matrix work
rng = np.random.default_rng(2846)
n = rng.integers(1, 32)
alpha = rng.uniform(10e-10, 100, n)
d = dirichlet(alpha)
num_tests = 10
num_multiple = 5
xm = None
for i in range(num_tests):
for m in range(num_multiple):
x = rng.uniform(10e-10, 100, n)
x /= np.sum(x)
if xm is not None:
xm = np.vstack((xm, x))
else:
xm = x
rm = d.pdf(xm.T)
rs = None
for xs in xm:
r = d.pdf(xs)
if rs is not None:
rs = np.append(rs, r)
else:
rs = r
assert_array_almost_equal(rm, rs)
def test_2D_dirichlet_is_beta(self):
rng = np.random.default_rng(2846)
alpha = rng.uniform(10e-10, 100, 2)
d = dirichlet(alpha)
b = beta(alpha[0], alpha[1])
num_tests = 10
for i in range(num_tests):
x = rng.uniform(10e-10, 100, 2)
x /= np.sum(x)
assert_almost_equal(b.pdf(x), d.pdf([x]))
assert_almost_equal(b.mean(), d.mean()[0])
assert_almost_equal(b.var(), d.var()[0])
def test_multivariate_normal_dimensions_mismatch():
# Regression test for GH #3493. Check that setting up a PDF with a mean of
# length M and a covariance matrix of size (N, N), where M != N, raises a
# ValueError with an informative error message.
mu = np.array([0.0, 0.0])
sigma = np.array([[1.0]])
assert_raises(ValueError, multivariate_normal, mu, sigma)
# A simple check that the right error message was passed along. Checking
# that the entire message is there, word for word, would be somewhat
# fragile, so we just check for the leading part.
try:
multivariate_normal(mu, sigma)
except ValueError as e:
msg = "Dimension mismatch"
assert_equal(str(e)[:len(msg)], msg)
| TestDirichlet |
python | has2k1__plotnine | plotnine/geoms/geom_qq_line.py | {
"start": 77,
"end": 428
} | class ____(geom_path):
"""
Quantile-Quantile Line plot
{usage}
Parameters
----------
{common_parameters}
See Also
--------
plotnine.stat_qq_line : The default `stat` for this `geom`.
"""
DEFAULT_PARAMS = {
"stat": "qq_line",
"position": "identity",
"na_rm": False,
}
| geom_qq_line |
python | getsentry__sentry | tests/acceptance/test_teams_list.py | {
"start": 152,
"end": 1566
} | class ____(AcceptanceTestCase):
def setUp(self) -> None:
super().setUp()
self.user = self.create_user("foo@example.com")
self.org = self.create_organization(name="Rowdy Tiger", owner=None)
self.team = self.create_team(organization=self.org, name="Mariachi Band")
self.project = self.create_project(organization=self.org, teams=[self.team], name="Bengal")
self.create_member(user=self.user, organization=self.org, role="owner", teams=[self.team])
self.login_as(self.user)
# this should redirect to /settings/{}/teams/
self.path = f"/organizations/{self.org.slug}/teams/"
def test_simple(self) -> None:
self.project.update(first_event=timezone.now())
self.browser.get(self.path)
self.browser.wait_until_not('[data-test-id="loading-indicator"]')
self.browser.wait_until_test_id("team-list")
# team details link
self.browser.click('a[data-test-id="team-link"]')
self.browser.wait_until_not('[data-test-id="loading-indicator"]')
# Click projects tab
self.browser.click('[role="tablist"] li:nth-child(2) a')
self.browser.wait_until_not('[data-test-id="loading-indicator"]')
# Click notifications tab
self.browser.click('[role="tablist"] li:nth-child(3) a')
self.browser.wait_until_not('[data-test-id="loading-indicator"]')
| TeamsListTest |
python | redis__redis-py | redis/auth/token_manager.py | {
"start": 3553,
"end": 12018
} | class ____:
def __init__(
self, identity_provider: IdentityProviderInterface, config: TokenManagerConfig
):
self._idp = identity_provider
self._config = config
self._next_timer = None
self._listener = None
self._init_timer = None
self._retries = 0
def __del__(self):
logger.info("Token manager are disposed")
self.stop()
def start(
self,
listener: CredentialsListener,
skip_initial: bool = False,
) -> Callable[[], None]:
self._listener = listener
try:
loop = asyncio.get_running_loop()
except RuntimeError:
# Run loop in a separate thread to unblock main thread.
loop = asyncio.new_event_loop()
thread = threading.Thread(
target=_start_event_loop_in_thread, args=(loop,), daemon=True
)
thread.start()
# Event to block for initial execution.
init_event = asyncio.Event()
self._init_timer = loop.call_later(
0, self._renew_token, skip_initial, init_event
)
logger.info("Token manager started")
# Blocks in thread-safe manner.
asyncio.run_coroutine_threadsafe(init_event.wait(), loop).result()
return self.stop
async def start_async(
self,
listener: CredentialsListener,
block_for_initial: bool = False,
initial_delay_in_ms: float = 0,
skip_initial: bool = False,
) -> Callable[[], None]:
self._listener = listener
loop = asyncio.get_running_loop()
init_event = asyncio.Event()
# Wraps the async callback with async wrapper to schedule with loop.call_later()
wrapped = _async_to_sync_wrapper(
loop, self._renew_token_async, skip_initial, init_event
)
self._init_timer = loop.call_later(initial_delay_in_ms / 1000, wrapped)
logger.info("Token manager started")
if block_for_initial:
await init_event.wait()
return self.stop
def stop(self):
if self._init_timer is not None:
self._init_timer.cancel()
if self._next_timer is not None:
self._next_timer.cancel()
def acquire_token(self, force_refresh=False) -> TokenResponse:
try:
token = self._idp.request_token(force_refresh)
except RequestTokenErr as e:
if self._retries < self._config.get_retry_policy().get_max_attempts():
self._retries += 1
sleep(self._config.get_retry_policy().get_delay_in_ms() / 1000)
return self.acquire_token(force_refresh)
else:
raise e
self._retries = 0
return TokenResponse(token)
async def acquire_token_async(self, force_refresh=False) -> TokenResponse:
try:
token = self._idp.request_token(force_refresh)
except RequestTokenErr as e:
if self._retries < self._config.get_retry_policy().get_max_attempts():
self._retries += 1
await asyncio.sleep(
self._config.get_retry_policy().get_delay_in_ms() / 1000
)
return await self.acquire_token_async(force_refresh)
else:
raise e
self._retries = 0
return TokenResponse(token)
def _calculate_renewal_delay(self, expire_date: float, issue_date: float) -> float:
delay_for_lower_refresh = self._delay_for_lower_refresh(expire_date)
delay_for_ratio_refresh = self._delay_for_ratio_refresh(expire_date, issue_date)
delay = min(delay_for_ratio_refresh, delay_for_lower_refresh)
return 0 if delay < 0 else delay / 1000
def _delay_for_lower_refresh(self, expire_date: float):
return (
expire_date
- self._config.get_lower_refresh_bound_millis()
- (datetime.now(timezone.utc).timestamp() * 1000)
)
def _delay_for_ratio_refresh(self, expire_date: float, issue_date: float):
token_ttl = expire_date - issue_date
refresh_before = token_ttl - (
token_ttl * self._config.get_expiration_refresh_ratio()
)
return (
expire_date
- refresh_before
- (datetime.now(timezone.utc).timestamp() * 1000)
)
def _renew_token(
self, skip_initial: bool = False, init_event: asyncio.Event = None
):
"""
Task to renew token from identity provider.
Schedules renewal tasks based on token TTL.
"""
try:
token_res = self.acquire_token(force_refresh=True)
delay = self._calculate_renewal_delay(
token_res.get_token().get_expires_at_ms(),
token_res.get_token().get_received_at_ms(),
)
if token_res.get_token().is_expired():
raise TokenRenewalErr("Requested token is expired")
if self._listener.on_next is None:
logger.warning(
"No registered callback for token renewal task. Renewal cancelled"
)
return
if not skip_initial:
try:
self._listener.on_next(token_res.get_token())
except Exception as e:
raise TokenRenewalErr(e)
if delay <= 0:
return
loop = asyncio.get_running_loop()
self._next_timer = loop.call_later(delay, self._renew_token)
logger.info(f"Next token renewal scheduled in {delay} seconds")
return token_res
except Exception as e:
if self._listener.on_error is None:
raise e
self._listener.on_error(e)
finally:
if init_event:
init_event.set()
async def _renew_token_async(
self, skip_initial: bool = False, init_event: asyncio.Event = None
):
"""
Async task to renew tokens from identity provider.
Schedules renewal tasks based on token TTL.
"""
try:
token_res = await self.acquire_token_async(force_refresh=True)
delay = self._calculate_renewal_delay(
token_res.get_token().get_expires_at_ms(),
token_res.get_token().get_received_at_ms(),
)
if token_res.get_token().is_expired():
raise TokenRenewalErr("Requested token is expired")
if self._listener.on_next is None:
logger.warning(
"No registered callback for token renewal task. Renewal cancelled"
)
return
if not skip_initial:
try:
await self._listener.on_next(token_res.get_token())
except Exception as e:
raise TokenRenewalErr(e)
if delay <= 0:
return
loop = asyncio.get_running_loop()
wrapped = _async_to_sync_wrapper(loop, self._renew_token_async)
logger.info(f"Next token renewal scheduled in {delay} seconds")
loop.call_later(delay, wrapped)
except Exception as e:
if self._listener.on_error is None:
raise e
await self._listener.on_error(e)
finally:
if init_event:
init_event.set()
def _async_to_sync_wrapper(loop, coro_func, *args, **kwargs):
"""
Wraps an asynchronous function so it can be used with loop.call_later.
:param loop: The event loop in which the coroutine will be executed.
:param coro_func: The coroutine function to wrap.
:param args: Positional arguments to pass to the coroutine function.
:param kwargs: Keyword arguments to pass to the coroutine function.
:return: A regular function suitable for loop.call_later.
"""
def wrapped():
# Schedule the coroutine in the event loop
asyncio.ensure_future(coro_func(*args, **kwargs), loop=loop)
return wrapped
def _start_event_loop_in_thread(event_loop: asyncio.AbstractEventLoop):
"""
Starts event loop in a thread.
Used to be able to schedule tasks using loop.call_later.
:param event_loop:
:return:
"""
asyncio.set_event_loop(event_loop)
event_loop.run_forever()
| TokenManager |
python | geekcomputers__Python | venv/Lib/site-packages/pip/_internal/cli/index_command.py | {
"start": 1361,
"end": 4508
} | class ____(CommandContextMixIn):
"""
A class mixin for command classes needing _build_session().
"""
def __init__(self) -> None:
super().__init__()
self._session: Optional["PipSession"] = None
@classmethod
def _get_index_urls(cls, options: Values) -> Optional[List[str]]:
"""Return a list of index urls from user-provided options."""
index_urls = []
if not getattr(options, "no_index", False):
url = getattr(options, "index_url", None)
if url:
index_urls.append(url)
urls = getattr(options, "extra_index_urls", None)
if urls:
index_urls.extend(urls)
# Return None rather than an empty list
return index_urls or None
def get_default_session(self, options: Values) -> "PipSession":
"""Get a default-managed session."""
if self._session is None:
self._session = self.enter_context(self._build_session(options))
# there's no type annotation on requests.Session, so it's
# automatically ContextManager[Any] and self._session becomes Any,
# then https://github.com/python/mypy/issues/7696 kicks in
assert self._session is not None
return self._session
def _build_session(
self,
options: Values,
retries: Optional[int] = None,
timeout: Optional[int] = None,
) -> "PipSession":
from pip._internal.network.session import PipSession
cache_dir = options.cache_dir
assert not cache_dir or os.path.isabs(cache_dir)
if "legacy-certs" not in options.deprecated_features_enabled:
ssl_context = _create_truststore_ssl_context()
else:
ssl_context = None
session = PipSession(
cache=os.path.join(cache_dir, "http-v2") if cache_dir else None,
retries=retries if retries is not None else options.retries,
trusted_hosts=options.trusted_hosts,
index_urls=self._get_index_urls(options),
ssl_context=ssl_context,
)
# Handle custom ca-bundles from the user
if options.cert:
session.verify = options.cert
# Handle SSL client certificate
if options.client_cert:
session.cert = options.client_cert
# Handle timeouts
if options.timeout or timeout:
session.timeout = timeout if timeout is not None else options.timeout
# Handle configured proxies
if options.proxy:
session.proxies = {
"http": options.proxy,
"https": options.proxy,
}
session.trust_env = False
# Determine if we can prompt the user for authentication or not
session.auth.prompting = not options.no_input
session.auth.keyring_provider = options.keyring_provider
return session
def _pip_self_version_check(session: "PipSession", options: Values) -> None:
from pip._internal.self_outdated_check import pip_self_version_check as check
check(session, options)
| SessionCommandMixin |
python | getsentry__sentry | src/sentry/shared_integrations/client/base.py | {
"start": 971,
"end": 13700
} | class ____:
base_url: str = ""
allow_redirects: bool | None = None
integration_type: str # abstract
logger = logging.getLogger(__name__)
metrics_prefix: str | None = None
cache_time = 900
page_size: int = 100
page_number_limit = 10
integration_name: str
# Timeout for both the connect and the read timeouts.
# See: https://requests.readthedocs.io/en/latest/user/advanced/#timeouts
timeout: int = 30
@property
def name(self) -> str:
return getattr(self, f"{self.integration_type}_name")
def __init__(
self,
integration_id: int | None = None,
verify_ssl: bool = True,
logging_context: Mapping[str, Any] | None = None,
) -> None:
self.verify_ssl = verify_ssl
self.logging_context = logging_context
self.integration_id = integration_id
def __enter__(self) -> Self:
return self
def __exit__(self, exc_type: type[Exception], exc_value: Exception, traceback: Any) -> None:
# TODO(joshuarli): Look into reusing a SafeSession, and closing it here.
# Don't want to make the change until I completely understand urllib3
# machinery + how we override it, possibly do this along with urllib3
# upgrade.
pass
def track_response_data(
self,
code: str | int,
error: Exception | None = None,
resp: Response | None = None,
extra: Mapping[str, str] | None = None,
) -> None:
metrics.incr(
f"{self.metrics_prefix}.http_response",
sample_rate=1.0,
tags={self.integration_type: self.name, "status": code},
)
log_params = {
**(extra or {}),
"status_string": str(code),
"error": str(error)[:256] if error else None,
}
if self.integration_type:
log_params[self.integration_type] = self.name
log_params.update(getattr(self, "logging_context", None) or {})
self.logger.info("%s.http_response", self.integration_type, extra=log_params)
def get_cache_prefix(self) -> str:
return f"{self.integration_type}.{self.name}.client:"
def build_url(self, path: str) -> str:
if path.startswith("/"):
if not self.base_url:
raise ValueError(f"Invalid URL: {path}")
base_url = self.base_url.rstrip("/")
path = path.lstrip("/")
return f"{base_url}/{path}"
return path
def finalize_request(self, prepared_request: PreparedRequest) -> PreparedRequest:
"""
Allows subclasses to add hooks before sending requests out
"""
return prepared_request
def is_response_fatal(self, resp: Response) -> bool:
return False
def is_response_error(self, resp: Response) -> bool:
if resp.status_code:
if resp.status_code >= 400 and resp.status_code != 429 and resp.status_code < 500:
return True
return False
def is_response_success(self, resp: Response) -> bool:
if resp.status_code:
if resp.status_code < 300:
return True
return False
def is_error_fatal(self, error: Exception) -> bool:
return False
def build_session(self) -> SafeSession:
"""
Generates a safe Requests session for the API client to use.
"""
return build_session()
@overload
def _request(
self,
method: str,
path: str,
headers: Mapping[str, str] | None = None,
data: Mapping[str, str] | None = None,
params: Mapping[str, str] | None = None,
auth: tuple[str, str] | None = None,
json: bool = True,
allow_text: bool = False,
allow_redirects: bool | None = None,
timeout: int | None = None,
ignore_webhook_errors: bool = False,
prepared_request: PreparedRequest | None = None,
raw_response: Literal[True] = ...,
) -> Response: ...
@overload
def _request(
self,
method: str,
path: str,
headers: Mapping[str, str] | None = None,
data: Mapping[str, str] | None = None,
params: Mapping[str, str] | None = None,
auth: str | None = None,
json: bool = True,
allow_text: bool = False,
allow_redirects: bool | None = None,
timeout: int | None = None,
ignore_webhook_errors: bool = False,
prepared_request: PreparedRequest | None = None,
raw_response: bool = ...,
) -> Any: ...
def _request(
self,
method: str,
path: str,
headers: Mapping[str, str] | None = None,
data: Mapping[str, str] | None = None,
params: Mapping[str, str] | None = None,
auth: tuple[str, str] | str | None = None,
json: bool = True,
allow_text: bool = False,
allow_redirects: bool | None = None,
timeout: int | None = None,
ignore_webhook_errors: bool = False,
prepared_request: PreparedRequest | None = None,
raw_response: bool = False,
) -> Any | Response:
if allow_redirects is None:
allow_redirects = self.allow_redirects
if allow_redirects is None: # is still None
allow_redirects = method.upper() == "GET"
if timeout is None:
timeout = self.timeout
full_url = self.build_url(path)
metrics.incr(
f"{self.metrics_prefix}.http_request",
sample_rate=1.0,
tags={self.integration_type: self.name},
)
if self.integration_type:
sentry_sdk.get_isolation_scope().set_tag(self.integration_type, self.name)
request = Request(
method=method.upper(),
url=full_url,
headers=headers,
json=data if json else None,
data=data if not json else None,
params=params,
auth=auth,
)
_prepared_request = prepared_request if prepared_request is not None else request.prepare()
extra = {"url": full_url}
# It shouldn't be possible for integration_type to be null.
if self.integration_type:
extra[self.integration_type] = self.name
try:
with self.build_session() as session:
finalized_request = self.finalize_request(_prepared_request)
environment_settings = session.merge_environment_settings(
url=finalized_request.url,
proxies={},
stream=None,
verify=self.verify_ssl,
cert=None,
)
session_settings: SessionSettings = {
"timeout": timeout,
"allow_redirects": allow_redirects,
**environment_settings,
}
resp: Response = session.send(finalized_request, **session_settings)
if raw_response:
return resp
resp.raise_for_status()
except RestrictedIPAddress as e:
self.track_response_data("restricted_ip_address", e, extra=extra)
raise ApiHostError.from_exception(e) from e
except ConnectionError as e:
self.track_response_data("connection_error", e, extra=extra)
raise ApiHostError.from_exception(e) from e
except Timeout as e:
self.track_response_data("timeout", e, extra=extra)
raise ApiTimeoutError.from_exception(e) from e
except RetryError as e:
self.track_response_data("max_retries", e, extra=extra)
raise ApiRetryError.from_exception(e) from e
except HTTPError as e:
error_resp = e.response
if error_resp is None:
self.track_response_data("unknown", e, extra=extra)
self.logger.exception("request.error", extra=extra)
raise ApiError("Internal Error", url=full_url) from e
self.track_response_data(error_resp.status_code, e, resp=error_resp, extra=extra)
raise ApiError.from_response(error_resp, url=full_url) from e
except Exception as e:
# Sometimes a ConnectionResetError shows up two or three deep in an exception
# chain, and you end up with an exception like
# `ChunkedEncodingError("Connection broken: ConnectionResetError(104, 'Connection reset by peer')",
# ConnectionResetError(104, 'Connection reset by peer'))`,
# which is a ChunkedEncodingError caused by a ProtocolError caused by a ConnectionResetError.
# Rather than worrying about what the other layers might be, we just stringify to detect this.
if "ConnectionResetError" in str(e):
self.track_response_data("connection_reset_error", e, extra=extra)
raise ApiConnectionResetError("Connection reset by peer", url=full_url) from e
# The same thing can happen with an InvalidChunkLength exception, which is a subclass of HTTPError
if "InvalidChunkLength" in str(e):
self.track_response_data("invalid_chunk_length", e, extra=extra)
raise ApiError("Connection broken: invalid chunk length", url=full_url) from e
# If it's not something we recognize, let the caller deal with it
raise
self.track_response_data(resp.status_code, None, resp, extra=extra)
if resp.status_code == 204:
return {}
return BaseApiResponse.from_response(
resp, allow_text=allow_text, ignore_webhook_errors=ignore_webhook_errors
)
# subclasses should override ``request``
def request(self, *args: Any, **kwargs: Any) -> Any:
return self._request(*args, **kwargs)
def delete(self, *args: Any, **kwargs: Any) -> Any:
return self.request("DELETE", *args, **kwargs)
def get_cache_key(self, path: str, method: str, query: str = "", data: str | None = "") -> str:
if not data:
return (
self.get_cache_prefix() + md5_text(self.build_url(path), method, query).hexdigest()
)
return (
self.get_cache_prefix()
+ md5_text(self.build_url(path), method, query, data).hexdigest()
)
def check_cache(self, cache_key: str) -> Any | None:
return cache.get(cache_key)
def set_cache(self, cache_key: str, result: Any, cache_time: int) -> None:
cache.set(cache_key, result, cache_time)
def _get_cached(self, path: str, method: str, *args: Any, **kwargs: Any) -> Any:
data = kwargs.get("data", None)
query = ""
if kwargs.get("params", None):
query = json.dumps(kwargs.get("params"))
key = self.get_cache_key(path, method, query, data)
result = self.check_cache(key)
if result is None:
cache_time = kwargs.pop("cache_time", None) or self.cache_time
result = self.request(method, path, *args, **kwargs)
self.set_cache(key, result, cache_time)
return result
def get_cached(self, path: str, *args: Any, **kwargs: Any) -> Any:
return self._get_cached(path, "GET", *args, **kwargs)
def get(self, *args: Any, **kwargs: Any) -> Any:
return self.request("GET", *args, **kwargs)
def patch(self, *args: Any, **kwargs: Any) -> Any:
return self.request("PATCH", *args, **kwargs)
def post(self, *args: Any, **kwargs: Any) -> Any:
return self.request("POST", *args, **kwargs)
def put(self, *args: Any, **kwargs: Any) -> Any:
return self.request("PUT", *args, **kwargs)
def head(self, *args: Any, **kwargs: Any) -> Any:
return self.request("HEAD", *args, **kwargs)
def head_cached(self, path: str, *args: Any, **kwargs: Any) -> Any:
return self._get_cached(path, "HEAD", *args, **kwargs)
def get_with_pagination(
self,
path: str,
gen_params: Callable[..., Any],
get_results: Callable[..., Any],
*args: Any,
**kwargs: Any,
) -> list[Any]:
page_size = self.page_size
output = []
for i in range(self.page_number_limit):
resp = self.get(path, params=gen_params(i, page_size))
results = get_results(resp)
num_results = len(results)
output += results
# if the number is lower than our page_size, we can quit
if num_results < page_size:
return output
return output
| BaseApiClient |
python | imageio__imageio | imageio/plugins/bsdf.py | {
"start": 3590,
"end": 3623
} | class ____(Image):
pass
| Image2D |
python | ray-project__ray | python/ray/data/tests/test_util.py | {
"start": 8071,
"end": 14347
} | class ____:
def __init__(self):
self.concurrency = 0
self.max_concurrency = 0
def inc(self):
self.concurrency += 1
if self.concurrency > self.max_concurrency:
self.max_concurrency = self.concurrency
return self.concurrency
def decr(self):
self.concurrency -= 1
return self.concurrency
def get_max_concurrency(self):
return self.max_concurrency
def test_iterate_with_retry():
has_raised_error = False
class MockIterable:
"""Iterate over the numbers 0, 1, 2, and raise an error on the first iteration
attempt.
"""
def __init__(self, fail_at_index=3):
self._index = -1
self._fail_at_index = fail_at_index
def __iter__(self):
return self
def __next__(self):
self._index += 1
if self._index >= 10:
raise StopIteration
nonlocal has_raised_error
if self._index == self._fail_at_index and not has_raised_error:
has_raised_error = True
raise RuntimeError("Transient error")
return self._index
expected = list(range(10))
assert list(iterate_with_retry(MockIterable, description="get item")) == expected
has_raised_error = False
assert (
list(iterate_with_retry(MockIterable, description="get item", max_attempts=2))
== expected
)
def test_find_partition_index_single_column_ascending():
table = pa.table({"value": [1, 2, 2, 3, 5]})
sort_key = SortKey(key=["value"], descending=[False])
assert find_partition_index(table, (0,), sort_key) == 0 # all entries > 0
assert find_partition_index(table, (2,), sort_key) == 1 # first match index
assert find_partition_index(table, (4,), sort_key) == 4 # belongs after 3, before 5
assert find_partition_index(table, (6,), sort_key) == 5 # all entries < 6
def test_find_partition_index_single_column_descending():
table = pa.table({"value": [5, 3, 2, 2, 1]})
sort_key = SortKey(key=["value"], descending=[True])
assert find_partition_index(table, (6,), sort_key) == 0 # belongs before 5
assert find_partition_index(table, (3,), sort_key) == 2 # after the last 3
assert find_partition_index(table, (2,), sort_key) == 4 # after the last 2
assert find_partition_index(table, (0,), sort_key) == 5 # all entries > 0
def test_find_partition_index_multi_column():
# Table sorted by col1 asc, then col2 desc.
table = pa.table({"col1": [1, 1, 1, 2, 2], "col2": [3, 2, 1, 2, 1]})
sort_key = SortKey(key=["col1", "col2"], descending=[False, True])
# Insert value (1,3) -> belongs before (1,2)
assert find_partition_index(table, (1, 3), sort_key) == 0
# Insert value (1,2) -> belongs after the first (1,3) and before (1,2)
# because col1 ties, col2 descending
assert find_partition_index(table, (1, 2), sort_key) == 1
# Insert value (2,2) -> belongs right before (2,2) that starts at index 3
assert find_partition_index(table, (2, 2), sort_key) == 3
# Insert value (0, 4) -> belongs at index 0 (all col1 > 0)
assert find_partition_index(table, (0, 4), sort_key) == 0
# Insert value (2,0) -> belongs after (2,1)
assert find_partition_index(table, (2, 0), sort_key) == 5
def test_find_partition_index_with_nulls():
# _NullSentinel is sorted greater, so they appear after all real values.
table = pa.table({"value": [1, 2, 3, None, None]})
sort_key = SortKey(key=["value"], descending=[False])
# Insert (2,) -> belongs after 1, before 2 => index 1
# (But the actual find_partition_index uses the table as-is.)
assert find_partition_index(table, (2,), sort_key) == 1
# Insert (4,) -> belongs before any null => index 3
assert find_partition_index(table, (4,), sort_key) == 3
# Insert (None,) -> always belongs at the end
assert find_partition_index(table, (None,), sort_key) == 3
def test_find_partition_index_duplicates():
table = pa.table({"value": [2, 2, 2, 2, 2]})
sort_key = SortKey(key=["value"], descending=[False])
# Insert (2,) in a table of all 2's -> first matching index is 0
assert find_partition_index(table, (2,), sort_key) == 0
# Insert (1,) -> belongs at index 0
assert find_partition_index(table, (1,), sort_key) == 0
# Insert (3,) -> belongs at index 5
assert find_partition_index(table, (3,), sort_key) == 5
def test_find_partition_index_duplicates_descending():
table = pa.table({"value": [2, 2, 2, 2, 2]})
sort_key = SortKey(key=["value"], descending=[True])
# Insert (2,) in a table of all 2's -> belongs at index 5
assert find_partition_index(table, (2,), sort_key) == 5
# Insert (1,) -> belongs at index 5
assert find_partition_index(table, (1,), sort_key) == 5
# Insert (3,) -> belongs at index 0
assert find_partition_index(table, (3,), sort_key) == 0
def test_merge_resources_to_ray_remote_args():
ray_remote_args = {}
ray_remote_args = merge_resources_to_ray_remote_args(1, 1, 1, ray_remote_args)
assert ray_remote_args == {"num_cpus": 1, "num_gpus": 1, "memory": 1}
ray_remote_args = {"other_resource": 1}
ray_remote_args = merge_resources_to_ray_remote_args(1, 1, 1, ray_remote_args)
assert ray_remote_args == {
"num_cpus": 1,
"num_gpus": 1,
"memory": 1,
"other_resource": 1,
}
@pytest.mark.parametrize(
"actual, expected, expected_equal",
[
(pd.DataFrame({"a": [1]}), pd.DataFrame({"a": [1]}), True),
# Different value.
(pd.DataFrame({"a": [1]}), pd.DataFrame({"a": [2]}), False),
# Extra column.
(pd.DataFrame({"a": [1]}), pd.DataFrame({"a": [1], "b": [2]}), False),
# Different number of rows.
(pd.DataFrame({"a": [1]}), pd.DataFrame({"a": [1, 1]}), False),
# Same rows, but different order.
(pd.DataFrame({"a": [1, 2]}), pd.DataFrame({"a": [2, 1]}), True),
],
)
def test_rows_same(actual: pd.DataFrame, expected: pd.DataFrame, expected_equal: bool):
assert rows_same(actual, expected) == expected_equal
if __name__ == "__main__":
import sys
sys.exit(pytest.main(["-v", __file__]))
| ConcurrencyCounter |
python | lxml__lxml | src/lxml/tests/test_io.py | {
"start": 12128,
"end": 14761
} | class ____(_IOTestCaseBase):
etree = etree
@needs_feature('zlib')
def test_parse_gzip_file_decompress(self):
XMLParser = self.etree.XMLParser
parse = self.etree.parse
tostring = self.etree.tostring
data = b'<a>' + b'<b/>' * 200 + b'</a>'
parser = XMLParser(decompress=True)
with tempfile.TemporaryDirectory() as temp_dir:
gzfile = pathlib.Path(temp_dir) / "input.xml.gz"
with gzip.GzipFile(gzfile, mode='wb') as outfile:
outfile.write(data)
root = parse(str(gzfile), parser=parser)
self.assertEqual(tostring(root), data)
@needs_feature('zlib')
def test_parse_gzip_file_default_no_unzip(self):
parse = self.etree.parse
tostring = self.etree.tostring
data = b'<a>' + b'<b/>' * 200 + b'</a>'
with tempfile.TemporaryDirectory() as temp_dir:
gzfile = pathlib.Path(temp_dir) / "input.xml.gz"
with gzip.GzipFile(gzfile, mode='wb') as outfile:
outfile.write(data)
try:
root = parse(str(gzfile))
except self.etree.XMLSyntaxError:
pass # self.assertGreaterEqual(self.etree.LIBXML_VERSION, (2, 15))
else:
pass # self.assertLess(self.etree.LIBXML_VERSION, (2, 15))
output = tostring(root)
self.assertEqual(output, data)
def test_write_compressed_text(self):
Element = self.etree.Element
SubElement = self.etree.SubElement
ElementTree = self.etree.ElementTree
text = _str("qwrtioüöä")
root = Element('root')
root.text = text
child = SubElement(root, 'sub')
child.text = 'TEXT'
child.tail = 'TAIL'
SubElement(root, 'sub').text = text
tree = ElementTree(root)
out = BytesIO()
tree.write(out, method='text', encoding='utf8', compression=9)
out.seek(0)
f = gzip.GzipFile(fileobj=out)
try:
result = f.read().decode('utf8')
finally:
f.close()
self.assertEqual(text+'TEXTTAIL'+text, result)
if ElementTree:
class ElementTreeIOTestCase(_IOTestCaseBase):
etree = ElementTree
def test_suite():
suite = unittest.TestSuite()
suite.addTests([unittest.defaultTestLoader.loadTestsFromTestCase(ETreeIOTestCase)])
if ElementTree:
suite.addTests([unittest.defaultTestLoader.loadTestsFromTestCase(ElementTreeIOTestCase)])
return suite
if __name__ == '__main__':
print('to test use test.py %s' % __file__)
| ETreeIOTestCase |
python | readthedocs__readthedocs.org | readthedocs/projects/admin.py | {
"start": 13407,
"end": 13843
} | class ____(admin.ModelAdmin):
list_display = (
"domain",
"project",
"canonical",
"https",
"count",
"ssl_status",
"created",
"modified",
)
inlines = (HTTPHeaderInline,)
search_fields = ("domain", "project__slug")
raw_id_fields = ("project",)
list_filter = ("canonical", "https", "ssl_status")
model = Domain
@admin.register(HTTPHeader)
| DomainAdmin |
python | tensorflow__tensorflow | tensorflow/python/framework/op_callbacks_test.py | {
"start": 30224,
"end": 31939
} | class ____(test_util.TensorFlowTestCase):
def tearDown(self):
op_callbacks.clear_op_callbacks()
super(OpCallbacksErrorConditionsTest, self).tearDown()
def testNonCallableObjectArgErrors(self):
with self.assertRaisesRegex(ValueError, r"is expected to be callable"):
op_callbacks.add_op_callback(1337)
def testRemoveUnregisteredCallbackLeadsToError(self):
instrument = _NumpyFunctionCallback()
with self.assertRaisesRegex(KeyError, r"has not been registered"):
op_callbacks.remove_op_callback(instrument.callback)
def testRemovingCallbackTwiceLeadsToError(self):
instrument = _NumpyFunctionCallback()
op_callbacks.add_op_callback(instrument.callback)
op_callbacks.remove_op_callback(instrument.callback)
with self.assertRaisesRegex(KeyError, r"has not been registered"):
op_callbacks.remove_op_callback(instrument.callback)
def testOverridingWithWrongNumberOfTensorOutputsErrors(self):
def wrong_outputs_callback(op_type,
inputs,
attrs,
outputs,
op_name=None,
graph=None):
del op_type, inputs, attrs, op_name, graph # Unused.
return outputs[0], math_ops.negative(outputs[0])
@def_function.function
def log1p(x):
return math_ops.log(1.0 + x)
x = constant_op.constant(3.0)
op_callbacks.add_op_callback(wrong_outputs_callback)
with self.assertRaisesRegex(
ValueError,
r"returned 2 tensors, .* does not match .* \(1\)"):
log1p(x)
if __name__ == "__main__":
ops.enable_eager_execution()
test.main()
| OpCallbacksErrorConditionsTest |
python | great-expectations__great_expectations | great_expectations/datasource/fluent/aurora_datasource.py | {
"start": 358,
"end": 1011
} | class ____(SQLDatasource):
"""Adds an aurora datasource to the data context.
Args:
name: The name of this aurora datasource.
connection_string: The connection string used to connect to the postgres database.
For example: "postgresql+psycopg2://<username>:<password>@<cluster-endpoint>.amazonaws.com:<port>/<database_name>"
assets: An optional dictionary whose keys are TableAsset or QueryAsset names and whose
values are TableAsset or QueryAsset objects.
"""
type: Literal["aurora"] = "aurora" # type: ignore[assignment]
connection_string: Union[ConfigStr, PostgresDsn]
| AuroraDatasource |
python | pennersr__django-allauth | tests/apps/socialaccount/providers/ynab/tests.py | {
"start": 652,
"end": 2252
} | class ____(OAuth2TestsMixin, TestCase):
provider_id = YNABProvider.id
def get_mocked_response(self):
return MockedResponse(
HTTPStatus.OK,
"""
{"data": {
"user":{
"id": "abcd1234xyz5678"
}
}
}
""",
)
def get_expected_to_str(self):
return "YNAB"
def test_ynab_compelete_login_401(self):
from allauth.socialaccount.providers.ynab.views import YNABOAuth2Adapter
class LessMockedResponse(MockedResponse):
def raise_for_status(self):
if self.status_code != HTTPStatus.OK:
raise HTTPError(None)
request = RequestFactory().get(
reverse(self.provider.id + "_login"), dict(process="login")
)
adapter = YNABOAuth2Adapter(request)
app = adapter.get_provider().app
token = SocialToken(token="some_token")
response_with_401 = LessMockedResponse(
HTTPStatus.UNAUTHORIZED,
"""
{"error": {
"errors": [{
"domain": "global",
"reason": "authError",
"message": "Invalid Credentials",
"locationType": "header",
"location": "Authorization" } ],
"code": 401,
"message": "Invalid Credentials" }
}""",
)
with mocked_response(response_with_401):
with self.assertRaises(HTTPError):
adapter.complete_login(request, app, token)
| YNABTests |
python | fastai__fastai | fastai/callback/schedule.py | {
"start": 704,
"end": 3661
} | class ____:
def __init__(self, f, start, end): store_attr('f,start,end')
def __call__(self, pos): return self.f(self.start, self.end, pos)
# %% ../../nbs/14_callback.schedule.ipynb 9
def annealer(f):
"Decorator to make `f` return itself partially applied."
@functools.wraps(f)
def _inner(start, end): return _Annealer(f, start, end)
return _inner
# %% ../../nbs/14_callback.schedule.ipynb 11
#TODO Jeremy, make this pickle
#@annealer
#def SchedLin(start, end, pos): return start + pos*(end-start)
#@annealer
#def SchedCos(start, end, pos): return start + (1 + math.cos(math.pi*(1-pos))) * (end-start) / 2
#@annealer
#def SchedNo (start, end, pos): return start
#@annealer
#def SchedExp(start, end, pos): return start * (end/start) ** pos
#
#SchedLin.__doc__ = "Linear schedule function from `start` to `end`"
#SchedCos.__doc__ = "Cosine schedule function from `start` to `end`"
#SchedNo .__doc__ = "Constant schedule function with `start` value"
#SchedExp.__doc__ = "Exponential schedule function from `start` to `end`"
# %% ../../nbs/14_callback.schedule.ipynb 12
def sched_lin(start, end, pos): return start + pos*(end-start)
def sched_cos(start, end, pos): return start + (1 + math.cos(math.pi*(1-pos))) * (end-start) / 2
def sched_no (start, end, pos): return start
def sched_exp(start, end, pos): return start * (end/start) ** pos
def SchedLin(start, end): return _Annealer(sched_lin, start, end)
def SchedCos(start, end): return _Annealer(sched_cos, start, end)
def SchedNo (start, end): return _Annealer(sched_no, start, end)
def SchedExp(start, end): return _Annealer(sched_exp, start, end)
SchedLin.__doc__ = "Linear schedule function from `start` to `end`"
SchedCos.__doc__ = "Cosine schedule function from `start` to `end`"
SchedNo .__doc__ = "Constant schedule function with `start` value"
SchedExp.__doc__ = "Exponential schedule function from `start` to `end`"
# %% ../../nbs/14_callback.schedule.ipynb 15
def SchedPoly(start, end, power):
"Polynomial schedule (of `power`) function from `start` to `end`"
def _inner(pos): return start + (end - start) * pos ** power
return _inner
# %% ../../nbs/14_callback.schedule.ipynb 28
def combine_scheds(pcts, scheds):
"Combine `scheds` according to `pcts` in one function"
assert sum(pcts) == 1.
pcts = tensor([0] + L(pcts))
assert torch.all(pcts >= 0)
pcts = torch.cumsum(pcts, 0)
pct_lim = len(pcts) - 2
def _inner(pos):
idx = min((pos >= pcts).nonzero().max(), pct_lim)
actual_pos = (pos-pcts[idx]) / (pcts[idx+1]-pcts[idx])
return scheds[idx](actual_pos.item())
return _inner
# %% ../../nbs/14_callback.schedule.ipynb 33
def combined_cos(pct, start, middle, end):
"Return a scheduler with cosine annealing from `start`→`middle` & `middle`→`end`"
return combine_scheds([pct,1-pct], [SchedCos(start, middle), SchedCos(middle, end)])
# %% ../../nbs/14_callback.schedule.ipynb 38
@docs
| _Annealer |
python | qdrant__qdrant-client | tools/async_client_generator/transformers/import_from_transformer.py | {
"start": 41,
"end": 840
} | class ____(ast.NodeTransformer):
def __init__(self, import_replace_map: Optional[dict[str, str]] = None):
self.import_replace_map = import_replace_map if import_replace_map is not None else {}
def visit_ImportFrom(self, node: ast.ImportFrom) -> ast.AST:
# update module name
for old_value, new_value in self.import_replace_map.items():
if node.module is not None:
node.module = node.module.replace(old_value, new_value)
# update imported item name
for alias in node.names:
if hasattr(alias, "name"):
for old_value, new_value in self.import_replace_map.items():
alias.name = alias.name.replace(old_value, new_value)
return self.generic_visit(node)
| ImportFromTransformer |
python | openai__openai-python | src/openai/types/shared/compound_filter.py | {
"start": 344,
"end": 581
} | class ____(BaseModel):
filters: List[Filter]
"""Array of filters to combine.
Items can be `ComparisonFilter` or `CompoundFilter`.
"""
type: Literal["and", "or"]
"""Type of operation: `and` or `or`."""
| CompoundFilter |
python | neetcode-gh__leetcode | python/0061-rotate-list.py | {
"start": 0,
"end": 667
} | class ____:
def rotateRight(self, head: Optional[ListNode], k: int) -> Optional[ListNode]:
if not head or not head.next or k == 0:
return head
old_head = head
curr, size = head, 0
while curr:
curr, size = curr.next, size + 1
if k % size == 0:
return head
k %= size
slow = fast = head
while fast and fast.next:
if k <= 0:
slow = slow.next
fast = fast.next
k -= 1
new_tail, new_head, old_tail = slow, slow.next, fast
new_tail.next, old_tail.next = None, old_head
return new_head
| Solution |
python | allegroai__clearml | clearml/backend_api/services/v2_23/workers.py | {
"start": 70435,
"end": 74459
} | class ____(Response):
"""
Response of workers.get_stats endpoint.
:param workers: List of the requested workers with their statistics
:type workers: Sequence[WorkerStats]
"""
_service = "workers"
_action = "get_stats"
_version = "2.23"
_schema = {
"definitions": {
"aggregation_stats": {
"properties": {
"aggregation": {
"oneOf": [
{"$ref": "#/definitions/aggregation_type"},
{"type": "null"},
]
},
"values": {
"description": "List of values corresponding to the dates in metric statistics",
"items": {"type": "number"},
"type": ["array", "null"],
},
},
"type": "object",
},
"aggregation_type": {
"description": "Metric aggregation type",
"enum": ["avg", "min", "max"],
"type": "string",
},
"metric_stats": {
"properties": {
"dates": {
"description": "List of timestamps (in seconds from epoch) in the acceding order. The timestamps are separated by the requested interval. Timestamps where no workers activity was recorded are omitted.",
"items": {"type": "integer"},
"type": ["array", "null"],
},
"metric": {
"description": "Name of the metric (cpu_usage, memory_used etc.)",
"type": ["string", "null"],
},
"stats": {
"description": "Statistics data by type",
"items": {"$ref": "#/definitions/aggregation_stats"},
"type": ["array", "null"],
},
"variant": {
"description": "Name of the metric component. Set only if 'split_by_variant' was set in the request",
"type": ["string", "null"],
},
},
"type": "object",
},
"worker_stats": {
"properties": {
"metrics": {
"description": "List of the metrics statistics for the worker",
"items": {"$ref": "#/definitions/metric_stats"},
"type": ["array", "null"],
},
"worker": {
"description": "ID of the worker",
"type": ["string", "null"],
},
},
"type": "object",
},
},
"properties": {
"workers": {
"description": "List of the requested workers with their statistics",
"items": {"$ref": "#/definitions/worker_stats"},
"type": ["array", "null"],
}
},
"type": "object",
}
def __init__(self, workers: Optional[List[Any]] = None, **kwargs: Any) -> None:
super(GetStatsResponse, self).__init__(**kwargs)
self.workers = workers
@schema_property("workers")
def workers(self) -> Optional[List[Any]]:
return self._property_workers
@workers.setter
def workers(self, value: Optional[List[Any]]) -> None:
if value is None:
self._property_workers = None
return
self.assert_isinstance(value, "workers", (list, tuple))
if any((isinstance(v, dict) for v in value)):
value = [WorkerStats.from_dict(v) if isinstance(v, dict) else v for v in value]
else:
self.assert_isinstance(value, "workers", WorkerStats, is_array=True)
self._property_workers = value
| GetStatsResponse |
python | huggingface__transformers | tests/models/rt_detr/test_image_processing_rt_detr.py | {
"start": 1270,
"end": 3043
} | class ____:
def __init__(
self,
parent,
batch_size=4,
num_channels=3,
do_resize=True,
size=None,
do_rescale=True,
rescale_factor=1 / 255,
do_normalize=False,
do_pad=False,
return_tensors="pt",
):
self.parent = parent
self.batch_size = batch_size
self.num_channels = num_channels
self.do_resize = do_resize
self.size = size if size is not None else {"height": 640, "width": 640}
self.do_rescale = do_rescale
self.rescale_factor = rescale_factor
self.do_normalize = do_normalize
self.do_pad = do_pad
self.return_tensors = return_tensors
def prepare_image_processor_dict(self):
return {
"do_resize": self.do_resize,
"size": self.size,
"do_rescale": self.do_rescale,
"rescale_factor": self.rescale_factor,
"do_normalize": self.do_normalize,
"do_pad": self.do_pad,
"return_tensors": self.return_tensors,
}
def get_expected_values(self):
return self.size["height"], self.size["width"]
def expected_output_image_shape(self, images):
height, width = self.get_expected_values()
return self.num_channels, height, width
def prepare_image_inputs(self, equal_resolution=False, numpify=False, torchify=False):
return prepare_image_inputs(
batch_size=self.batch_size,
num_channels=self.num_channels,
min_resolution=30,
max_resolution=400,
equal_resolution=equal_resolution,
numpify=numpify,
torchify=torchify,
)
@require_torch
@require_vision
| RTDetrImageProcessingTester |
python | microsoft__pyright | packages/pyright-internal/src/tests/samples/enum1.py | {
"start": 6222,
"end": 6394
} | class ____(metaclass=CustomEnumMeta1):
pass
TestEnum14 = TestEnum13("TestEnum14", "A, B, C")
reveal_type(TestEnum14.A, expected_text="Literal[TestEnum14.A]")
| TestEnum13 |
python | tensorflow__tensorflow | tensorflow/python/ops/batch_ops_test.py | {
"start": 1863,
"end": 24121
} | class ____(test.TestCase):
"""Tests for batch_ops.{un,}batch."""
# Test for only non eager mode as batching in eager context as a functionality
# is TBD.
def testBasicBatch(self):
"""Tests that a single batched tensor executes together and only once."""
if context.executing_eagerly():
return
with self.cached_session() as sess:
inp = array_ops.placeholder(dtype=dtypes.int32, shape=[1])
batched, index, _ = batch_ops.batch(
[inp], num_batch_threads=1, max_batch_size=2,
batch_timeout_micros=36000000, grad_timeout_micros=0,
batching_queue="")
thread_results = []
def worker():
thread_results.extend(
sess.run([batched, index], feed_dict={inp: [1]}))
worker_thread = threading.Thread(target=worker)
worker_thread.start()
main_results = sess.run([batched, index], feed_dict={inp: [2]})
worker_thread.join()
# At this point either the thread or the main did the batch and the other
# should have empty results.
if list(thread_results[0][0]):
batch_t = thread_results[0][0]
index_t = thread_results[1]
empty_b = main_results[0][0]
empty_m = main_results[1]
else:
batch_t = main_results[0][0]
index_t = main_results[1]
empty_b = thread_results[0][0]
empty_m = thread_results[1]
# Check that both the inputs made it out exactly once.
self.assertAllEqual(sorted(batch_t), (1, 2))
# Check that we get 2 rows in the index tensor.
self.assertEqual(len(index_t), 2)
# Check that the other ones are empty.
self.assertEqual(len(empty_b), 0)
self.assertEqual(len(empty_m), 0)
def testBatchWithPadding(self):
"""Test that batching with padding up to an allowed batch size works."""
if context.executing_eagerly():
return
with self.cached_session() as sess:
inp = array_ops.placeholder(dtype=dtypes.int32, shape=[2])
batched, index, _ = batch_ops.batch(
[inp], num_batch_threads=1, max_batch_size=10,
batch_timeout_micros=100000, # 100ms
allowed_batch_sizes=[5, 10],
grad_timeout_micros=0, batching_queue="")
thread_results = []
def worker():
thread_results.extend(
sess.run([batched, index], feed_dict={inp: [1, 3]}))
worker_thread = threading.Thread(target=worker)
worker_thread.start()
main_results = sess.run([batched, index], feed_dict={inp: [2, 4]})
worker_thread.join()
# At this point either the thread or the main did the batch and the other
# should have empty results.
if list(thread_results[0][0]):
batch_t = thread_results[0][0]
else:
batch_t = main_results[0][0]
# Check that the batch tensor incorporates the padding.
self.assertEqual(len(batch_t), 5)
def testMultipleBatch(self):
"""Tests that multiple batched tensors execute together."""
if context.executing_eagerly():
return
with self.cached_session() as sess:
inp0 = array_ops.placeholder(dtype=dtypes.int32, shape=[1])
inp1 = array_ops.placeholder(dtype=dtypes.int32, shape=[1])
batched, _, _ = batch_ops.batch(
[inp0, inp1],
num_batch_threads=1,
max_batch_size=2,
batch_timeout_micros=36000000,
grad_timeout_micros=0,
batching_queue="")
thread_results = []
def worker():
thread_results.extend(
sess.run([batched], feed_dict={inp0: [1],
inp1: [2]}))
worker_thread = threading.Thread(target=worker)
worker_thread.start()
main_results = sess.run([batched], feed_dict={inp0: [2], inp1: [3]})
worker_thread.join()
# At this point either the thread or the main did the batch and the other
# should have empty results.
if list(thread_results[0][0]):
batch_t = thread_results[0]
empty_t = main_results[0]
else:
batch_t = main_results[0]
empty_t = thread_results[0]
# Assert that the tensors were batched together.
self.assertAllEqual(sorted(batch_t[0]), [1, 2])
self.assertAllEqual(sorted(batch_t[1]), [2, 3])
self.assertAllEqual(empty_t[0], [])
self.assertAllEqual(empty_t[1], [])
def testIllegalBatchDifferentDim0Sizes(self):
"""Tests illegally feeding tensors with different dim0 sizes."""
if context.executing_eagerly():
return
with self.cached_session() as sess:
inp0 = array_ops.placeholder(dtype=dtypes.int32, shape=[1])
inp1 = array_ops.placeholder(dtype=dtypes.int32, shape=[2])
batched, index, _ = batch_ops.batch(
[inp0, inp1], num_batch_threads=1, max_batch_size=2,
batch_timeout_micros=0, grad_timeout_micros=0, batching_queue="")
with self.assertRaises(Exception) as raised:
_ = sess.run([batched, index], feed_dict={inp0: [0], inp1: [1, 2]})
self.assertGreater(
raised.exception.message.find("must have equal 0th-dimension size"),
0)
def testBasicUnbatch(self):
"""Tests that batch and unbatch work together."""
if context.executing_eagerly():
return
with self.cached_session() as sess:
inp = array_ops.placeholder(dtype=dtypes.int32, shape=[1])
batched, index, id_t = batch_ops.batch(
[inp], num_batch_threads=1, max_batch_size=10,
batch_timeout_micros=100000, # 100ms
allowed_batch_sizes=[3, 10],
grad_timeout_micros=0, batching_queue="")
computation = batched[0] + 1
result = batch_ops.unbatch(computation, index, id_t,
timeout_micros=1000000, shared_name="unbatch")
thread_results = []
def worker():
thread_results.extend(sess.run([result], feed_dict={inp: [1]}))
worker_thread = threading.Thread(target=worker)
worker_thread.start()
main_results = sess.run([result], feed_dict={inp: [2]})
worker_thread.join()
self.assertEqual(thread_results[0], [2])
self.assertEqual(main_results[0], [3])
def testBasicUnbatchDecorated(self):
"""Tests that the batch_function decorator works."""
if context.executing_eagerly():
return
with self.cached_session() as sess:
# TODO(apassos): Removing this line causes test flakiness! Ideally should
# be investigated.
default_inp = array_ops.placeholder_with_default(2, shape=[]) # pylint: disable=unused-variable
@batch_ops.batch_function(1, 10, 100000)
def computation(in_t):
self.assertTrue(in_t.shape is not None)
return in_t + 1
inp = array_ops.placeholder(dtype=dtypes.int32, shape=[1])
result = computation(inp)
thread_results = []
def worker():
thread_results.extend(sess.run([result], feed_dict={inp: [1]}))
worker_thread = threading.Thread(target=worker)
worker_thread.start()
main_results = sess.run([result], feed_dict={inp: [2]})
worker_thread.join()
self.assertEqual(thread_results[0], [2])
self.assertEqual(main_results[0], [3])
def testUnbatchInvalidIdArg(self):
"""Tests that unbatch work together."""
if context.executing_eagerly():
batched_tensor = constant_op.constant(
value=np.random.random(size=(3, 3, 1)), dtype=dtypes.float64)
batched_index = constant_op.constant(
value=np.random.randint(0, 100, size=(3, 3, 1)), dtype=dtypes.int64)
arg_id = constant_op.constant(
value=np.random.randint(0, 100, size=(3, 3, 1)), dtype=dtypes.int64)
with self.assertRaisesRegex(errors.InvalidArgumentError,
"Input id should be scalar;"):
batch_ops.unbatch(
batched_tensor=batched_tensor,
batch_index=batched_index,
id=arg_id,
timeout_micros=50,
container="",
shared_name="")
def testBatchDecoratedWithCapturedInput(self):
"""Tests that the batch_function decorator works."""
if context.executing_eagerly():
return
with self.cached_session() as sess:
captured_inp0 = array_ops.placeholder_with_default(2., shape=[])
captured_inp1 = resource_variable_ops.ResourceVariable(3.)
with ops.device("/cpu:0"):
captured_inp2 = resource_variable_ops.ResourceVariable(4.)
@batch_ops.batch_function(1, 10, 100000)
def computation(in_t):
return in_t + captured_inp0 + captured_inp1 + captured_inp2
inp = array_ops.placeholder(dtype=dtypes.float32, shape=[1])
result = computation(inp)
thread_results = []
def worker():
thread_results.extend(sess.run([result], feed_dict={inp: [1]}))
sess.run(variables.global_variables_initializer())
worker_thread = threading.Thread(target=worker)
worker_thread.start()
main_results = sess.run([result], feed_dict={inp: [2]})
worker_thread.join()
self.assertEqual(thread_results[0], [10])
self.assertEqual(main_results[0], [11])
@test_util.disable_xla("DeviceIndex returns sentinel value with XLA")
def testBatchDecoratedGpu(self):
if context.executing_eagerly():
return
with self.cached_session() as sess:
@batch_ops.batch_function(1, 10, 100000)
def computation(in_t):
# index is 0 on CPU and 1 on GPU
index = gen_functional_ops.DeviceIndex(device_names=["CPU", "GPU"])
return in_t + math_ops.cast(index, dtypes.float32)
inp = array_ops.placeholder(dtype=dtypes.float32, shape=[1])
result = computation(inp)
thread_results = []
def worker():
thread_results.extend(sess.run([result], feed_dict={inp: [10.]}))
worker_thread = threading.Thread(target=worker)
worker_thread.start()
main_results = sess.run([result], feed_dict={inp: [20.]})
worker_thread.join()
self.assertEqual(thread_results[0], [10 + test_util.is_gpu_available()])
self.assertEqual(main_results[0], [20 + test_util.is_gpu_available()])
def testParallelRunsWithCpuAndGpu(self):
# Run multiple instances of a batch function in parallel. This is a
# regression test: this used to fail because _Send nodes for one call would
# send the tensor to the _Recv node for a different call.
if context.executing_eagerly():
return
@batch_ops.batch_function(1, 2, 1)
def f(x):
with ops.device("/GPU:0"):
x = x + 1.
with ops.device("/CPU:0"):
return x + 1
num_calls = 10
placeholders = [array_ops.placeholder(dtypes.float32, shape=(1,))
for _ in range(num_calls)]
results = []
for p in placeholders:
result = f(p)
results.append(result)
inputs = [[float(i)] for i in range(num_calls)]
expected = [[float(i + 2)] for i in range(num_calls)]
with self.session() as sess:
outputs = sess.run(results, feed_dict=dict(zip(placeholders, inputs)))
self.assertAllEqual(outputs, expected)
def testSoftPlacement(self):
if context.executing_eagerly():
return
@batch_ops.batch_function(1, 10, 100000)
def computation(in_t):
with ops.device("/GPU:0"):
return in_t + 1.
inp = array_ops.placeholder(dtype=dtypes.float32, shape=[1])
result = computation(inp)
# With soft placement, the function will run even without a GPU
config = config_pb2.ConfigProto(allow_soft_placement=True)
with self.session(config=config) as sess:
sess.run([result], feed_dict={inp: [20.]})
# Without soft placement, the function fails without a GPU due to the
# addition explicitly being placed on the GPU
config.allow_soft_placement = False
with self.session(config=config) as sess:
if test_util.is_gpu_available():
sess.run([result], feed_dict={inp: [20.]})
else:
with self.assertRaisesRegex(InvalidArgumentError,
"Cannot assign a device for operation"):
sess.run([result], feed_dict={inp: [20.]})
def testBatchFunctionOp(self):
"""Tests that the batch_function op works."""
if context.executing_eagerly():
return
with self.cached_session() as sess:
@function.Defun(dtypes.int32)
def computation(in_t):
return in_t + 1
inp = array_ops.placeholder(dtype=dtypes.int32, shape=[1])
result = gen_batch_ops.batch_function(
[inp],
num_batch_threads=1,
max_batch_size=10,
batch_timeout_micros=100000,
Tout=[dtypes.int32],
f=computation,
captured_tensors=computation.captured_inputs)
thread_results = []
def worker():
thread_results.extend(sess.run([result], feed_dict={inp: [1]}))
worker_thread = threading.Thread(target=worker)
worker_thread.start()
main_results = sess.run([result], feed_dict={inp: [2]})
worker_thread.join()
self.assertEqual(thread_results[0], [2])
self.assertEqual(main_results[0], [3])
def testBatchFunctionOpWithCapturedInput(self):
"""Tests that batch_function op works with captured input."""
if context.executing_eagerly():
return
with self.cached_session() as sess:
captured_inp0 = array_ops.placeholder_with_default(2, shape=[])
captured_inp1 = array_ops.placeholder_with_default(1, shape=[])
inp = array_ops.placeholder(dtype=dtypes.int32, shape=[1])
@function.Defun(dtypes.int32)
def computation(inp):
return inp + captured_inp0 - captured_inp1
result = gen_batch_ops.batch_function(
num_batch_threads=1,
max_batch_size=10,
batch_timeout_micros=100000, # 100ms
allowed_batch_sizes=[3, 10],
batching_queue="",
f=computation,
in_tensors=[inp],
captured_tensors=computation.captured_inputs,
Tout=[o.type for o in computation.definition.signature.output_arg])
thread_results = []
def worker():
thread_results.extend(sess.run([result], feed_dict={inp: [1]}))
worker_thread = threading.Thread(target=worker)
worker_thread.start()
main_results = sess.run([result], feed_dict={inp: [2]})
worker_thread.join()
self.assertEqual(thread_results[0], [2])
self.assertEqual(main_results[0], [3])
def testBatchFunctionOpWithInputError(self):
"""Tests that batch_function op works with error in the inputs."""
if context.executing_eagerly():
return
with self.cached_session() as sess:
inp = array_ops.placeholder(dtype=dtypes.int32, shape=[1])
@function.Defun(dtypes.int32, dtypes.int32)
def computation(in0, in1):
return in0 + in1
result = gen_batch_ops.batch_function(
[inp], # computation actually expects 2 inputs.
num_batch_threads=1,
max_batch_size=10,
batch_timeout_micros=100000, # 100ms
batching_queue="",
f=computation,
captured_tensors=computation.captured_inputs,
Tout=[o.type for o in computation.definition.signature.output_arg])
with self.assertRaisesRegex(
InvalidArgumentError,
r"Function takes 2 argument\(s\) but 1 argument\(s\) were passed"):
sess.run([result], feed_dict={inp: [2]})
def testBatchFunctionOpWithLargeBatchSplitted(self):
"""Tests that the batch_function op works with large batch splitted."""
if context.executing_eagerly():
return
with self.cached_session() as sess:
@function.Defun(dtypes.int32)
def computation(in_t):
return in_t + 3
inp = array_ops.placeholder(dtype=dtypes.int32)
result = gen_batch_ops.batch_function(
[inp],
num_batch_threads=2,
# enable_large_batch_splitting is True, so it's valid as long as
# max('allowed_batch_sizes') <= 'max_batch_size'.
allowed_batch_sizes=[1, 2],
max_batch_size=5,
batch_timeout_micros=100000, # 100ms
Tout=[dtypes.int32],
enable_large_batch_splitting=True,
f=computation,
captured_tensors=computation.captured_inputs)
thread1_results = []
thread2_results = []
# Input sizes of worker1 and main thread are larger than
# max(allowed_batch_sizes), while input size of worker2 is smaller.
def worker1():
thread1_results.extend(
sess.run([result], feed_dict={inp: [5, 6, 7, 8, 9]}))
worker_thread1 = threading.Thread(target=worker1)
worker_thread1.start()
def worker2():
thread2_results.extend(sess.run([result], feed_dict={inp: [10]}))
worker_thread2 = threading.Thread(target=worker2)
worker_thread2.start()
main_results = sess.run([result], feed_dict={inp: [2, 3, 4]})
worker_thread1.join()
worker_thread2.join()
self.assertTrue(
np.all(np.equal(thread2_results[0], np.array([13], dtype=np.int32))))
self.assertTrue(
np.all(
np.equal(thread1_results[0],
np.array([8, 9, 10, 11, 12], dtype=np.int32))))
self.assertTrue(
np.all(
np.equal(main_results[0], np.array([5, 6, 7], dtype=np.int32))))
def testBasicUnbatchDecoratedWithReshape(self):
"""Tests that the batch_function decorator works."""
if context.executing_eagerly():
return
with self.cached_session() as sess:
@batch_ops.batch_function(1, 10, 100000)
def computation(in_t):
return array_ops.reshape(in_t, [-1]) + 1
inp = array_ops.placeholder(dtype=dtypes.int32, shape=[1, 1])
result = computation(inp)
thread_results = []
def worker():
thread_results.extend(sess.run([result], feed_dict={inp: [[1]]}))
worker_thread = threading.Thread(target=worker)
worker_thread.start()
main_results = sess.run([result], feed_dict={inp: [[2]]})
worker_thread.join()
self.assertEqual(thread_results[0], [2])
self.assertEqual(main_results[0], [3])
def testUnbatchTimeout(self):
"""Tests that the unbatch timeout works."""
if context.executing_eagerly():
return
with self.cached_session() as sess:
inp = array_ops.placeholder(dtype=dtypes.int32, shape=[1])
batched, index, id_t = batch_ops.batch(
[inp], num_batch_threads=1, max_batch_size=2,
batch_timeout_micros=36000000, grad_timeout_micros=0,
batching_queue="")
computation = batched[0] + 1
timeout_micros = 10
result = batch_ops.unbatch(computation, index, id_t, timeout_micros,
shared_name="shared_unbatch")
# Set up a parallel pipeline that delays the computation, but uses the
# same unbatch resource object as the non-delayed pipeline.
computation_delayed = script_ops.py_func(delayed_plus1,
[batched[0]],
dtypes.int32)
result_delayed = batch_ops.unbatch(computation_delayed,
index,
id_t,
timeout_micros,
shared_name="shared_unbatch")
thread_results = []
def worker():
# A first call using the non-delayed pipeline. The batcher will send an
# empty tensor along the non-delayed pipeline.
thread_results.extend(sess.run([result], feed_dict={inp: [1]}))
worker_thread = threading.Thread(target=worker)
worker_thread.start()
time.sleep(0.1) # Ensure the thread's call starts first.
# A second call using the delayed pipeline. The batcher will send the
# batched tensor along the delayed pipeline, thus delaying the arrival of
# the batched tensor at the unbatch op, relative to the empty tensor.
#
# TODO(olston, apassos): Avoid relying on the order in which the batch op
# emits the empty tensor versus the batched one.
_ = sess.run([result_delayed], feed_dict={inp: [2]})
worker_thread.join()
# The thread's call should hit the timeout, and thus get 0 results.
self.assertEqual(len(thread_results), 0)
def testUnbatchGradInvalidId(self):
with self.assertRaises(errors.InvalidArgumentError):
self.evaluate(
gen_batch_ops.unbatch_grad(
original_input=constant_op.constant([1]),
batch_index=constant_op.constant([
[0, 0, 0],
], dtype=dtypes.int64),
grad=constant_op.constant([
1,
]),
id=constant_op.constant([
1,
1,
], dtype=dtypes.int64)))
def testUnbatchGradInvalidBatchId(self):
with self.assertRaises(errors.InvalidArgumentError):
self.evaluate(
gen_batch_ops.unbatch_grad(
original_input=constant_op.constant([1]),
batch_index=constant_op.constant([
[0, 0],
], dtype=dtypes.int64),
grad=constant_op.constant([
1,
]),
id=constant_op.constant([
1,
], dtype=dtypes.int64)))
def testUnbatchGradInvalidArgs(self):
original_input = random_ops.random_uniform(
shape=(3, 1), dtype=dtypes.float64, maxval=None)
batch_index = random_ops.random_uniform(
shape=(3, 1), dtype=dtypes.int64, maxval=65536)
grad = random_ops.random_uniform(
shape=(3, 1), dtype=dtypes.float64, maxval=None)
batch_id = random_ops.random_uniform(
shape=(3, 1), dtype=dtypes.int64, maxval=65536)
with self.assertRaises(errors.InvalidArgumentError):
self.evaluate(
gen_batch_ops.unbatch_grad(
original_input=original_input,
batch_index=batch_index,
grad=grad,
id=batch_id,
container="",
shared_name="",
name=""))
if __name__ == "__main__":
test.main()
| BatchOpsTest |
python | django-import-export__django-import-export | tests/core/migrations/0004_bookwithchapters.py | {
"start": 519,
"end": 1592
} | class ____(migrations.Migration):
dependencies = [
("core", "0003_withfloatfield"),
]
operations = []
pg_only_operations = [
migrations.CreateModel(
name="BookWithChapters",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("name", models.CharField(max_length=100, verbose_name="Book name")),
("chapters", chapters_field),
("data", data_field),
],
),
]
def apply(self, project_state, schema_editor, collect_sql=False):
if can_use_postgres_fields and schema_editor.connection.vendor.startswith(
"postgres"
):
self.operations = self.operations + self.pg_only_operations
return super().apply(project_state, schema_editor, collect_sql)
| Migration |
python | fastai__fastai | fastai/torch_core.py | {
"start": 5221,
"end": 5337
} | class ____(ArrayImageBase):
"An array representing an image"
pass
# %% ../nbs/00_torch_core.ipynb 30
| ArrayImage |
python | joke2k__faker | faker/providers/color/uk_UA/__init__.py | {
"start": 80,
"end": 8883
} | class ____(ColorProvider):
"""Implement color provider for ``uk_UA`` locale.
Sources:
- https://uk.wikipedia.org/wiki/Список_кольорів
"""
all_colors = OrderedDict(
(
("Абрикосовий", "#FBCEB1"),
("Аквамариновий", "#7FFFD4"),
("Алізариновий червоний", "#E32636"),
("Амарантовий", "#E52B50"),
("Амарантово-рожевий", "#F19CBB"),
("Аметистовий", "#9966CC"),
("Андроїдний зелений", "#A4C639"),
("Арсеновий", "#3B444B"),
("Атомний мандаріновий", "#FF9966"),
("Багряний", "#FF2400"),
("Баклажановий", "#990066"),
("Барвінковий", "#CCCCFF"),
("Бежевий", "#F5F5DC"),
("Берлінська лазур", "#003153"),
("Блаватний", "#6495ED"),
("Блакитний", "#AFEEEE"),
("Блакитний Брандейса", "#0070FF"),
("Блакитно-зелений", "#00DDDD"),
("Блакитно-фіолетовий", "#8A2BE2"),
("Блідий рожево-ліловий", "#996666"),
("Блідо-брунатний", "#987654"),
("Блідо-волошковий", "#ABCDEF"),
("Блідо-карміновий", "#AF4035"),
("Блідо-каштановий", "#DDADAF"),
("Блідо-пурпуровий", "#F984E5"),
("Блідо-пісочний", "#DABDAB"),
("Блідо-рожевий", "#FADADD"),
("Болотний", "#ACB78E"),
("Бронзовий", "#CD7F32"),
("Брунатний", "#964B00"),
("Брунато-малиновий", "#800000"),
("Будяковий", "#D8BFD8"),
("Бузковий", "#C8A2C8"),
("Бургундський", "#900020"),
("Бурий", "#755A57"),
("Бурштиновий", "#FFBF00"),
("Білий", "#FFFFFF"),
("Білий навахо", "#FFDEAD"),
("Бірюзовий", "#30D5C8"),
("Бістр", "#3D2B1F"),
("Вода пляжа Бонді", "#0095B6"),
("Вохра", "#CC7722"),
("Відбірний жовтий", "#FFBA00"),
("Візантійський", "#702963"),
("Гарбуз", "#FF7518"),
("Гарячо-рожевий", "#FC0FC0"),
("Геліотроп", "#DF73FF"),
("Глибокий фіолетовий", "#423189"),
("Глицінія", "#C9A0DC"),
("Грушевий", "#D1E231"),
("Гумігут", "#E49B0F"),
("Гірчичний", "#FFDB58"),
("Дерева", "#79443B"),
("Джинсовий", "#1560BD"),
("Діамантово-рожевий", "#FF55A3"),
("Жовтий", "#FFFF00"),
("Жовто-зелений", "#ADFF2F"),
("Жовто-персиковий", "#FADFAD"),
("Захисний синій", "#1E90FF"),
("Зелена весна", "#00FF7F"),
("Зелена мʼята", "#98FF98"),
("Зелена сосна", "#01796F"),
("Зелене море", "#2E8B57"),
("Зелений", "#00FF00"),
("Зелений армійський", "#4B5320"),
("Зелений мох", "#ADDFAD"),
("Зелений папороть", "#4F7942"),
("Зелений чай", "#D0F0C0"),
("Зелено-сірий чай", "#CADABA"),
("Зеленувато-блакитний", "#008080"),
("Золотаво-березовий", "#DAA520"),
("Золотий", "#FFD700"),
("Золотисто-каштановий", "#6D351A"),
("Індиго", "#4B0082"),
("Іржавий", "#B7410E"),
("Кардинал (колір)", "#C41E3A"),
("Карміновий", "#960018"),
("Каштановий", "#CD5C5C"),
("Кобальтовий", "#0047AB"),
("Колір жовтого шкільного автобуса", "#FFD800"),
("Колір засмаги", "#D2B48C"),
("Колір морської піни", "#FFF5EE"),
("Колір морської хвилі", "#00FFFF"),
("Кораловий", "#FF7F50"),
("Королівський синій", "#4169E1"),
("Кремовий", "#FFFDD0"),
("Кукурудзяний", "#FBEC5D"),
("Кіновар", "#FF4D00"),
("Лавандний", "#E6E6FA"),
("Лазуровий", "#007BA7"),
("Лазурово-синій", "#2A52BE"),
("Лайм", "#CCFF00"),
("Латунний", "#B5A642"),
("Лимонний", "#FDE910"),
("Лимонно-кремовий", "#FFFACD"),
("Лляний", "#EEDC82"),
("Лляний", "#FAF0E6"),
("Лососевий", "#FF8C69"),
("Ліловий", "#DB7093"),
("Малахітовий", "#0BDA51"),
("Малиновий", "#DC143C"),
("Мандариновий", "#FFCC00"),
("Мисливський", "#004225"),
("Морквяний", "#ED9121"),
("Мідний", "#B87333"),
("Міжнародний помаранчевий", "#FF4F00"),
("Нефритовий", "#00A86B"),
("Ніжно-блакитний", "#E0FFFF"),
("Ніжно-оливковий", "#6B8E23"),
("Ніжно-рожевий", "#FB607F"),
("Оливковий", "#808000"),
("Опівнічно-синій", "#003366"),
("Орхідея", "#DA70D6"),
("Палена сіена", "#E97451"),
("Палений оранжевий", "#CC5500"),
("Панг", "#C7FCEC"),
("Паросток папаї", "#FFEFD5"),
("Пастельно-зелений", "#77DD77"),
("Пастельно-рожевий", "#FFD1DC"),
("Персиковий", "#FFE5B4"),
("Перський синій", "#6600FF"),
("Помаранчевий", "#FFA500"),
("Помаранчево-персиковий", "#FFCC99"),
("Помаранчево-рожевий", "#FF9966"),
("Пурпурний", "#FF00FF"),
("Пурпуровий", "#660099"),
("Пшеничний", "#F5DEB3"),
("Пісочний колір", "#F4A460"),
("Рожевий", "#FFC0CB"),
("Рожевий Маунтбеттена", "#997A8D"),
("Рожево-лавандний", "#FFF0F5"),
("Рожево-ліловий", "#993366"),
("Салатовий", "#7FFF00"),
("Сангрія", "#92000A"),
("Сапфіровий", "#082567"),
("Світло-синій", "#007DFF"),
("Сепія", "#704214"),
("Сиваво-зелений", "#ACE1AF"),
("Сигнально-помаранчевий", "#FF9900"),
("Синя пил", "#003399"),
("Синя сталь", "#4682B4"),
("Сині яйця малинівки", "#00CCCC"),
("Синій", "#0000FF"),
("Синій (RYB)", "#0247FE"),
("Синій (пігмент)", "#333399"),
("Синій ВПС", "#5D8AA8"),
("Синій Клейна", "#3A75C4"),
("Сливовий", "#660066"),
("Смарагдовий", "#50C878"),
("Спаржевий", "#7BA05B"),
("Срібний", "#C0C0C0"),
("Старе золото", "#CFB53B"),
("Сіра спаржа", "#465945"),
("Сірий", "#808080"),
("Сірий шифер", "#708090"),
("Темний весняно-зелений", "#177245"),
("Темний жовто-брунатний", "#918151"),
("Темний зелений чай", "#BADBAD"),
("Темний пастельно-зелений", "#03C03C"),
("Темний хакі", "#BDB76B"),
("Темний індиго", "#310062"),
("Темно-аспідний сірий", "#2F4F4F"),
("Темно-брунатний", "#654321"),
("Темно-бірюзовий", "#116062"),
("Темно-зелений", "#013220"),
("Темно-зелений хакі", "#78866B"),
("Темно-золотий", "#B8860B"),
("Темно-карміновий", "#560319"),
("Темно-каштановий", "#986960"),
("Темно-кораловий", "#CD5B45"),
("Темно-лазурний", "#08457E"),
("Темно-лососевий", "#E9967A"),
("Темно-мандариновий", "#FFA812"),
("Темно-оливковий", "#556832"),
("Темно-персиковий", "#FFDAB9"),
("Темно-рожевий", "#E75480"),
("Темно-синій", "#000080"),
("Ультрамариновий", "#120A8F"),
("Умбра", "#734A12"),
("Умбра палена", "#8A3324"),
("Фуксія", "#FF00FF"),
("Фіолетовий", "#8B00FF"),
("Фіолетово-баклажановий", "#991199"),
("Фіолетово-червоний", "#C71585"),
("Хакі", "#C3B091"),
("Цинамоновий", "#7B3F00"),
("Циннвальдит", "#EBC2AF"),
("Ціан (колір)", "#00FFFF"),
("Ціано-блакитний", "#F0F8FF"),
("Червоний", "#FF0000"),
("Червоно-буро-помаранчевий", "#CD5700"),
("Червоновато-брунатний", "#CC8899"),
("Чорний", "#000000"),
("Шафрановий", "#F4C430"),
("Шкіра буйвола", "#F0DC82"),
("Шоколадний", "#D2691E"),
("Яскраво-бурштиновий", "#FF7E00"),
("Яскраво-бірюзовий", "#08E8DE"),
("Яскраво-зелений", "#66FF00"),
("Яскраво-зелений", "#40826D"),
("Яскраво-рожевий", "#FF007F"),
("Яскраво-фіолетовий", "#CD00CD"),
("Ясно-брунатний", "#CD853F"),
("Ясно-вишневий", "#DE3163"),
("Ясно-лазуровий", "#007FFF"),
("Ясно-лазуровий (веб)", "#F0FFFF"),
)
)
| Provider |
python | great-expectations__great_expectations | contrib/great_expectations_semantic_types_expectations/great_expectations_semantic_types_expectations/expectations/expect_column_values_to_be_valid_phonenumber.py | {
"start": 1769,
"end": 4093
} | class ____(ColumnMapExpectation):
"""Expect column values to be valid phone numbers."""
# These examples will be shown in the public gallery.
# They will also be executed as unit tests for your Expectation.
examples = [
{
"data": {
"well_formed_phonenumber": [
"+41 44 668 18 00",
"+41446681800",
"+41-44-668-1800",
"tel:+1-900-253-0000",
],
"malformed_phonenumber": [
"000-000-0000",
"+1 650 253 00000",
"+800 1234 5678 9",
"this is not a phone number",
],
},
"tests": [
{
"title": "basic_positive_test",
"exact_match_out": False,
"include_in_gallery": True,
"in": {"column": "well_formed_phonenumber"},
"out": {"success": True},
},
{
"title": "basic_negative_test",
"exact_match_out": False,
"include_in_gallery": True,
"in": {"column": "malformed_phonenumber"},
"out": {"success": False},
},
],
}
]
# This is the id string of the Metric used by this Expectation.
# For most Expectations, it will be the same as the `condition_metric_name` defined in your Metric class above.
map_metric = "column_values.valid_phonenumber"
# This is a list of parameter names that can affect whether the Expectation evaluates to True or False
success_keys = ("mostly",)
# This dictionary contains default values for any parameters that should have default values
default_kwarg_values = {}
# This object contains metadata for display in the public Gallery
library_metadata = {
"maturity": "experimental",
"tags": ["experimental", "hackathon", "typed-entities"],
"contributors": [
"@voidforall",
],
"requirements": ["phonenumbers"],
}
if __name__ == "__main__":
ExpectColumnValuesToBeValidPhonenumber().print_diagnostic_checklist()
| ExpectColumnValuesToBeValidPhonenumber |
python | Textualize__textual | tests/snapshot_tests/snapshot_apps/vertical_max_height.py | {
"start": 125,
"end": 529
} | class ____(App):
CSS = """
#top {
height: 1fr;
border: white;
}
#bottom {
height:3fr;
border: white;
max-height: 10;
}
"""
def compose(self) -> ComposeResult:
with Vertical():
yield Placeholder(id="top")
yield Placeholder(id="bottom")
if __name__ == "__main__":
VerticalApp().run()
| VerticalApp |
python | spack__spack | lib/spack/spack/test/error_messages.py | {
"start": 1595,
"end": 1691
} | class ____(Package):
version("4.1")
version("4.0")
""",
)
_pkgz1 = (
"z1",
"""\
| Y4 |
python | tensorflow__tensorflow | tensorflow/python/ops/ragged/dynamic_ragged_shape.py | {
"start": 79685,
"end": 80630
} | class ____(_LayerBroadcaster):
"""Implements _LayerBroadcaster with an explicit gather_index.
For example, suppose that the source shape is:
[*],[*,*]
And the target shape is:
[*],[*,*],[*],[*,*]
Then, this can be represented with a map:
[0,1,2,0,1,2]
"""
def __init__(self, gather_index):
gather_index = ops.convert_to_tensor(gather_index)
if (gather_index.dtype != dtypes.int64 and
gather_index.dtype != dtypes.int32):
raise ValueError("gather_index must be int64 or int32")
self._gather_index = gather_index
@property
def gather_index(self):
return self._gather_index
def with_dtype(self, dtype):
return _GatherLayerBroadcaster(math_ops.cast(self._gather_index, dtype))
def with_dependencies(self, checks):
new_gather_index = control_flow_ops.with_dependencies(
checks, self._gather_index)
return _GatherLayerBroadcaster(new_gather_index)
| _GatherLayerBroadcaster |
python | EpistasisLab__tpot | tpot/search_spaces/pipelines/dynamicunion.py | {
"start": 1725,
"end": 6617
} | class ____(SklearnIndividual):
"""
Takes in one search space.
Will produce a FeatureUnion of up to max_estimators number of steps.
The output of the FeatureUnion will the all of the steps concatenated together.
"""
def __init__(self, search_space : SearchSpace, max_estimators=None, allow_repeats=False, rng=None) -> None:
super().__init__()
self.search_space = search_space
if max_estimators is None:
self.max_estimators = np.inf
else:
self.max_estimators = max_estimators
self.allow_repeats = allow_repeats
self.union_dict = {}
if self.max_estimators == np.inf:
init_max = 3
else:
init_max = self.max_estimators
rng = np.random.default_rng(rng)
for _ in range(rng.integers(1, init_max)):
self._mutate_add_step(rng)
def mutate(self, rng=None):
rng = np.random.default_rng(rng)
mutation_funcs = [self._mutate_add_step, self._mutate_remove_step, self._mutate_replace_step, self._mutate_note]
rng.shuffle(mutation_funcs)
for mutation_func in mutation_funcs:
if mutation_func(rng):
return True
def _mutate_add_step(self, rng):
rng = np.random.default_rng(rng)
max_attempts = 10
if len(self.union_dict) < self.max_estimators:
for _ in range(max_attempts):
new_step = self.search_space.generate(rng)
if new_step.unique_id() not in self.union_dict:
self.union_dict[new_step.unique_id()] = new_step
return True
return False
def _mutate_remove_step(self, rng):
rng = np.random.default_rng(rng)
if len(self.union_dict) > 1:
self.union_dict.pop( rng.choice(list(self.union_dict.keys())))
return True
return False
def _mutate_replace_step(self, rng):
rng = np.random.default_rng(rng)
changed = self._mutate_remove_step(rng) or self._mutate_add_step(rng)
return changed
#TODO mutate one step or multiple?
def _mutate_note(self, rng):
rng = np.random.default_rng(rng)
changed = False
values = list(self.union_dict.values())
for step in values:
if rng.random() < 0.5:
changed = step.mutate(rng) or changed
self.union_dict = {step.unique_id(): step for step in values}
return changed
def crossover(self, other, rng=None):
rng = np.random.default_rng(rng)
cx_funcs = [self._crossover_swap_multiple_nodes, self._crossover_node]
rng.shuffle(cx_funcs)
for cx_func in cx_funcs:
if cx_func(other, rng):
return True
return False
def _crossover_swap_multiple_nodes(self, other, rng):
rng = np.random.default_rng(rng)
self_values = list(self.union_dict.values())
other_values = list(other.union_dict.values())
rng.shuffle(self_values)
rng.shuffle(other_values)
self_idx = rng.integers(0,len(self_values))
other_idx = rng.integers(0,len(other_values))
#Note that this is not one-point-crossover since the sequence doesn't matter. this is just a quick way to swap multiple random items
self_values[:self_idx], other_values[:other_idx] = other_values[:other_idx], self_values[:self_idx]
self.union_dict = {step.unique_id(): step for step in self_values}
other.union_dict = {step.unique_id(): step for step in other_values}
return True
def _crossover_node(self, other, rng):
rng = np.random.default_rng(rng)
changed = False
self_values = list(self.union_dict.values())
other_values = list(other.union_dict.values())
rng.shuffle(self_values)
rng.shuffle(other_values)
for self_step, other_step in zip(self_values, other_values):
if rng.random() < 0.5:
changed = self_step.crossover(other_step, rng) or changed
self.union_dict = {step.unique_id(): step for step in self_values}
other.union_dict = {step.unique_id(): step for step in other_values}
return changed
def export_pipeline(self, **kwargs):
values = list(self.union_dict.values())
return sklearn.pipeline.make_union(*[step.export_pipeline(**kwargs) for step in values])
def unique_id(self):
values = list(self.union_dict.values())
l = [step.unique_id() for step in values]
# if all items are strings, then sort them
if all([isinstance(x, str) for x in l]):
l.sort()
l = ["FeatureUnion"] + l
return TupleIndex(frozenset(l))
| DynamicUnionPipelineIndividual |
python | scrapy__scrapy | tests/test_webclient.py | {
"start": 12387,
"end": 12623
} | class ____(TestContextFactoryBase):
@inlineCallbacks
def testPayload(self, server_url):
s = "0123456789" * 10
body = yield getPage(server_url + "payload", body=s)
assert body == to_bytes(s)
| TestWebClientSSL |
python | spack__spack | lib/spack/spack/vendor/ruamel/yaml/dumper.py | {
"start": 5035,
"end": 6650
} | class ____(Emitter, Serializer, RoundTripRepresenter, VersionedResolver):
def __init__(
self,
stream,
default_style=None,
default_flow_style=None,
canonical=None,
indent=None,
width=None,
allow_unicode=None,
line_break=None,
encoding=None,
explicit_start=None,
explicit_end=None,
version=None,
tags=None,
block_seq_indent=None,
top_level_colon_align=None,
prefix_colon=None,
):
# type: (StreamType, Any, Optional[bool], Optional[int], Optional[int], Optional[int], Optional[bool], Any, Any, Optional[bool], Optional[bool], Any, Any, Any, Any, Any) -> None # NOQA
Emitter.__init__(
self,
stream,
canonical=canonical,
indent=indent,
width=width,
allow_unicode=allow_unicode,
line_break=line_break,
block_seq_indent=block_seq_indent,
top_level_colon_align=top_level_colon_align,
prefix_colon=prefix_colon,
dumper=self,
)
Serializer.__init__(
self,
encoding=encoding,
explicit_start=explicit_start,
explicit_end=explicit_end,
version=version,
tags=tags,
dumper=self,
)
RoundTripRepresenter.__init__(
self,
default_style=default_style,
default_flow_style=default_flow_style,
dumper=self,
)
VersionedResolver.__init__(self, loader=self)
| RoundTripDumper |
python | huggingface__transformers | src/transformers/cache_utils.py | {
"start": 60193,
"end": 60579
} | class ____(StaticSlidingWindowLayer):
def __init__(self, max_cache_len: int, sliding_window: int):
logger.warning_once(
"`SlidingWindowLayer` is deprecated and will be removed in version v4.59 "
"Use `StaticSlidingWindowLayer` instead, which is a better name for it."
)
super().__init__(max_cache_len, sliding_window)
| SlidingWindowLayer |
python | ansible__ansible | lib/ansible/module_utils/facts/collector.py | {
"start": 1801,
"end": 2150
} | class ____(Exception):
"""Indicates there is a cycle in fact collector deps
If collector-B requires collector-A, and collector-A requires
collector-B, that is a cycle. In that case, there is no ordering
that will satisfy B before A and A and before B. That will cause this
error to be raised.
"""
pass
| CycleFoundInFactDeps |
python | getsentry__sentry | src/sentry/relocation/api/endpoints/artifacts/index.py | {
"start": 892,
"end": 2942
} | class ____(Endpoint):
owner = ApiOwner.HYBRID_CLOUD
publish_status = {
# TODO(getsentry/team-ospo#214): Stabilize before GA.
"GET": ApiPublishStatus.EXPERIMENTAL,
}
permission_classes = (SuperuserOrStaffFeatureFlaggedPermission,)
def get(self, request: Request, relocation_uuid: str) -> Response:
"""
Lists all relocation bucket files associated with a relocation
``````````````````````````````````````````````````
:pparam string relocation_uuid: a UUID identifying the relocation.
:auth: required
"""
logger.info("relocations.artifact.index.get.start", extra={"caller": request.user.id})
# TODO(schew2381): Remove the superuser reference below after feature flag is removed.
# Must be superuser/staff AND have a `UserPermission` of `relocation.admin` to see access!
if not has_elevated_mode(request):
if has_staff_option(request.user):
raise StaffRequired
raise SuperuserRequired
if not request.access.has_permission("relocation.admin"):
raise PermissionDenied(ERR_NEED_RELOCATION_ADMIN)
try:
relocation: Relocation = Relocation.objects.get(uuid=relocation_uuid)
except Relocation.DoesNotExist:
raise ResourceDoesNotExist
relocation_storage = get_relocation_storage()
(dirs, files) = relocation_storage.listdir(f"runs/{relocation.uuid}")
# Only check one level deep - no need to recurse.
for dir in dirs:
(_, dir_files) = relocation_storage.listdir(f"runs/{relocation.uuid}/{dir}")
files += [f"{dir}/{file}" for file in dir_files]
# TODO(azaslavsky): We should use a cleverer, asynchronous way to get all these sizes.
file_metadata = [
{"path": f, "bytes": relocation_storage.size(f"runs/{relocation.uuid}/{f}")}
for f in sorted(files)
]
return self.respond({"files": file_metadata})
| RelocationArtifactIndexEndpoint |
python | celery__celery | t/unit/backends/test_database.py | {
"start": 1233,
"end": 2025
} | class ____:
def test_for_mssql_dialect(self):
"""Test that ID columns use BigInteger for MSSQL and Integer for other dialects."""
from sqlalchemy import BigInteger, Integer
from sqlalchemy.dialects import mssql, mysql, oracle, postgresql, sqlite
models = [Task, TaskSet]
id_columns = [m.__table__.columns['id'] for m in models]
for dialect in [mssql, postgresql, mysql, sqlite, oracle]:
for id_column in id_columns:
compiled_type = id_column.type.dialect_impl(dialect.dialect())
if dialect == mssql:
assert isinstance(compiled_type, BigInteger)
else:
assert isinstance(compiled_type, Integer)
@skip.if_pypy
| test_ModelsIdFieldTypeVariations |
python | realpython__materials | python-enum/mood.py | {
"start": 24,
"end": 281
} | class ____(Enum):
FUNKY = 1
MAD = 2
HAPPY = 3
def describe_mood(self):
return self.name, self.value
def __str__(self):
return f"I feel {self.name}"
@classmethod
def favorite_mood(cls):
return cls.HAPPY
| Mood |
python | skorch-dev__skorch | skorch/hf.py | {
"start": 42162,
"end": 48911
} | class ____:
"""Helper class that allows writing data to the Hugging Face Hub.
Use this, for instance, in combination with checkpoint callbacks such as
:class:`skorch.callbacks.training.TrainEndCheckpoint` or
:class:`skorch.callbacks.training.Checkpoint` to upload the trained model
directly to the Hugging Face Hub instead of storing it locally.
To use this, it is necessary to install the `Hugging Face Hub library
<https://huggingface.co/docs/huggingface_hub/index>`__.
.. code:: bash
python -m pip install huggingface_hub
Note that writes to the Hub are synchronous. Therefore, if the time it takes
to upload the data is long compared to training the model, there can be a
signficant slowdown. It is best to use this with
:class:`skorch.callbacks.training.TrainEndCheckpoint`, as that checkpoint
only uploads the data once, at the end of training. Also, using this writer
with :class:`skorch.callbacks.training.LoadInitState` is not supported for
now because the Hub API does not support model loading yet.
Parameters
----------
hf_api : instance of huggingface_hub.HfApi
Pass an instantiated ``huggingface_hub.HfApi`` object here.
path_in_repo : str
The name that the file should have in the repo, e.g. ``my-model.pkl``. If
you want each upload to have a different file name, instead of overwriting
the file, use a templated name, e.g. ``my-model-{}.pkl``. Then your files
will be called ``my-model-1.pkl``, ``my-model-2.pkl``, etc. If there are
already files by this name in the repository, they will be overwritten.
repo_id : str
The repository to which the file will be uploaded, for example:
``"username/reponame"``.
verbose : int (default=0)
Control the level of verbosity.
local_storage : str, pathlib.Path or None (default=None)
Indicate temporary storage of the parameters. By default, they are stored
in-memory. By passing a string or Path to this parameter, you can instead
store the parameters at the indicated location. There is no automatic
cleanup, so if you don't need the file on disk, put it into a temp folder.
sink : callable (default=print)
The target that the verbose information is sent to. By default, the output
is printed to stdout, but the sink could also be a logger or
:func:`~skorch.utils.noop`.
kwargs : dict
The remaining arguments are the same as for ``HfApi.upload_file`` (see
https://huggingface.co/docs/huggingface_hub/package_reference/hf_api#huggingface_hub.HfApi.upload_file).
Attributes
----------
latest_url_ : str
Stores the latest URL that the file has been uploaded to.
Examples
--------
>>> from huggingface_hub import create_repo, HfApi
>>> model_name = 'my-skorch-model.pkl'
>>> params_name = 'my-torch-params.pt'
>>> repo_name = 'my-user/my-repo'
>>> token = 'my-secret-token'
>>> # you can create a new repo like this:
>>> create_repo(repo_name, token=token, exist_ok=True)
>>> hf_api = HfApi()
>>> hub_pickle_writer = HfHubStorage(
... hf_api,
... path_in_repo=model_name,
... repo_id=repo_name,
... token=token,
... verbose=1,
... )
>>> hub_params_writer = HfHubStorage(
... hf_api,
... path_in_repo=params_name,
... repo_id=repo_name,
... token=token,
... verbose=1,
... )
>>> checkpoints = [
... TrainEndCheckpoint(f_pickle=hub_pickle_writer),
... TrainEndCheckpoint(f_params=hub_params_writer),
... ]
>>> net = NeuralNet(..., checkpoints=checkpoints)
>>> net.fit(X, y)
>>> # prints:
>>> # Uploaded model to https://huggingface.co/my-user/my-repo/blob/main/my-skorch-model.pkl
>>> # Uploaded model to https://huggingface.co/my-user/my-repo/blob/main/my-torch-params.pt
...
>>> # later...
>>> import pickle
>>> from huggingface_hub import hf_hub_download
>>> path = hf_hub_download(repo_name, model_name, use_auth_token=token)
>>> with open(path, 'rb') as f:
>>> net_loaded = pickle.load(f)
"""
def __init__(
self,
hf_api,
path_in_repo,
repo_id,
local_storage=None,
verbose=0,
sink=print,
**kwargs
):
self.hf_api = hf_api
self.path_in_repo = path_in_repo
self.repo_id = repo_id
self.local_storage = local_storage
self.verbose = verbose
self.sink = sink
self.kwargs = kwargs
self.latest_url_ = None
self._buffer = None
self._call_count = 0
self._needs_flush = False
def _get_buffer(self):
if self.local_storage is None:
return io.BytesIO()
return open(self.local_storage, 'wb')
def write(self, content):
"""Upload the file to the Hugging Face Hub"""
if self._buffer is None:
self._buffer = self._get_buffer()
self._buffer.write(content)
self._needs_flush = True
def flush(self):
"""Flush buffered file"""
if not self._needs_flush:
# This is to prevent double-flushing. Some PyTorch versions create
# two contexts, resulting in __exit__, and thus flush, being called
# twice
return
if isinstance(self.local_storage, (str, pathlib.Path)):
self._buffer.close()
path_or_fileobj = self._buffer.name
else:
self._buffer.seek(0)
path_or_fileobj = self._buffer
path_in_repo = self.path_in_repo.format(self._call_count)
return_url = self.hf_api.upload_file(
path_or_fileobj=path_or_fileobj,
path_in_repo=path_in_repo,
repo_id=self.repo_id,
**self.kwargs
)
if hasattr(return_url, 'commit_url'):
# starting from huggingface_hub, the return type is now a CommitInfo
# object instead of a string
return_url = return_url.commit_url
self._buffer = None
self._needs_flush = False
self.latest_url_ = return_url
self._call_count += 1
if self.verbose:
self.sink(f"Uploaded file to {return_url}")
# pylint: disable=unused-argument
def close(self, *args):
self.flush()
def seek(self, offset, whence=0):
raise NotImplementedError("Seek is not (yet) implemented")
def tell(self):
raise NotImplementedError("Tell is not (yet) implemented")
def read(self):
raise NotImplementedError("Read is not (yet) implemented")
| HfHubStorage |
python | kamyu104__LeetCode-Solutions | Python/different-ways-to-add-parentheses.py | {
"start": 1394,
"end": 2311
} | class ____(object):
# @param {string} input
# @return {integer[]}
def diffWaysToCompute(self, input):
lookup = [[None for _ in xrange(len(input) + 1)] for _ in xrange(len(input) + 1)]
ops = {'+': operator.add, '-': operator.sub, '*': operator.mul}
def diffWaysToComputeRecu(left, right):
if lookup[left][right]:
return lookup[left][right]
result = []
for i in xrange(left, right):
if input[i] in ops:
for x in diffWaysToComputeRecu(left, i):
for y in diffWaysToComputeRecu(i + 1, right):
result.append(ops[input[i]](x, y))
if not result:
result = [int(input[left:right])]
lookup[left][right] = result
return lookup[left][right]
return diffWaysToComputeRecu(0, len(input))
| Solution2 |
python | google__jax | jax/_src/core.py | {
"start": 59179,
"end": 59230
} | class ____:
val: int
@dataclass(frozen=True)
| InDBIdx |
python | openai__openai-python | src/openai/types/realtime/realtime_conversation_item_system_message_param.py | {
"start": 281,
"end": 466
} | class ____(TypedDict, total=False):
text: str
"""The text content."""
type: Literal["input_text"]
"""The content type. Always `input_text` for system messages."""
| Content |
python | sphinx-doc__sphinx | tests/test_ext_napoleon/test_ext_napoleon_docstring.py | {
"start": 33308,
"end": 74283
} | class ____:
docstrings = [
(
"""Single line summary""",
"""Single line summary""",
),
(
"""
Single line summary
Extended description
""",
"""
Single line summary
Extended description
""",
),
(
"""
Single line summary
Parameters
----------
arg1:str
Extended
description of arg1
""",
"""
Single line summary
:Parameters: **arg1** (:py:class:`str`) -- Extended
description of arg1
""",
),
(
"""
Single line summary
Parameters
----------
arg1:str
Extended
description of arg1
arg2 : int
Extended
description of arg2
Keyword Arguments
-----------------
kwarg1:str
Extended
description of kwarg1
kwarg2 : int
Extended
description of kwarg2
""",
"""
Single line summary
:Parameters: * **arg1** (:py:class:`str`) -- Extended
description of arg1
* **arg2** (:py:class:`int`) -- Extended
description of arg2
:Keyword Arguments: * **kwarg1** (:py:class:`str`) -- Extended
description of kwarg1
* **kwarg2** (:py:class:`int`) -- Extended
description of kwarg2
""",
),
(
"""
Single line summary
Return
------
str
Extended
description of return value
""",
"""
Single line summary
:returns: :py:class:`str` -- Extended
description of return value
""",
),
(
"""
Single line summary
Returns
-------
str
Extended
description of return value
""",
"""
Single line summary
:returns: :py:class:`str` -- Extended
description of return value
""",
),
(
"""
Single line summary
Parameters
----------
arg1:str
Extended description of arg1
*args:
Variable length argument list.
**kwargs:
Arbitrary keyword arguments.
""",
"""
Single line summary
:Parameters: * **arg1** (:py:class:`str`) -- Extended description of arg1
* **\\*args** -- Variable length argument list.
* **\\*\\*kwargs** -- Arbitrary keyword arguments.
""",
),
(
"""
Single line summary
Parameters
----------
arg1:str
Extended description of arg1
*args, **kwargs:
Variable length argument list and arbitrary keyword arguments.
""",
"""
Single line summary
:Parameters: * **arg1** (:py:class:`str`) -- Extended description of arg1
* **\\*args, \\*\\*kwargs** -- Variable length argument list and arbitrary keyword arguments.
""",
),
(
"""
Single line summary
Receive
-------
arg1:str
Extended
description of arg1
arg2 : int
Extended
description of arg2
""",
"""
Single line summary
:Receives: * **arg1** (:py:class:`str`) -- Extended
description of arg1
* **arg2** (:py:class:`int`) -- Extended
description of arg2
""",
),
(
"""
Single line summary
Receives
--------
arg1:str
Extended
description of arg1
arg2 : int
Extended
description of arg2
""",
"""
Single line summary
:Receives: * **arg1** (:py:class:`str`) -- Extended
description of arg1
* **arg2** (:py:class:`int`) -- Extended
description of arg2
""",
),
(
"""
Single line summary
Yield
-----
str
Extended
description of yielded value
""",
"""
Single line summary
:Yields: :py:class:`str` -- Extended
description of yielded value
""",
),
(
"""
Single line summary
Yields
------
str
Extended
description of yielded value
""",
"""
Single line summary
:Yields: :py:class:`str` -- Extended
description of yielded value
""",
),
]
def test_sphinx_admonitions(self):
admonition_map = {
'Attention': 'attention',
'Caution': 'caution',
'Danger': 'danger',
'Error': 'error',
'Hint': 'hint',
'Important': 'important',
'Note': 'note',
'Tip': 'tip',
'Todo': 'todo',
'Warning': 'warning',
'Warnings': 'warning',
}
config = Config()
for section, admonition in admonition_map.items():
# Multiline
underline = '-' * len(section)
actual = NumpyDocstring(
f'{section}\n'
f'{underline}\n'
' this is the first line\n'
'\n'
' and this is the second line\n',
config,
)
expect = (
f'.. {admonition}::\n'
'\n'
' this is the first line\n'
' \n'
' and this is the second line\n'
)
assert str(actual) == expect
# Single line
actual = NumpyDocstring(
f'{section}\n{"-" * len(section)}\n this is a single line\n',
config,
)
expect = f'.. {admonition}:: this is a single line\n'
assert str(actual) == expect
def test_docstrings(self):
config = Config(
napoleon_use_param=False,
napoleon_use_rtype=False,
napoleon_use_keyword=False,
napoleon_preprocess_types=True,
)
for docstring, expected in self.docstrings:
actual = NumpyDocstring(dedent(docstring), config)
expected = dedent(expected)
assert str(actual) == expected
def test_type_preprocessor(self):
docstring = dedent("""
Single line summary
Parameters
----------
arg1:str
Extended
description of arg1
""")
config = Config(napoleon_preprocess_types=False, napoleon_use_param=False)
actual = NumpyDocstring(docstring, config)
expected = dedent("""
Single line summary
:Parameters: **arg1** (*str*) -- Extended
description of arg1
""")
assert str(actual) == expected
def test_parameters_with_class_reference(self):
docstring = """\
Parameters
----------
param1 : :class:`MyClass <name.space.MyClass>` instance
Other Parameters
----------------
param2 : :class:`MyClass <name.space.MyClass>` instance
"""
config = Config(napoleon_use_param=False)
actual = NumpyDocstring(docstring, config)
expected = """\
:Parameters: **param1** (:class:`MyClass <name.space.MyClass>` instance)
:Other Parameters: **param2** (:class:`MyClass <name.space.MyClass>` instance)
"""
assert str(actual) == expected
config = Config(napoleon_use_param=True)
actual = NumpyDocstring(docstring, config)
expected = """\
:param param1:
:type param1: :class:`MyClass <name.space.MyClass>` instance
:param param2:
:type param2: :class:`MyClass <name.space.MyClass>` instance
"""
assert str(actual) == expected
def test_multiple_parameters(self):
docstring = """\
Parameters
----------
x1, x2 : array_like
Input arrays, description of ``x1``, ``x2``.
"""
config = Config(napoleon_use_param=False)
actual = NumpyDocstring(docstring, config)
expected = """\
:Parameters: **x1, x2** (*array_like*) -- Input arrays, description of ``x1``, ``x2``.
"""
assert str(actual) == expected
config = Config(napoleon_use_param=True)
actual = NumpyDocstring(dedent(docstring), config)
expected = """\
:param x1: Input arrays, description of ``x1``, ``x2``.
:type x1: array_like
:param x2: Input arrays, description of ``x1``, ``x2``.
:type x2: array_like
"""
assert str(actual) == expected
def test_parameters_without_class_reference(self):
docstring = """\
Parameters
----------
param1 : MyClass instance
"""
config = Config(napoleon_use_param=False)
actual = NumpyDocstring(docstring, config)
expected = """\
:Parameters: **param1** (*MyClass instance*)
"""
assert str(actual) == expected
config = Config(napoleon_use_param=True)
actual = NumpyDocstring(dedent(docstring), config)
expected = """\
:param param1:
:type param1: MyClass instance
"""
assert str(actual) == expected
def test_see_also_refs(self):
docstring = """\
numpy.multivariate_normal(mean, cov, shape=None, spam=None)
See Also
--------
some, other, funcs
otherfunc : relationship
"""
actual = NumpyDocstring(docstring)
expected = """\
numpy.multivariate_normal(mean, cov, shape=None, spam=None)
.. seealso::
:py:obj:`some`, :py:obj:`other`, :py:obj:`funcs`
\n\
:py:obj:`otherfunc`
relationship
"""
assert str(actual) == expected
docstring = """\
numpy.multivariate_normal(mean, cov, shape=None, spam=None)
See Also
--------
some, other, funcs
otherfunc : relationship
"""
config = Config()
app = mock.Mock()
actual = NumpyDocstring(docstring, config, app, 'method')
expected = """\
numpy.multivariate_normal(mean, cov, shape=None, spam=None)
.. seealso::
:py:obj:`some`, :py:obj:`other`, :py:obj:`funcs`
\n\
:py:obj:`otherfunc`
relationship
"""
assert str(actual) == expected
docstring = """\
numpy.multivariate_normal(mean, cov, shape=None, spam=None)
See Also
--------
some, other, :func:`funcs`
otherfunc : relationship
"""
translations = {
'other': 'MyClass.other',
'otherfunc': ':func:`~my_package.otherfunc`',
}
config = Config(napoleon_type_aliases=translations)
app = mock.Mock()
actual = NumpyDocstring(docstring, config, app, 'method')
expected = """\
numpy.multivariate_normal(mean, cov, shape=None, spam=None)
.. seealso::
:py:obj:`some`, :py:obj:`MyClass.other`, :func:`funcs`
\n\
:func:`~my_package.otherfunc`
relationship
"""
assert str(actual) == expected
def test_colon_in_return_type(self):
docstring = """
Summary
Returns
-------
:py:class:`~my_mod.my_class`
an instance of :py:class:`~my_mod.my_class`
"""
expected = """
Summary
:returns: an instance of :py:class:`~my_mod.my_class`
:rtype: :py:class:`~my_mod.my_class`
"""
config = Config()
app = mock.Mock()
actual = NumpyDocstring(docstring, config, app, 'method')
assert str(actual) == expected
def test_underscore_in_attribute(self):
docstring = """
Attributes
----------
arg_ : type
some description
"""
expected = """
:ivar arg_: some description
:vartype arg_: type
"""
config = Config(napoleon_use_ivar=True)
app = mock.Mock()
actual = NumpyDocstring(docstring, config, app, 'class')
assert str(actual) == expected
def test_underscore_in_attribute_strip_signature_backslash(self):
docstring = """
Attributes
----------
arg_ : type
some description
"""
expected = """
:ivar arg\\_: some description
:vartype arg\\_: type
"""
config = Config(napoleon_use_ivar=True)
config.strip_signature_backslash = True
app = mock.Mock()
actual = NumpyDocstring(docstring, config, app, 'class')
assert str(actual) == expected
def test_return_types(self):
docstring = dedent("""
Returns
-------
DataFrame
a dataframe
""")
expected = dedent("""
:returns: a dataframe
:rtype: :py:class:`~pandas.DataFrame`
""")
translations = {
'DataFrame': '~pandas.DataFrame',
}
config = Config(
napoleon_use_param=True,
napoleon_use_rtype=True,
napoleon_preprocess_types=True,
napoleon_type_aliases=translations,
)
actual = NumpyDocstring(docstring, config)
assert str(actual) == expected
def test_yield_types(self):
docstring = dedent("""
Example Function
Yields
------
scalar or array-like
The result of the computation
""")
expected = dedent("""
Example Function
:Yields: :term:`scalar` or :py:class:`array-like <numpy.ndarray>` -- The result of the computation
""")
translations = {
'scalar': ':term:`scalar`',
'array-like': ':py:class:`array-like <numpy.ndarray>`',
}
config = Config(
napoleon_type_aliases=translations, napoleon_preprocess_types=True
)
app = mock.Mock()
actual = NumpyDocstring(docstring, config, app, 'method')
assert str(actual) == expected
def test_raises_types(self):
docstrings = [
(
"""
Example Function
Raises
------
RuntimeError
A setting wasn't specified, or was invalid.
ValueError
Something something value error.
""",
"""
Example Function
:raises RuntimeError: A setting wasn't specified, or was invalid.
:raises ValueError: Something something value error.
""",
),
################################
(
"""
Example Function
Raises
------
InvalidDimensionsError
""",
"""
Example Function
:raises InvalidDimensionsError:
""",
),
################################
(
"""
Example Function
Raises
------
Invalid Dimensions Error
""",
"""
Example Function
:raises Invalid Dimensions Error:
""",
),
################################
(
"""
Example Function
Raises
------
Invalid Dimensions Error
With description
""",
"""
Example Function
:raises Invalid Dimensions Error: With description
""",
),
################################
(
"""
Example Function
Raises
------
InvalidDimensionsError
If the dimensions couldn't be parsed.
""",
"""
Example Function
:raises InvalidDimensionsError: If the dimensions couldn't be parsed.
""",
),
################################
(
"""
Example Function
Raises
------
Invalid Dimensions Error
If the dimensions couldn't be parsed.
""",
"""
Example Function
:raises Invalid Dimensions Error: If the dimensions couldn't be parsed.
""",
),
################################
(
"""
Example Function
Raises
------
If the dimensions couldn't be parsed.
""",
"""
Example Function
:raises If the dimensions couldn't be parsed.:
""",
),
################################
(
"""
Example Function
Raises
------
:class:`exc.InvalidDimensionsError`
""",
"""
Example Function
:raises exc.InvalidDimensionsError:
""",
),
################################
(
"""
Example Function
Raises
------
:class:`exc.InvalidDimensionsError`
If the dimensions couldn't be parsed.
""",
"""
Example Function
:raises exc.InvalidDimensionsError: If the dimensions couldn't be parsed.
""",
),
################################
(
"""
Example Function
Raises
------
:class:`exc.InvalidDimensionsError`
If the dimensions couldn't be parsed,
then a :class:`exc.InvalidDimensionsError` will be raised.
""",
"""
Example Function
:raises exc.InvalidDimensionsError: If the dimensions couldn't be parsed,
then a :class:`exc.InvalidDimensionsError` will be raised.
""",
),
################################
(
"""
Example Function
Raises
------
:class:`exc.InvalidDimensionsError`
If the dimensions couldn't be parsed.
:class:`exc.InvalidArgumentsError`
If the arguments are invalid.
""",
"""
Example Function
:raises exc.InvalidDimensionsError: If the dimensions couldn't be parsed.
:raises exc.InvalidArgumentsError: If the arguments are invalid.
""",
),
################################
(
"""
Example Function
Raises
------
CustomError
If the dimensions couldn't be parsed.
""",
"""
Example Function
:raises package.CustomError: If the dimensions couldn't be parsed.
""",
),
################################
(
"""
Example Function
Raises
------
AnotherError
If the dimensions couldn't be parsed.
""",
"""
Example Function
:raises ~package.AnotherError: If the dimensions couldn't be parsed.
""",
),
################################
(
"""
Example Function
Raises
------
:class:`exc.InvalidDimensionsError`
:class:`exc.InvalidArgumentsError`
""",
"""
Example Function
:raises exc.InvalidDimensionsError:
:raises exc.InvalidArgumentsError:
""",
),
]
for docstring, expected in docstrings:
translations = {
'CustomError': 'package.CustomError',
'AnotherError': ':py:exc:`~package.AnotherError`',
}
config = Config(
napoleon_type_aliases=translations, napoleon_preprocess_types=True
)
app = mock.Mock()
actual = NumpyDocstring(docstring, config, app, 'method')
assert str(actual) == expected
def test_xrefs_in_return_type(self):
docstring = """
Example Function
Returns
-------
:class:`numpy.ndarray`
A :math:`n \\times 2` array containing
a bunch of math items
"""
expected = """
Example Function
:returns: A :math:`n \\times 2` array containing
a bunch of math items
:rtype: :class:`numpy.ndarray`
"""
config = Config()
app = mock.Mock()
actual = NumpyDocstring(docstring, config, app, 'method')
assert str(actual) == expected
def test_section_header_underline_length(self):
docstrings = [
(
"""
Summary line
Example
-
Multiline example
body
""",
"""
Summary line
Example
-
Multiline example
body
""",
),
################################
(
"""
Summary line
Example
--
Multiline example
body
""",
"""
Summary line
.. rubric:: Example
Multiline example
body
""",
),
################################
(
"""
Summary line
Example
-------
Multiline example
body
""",
"""
Summary line
.. rubric:: Example
Multiline example
body
""",
),
################################
(
"""
Summary line
Example
------------
Multiline example
body
""",
"""
Summary line
.. rubric:: Example
Multiline example
body
""",
),
]
for docstring, expected in docstrings:
actual = NumpyDocstring(docstring)
assert str(actual) == expected
def test_list_in_parameter_description(self):
docstring = """One line summary.
Parameters
----------
no_list : int
one_bullet_empty : int
*
one_bullet_single_line : int
- first line
one_bullet_two_lines : int
+ first line
continued
two_bullets_single_line : int
- first line
- second line
two_bullets_two_lines : int
* first line
continued
* second line
continued
one_enumeration_single_line : int
1. first line
one_enumeration_two_lines : int
1) first line
continued
two_enumerations_one_line : int
(iii) first line
(iv) second line
two_enumerations_two_lines : int
a. first line
continued
b. second line
continued
one_definition_one_line : int
item 1
first line
one_definition_two_lines : int
item 1
first line
continued
two_definitions_one_line : int
item 1
first line
item 2
second line
two_definitions_two_lines : int
item 1
first line
continued
item 2
second line
continued
one_definition_blank_line : int
item 1
first line
extra first line
two_definitions_blank_lines : int
item 1
first line
extra first line
item 2
second line
extra second line
definition_after_normal_text : int
text line
item 1
first line
"""
expected = """One line summary.
:param no_list:
:type no_list: int
:param one_bullet_empty:
*
:type one_bullet_empty: int
:param one_bullet_single_line:
- first line
:type one_bullet_single_line: int
:param one_bullet_two_lines:
+ first line
continued
:type one_bullet_two_lines: int
:param two_bullets_single_line:
- first line
- second line
:type two_bullets_single_line: int
:param two_bullets_two_lines:
* first line
continued
* second line
continued
:type two_bullets_two_lines: int
:param one_enumeration_single_line:
1. first line
:type one_enumeration_single_line: int
:param one_enumeration_two_lines:
1) first line
continued
:type one_enumeration_two_lines: int
:param two_enumerations_one_line:
(iii) first line
(iv) second line
:type two_enumerations_one_line: int
:param two_enumerations_two_lines:
a. first line
continued
b. second line
continued
:type two_enumerations_two_lines: int
:param one_definition_one_line:
item 1
first line
:type one_definition_one_line: int
:param one_definition_two_lines:
item 1
first line
continued
:type one_definition_two_lines: int
:param two_definitions_one_line:
item 1
first line
item 2
second line
:type two_definitions_one_line: int
:param two_definitions_two_lines:
item 1
first line
continued
item 2
second line
continued
:type two_definitions_two_lines: int
:param one_definition_blank_line:
item 1
first line
extra first line
:type one_definition_blank_line: int
:param two_definitions_blank_lines:
item 1
first line
extra first line
item 2
second line
extra second line
:type two_definitions_blank_lines: int
:param definition_after_normal_text: text line
item 1
first line
:type definition_after_normal_text: int
"""
config = Config(napoleon_use_param=True)
actual = NumpyDocstring(docstring, config)
assert str(actual) == expected
expected = """One line summary.
:Parameters: * **no_list** (:py:class:`int`)
* **one_bullet_empty** (:py:class:`int`) --
*
* **one_bullet_single_line** (:py:class:`int`) --
- first line
* **one_bullet_two_lines** (:py:class:`int`) --
+ first line
continued
* **two_bullets_single_line** (:py:class:`int`) --
- first line
- second line
* **two_bullets_two_lines** (:py:class:`int`) --
* first line
continued
* second line
continued
* **one_enumeration_single_line** (:py:class:`int`) --
1. first line
* **one_enumeration_two_lines** (:py:class:`int`) --
1) first line
continued
* **two_enumerations_one_line** (:py:class:`int`) --
(iii) first line
(iv) second line
* **two_enumerations_two_lines** (:py:class:`int`) --
a. first line
continued
b. second line
continued
* **one_definition_one_line** (:py:class:`int`) --
item 1
first line
* **one_definition_two_lines** (:py:class:`int`) --
item 1
first line
continued
* **two_definitions_one_line** (:py:class:`int`) --
item 1
first line
item 2
second line
* **two_definitions_two_lines** (:py:class:`int`) --
item 1
first line
continued
item 2
second line
continued
* **one_definition_blank_line** (:py:class:`int`) --
item 1
first line
extra first line
* **two_definitions_blank_lines** (:py:class:`int`) --
item 1
first line
extra first line
item 2
second line
extra second line
* **definition_after_normal_text** (:py:class:`int`) -- text line
item 1
first line
"""
config = Config(napoleon_use_param=False, napoleon_preprocess_types=True)
actual = NumpyDocstring(docstring, config)
assert str(actual) == expected
def test_token_type(self):
tokens = (
('1', 'literal'),
('-4.6', 'literal'),
('2j', 'literal'),
("'string'", 'literal'),
('"another_string"', 'literal'),
('{1, 2}', 'literal'),
("{'va{ue', 'set'}", 'literal'),
('optional', 'control'),
('default', 'control'),
(', ', 'delimiter'),
(' of ', 'delimiter'),
(' or ', 'delimiter'),
(': ', 'delimiter'),
('True', 'obj'),
('None', 'obj'),
('name', 'obj'),
(':py:class:`Enum`', 'reference'),
)
for token, expected in tokens:
actual = _token_type(token)
assert actual == expected
def test_tokenize_type_spec(self):
specs = (
'str',
'defaultdict',
'int, float, or complex',
'int or float or None, optional',
'list of list of int or float, optional',
'tuple of list of str, float, or int',
'{"F", "C", "N"}',
"{'F', 'C', 'N'}, default: 'F'",
"{'F', 'C', 'N or C'}, default 'F'",
"str, default: 'F or C'",
'int, default: None',
'int, default None',
'int, default :obj:`None`',
'"ma{icious"',
r"'with \'quotes\''",
)
tokens = (
['str'],
['defaultdict'],
['int', ', ', 'float', ', or ', 'complex'],
['int', ' or ', 'float', ' or ', 'None', ', ', 'optional'],
['list', ' of ', 'list', ' of ', 'int', ' or ', 'float', ', ', 'optional'],
['tuple', ' of ', 'list', ' of ', 'str', ', ', 'float', ', or ', 'int'],
['{', '"F"', ', ', '"C"', ', ', '"N"', '}'],
['{', "'F'", ', ', "'C'", ', ', "'N'", '}', ', ', 'default', ': ', "'F'"],
['{', "'F'", ', ', "'C'", ', ', "'N or C'", '}', ', ', 'default', ' ', "'F'"],
['str', ', ', 'default', ': ', "'F or C'"],
['int', ', ', 'default', ': ', 'None'],
['int', ', ', 'default', ' ', 'None'],
['int', ', ', 'default', ' ', ':obj:`None`'],
['"ma{icious"'],
[r"'with \'quotes\''"],
) # fmt: skip
for spec, expected in zip(specs, tokens, strict=True):
actual = _tokenize_type_spec(spec)
assert actual == expected
def test_recombine_set_tokens(self):
tokens = (
['{', '1', ', ', '2', '}'],
['{', '"F"', ', ', '"C"', ', ', '"N"', '}', ', ', 'optional'],
['{', "'F'", ', ', "'C'", ', ', "'N'", '}', ', ', 'default', ': ', 'None'],
['{', "'F'", ', ', "'C'", ', ', "'N'", '}', ', ', 'default', ' ', 'None'],
)
combined_tokens = (
['{1, 2}'],
['{"F", "C", "N"}', ', ', 'optional'],
["{'F', 'C', 'N'}", ', ', 'default', ': ', 'None'],
["{'F', 'C', 'N'}", ', ', 'default', ' ', 'None'],
)
for tokens_, expected in zip(tokens, combined_tokens, strict=True):
actual = _recombine_set_tokens(tokens_)
assert actual == expected
def test_recombine_set_tokens_invalid(self):
tokens = (
['{', '1', ', ', '2'],
['"F"', ', ', '"C"', ', ', '"N"', '}', ', ', 'optional'],
['{', '1', ', ', '2', ', ', 'default', ': ', 'None'],
)
combined_tokens = (
['{1, 2'],
['"F"', ', ', '"C"', ', ', '"N"', '}', ', ', 'optional'],
['{1, 2', ', ', 'default', ': ', 'None'],
)
for tokens_, expected in zip(tokens, combined_tokens, strict=True):
actual = _recombine_set_tokens(tokens_)
assert actual == expected
def test_convert_numpy_type_spec(self):
translations = {
'DataFrame': 'pandas.DataFrame',
}
specs = (
'',
'optional',
'str, optional',
'int or float or None, default: None',
'list of tuple of str, optional',
'int, default None',
'{"F", "C", "N"}',
"{'F', 'C', 'N'}, default: 'N'",
"{'F', 'C', 'N'}, default 'N'",
'DataFrame, optional',
)
converted = (
'',
'*optional*',
':py:class:`str`, *optional*',
':py:class:`int` or :py:class:`float` or :py:obj:`None`, *default*: :py:obj:`None`',
':py:class:`list` of :py:class:`tuple` of :py:class:`str`, *optional*',
':py:class:`int`, *default* :py:obj:`None`',
'``{"F", "C", "N"}``',
"``{'F', 'C', 'N'}``, *default*: ``'N'``",
"``{'F', 'C', 'N'}``, *default* ``'N'``",
':py:class:`pandas.DataFrame`, *optional*',
)
for spec, expected in zip(specs, converted, strict=True):
actual = _convert_type_spec(spec, translations=translations)
assert actual == expected
def test_parameter_types(self):
docstring = dedent("""\
Parameters
----------
param1 : DataFrame
the data to work on
param2 : int or float or None, optional
a parameter with different types
param3 : dict-like, optional
a optional mapping
param4 : int or float or None, optional
a optional parameter with different types
param5 : {"F", "C", "N"}, optional
a optional parameter with fixed values
param6 : int, default None
different default format
param7 : mapping of hashable to str, optional
a optional mapping
param8 : ... or Ellipsis
ellipsis
param9 : tuple of list of int
a parameter with tuple of list of int
""")
expected = dedent("""\
:param param1: the data to work on
:type param1: :py:class:`DataFrame`
:param param2: a parameter with different types
:type param2: :py:class:`int` or :py:class:`float` or :py:obj:`None`, *optional*
:param param3: a optional mapping
:type param3: :term:`dict-like <mapping>`, *optional*
:param param4: a optional parameter with different types
:type param4: :py:class:`int` or :py:class:`float` or :py:obj:`None`, *optional*
:param param5: a optional parameter with fixed values
:type param5: ``{"F", "C", "N"}``, *optional*
:param param6: different default format
:type param6: :py:class:`int`, *default* :py:obj:`None`
:param param7: a optional mapping
:type param7: :term:`mapping` of :term:`hashable` to :py:class:`str`, *optional*
:param param8: ellipsis
:type param8: :py:obj:`... <Ellipsis>` or :py:obj:`Ellipsis`
:param param9: a parameter with tuple of list of int
:type param9: :py:class:`tuple` of :py:class:`list` of :py:class:`int`
""")
translations = {
'dict-like': ':term:`dict-like <mapping>`',
'mapping': ':term:`mapping`',
'hashable': ':term:`hashable`',
}
config = Config(
napoleon_use_param=True,
napoleon_use_rtype=True,
napoleon_preprocess_types=True,
napoleon_type_aliases=translations,
)
actual = NumpyDocstring(docstring, config)
assert str(actual) == expected
@pytest.mark.sphinx('html', testroot='root')
def test_token_type_invalid(self, app):
tokens = (
'{1, 2',
'}',
"'abc",
"def'",
'"ghi',
'jkl"',
)
errors = (
r'.+: invalid value set \(missing closing brace\):',
r'.+: invalid value set \(missing opening brace\):',
r'.+: malformed string literal \(missing closing quote\):',
r'.+: malformed string literal \(missing opening quote\):',
r'.+: malformed string literal \(missing closing quote\):',
r'.+: malformed string literal \(missing opening quote\):',
)
for token, error in zip(tokens, errors, strict=True):
try:
_token_type(token)
finally:
raw_warnings = app.warning.getvalue()
warnings = [w for w in raw_warnings.split('\n') if w.strip()]
assert len(warnings) == 1
assert re.compile(error).match(warnings[0])
app.warning.truncate(0)
@pytest.mark.parametrize(
('name', 'expected'),
[
('x, y, z', 'x, y, z'),
('*args, **kwargs', r'\*args, \*\*kwargs'),
('*x, **y', r'\*x, \*\*y'),
],
)
def test_escape_args_and_kwargs(self, name, expected):
numpy_docstring = NumpyDocstring('')
actual = numpy_docstring._escape_args_and_kwargs(name)
assert actual == expected
def test_pep526_annotations(self):
# test class attributes annotations
config = Config(
napoleon_attr_annotations=True,
)
actual = NumpyDocstring(
cleandoc(PEP526NumpyClass.__doc__),
config,
app=None,
what='class',
obj=PEP526NumpyClass,
)
expected = """\
Sample class with PEP 526 annotations and numpy docstring
.. attribute:: attr1
Attr1 description
:type: int
.. attribute:: attr2
Attr2 description
:type: str
"""
print(actual)
assert str(actual) == expected
@pytest.mark.sphinx(
'text',
testroot='ext-napoleon',
confoverrides={
'autodoc_typehints': 'description',
'autodoc_typehints_description_target': 'all',
},
)
def test_napoleon_and_autodoc_typehints_description_all(app: SphinxTestApp) -> None:
app.build()
content = (app.outdir / 'typehints.txt').read_text(encoding='utf-8')
assert content == (
'typehints\n'
'*********\n'
'\n'
'mypackage.typehints.hello(x, *args, **kwargs)\n'
'\n'
' Parameters:\n'
' * **x** (*int*) -- X\n'
'\n'
' * ***args** (*int*) -- Additional arguments.\n'
'\n'
' * ****kwargs** (*int*) -- Extra arguments.\n'
'\n'
' Return type:\n'
' None\n'
)
@pytest.mark.sphinx(
'text',
testroot='ext-napoleon',
confoverrides={
'autodoc_typehints': 'description',
'autodoc_typehints_description_target': 'documented_params',
},
)
def test_napoleon_and_autodoc_typehints_description_documented_params(
app: SphinxTestApp,
) -> None:
app.build()
content = (app.outdir / 'typehints.txt').read_text(encoding='utf-8')
assert content == (
'typehints\n'
'*********\n'
'\n'
'mypackage.typehints.hello(x, *args, **kwargs)\n'
'\n'
' Parameters:\n'
' * **x** (*int*) -- X\n'
'\n'
' * ***args** (*int*) -- Additional arguments.\n'
'\n'
' * ****kwargs** (*int*) -- Extra arguments.\n'
)
@pytest.mark.sphinx('html', testroot='ext-napoleon-paramtype', freshenv=True)
def test_napoleon_keyword_and_paramtype(app, tmp_path):
inv_file = tmp_path / 'objects.inv'
inv_file.write_bytes(
b"""\
# Sphinx inventory version 2
# Project: Intersphinx Test
# Version: 42
# The remainder of this file is compressed using zlib.
"""
+ zlib.compress(b"""\
None py:data 1 none.html -
list py:class 1 list.html -
int py:class 1 int.html -
""")
)
app.config.intersphinx_mapping = {'python': ('127.0.0.1:5555', str(inv_file))}
validate_intersphinx_mapping(app, app.config)
load_mappings(app)
app.build(force_all=True)
etree = etree_parse(app.outdir / 'index.html')
for name, typename in product(
('keyword', 'kwarg', 'kwparam'),
('paramtype', 'kwtype'),
):
param = f'{name}_{typename}'
li_ = list(etree.findall(f'.//li/p/strong[.="{param}"]/../..'))
assert len(li_) == 1
li = li_[0]
text = li.text or ''.join(li.itertext())
assert text == f'{param} (list[int]) \u2013 some param'
a_ = list(li.findall('.//a[@class="reference external"]'))
assert len(a_) == 2
for a, uri in zip(a_, ('list.html', 'int.html'), strict=True):
assert a.attrib['href'] == f'127.0.0.1:5555/{uri}'
assert a.attrib['title'] == '(in Intersphinx Test v42)'
| TestNumpyDocstring |
python | facebookresearch__faiss | faiss/gpu/test/test_gpu_index_refs.py | {
"start": 342,
"end": 937
} | class ____(Enum):
BEFORE_TRAIN = 1
BEFORE_ADD = 2
BEFORE_SEARCH = 3
def do_test(idx, index_to_delete, db, deletion_site: DeletionSite):
if deletion_site == DeletionSite.BEFORE_TRAIN:
del index_to_delete
idx.train(db)
if deletion_site == DeletionSite.BEFORE_ADD:
del index_to_delete
idx.add(db)
if deletion_site == DeletionSite.BEFORE_SEARCH:
del index_to_delete
idx.search(db, 1)
def do_multi_test(idx, index_to_delete, db):
for site in DeletionSite:
do_test(idx, index_to_delete, db, site)
#
# Test
#
| DeletionSite |
python | pandas-dev__pandas | pandas/tests/test_register_accessor.py | {
"start": 824,
"end": 3237
} | class ____:
def __init__(self, obj) -> None:
self.obj = obj
self.item = "item"
@property
def prop(self):
return self.item
def method(self):
return self.item
@pytest.mark.parametrize(
"obj, registrar",
[
(pd.Series, pd.api.extensions.register_series_accessor),
(pd.DataFrame, pd.api.extensions.register_dataframe_accessor),
(pd.Index, pd.api.extensions.register_index_accessor),
],
)
def test_register(obj, registrar):
with ensure_removed(obj, "mine"):
before = set(dir(obj))
registrar("mine")(MyAccessor)
o = obj([]) if obj is not pd.Series else obj([], dtype=object)
assert o.mine.prop == "item"
after = set(dir(obj))
assert (before ^ after) == {"mine"}
assert "mine" in obj._accessors
def test_accessor_works():
with ensure_removed(pd.Series, "mine"):
pd.api.extensions.register_series_accessor("mine")(MyAccessor)
s = pd.Series([1, 2])
assert s.mine.obj is s
assert s.mine.prop == "item"
assert s.mine.method() == "item"
def test_overwrite_warns():
match = r".*MyAccessor.*fake.*Series.*"
with tm.assert_produces_warning(UserWarning, match=match):
with ensure_removed(pd.Series, "fake"):
setattr(pd.Series, "fake", 123)
pd.api.extensions.register_series_accessor("fake")(MyAccessor)
s = pd.Series([1, 2])
assert s.fake.prop == "item"
def test_raises_attribute_error():
with ensure_removed(pd.Series, "bad"):
@pd.api.extensions.register_series_accessor("bad")
class Bad:
def __init__(self, data) -> None:
raise AttributeError("whoops")
with pytest.raises(AttributeError, match="whoops"):
pd.Series([], dtype=object).bad
@pytest.mark.parametrize(
"klass, registrar",
[
(pd.Series, pd.api.extensions.register_series_accessor),
(pd.DataFrame, pd.api.extensions.register_dataframe_accessor),
(pd.Index, pd.api.extensions.register_index_accessor),
],
)
def test_no_circular_reference(klass, registrar):
# GH 41357
with ensure_removed(klass, "access"):
registrar("access")(MyAccessor)
obj = klass([0])
ref = weakref.ref(obj)
assert obj.access.obj is obj
del obj
assert ref() is None
| MyAccessor |
python | huggingface__transformers | src/transformers/models/megatron_bert/modeling_megatron_bert.py | {
"start": 44598,
"end": 48475
} | class ____(MegatronBertPreTrainedModel):
def __init__(self, config):
super().__init__(config)
self.bert = MegatronBertModel(config)
self.cls = MegatronBertOnlyNSPHead(config)
# Initialize weights and apply final processing
self.post_init()
@auto_docstring
def forward(
self,
input_ids: Optional[torch.LongTensor] = None,
attention_mask: Optional[torch.FloatTensor] = None,
token_type_ids: Optional[torch.LongTensor] = None,
position_ids: Optional[torch.LongTensor] = None,
inputs_embeds: Optional[torch.FloatTensor] = None,
labels: Optional[torch.LongTensor] = None,
output_attentions: Optional[bool] = None,
output_hidden_states: Optional[bool] = None,
return_dict: Optional[bool] = None,
**kwargs,
) -> Union[tuple, NextSentencePredictorOutput]:
r"""
labels (`torch.LongTensor` of shape `(batch_size,)`, *optional*):
Labels for computing the next sequence prediction (classification) loss. Input should be a sequence pair
(see `input_ids` docstring). Indices should be in `[0, 1]`:
- 0 indicates sequence B is a continuation of sequence A,
- 1 indicates sequence B is a random sequence.
Example:
```python
>>> from transformers import AutoTokenizer, MegatronBertForNextSentencePrediction
>>> import torch
>>> tokenizer = AutoTokenizer.from_pretrained("nvidia/megatron-bert-cased-345m")
>>> model = MegatronBertForNextSentencePrediction.from_pretrained("nvidia/megatron-bert-cased-345m")
>>> prompt = "In Italy, pizza served in formal settings, such as at a restaurant, is presented unsliced."
>>> next_sentence = "The sky is blue due to the shorter wavelength of blue light."
>>> encoding = tokenizer(prompt, next_sentence, return_tensors="pt")
>>> outputs = model(**encoding, labels=torch.LongTensor([1]))
>>> logits = outputs.logits
>>> assert logits[0, 0] < logits[0, 1] # next sentence was random
```"""
if "next_sentence_label" in kwargs:
warnings.warn(
"The `next_sentence_label` argument is deprecated and will be removed in a future version, use"
" `labels` instead.",
FutureWarning,
)
labels = kwargs.pop("next_sentence_label")
return_dict = return_dict if return_dict is not None else self.config.use_return_dict
outputs = self.bert(
input_ids,
attention_mask=attention_mask,
token_type_ids=token_type_ids,
position_ids=position_ids,
inputs_embeds=inputs_embeds,
output_attentions=output_attentions,
output_hidden_states=output_hidden_states,
return_dict=return_dict,
)
pooled_output = outputs[1]
seq_relationship_scores = self.cls(pooled_output)
next_sentence_loss = None
if labels is not None:
loss_fct = CrossEntropyLoss()
next_sentence_loss = loss_fct(seq_relationship_scores.view(-1, 2), labels.view(-1))
if not return_dict:
output = (seq_relationship_scores,) + outputs[2:]
return ((next_sentence_loss,) + output) if next_sentence_loss is not None else output
return NextSentencePredictorOutput(
loss=next_sentence_loss,
logits=seq_relationship_scores,
hidden_states=outputs.hidden_states,
attentions=outputs.attentions,
)
@auto_docstring(
custom_intro="""
MegatronBert Model transformer with a sequence classification/regression head on top (a linear layer on top of the
pooled output) e.g. for GLUE tasks.
"""
)
| MegatronBertForNextSentencePrediction |
python | joke2k__faker | tests/providers/test_company.py | {
"start": 22366,
"end": 22844
} | class ____:
"""Test vi_VN company provider methods"""
def test_company_suffix(self, faker, num_samples):
for _ in range(num_samples):
suffix = faker.company_suffix()
assert isinstance(suffix, str)
assert suffix in ViVnCompanyProvider.company_suffixes
def test_company(self, faker, num_samples):
for _ in range(num_samples):
company = faker.company()
assert isinstance(company, str)
| TestViVn |
python | run-llama__llama_index | llama-index-core/tests/program/test_llm_program.py | {
"start": 477,
"end": 772
} | class ____(MagicMock):
def complete(self, prompt: str) -> CompletionResponse:
test_object = {"hello": "world"}
text = json.dumps(test_object)
return CompletionResponse(text=text)
@property
def metadata(self) -> LLMMetadata:
return LLMMetadata()
| MockLLM |
python | walkccc__LeetCode | solutions/1891. Cutting Ribbons/1891.py | {
"start": 0,
"end": 396
} | class ____:
def maxLength(self, ribbons: list[int], k: int) -> int:
def isCutPossible(length: int) -> bool:
count = 0
for ribbon in ribbons:
count += ribbon // length
return count >= k
l = 1
r = sum(ribbons) // k + 1
while l < r:
m = (l + r) // 2
if not isCutPossible(m):
r = m
else:
l = m + 1
return l - 1
| Solution |
python | great-expectations__great_expectations | contrib/experimental/great_expectations_experimental/expectations/expect_multicolumn_datetime_difference_to_be_less_than_two_months.py | {
"start": 600,
"end": 1868
} | class ____(MulticolumnMapMetricProvider):
condition_metric_name = (
"multicolumn_values.column_datetime_difference_to_be_less_than_two_months"
)
# These point your metric at the provided keys to facilitate calculation
condition_domain_keys = (
"batch_id",
"table",
"column_list",
"row_condition",
"condition_parser",
"ignore_row_if",
)
condition_value_keys = ("start_datetime", "end_datetime")
@multicolumn_condition_partial(engine=PandasExecutionEngine)
def _pandas(cls, dataframe, start_datetime, end_datetime, **kwargs):
def date_diff_in_months(row):
col_start = pd.to_datetime(row[start_datetime])
col_end = pd.to_datetime(row[end_datetime])
diff_days = abs(col_end.day - col_start.day)
if pd.isnull(col_start) or pd.isnull(col_end):
return True
diff_months = (col_end.year - col_start.year) * 12 + (col_end.month - col_start.month)
return abs(diff_months) < 2 or (abs(diff_months) == 2 and diff_days <= 0)
return dataframe.apply(lambda row: date_diff_in_months(row), axis=1)
# This class defines the Expectation itself
| MulticolumnDatetimeDifferenceToBeLessThanTwoMonths |
python | walkccc__LeetCode | solutions/3490. Count Beautiful Numbers/3490.py | {
"start": 0,
"end": 1092
} | class ____:
def beautifulNumbers(self, l: int, r: int) -> int:
@functools.lru_cache(None)
def dp(
s: str,
i: int,
tight: bool,
isLeadingZero: bool,
hasZero: bool,
sum: int,
prod: int,
) -> int:
if i == len(s):
if isLeadingZero:
return 0
return 1 if hasZero or prod % sum == 0 else 0
if not isLeadingZero and hasZero and not tight:
return 10 ** (len(s) - i)
res = 0
maxDigit = int(s[i]) if tight else 9
for d in range(maxDigit + 1):
nextTight = tight and (d == maxDigit)
nextIsLeadingZero = isLeadingZero and d == 0
nextHasZero = not nextIsLeadingZero and d == 0
nextProd = 1 if nextIsLeadingZero else prod * d
res += dp(s, i + 1, nextTight, nextIsLeadingZero,
nextHasZero, sum + d, nextProd)
return res
return (dp(str(r), 0, tight=True, isLeadingZero=True, hasZero=False, sum=0, prod=1) -
dp(str(l - 1), 0, tight=True, isLeadingZero=True, hasZero=False, sum=0, prod=1))
| Solution |
python | allegroai__clearml | clearml/backend_api/services/v2_9/models.py | {
"start": 91313,
"end": 93172
} | class ____(Response):
"""
Response of models.update endpoint.
:param updated: Number of models updated (0 or 1)
:type updated: int
:param fields: Updated fields names and values
:type fields: dict
"""
_service = "models"
_action = "update"
_version = "2.9"
_schema = {
"definitions": {},
"properties": {
"fields": {
"additionalProperties": True,
"description": "Updated fields names and values",
"type": ["object", "null"],
},
"updated": {
"description": "Number of models updated (0 or 1)",
"enum": [0, 1],
"type": ["integer", "null"],
},
},
"type": "object",
}
def __init__(self, updated: Optional[int] = None, fields: Optional[dict] = None, **kwargs: Any) -> None:
super(UpdateResponse, self).__init__(**kwargs)
self.updated = updated
self.fields = fields
@schema_property("updated")
def updated(self) -> Optional[int]:
return self._property_updated
@updated.setter
def updated(self, value: Optional[int]) -> None:
if value is None:
self._property_updated = None
return
if isinstance(value, float) and value.is_integer():
value = int(value)
self.assert_isinstance(value, "updated", six.integer_types)
self._property_updated = value
@schema_property("fields")
def fields(self) -> Optional[dict]:
return self._property_fields
@fields.setter
def fields(self, value: Optional[dict]) -> None:
if value is None:
self._property_fields = None
return
self.assert_isinstance(value, "fields", (dict,))
self._property_fields = value
| UpdateResponse |
python | cherrypy__cherrypy | cherrypy/process/servers.py | {
"start": 11850,
"end": 13964
} | class ____(object):
"""Adapter for a flup.server.scgi.WSGIServer."""
def __init__(self, *args, **kwargs):
"""Initialize the SCGI server parameters."""
self.args = args
self.kwargs = kwargs
self.ready = False
def start(self):
"""Start the SCGI server."""
# We have to instantiate the server class here because its __init__
# starts a threadpool. If we do it too early, daemonize won't work.
from flup.server.scgi import WSGIServer
self.scgiserver = WSGIServer(*self.args, **self.kwargs)
# TODO: report this bug upstream to flup.
# If we don't set _oldSIGs on Windows, we get:
# File "C:\Python24\Lib\site-packages\flup\server\threadedserver.py",
# line 108, in run
# self._restoreSignalHandlers()
# File "C:\Python24\Lib\site-packages\flup\server\threadedserver.py",
# line 156, in _restoreSignalHandlers
# for signum,handler in self._oldSIGs:
# AttributeError: 'WSGIServer' object has no attribute '_oldSIGs'
self.scgiserver._installSignalHandlers = lambda: None
self.scgiserver._oldSIGs = []
self.ready = True
self.scgiserver.run()
def stop(self):
"""Stop the HTTP server."""
self.ready = False
# Forcibly stop the scgi server main event loop.
self.scgiserver._keepGoing = False
# Force all worker threads to die off.
self.scgiserver._threadPool.maxSpare = 0
@contextlib.contextmanager
def _safe_wait(host, port):
"""Warn when bind interface is ambiguous.
On systems where a loopback interface is not available and the
server is bound to all interfaces, it's difficult to determine
whether the server is in fact occupying the port. In this case,
just issue a warning and move on. See issue #1100.
"""
try:
yield
except portend.Timeout:
if host == portend.client_host(host):
raise
msg = 'Unable to verify that the server is bound on %r' % port
warnings.warn(msg)
| FlupSCGIServer |
python | airbytehq__airbyte | airbyte-ci/connectors/ci_credentials/ci_credentials/logger.py | {
"start": 179,
"end": 630
} | class ____(logging.Formatter):
"""Custom formatter for logging"""
converter = dt.datetime.fromtimestamp
def formatTime(self, record, datefmt=None):
"""! @brief redefinition of format of log"""
ct = self.converter(record.created)
if datefmt:
s = ct.strftime(datefmt)
else:
t = ct.strftime("%Y-%m-%d %H:%M:%S")
s = "%s,%03d" % (t, record.msecs)
return s
| MyFormatter |
python | docker__docker-py | docker/api/client.py | {
"start": 1413,
"end": 19429
} | class ____(
requests.Session,
BuildApiMixin,
ConfigApiMixin,
ContainerApiMixin,
DaemonApiMixin,
ExecApiMixin,
ImageApiMixin,
NetworkApiMixin,
PluginApiMixin,
SecretApiMixin,
ServiceApiMixin,
SwarmApiMixin,
VolumeApiMixin):
"""
A low-level client for the Docker Engine API.
Example:
>>> import docker
>>> client = docker.APIClient(base_url='unix://var/run/docker.sock')
>>> client.version()
{u'ApiVersion': u'1.33',
u'Arch': u'amd64',
u'BuildTime': u'2017-11-19T18:46:37.000000000+00:00',
u'GitCommit': u'f4ffd2511c',
u'GoVersion': u'go1.9.2',
u'KernelVersion': u'4.14.3-1-ARCH',
u'MinAPIVersion': u'1.12',
u'Os': u'linux',
u'Version': u'17.10.0-ce'}
Args:
base_url (str): URL to the Docker server. For example,
``unix:///var/run/docker.sock`` or ``tcp://127.0.0.1:1234``.
version (str): The version of the API to use. Set to ``auto`` to
automatically detect the server's version. Default: ``1.35``
timeout (int): Default timeout for API calls, in seconds.
tls (bool or :py:class:`~docker.tls.TLSConfig`): Enable TLS. Pass
``True`` to enable it with default options, or pass a
:py:class:`~docker.tls.TLSConfig` object to use custom
configuration.
user_agent (str): Set a custom user agent for requests to the server.
credstore_env (dict): Override environment variables when calling the
credential store process.
use_ssh_client (bool): If set to `True`, an ssh connection is made
via shelling out to the ssh client. Ensure the ssh client is
installed and configured on the host.
max_pool_size (int): The maximum number of connections
to save in the pool.
"""
__attrs__ = requests.Session.__attrs__ + ['_auth_configs',
'_general_configs',
'_version',
'base_url',
'timeout']
def __init__(self, base_url=None, version=None,
timeout=DEFAULT_TIMEOUT_SECONDS, tls=False,
user_agent=DEFAULT_USER_AGENT, num_pools=None,
credstore_env=None, use_ssh_client=False,
max_pool_size=DEFAULT_MAX_POOL_SIZE):
super().__init__()
if tls and not base_url:
raise TLSParameterError(
'If using TLS, the base_url argument must be provided.'
)
self.base_url = base_url
self.timeout = timeout
self.headers['User-Agent'] = user_agent
self._general_configs = config.load_general_config()
proxy_config = self._general_configs.get('proxies', {})
try:
proxies = proxy_config[base_url]
except KeyError:
proxies = proxy_config.get('default', {})
self._proxy_configs = ProxyConfig.from_dict(proxies)
self._auth_configs = auth.load_config(
config_dict=self._general_configs, credstore_env=credstore_env,
)
self.credstore_env = credstore_env
base_url = utils.parse_host(
base_url, IS_WINDOWS_PLATFORM, tls=bool(tls)
)
# SSH has a different default for num_pools to all other adapters
num_pools = num_pools or DEFAULT_NUM_POOLS_SSH if \
base_url.startswith('ssh://') else DEFAULT_NUM_POOLS
if base_url.startswith('http+unix://'):
self._custom_adapter = UnixHTTPAdapter(
base_url, timeout, pool_connections=num_pools,
max_pool_size=max_pool_size
)
self.mount('http+docker://', self._custom_adapter)
self._unmount('http://', 'https://')
# host part of URL should be unused, but is resolved by requests
# module in proxy_bypass_macosx_sysconf()
self.base_url = 'http+docker://localhost'
elif base_url.startswith('npipe://'):
if not IS_WINDOWS_PLATFORM:
raise DockerException(
'The npipe:// protocol is only supported on Windows'
)
try:
self._custom_adapter = NpipeHTTPAdapter(
base_url, timeout, pool_connections=num_pools,
max_pool_size=max_pool_size
)
except NameError as err:
raise DockerException(
'Install pypiwin32 package to enable npipe:// support'
) from err
self.mount('http+docker://', self._custom_adapter)
self.base_url = 'http+docker://localnpipe'
elif base_url.startswith('ssh://'):
try:
self._custom_adapter = SSHHTTPAdapter(
base_url, timeout, pool_connections=num_pools,
max_pool_size=max_pool_size, shell_out=use_ssh_client
)
except NameError as err:
raise DockerException(
'Install paramiko package to enable ssh:// support'
) from err
self.mount('http+docker://ssh', self._custom_adapter)
self._unmount('http://', 'https://')
self.base_url = 'http+docker://ssh'
else:
# Use SSLAdapter for the ability to specify SSL version
if isinstance(tls, TLSConfig):
tls.configure_client(self)
elif tls:
self._custom_adapter = requests.adapters.HTTPAdapter(
pool_connections=num_pools)
self.mount('https://', self._custom_adapter)
self.base_url = base_url
# version detection needs to be after unix adapter mounting
if version is None or (isinstance(
version,
str
) and version.lower() == 'auto'):
self._version = self._retrieve_server_version()
else:
self._version = version
if not isinstance(self._version, str):
raise DockerException(
'Version parameter must be a string or None. '
f'Found {type(version).__name__}'
)
if utils.version_lt(self._version, MINIMUM_DOCKER_API_VERSION):
raise InvalidVersion(
f'API versions below {MINIMUM_DOCKER_API_VERSION} are '
f'no longer supported by this library.'
)
def _retrieve_server_version(self):
try:
return self.version(api_version=False)["ApiVersion"]
except KeyError as ke:
raise DockerException(
'Invalid response from docker daemon: key "ApiVersion"'
' is missing.'
) from ke
except Exception as e:
raise DockerException(
f'Error while fetching server API version: {e}'
) from e
def _set_request_timeout(self, kwargs):
"""Prepare the kwargs for an HTTP request by inserting the timeout
parameter, if not already present."""
kwargs.setdefault('timeout', self.timeout)
return kwargs
@update_headers
def _post(self, url, **kwargs):
return self.post(url, **self._set_request_timeout(kwargs))
@update_headers
def _get(self, url, **kwargs):
return self.get(url, **self._set_request_timeout(kwargs))
@update_headers
def _put(self, url, **kwargs):
return self.put(url, **self._set_request_timeout(kwargs))
@update_headers
def _delete(self, url, **kwargs):
return self.delete(url, **self._set_request_timeout(kwargs))
def _url(self, pathfmt, *args, **kwargs):
for arg in args:
if not isinstance(arg, str):
raise ValueError(
f'Expected a string but found {arg} ({type(arg)}) instead'
)
quote_f = partial(urllib.parse.quote, safe="/:")
args = map(quote_f, args)
formatted_path = pathfmt.format(*args)
if kwargs.get('versioned_api', True):
return f'{self.base_url}/v{self._version}{formatted_path}'
else:
return f'{self.base_url}{formatted_path}'
def _raise_for_status(self, response):
"""Raises stored :class:`APIError`, if one occurred."""
try:
response.raise_for_status()
except requests.exceptions.HTTPError as e:
raise create_api_error_from_http_exception(e) from e
def _result(self, response, json=False, binary=False):
assert not (json and binary)
self._raise_for_status(response)
if json:
return response.json()
if binary:
return response.content
return response.text
def _post_json(self, url, data, **kwargs):
# Go <1.1 can't unserialize null to a string
# so we do this disgusting thing here.
data2 = {}
if data is not None and isinstance(data, dict):
for k, v in iter(data.items()):
if v is not None:
data2[k] = v
elif data is not None:
data2 = data
if 'headers' not in kwargs:
kwargs['headers'] = {}
kwargs['headers']['Content-Type'] = 'application/json'
return self._post(url, data=json.dumps(data2), **kwargs)
def _attach_params(self, override=None):
return override or {
'stdout': 1,
'stderr': 1,
'stream': 1
}
@check_resource('container')
def _attach_websocket(self, container, params=None):
url = self._url("/containers/{0}/attach/ws", container)
req = requests.Request("POST", url, params=self._attach_params(params))
full_url = req.prepare().url
full_url = full_url.replace("http://", "ws://", 1)
full_url = full_url.replace("https://", "wss://", 1)
return self._create_websocket_connection(full_url)
def _create_websocket_connection(self, url):
try:
import websocket
return websocket.create_connection(url)
except ImportError as ie:
raise DockerException(
'The `websocket-client` library is required '
'for using websocket connections. '
'You can install the `docker` library '
'with the [websocket] extra to install it.'
) from ie
def _get_raw_response_socket(self, response):
self._raise_for_status(response)
if self.base_url == "http+docker://localnpipe":
sock = response.raw._fp.fp.raw.sock
elif self.base_url.startswith('http+docker://ssh'):
sock = response.raw._fp.fp.channel
else:
sock = response.raw._fp.fp.raw
if self.base_url.startswith("https://"):
sock = sock._sock
try:
# Keep a reference to the response to stop it being garbage
# collected. If the response is garbage collected, it will
# close TLS sockets.
sock._response = response
except AttributeError:
# UNIX sockets can't have attributes set on them, but that's
# fine because we won't be doing TLS over them
pass
return sock
def _stream_helper(self, response, decode=False):
"""Generator for data coming from a chunked-encoded HTTP response."""
if response.raw._fp.chunked:
if decode:
yield from json_stream(self._stream_helper(response, False))
else:
reader = response.raw
while not reader.closed:
# this read call will block until we get a chunk
data = reader.read(1)
if not data:
break
if reader._fp.chunk_left:
data += reader.read(reader._fp.chunk_left)
yield data
else:
# Response isn't chunked, meaning we probably
# encountered an error immediately
yield self._result(response, json=decode)
def _multiplexed_buffer_helper(self, response):
"""A generator of multiplexed data blocks read from a buffered
response."""
buf = self._result(response, binary=True)
buf_length = len(buf)
walker = 0
while True:
if buf_length - walker < STREAM_HEADER_SIZE_BYTES:
break
header = buf[walker:walker + STREAM_HEADER_SIZE_BYTES]
_, length = struct.unpack_from('>BxxxL', header)
start = walker + STREAM_HEADER_SIZE_BYTES
end = start + length
walker = end
yield buf[start:end]
def _multiplexed_response_stream_helper(self, response):
"""A generator of multiplexed data blocks coming from a response
stream."""
# Disable timeout on the underlying socket to prevent
# Read timed out(s) for long running processes
socket = self._get_raw_response_socket(response)
self._disable_socket_timeout(socket)
while True:
header = response.raw.read(STREAM_HEADER_SIZE_BYTES)
if not header:
break
_, length = struct.unpack('>BxxxL', header)
if not length:
continue
data = response.raw.read(length)
if not data:
break
yield data
def _stream_raw_result(self, response, chunk_size=1, decode=True):
''' Stream result for TTY-enabled container and raw binary data'''
self._raise_for_status(response)
# Disable timeout on the underlying socket to prevent
# Read timed out(s) for long running processes
socket = self._get_raw_response_socket(response)
self._disable_socket_timeout(socket)
yield from response.iter_content(chunk_size, decode)
def _read_from_socket(self, response, stream, tty=True, demux=False):
"""Consume all data from the socket, close the response and return the
data. If stream=True, then a generator is returned instead and the
caller is responsible for closing the response.
"""
socket = self._get_raw_response_socket(response)
gen = frames_iter(socket, tty)
if demux:
# The generator will output tuples (stdout, stderr)
gen = (demux_adaptor(*frame) for frame in gen)
else:
# The generator will output strings
gen = (data for (_, data) in gen)
if stream:
return gen
else:
try:
# Wait for all frames, concatenate them, and return the result
return consume_socket_output(gen, demux=demux)
finally:
response.close()
def _disable_socket_timeout(self, socket):
""" Depending on the combination of python version and whether we're
connecting over http or https, we might need to access _sock, which
may or may not exist; or we may need to just settimeout on socket
itself, which also may or may not have settimeout on it. To avoid
missing the correct one, we try both.
We also do not want to set the timeout if it is already disabled, as
you run the risk of changing a socket that was non-blocking to
blocking, for example when using gevent.
"""
sockets = [socket, getattr(socket, '_sock', None)]
for s in sockets:
if not hasattr(s, 'settimeout'):
continue
timeout = -1
if hasattr(s, 'gettimeout'):
timeout = s.gettimeout()
# Don't change the timeout if it is already disabled.
if timeout is None or timeout == 0.0:
continue
s.settimeout(None)
@check_resource('container')
def _check_is_tty(self, container):
cont = self.inspect_container(container)
return cont['Config']['Tty']
def _get_result(self, container, stream, res):
return self._get_result_tty(stream, res, self._check_is_tty(container))
def _get_result_tty(self, stream, res, is_tty):
# We should also use raw streaming (without keep-alives)
# if we're dealing with a tty-enabled container.
if is_tty:
return self._stream_raw_result(res) if stream else \
self._result(res, binary=True)
self._raise_for_status(res)
sep = b''
if stream:
return self._multiplexed_response_stream_helper(res)
else:
return sep.join(
list(self._multiplexed_buffer_helper(res))
)
def _unmount(self, *args):
for proto in args:
self.adapters.pop(proto)
def get_adapter(self, url):
try:
return super().get_adapter(url)
except requests.exceptions.InvalidSchema as e:
if self._custom_adapter:
return self._custom_adapter
else:
raise e
@property
def api_version(self):
return self._version
def reload_config(self, dockercfg_path=None):
"""
Force a reload of the auth configuration
Args:
dockercfg_path (str): Use a custom path for the Docker config file
(default ``$HOME/.docker/config.json`` if present,
otherwise ``$HOME/.dockercfg``)
Returns:
None
"""
self._auth_configs = auth.load_config(
dockercfg_path, credstore_env=self.credstore_env
)
| APIClient |
python | ray-project__ray | python/ray/dashboard/modules/job/job_head.py | {
"start": 1837,
"end": 3333
} | class ____(BaseModel, extra=Extra.allow):
"""
Pydantic model used to inform if a particular Ray component can be considered
active, and metadata about observation.
"""
is_active: RayActivityStatus = Field(
...,
description=(
"Whether the corresponding Ray component is considered active or inactive, "
"or if there was an error while collecting this observation."
),
)
reason: Optional[str] = Field(
None, description="Reason if Ray component is considered active or errored."
)
timestamp: float = Field(
...,
description=(
"Timestamp of when this observation about the Ray component was made. "
"This is in the format of seconds since unix epoch."
),
)
last_activity_at: Optional[float] = Field(
None,
description=(
"Timestamp when last actvity of this Ray component finished in format of "
"seconds since unix epoch. This field does not need to be populated "
"for Ray components where it is not meaningful."
),
)
@validator("reason", always=True)
def reason_required(cls, v, values, **kwargs):
if "is_active" in values and values["is_active"] != RayActivityStatus.INACTIVE:
if v is None:
raise ValueError(
'Reason is required if is_active is "active" or "error"'
)
return v
| RayActivityResponse |
python | huggingface__transformers | src/transformers/models/wav2vec2_bert/modeling_wav2vec2_bert.py | {
"start": 1370,
"end": 2992
} | class ____(nn.Module):
"""Rotary positional embedding
Reference : https://blog.eleuther.ai/rotary-embeddings/ Paper: https://huggingface.co/papers/2104.09864
"""
def __init__(self, config):
super().__init__()
dim = config.hidden_size // config.num_attention_heads
base = config.rotary_embedding_base
inv_freq = 1.0 / (base ** (torch.arange(0, dim, 2, dtype=torch.int64).float() / dim))
# Ignore copy
self.register_buffer("inv_freq", inv_freq, persistent=False)
self.cached_sequence_length = None
self.cached_rotary_positional_embedding = None
def forward(self, hidden_states):
sequence_length = hidden_states.shape[1]
if sequence_length == self.cached_sequence_length and self.cached_rotary_positional_embedding is not None:
return self.cached_rotary_positional_embedding
self.cached_sequence_length = sequence_length
# Embeddings are computed in the dtype of the inv_freq constant
time_stamps = torch.arange(sequence_length).type_as(self.inv_freq)
freqs = torch.einsum("i,j->ij", time_stamps, self.inv_freq)
embeddings = torch.cat((freqs, freqs), dim=-1)
cos_embeddings = embeddings.cos()[:, None, None, :]
sin_embeddings = embeddings.sin()[:, None, None, :]
# Computed embeddings are cast to the dtype of the hidden state inputs
self.cached_rotary_positional_embedding = torch.stack([cos_embeddings, sin_embeddings]).type_as(hidden_states)
return self.cached_rotary_positional_embedding
| Wav2Vec2BertRotaryPositionalEmbedding |
python | HypothesisWorks__hypothesis | hypothesis-python/tests/attrs/test_inference.py | {
"start": 2472,
"end": 2516
} | class ____:
a = attr.ib()
@attr.s
| Required |
python | imageio__imageio | imageio/plugins/_swf.py | {
"start": 9666,
"end": 9840
} | class ____(ControlTag):
def __init__(self):
ControlTag.__init__(self)
self.tagtype = 1
def process_tag(self):
self.bytes = bytes()
| ShowFrameTag |
python | doocs__leetcode | solution/1300-1399/1353.Maximum Number of Events That Can Be Attended/Solution.py | {
"start": 0,
"end": 512
} | class ____:
def maxEvents(self, events: List[List[int]]) -> int:
g = defaultdict(list)
l, r = inf, 0
for s, e in events:
g[s].append(e)
l = min(l, s)
r = max(r, e)
pq = []
ans = 0
for s in range(l, r + 1):
while pq and pq[0] < s:
heappop(pq)
for e in g[s]:
heappush(pq, e)
if pq:
heappop(pq)
ans += 1
return ans
| Solution |
python | ray-project__ray | rllib/models/torch/torch_modelv2.py | {
"start": 331,
"end": 2710
} | class ____(ModelV2):
"""Torch version of ModelV2.
Note that this class by itself is not a valid model unless you
inherit from nn.Module and implement forward() in a subclass."""
def __init__(
self,
obs_space: gym.spaces.Space,
action_space: gym.spaces.Space,
num_outputs: int,
model_config: ModelConfigDict,
name: str,
):
"""Initialize a TorchModelV2.
Here is an example implementation for a subclass
``MyModelClass(TorchModelV2, nn.Module)``::
def __init__(self, *args, **kwargs):
TorchModelV2.__init__(self, *args, **kwargs)
nn.Module.__init__(self)
self._hidden_layers = nn.Sequential(...)
self._logits = ...
self._value_branch = ...
"""
if not isinstance(self, nn.Module):
raise ValueError(
"Subclasses of TorchModelV2 must also inherit from "
"nn.Module, e.g., MyModel(TorchModelV2, nn.Module)"
)
ModelV2.__init__(
self,
obs_space,
action_space,
num_outputs,
model_config,
name,
framework="torch",
)
# Dict to store per multi-gpu tower stats into.
# In PyTorch multi-GPU, we use a single TorchPolicy and copy
# it's Model(s) n times (1 copy for each GPU). When computing the loss
# on each tower, we cannot store the stats (e.g. `entropy`) inside the
# policy object as this would lead to race conditions between the
# different towers all accessing the same property at the same time.
self.tower_stats = {}
@override(ModelV2)
def variables(
self, as_dict: bool = False
) -> Union[List[TensorType], Dict[str, TensorType]]:
p = list(self.parameters())
if as_dict:
return {k: p[i] for i, k in enumerate(self.state_dict().keys())}
return p
@override(ModelV2)
def trainable_variables(
self, as_dict: bool = False
) -> Union[List[TensorType], Dict[str, TensorType]]:
if as_dict:
return {
k: v for k, v in self.variables(as_dict=True).items() if v.requires_grad
}
return [v for v in self.variables() if v.requires_grad]
| TorchModelV2 |
python | django__django | django/http/response.py | {
"start": 24069,
"end": 24137
} | class ____(HttpResponse):
status_code = 400
| HttpResponseBadRequest |
python | tensorflow__tensorflow | tensorflow/compiler/mlir/tfr/python/composite.py | {
"start": 772,
"end": 1944
} | class ____(object):
"""A decorator to register a function as a composition for an TF operator.
The argument to the decorator must be the name of a TF raw operator the
function composites for. Decorated function must take positional arguments
which corresponds to the input and attributes in OpDef of the TF operation.
# TODO(fengliuai): more documents here.
Example:
@composite.Composite('AddN')
def _compose_add_n(inputs, N):
if N == 1:
....
"""
# TODO(fengliuai): support input_binding and output_binding so the arguments
# are not positional.
def __init__(self,
op_name,
inputs=None,
attrs=None,
derived_attrs=None,
outputs=None):
self._op_name = op_name
self._inputs = inputs
self._attrs = attrs
self._derived_attrs = derived_attrs
self._outputs = outputs
def __call__(self, compose_fn):
# TODO(fengliuai): more sanity check of the input function and make sure
# the bounded arguments of the function matches the 'inputs' and 'attrs'.
setattr(compose_fn, '_tfr_op_name', self._op_name)
return compose_fn
| Composite |
python | mlflow__mlflow | mlflow/store/artifact/optimized_s3_artifact_repo.py | {
"start": 1172,
"end": 16563
} | class ____(CloudArtifactRepository):
"""
An optimized version of the S3 Artifact Repository.
This class is used for uploading and downloading S3 artifacts for UC models. While it largely
copies the behavior of the S3ArtifactRepository, the `log_artifact`, `log_artifacts`, and
`_download_file` methods are optimized by replacing boto3 client operations with the use of
presigned URLs for both uploads and downloads.
"""
def __init__(
self,
artifact_uri,
access_key_id=None,
secret_access_key=None,
session_token=None,
credential_refresh_def=None,
addressing_style=None,
s3_endpoint_url=None,
s3_upload_extra_args=None,
tracking_uri=None,
registry_uri: str | None = None,
):
super().__init__(artifact_uri, tracking_uri=tracking_uri, registry_uri=registry_uri)
self._access_key_id = access_key_id
self._secret_access_key = secret_access_key
self._session_token = session_token
self._credential_refresh_def = credential_refresh_def
self._addressing_style = addressing_style
self._s3_endpoint_url = s3_endpoint_url
self.bucket, self.bucket_path = self.parse_s3_compliant_uri(self.artifact_uri)
self._bucket_owner_params = (
{"ExpectedBucketOwner": owner}
if (owner := MLFLOW_S3_EXPECTED_BUCKET_OWNER.get())
else {}
)
self._region_name = self._get_region_name()
self._s3_upload_extra_args = s3_upload_extra_args or {}
def _refresh_credentials(self):
if not self._credential_refresh_def:
return self._get_s3_client()
new_creds = self._credential_refresh_def()
self._access_key_id = new_creds["access_key_id"]
self._secret_access_key = new_creds["secret_access_key"]
self._session_token = new_creds["session_token"]
self._s3_upload_extra_args = new_creds["s3_upload_extra_args"]
return self._get_s3_client()
def _get_region_name(self):
from botocore.exceptions import ClientError
temp_client = _get_s3_client(
addressing_style=self._addressing_style,
access_key_id=self._access_key_id,
secret_access_key=self._secret_access_key,
session_token=self._session_token,
s3_endpoint_url=self._s3_endpoint_url,
)
try:
head_bucket_resp = temp_client.head_bucket(
Bucket=self.bucket, **self._bucket_owner_params
)
# A normal response will have the region in the Bucket_Region field of the response
if _BUCKET_REGION in head_bucket_resp:
return head_bucket_resp[_BUCKET_REGION]
# If the bucket exists but the caller does not have permissions, the http headers
# are passed back as part of the metadata of a normal, non-throwing response. In
# this case we use the x-amz-bucket-region field of the HTTP headers which should
# always be populated with the region.
if (
_RESPONSE_METADATA in head_bucket_resp
and _HTTP_HEADERS in head_bucket_resp[_RESPONSE_METADATA]
and _HTTP_HEADER_BUCKET_REGION
in head_bucket_resp[_RESPONSE_METADATA][_HTTP_HEADERS]
):
return head_bucket_resp[_RESPONSE_METADATA][_HTTP_HEADERS][
_HTTP_HEADER_BUCKET_REGION
]
# Directory buckets do not have a Bucket_Region and instead have a
# Bucket_Location_Name. This name cannot be used as the region name
# however, so we warn that this has happened and allow the exception
# at the end to be raised.
if _BUCKET_LOCATION_NAME in head_bucket_resp:
_logger.warning(
f"Directory bucket {self.bucket} found with BucketLocationName "
f"{head_bucket_resp[_BUCKET_LOCATION_NAME]}."
)
raise Exception(f"Unable to get the region name for bucket {self.bucket}.")
except ClientError as error:
# If a client error occurs, we check to see if the x-amz-bucket-region field is set
# in the response and return that. If it is not present, this will raise due to the
# key not being present.
return error.response[_RESPONSE_METADATA][_HTTP_HEADERS][_HTTP_HEADER_BUCKET_REGION]
def _get_s3_client(self):
return _get_s3_client(
addressing_style=self._addressing_style,
access_key_id=self._access_key_id,
secret_access_key=self._secret_access_key,
session_token=self._session_token,
region_name=self._region_name,
s3_endpoint_url=self._s3_endpoint_url,
)
def parse_s3_compliant_uri(self, uri):
"""Parse an S3 URI, returning (bucket, path)"""
parsed = urllib.parse.urlparse(uri)
if parsed.scheme != "s3":
raise Exception(f"Not an S3 URI: {uri}")
path = parsed.path
path = path.removeprefix("/")
return parsed.netloc, path
@staticmethod
def get_s3_file_upload_extra_args():
if s3_file_upload_extra_args := MLFLOW_S3_UPLOAD_EXTRA_ARGS.get():
return json.loads(s3_file_upload_extra_args)
else:
return None
def _upload_file(self, s3_client, local_file, bucket, key):
extra_args = {}
extra_args.update(self._s3_upload_extra_args)
guessed_type, guessed_encoding = guess_type(local_file)
if guessed_type is not None:
extra_args["ContentType"] = guessed_type
if guessed_encoding is not None:
extra_args["ContentEncoding"] = guessed_encoding
extra_args.update(self._bucket_owner_params)
environ_extra_args = self.get_s3_file_upload_extra_args()
if environ_extra_args is not None:
extra_args.update(environ_extra_args)
def try_func(creds):
creds.upload_file(Filename=local_file, Bucket=bucket, Key=key, ExtraArgs=extra_args)
_retry_with_new_creds(
try_func=try_func, creds_func=self._refresh_credentials, orig_creds=s3_client
)
def log_artifact(self, local_file, artifact_path=None):
artifact_file_path = os.path.basename(local_file)
if artifact_path:
artifact_file_path = posixpath.join(artifact_path, artifact_file_path)
self._upload_to_cloud(
cloud_credential_info=self._get_s3_client(),
src_file_path=local_file,
artifact_file_path=artifact_file_path,
)
def _get_write_credential_infos(self, remote_file_paths):
"""
Instead of returning ArtifactCredentialInfo objects, we instead return a list of initialized
S3 client. We do so because S3 clients cannot be instantiated within each thread.
"""
return [self._get_s3_client() for _ in remote_file_paths]
def _upload_to_cloud(self, cloud_credential_info, src_file_path, artifact_file_path):
dest_path = posixpath.join(self.bucket_path, artifact_file_path)
key = posixpath.normpath(dest_path)
if (
MLFLOW_ENABLE_MULTIPART_UPLOAD.get()
and os.path.getsize(src_file_path) > MLFLOW_MULTIPART_UPLOAD_CHUNK_SIZE.get()
):
self._multipart_upload(cloud_credential_info, src_file_path, self.bucket, key)
else:
self._upload_file(cloud_credential_info, src_file_path, self.bucket, key)
def _multipart_upload(self, cloud_credential_info, local_file, bucket, key):
# Create multipart upload
s3_client = cloud_credential_info
response = s3_client.create_multipart_upload(
Bucket=bucket, Key=key, **self._bucket_owner_params
)
upload_id = response["UploadId"]
num_parts = _compute_num_chunks(local_file, MLFLOW_MULTIPART_UPLOAD_CHUNK_SIZE.get())
_validate_chunk_size_aws(MLFLOW_MULTIPART_UPLOAD_CHUNK_SIZE.get())
# define helper functions for uploading data
def _upload_part(part_number, local_file, start_byte, size):
data = read_chunk(local_file, size, start_byte)
def try_func(creds):
# Create presigned URL for each part
presigned_url = creds.generate_presigned_url(
"upload_part",
Params={
"Bucket": bucket,
"Key": key,
"UploadId": upload_id,
"PartNumber": part_number,
**self._bucket_owner_params,
},
)
with cloud_storage_http_request("put", presigned_url, data=data) as response:
augmented_raise_for_status(response)
return response.headers["ETag"]
return _retry_with_new_creds(
try_func=try_func, creds_func=self._refresh_credentials, orig_creds=s3_client
)
try:
# Upload each part with retries
futures = {}
for index in range(num_parts):
part_number = index + 1
start_byte = index * MLFLOW_MULTIPART_UPLOAD_CHUNK_SIZE.get()
future = self.chunk_thread_pool.submit(
_upload_part,
part_number=part_number,
local_file=local_file,
start_byte=start_byte,
size=MLFLOW_MULTIPART_UPLOAD_CHUNK_SIZE.get(),
)
futures[future] = part_number
results, errors = _complete_futures(futures, local_file)
if errors:
raise MlflowException(
f"Failed to upload at least one part of {local_file}. Errors: {errors}"
)
parts = [
{"PartNumber": part_number, "ETag": results[part_number]}
for part_number in sorted(results)
]
# Complete multipart upload
s3_client.complete_multipart_upload(
Bucket=bucket,
Key=key,
UploadId=upload_id,
MultipartUpload={"Parts": parts},
**self._bucket_owner_params,
)
except Exception as e:
_logger.warning(
"Encountered an unexpected error during multipart upload: %s, aborting", e
)
s3_client.abort_multipart_upload(
Bucket=bucket,
Key=key,
UploadId=upload_id,
**self._bucket_owner_params,
)
raise e
def list_artifacts(self, path=None):
artifact_path = self.bucket_path
dest_path = self.bucket_path
if path:
dest_path = posixpath.join(dest_path, path)
infos = []
dest_path = dest_path.rstrip("/") if dest_path else ""
prefix = dest_path + "/" if dest_path else ""
s3_client = self._get_s3_client()
paginator = s3_client.get_paginator("list_objects_v2")
results = paginator.paginate(
Bucket=self.bucket,
Prefix=prefix,
Delimiter="/",
**self._bucket_owner_params,
)
for result in results:
# Subdirectories will be listed as "common prefixes" due to the way we made the request
for obj in result.get("CommonPrefixes", []):
subdir_path = obj.get("Prefix")
self._verify_listed_object_contains_artifact_path_prefix(
listed_object_path=subdir_path, artifact_path=artifact_path
)
subdir_rel_path = posixpath.relpath(path=subdir_path, start=artifact_path)
subdir_rel_path = subdir_rel_path.removesuffix("/")
infos.append(FileInfo(subdir_rel_path, True, None))
# Objects listed directly will be files
for obj in result.get("Contents", []):
file_path = obj.get("Key")
self._verify_listed_object_contains_artifact_path_prefix(
listed_object_path=file_path, artifact_path=artifact_path
)
file_rel_path = posixpath.relpath(path=file_path, start=artifact_path)
file_size = int(obj.get("Size"))
infos.append(FileInfo(file_rel_path, False, file_size))
return sorted(infos, key=lambda f: f.path)
@staticmethod
def _verify_listed_object_contains_artifact_path_prefix(listed_object_path, artifact_path):
if not listed_object_path.startswith(artifact_path):
raise MlflowException(
"The path of the listed S3 object does not begin with the specified"
f" artifact path. Artifact path: {artifact_path}. Object path:"
f" {listed_object_path}."
)
def _get_presigned_uri(self, remote_file_path):
s3_client = self._get_s3_client()
s3_full_path = posixpath.join(self.bucket_path, remote_file_path)
return s3_client.generate_presigned_url(
"get_object",
Params={
"Bucket": self.bucket,
"Key": s3_full_path,
**self._bucket_owner_params,
},
)
def _get_read_credential_infos(self, remote_file_paths):
return [
ArtifactCredentialInfo(signed_uri=self._get_presigned_uri(path))
for path in remote_file_paths
]
def _download_from_cloud(self, remote_file_path, local_path):
s3_client = self._get_s3_client()
s3_full_path = posixpath.join(self.bucket_path, remote_file_path)
def try_func(creds):
download_kwargs = (
{"ExtraArgs": self._bucket_owner_params} if self._bucket_owner_params else {}
)
creds.download_file(self.bucket, s3_full_path, local_path, **download_kwargs)
_retry_with_new_creds(
try_func=try_func, creds_func=self._refresh_credentials, orig_creds=s3_client
)
def delete_artifacts(self, artifact_path=None):
dest_path = self.bucket_path
if artifact_path:
dest_path = posixpath.join(dest_path, artifact_path)
dest_path = dest_path.rstrip("/") if dest_path else ""
s3_client = self._get_s3_client()
paginator = s3_client.get_paginator("list_objects_v2")
results = paginator.paginate(
Bucket=self.bucket,
Prefix=dest_path,
**self._bucket_owner_params,
)
for result in results:
keys = []
for to_delete_obj in result.get("Contents", []):
file_path = to_delete_obj.get("Key")
self._verify_listed_object_contains_artifact_path_prefix(
listed_object_path=file_path, artifact_path=dest_path
)
keys.append({"Key": file_path})
if keys:
s3_client.delete_objects(
Bucket=self.bucket,
Delete={"Objects": keys},
**self._bucket_owner_params,
)
| OptimizedS3ArtifactRepository |
python | charliermarsh__ruff | scripts/check_docs_formatted.py | {
"start": 4092,
"end": 4163
} | class ____(Exception):
"""A code block parse error."""
| CodeBlockError |
python | dask__distributed | distributed/dashboard/components/scheduler.py | {
"start": 56204,
"end": 59845
} | class ____(DashboardComponent):
"""Tasks and CPU usage on each worker"""
@log_errors
def __init__(self, scheduler, width=600, **kwargs):
self.last = 0
self.scheduler = scheduler
self.source = ColumnDataSource(
{
"nprocessing": [],
"nprocessing-half": [],
"nprocessing-color": [],
"cpu": [],
"cpu-half": [],
"y": [],
"worker": [],
"escaped_worker": [],
}
)
processing = figure(
title="Tasks Processing",
tools="",
name="processing",
width=int(width / 2),
min_border_bottom=50,
**kwargs,
)
rect = processing.rect(
source=self.source,
x="nprocessing-half",
y="y",
width="nprocessing",
height=0.9,
color="nprocessing-color",
)
processing.x_range.start = 0
rect.nonselection_glyph = None
cpu = figure(
title="CPU Utilization",
tools="",
width=int(width / 2),
name="cpu_hist",
x_range=(0, 100),
min_border_bottom=50,
**kwargs,
)
rect = cpu.rect(
source=self.source,
x="cpu-half",
y="y",
width="cpu",
height=0.9,
color="blue",
)
rect.nonselection_glyph = None
for fig in (processing, cpu):
fig.xaxis.minor_tick_line_alpha = 0
fig.yaxis.visible = False
fig.ygrid.visible = False
tap = TapTool(callback=OpenURL(url="./info/worker/@escaped_worker.html"))
fig.add_tools(tap)
fig.toolbar_location = None
fig.yaxis.visible = False
hover = HoverTool()
hover.tooltips = "@worker : @nprocessing tasks"
hover.point_policy = "follow_mouse"
processing.add_tools(hover)
hover = HoverTool()
hover.tooltips = "@worker : @cpu %"
hover.point_policy = "follow_mouse"
cpu.add_tools(hover)
self.processing_figure = processing
self.cpu_figure = cpu
@without_property_validation
@log_errors
def update(self):
workers = self.scheduler.workers.values()
now = time()
if not any(ws.processing for ws in workers) and now < self.last + 1:
return
self.last = now
cpu = [int(ws.metrics["cpu"]) for ws in workers]
nprocessing = [len(ws.processing) for ws in workers]
nprocessing_color = []
for ws in workers:
if ws in self.scheduler.idle:
nprocessing_color.append("red")
elif ws in self.scheduler.saturated:
nprocessing_color.append("green")
else:
nprocessing_color.append("blue")
result = {
"cpu": cpu,
"cpu-half": [c / 2 for c in cpu],
"nprocessing": nprocessing,
"nprocessing-half": [np / 2 for np in nprocessing],
"nprocessing-color": nprocessing_color,
"worker": [ws.address for ws in workers],
"escaped_worker": [url_escape(ws.address) for ws in workers],
"y": list(range(len(workers))),
}
if self.scheduler.workers:
xrange = max(ws.nthreads or 1 for ws in workers)
else:
xrange = 1
self.cpu_figure.x_range.end = xrange * 100
update(self.source, result)
| CurrentLoad |
python | python__mypy | test-data/unit/plugins/dyn_class_from_method.py | {
"start": 347,
"end": 2574
} | class ____(Plugin):
def get_dynamic_class_hook(
self, fullname: str
) -> Callable[[DynamicClassDefContext], None] | None:
if "from_queryset" in fullname:
return add_info_hook
if "as_manager" in fullname:
return as_manager_hook
return None
def add_info_hook(ctx: DynamicClassDefContext) -> None:
class_def = ClassDef(ctx.name, Block([]))
class_def.fullname = ctx.api.qualified_name(ctx.name)
info = TypeInfo(SymbolTable(), class_def, ctx.api.cur_mod_id)
class_def.info = info
assert isinstance(ctx.call.args[0], RefExpr)
queryset_type_fullname = ctx.call.args[0].fullname
queryset_node = ctx.api.lookup_fully_qualified_or_none(queryset_type_fullname)
assert queryset_node is not None
queryset_info = queryset_node.node
assert isinstance(queryset_info, TypeInfo)
obj = ctx.api.named_type("builtins.object")
info.mro = [info, queryset_info, obj.type]
info.bases = [Instance(queryset_info, [])]
ctx.api.add_symbol_table_node(ctx.name, SymbolTableNode(GDEF, info))
def as_manager_hook(ctx: DynamicClassDefContext) -> None:
class_def = ClassDef(ctx.name, Block([]))
class_def.fullname = ctx.api.qualified_name(ctx.name)
info = TypeInfo(SymbolTable(), class_def, ctx.api.cur_mod_id)
class_def.info = info
assert isinstance(ctx.call.callee, MemberExpr)
assert isinstance(ctx.call.callee.expr, IndexExpr)
assert isinstance(ctx.call.callee.expr.analyzed, TypeApplication)
assert isinstance(ctx.call.callee.expr.analyzed.expr, NameExpr)
queryset_type_fullname = ctx.call.callee.expr.analyzed.expr.fullname
queryset_node = ctx.api.lookup_fully_qualified_or_none(queryset_type_fullname)
assert queryset_node is not None
queryset_info = queryset_node.node
assert isinstance(queryset_info, TypeInfo)
parameter_type = ctx.call.callee.expr.analyzed.types[0]
obj = ctx.api.named_type("builtins.object")
info.mro = [info, queryset_info, obj.type]
info.bases = [Instance(queryset_info, [parameter_type])]
ctx.api.add_symbol_table_node(ctx.name, SymbolTableNode(GDEF, info))
def plugin(version: str) -> type[DynPlugin]:
return DynPlugin
| DynPlugin |
python | sympy__sympy | sympy/integrals/manualintegrate.py | {
"start": 19804,
"end": 20015
} | class ____(AtomicRule):
a: Expr
e: Expr
def eval(self) -> Expr:
a, e, x = self.a, self.e, self.variable
return x**e * (-a*x)**(-e) * uppergamma(e + 1, -a*x)/a
@dataclass
| UpperGammaRule |
python | getsentry__sentry | tests/sentry/seer/test_seer_setup.py | {
"start": 5512,
"end": 6982
} | class ____(TestCase):
"""Test the standard get_seer_org_acknowledgement function for comparison."""
def setUp(self):
super().setUp()
self.organization = self.create_organization(name="test-org")
self.user = self.create_user()
self.feature_name = "seer_autofix_setup_acknowledged"
def test_returns_true_when_gen_ai_consent_removal_enabled(self):
"""Test returns True when gen-ai-consent-flow-removal feature is enabled."""
with self.feature("organizations:gen-ai-consent-flow-removal"):
result = get_seer_org_acknowledgement(self.organization)
assert result is True
def test_returns_true_when_org_has_acknowledged(self):
"""Test returns True when organization has acknowledged via PromptsActivity."""
PromptsActivity.objects.create(
user_id=self.user.id,
feature=self.feature_name,
organization_id=self.organization.id,
project_id=0,
data=orjson.dumps({"dismissed_ts": 123456789}).decode("utf-8"),
)
result = get_seer_org_acknowledgement(self.organization)
assert result is True
def test_returns_false_when_no_acknowledgement_and_feature_not_enabled(self):
"""Test returns False when no acknowledgement exists and feature flag is disabled."""
result = get_seer_org_acknowledgement(self.organization)
assert result is False
| TestGetSeerOrgAcknowledgement |
python | ray-project__ray | python/ray/exceptions.py | {
"start": 11271,
"end": 11593
} | class ____(RayError):
"""Indicates that the worker died unexpectedly while executing a task."""
def __str__(self):
return (
"The worker died unexpectedly while executing this task. "
"Check python-core-worker-*.log files for more information."
)
@PublicAPI
| WorkerCrashedError |
python | django__django | tests/migrations/test_migrations_squashed_complex_multi_apps/app1/4_auto.py | {
"start": 35,
"end": 182
} | class ____(migrations.Migration):
dependencies = [("app1", "3_auto")]
operations = [migrations.RunPython(migrations.RunPython.noop)]
| Migration |
python | doocs__leetcode | solution/3500-3599/3552.Grid Teleportation Traversal/Solution.py | {
"start": 0,
"end": 1160
} | class ____:
def minMoves(self, matrix: List[str]) -> int:
m, n = len(matrix), len(matrix[0])
g = defaultdict(list)
for i, row in enumerate(matrix):
for j, c in enumerate(row):
if c.isalpha():
g[c].append((i, j))
dirs = (-1, 0, 1, 0, -1)
dist = [[inf] * n for _ in range(m)]
dist[0][0] = 0
q = deque([(0, 0)])
while q:
i, j = q.popleft()
d = dist[i][j]
if i == m - 1 and j == n - 1:
return d
c = matrix[i][j]
if c in g:
for x, y in g[c]:
if d < dist[x][y]:
dist[x][y] = d
q.appendleft((x, y))
del g[c]
for a, b in pairwise(dirs):
x, y = i + a, j + b
if (
0 <= x < m
and 0 <= y < n
and matrix[x][y] != "#"
and d + 1 < dist[x][y]
):
dist[x][y] = d + 1
q.append((x, y))
return -1
| Solution |
python | xlwings__xlwings | xlwings/_xlwindows.py | {
"start": 8170,
"end": 14559
} | class ____(ctypes.Structure):
# https://docs.microsoft.com/en-us/openspecs/windows_protocols/
# ms-dtyp/49e490b8-f972-45d6-a3a4-99f924998d97
_fields_ = [
("Data1", ctypes.c_ulong),
("Data2", ctypes.c_ushort),
("Data3", ctypes.c_ushort),
("Data4", ctypes.c_byte * 8),
]
_IDISPATCH_GUID = _GUID()
oledll.ole32.CLSIDFromString(
"{00020400-0000-0000-C000-000000000046}", byref(_IDISPATCH_GUID)
)
def accessible_object_from_window(hwnd):
# ptr is a pointer to an IDispatch:
# https://docs.microsoft.com/en-us/windows/win32/api/oaidl/nn-oaidl-idispatch
# We don't bother using ctypes.POINTER(comtypes.automation.IDispatch)()
# because we won't dereference the pointer except through pywin32's
# pythoncom.PyCom_PyObjectFromIUnknown below in get_xl_app_from_hwnd().
ptr = ctypes.c_void_p()
res = oledll.oleacc.AccessibleObjectFromWindow( # noqa: F841
hwnd, OBJID_NATIVEOM, byref(_IDISPATCH_GUID), byref(ptr)
)
return ptr
def is_hwnd_xl_app(hwnd):
try:
child_hwnd = win32gui.FindWindowEx(hwnd, 0, "XLDESK", None)
child_hwnd = win32gui.FindWindowEx(child_hwnd, 0, "EXCEL7", None)
ptr = accessible_object_from_window(child_hwnd) # noqa: F841
return True
except WindowsError:
return False
except pywintypes.error:
return False
_PyCom_PyObjectFromIUnknown = PyDLL(pythoncom.__file__).PyCom_PyObjectFromIUnknown
_PyCom_PyObjectFromIUnknown.restype = py_object
def get_xl_app_from_hwnd(hwnd):
pythoncom.CoInitialize()
child_hwnd = win32gui.FindWindowEx(hwnd, 0, "XLDESK", None)
child_hwnd = win32gui.FindWindowEx(child_hwnd, 0, "EXCEL7", None)
ptr = accessible_object_from_window(child_hwnd)
p = _PyCom_PyObjectFromIUnknown(ptr, byref(_IDISPATCH_GUID), True)
disp = COMRetryObjectWrapper(Dispatch(p))
return disp.Application
def get_excel_hwnds():
pythoncom.CoInitialize()
hwnd = windll.user32.GetTopWindow(None)
pids = set()
while hwnd:
try:
# Apparently, this fails on some systems when Excel is closed
child_hwnd = win32gui.FindWindowEx(hwnd, 0, "XLDESK", None)
if child_hwnd:
child_hwnd = win32gui.FindWindowEx(child_hwnd, 0, "EXCEL7", None)
if child_hwnd:
pid = win32process.GetWindowThreadProcessId(hwnd)[1]
if pid not in pids:
pids.add(pid)
yield hwnd
except pywintypes.error:
pass
hwnd = windll.user32.GetWindow(hwnd, 2) # 2 = next window according to Z-order
def get_xl_apps():
for hwnd in get_excel_hwnds():
try:
yield get_xl_app_from_hwnd(hwnd)
except ExcelBusyError:
pass
except WindowsError:
# This happens if the bare Excel Application is open without Workbook, i.e.,
# there's no 'EXCEL7' child hwnd that would be necessary for a connection
pass
def is_range_instance(xl_range):
pyid = getattr(xl_range, "_oleobj_", None)
if pyid is None:
return False
return xl_range._oleobj_.GetTypeInfo().GetTypeAttr().iid == pywintypes.IID(
"{00020846-0000-0000-C000-000000000046}"
)
# return pyid.GetTypeInfo().GetDocumentation(-1)[0] == 'Range'
def _com_time_to_datetime(com_time, datetime_builder):
return datetime_builder(
month=com_time.month,
day=com_time.day,
year=com_time.year,
hour=com_time.hour,
minute=com_time.minute,
second=com_time.second,
microsecond=com_time.microsecond,
tzinfo=None,
)
def _datetime_to_com_time(dt_time):
"""
This function is a modified version from Pyvot (https://pypi.python.org/pypi/Pyvot)
and subject to the following copyright:
Copyright (c) Microsoft Corporation.
This source code is subject to terms and conditions of the Apache License,
Version 2.0. A copy of the license can be found in the LICENSE.txt file at the root
of this distribution. If you cannot locate the Apache License, Version 2.0, please
send an email to vspython@microsoft.com. By using this source code in any fashion,
you are agreeing to be bound by the terms of the Apache License, Version 2.0.
You must not remove this notice, or any other, from this software.
"""
# Convert date to datetime
if pd and isinstance(dt_time, type(pd.NaT)):
return ""
if np:
if type(dt_time) is np.datetime64:
dt_time = np_datetime_to_datetime(dt_time)
if type(dt_time) is dt.date:
dt_time = dt.datetime(
dt_time.year,
dt_time.month,
dt_time.day,
tzinfo=win32timezone.TimeZoneInfo.utc(),
)
# pywintypes has its time type inherit from datetime.
# For some reason, though it accepts plain datetimes, they must have a timezone set.
# See http://docs.activestate.com/activepython/2.7/pywin32/html/win32/help/py3k.html
# We replace no timezone -> UTC to allow round-trips in the naive case
if pd and isinstance(dt_time, pd.Timestamp):
# Otherwise pandas prints ignored exceptions on Python 3
dt_time = dt_time.to_pydatetime()
# We don't use pytz.utc to get rid of additional dependency
# Don't do any timezone transformation: simply cutoff the tz info
# If we don't reset it first, it gets transformed into UTC before sending to Excel
dt_time = dt_time.replace(tzinfo=None)
dt_time = dt_time.replace(tzinfo=win32timezone.TimeZoneInfo.utc())
return dt_time
cell_errors = {
-2146826281: "#DIV/0!",
-2146826246: "#N/A",
-2146826259: "#NAME?",
-2146826288: "#NULL!",
-2146826252: "#NUM!",
-2146826265: "#REF!",
-2146826273: "#VALUE!",
}
def _clean_value_data_element(
value, datetime_builder, empty_as, number_builder, err_to_str
):
if value in ("", None):
return empty_as
elif isinstance(value, time_types):
return _com_time_to_datetime(value, datetime_builder)
elif number_builder is not None and isinstance(value, float):
value = number_builder(value)
elif isinstance(value, int) and value in cell_errors:
if err_to_str:
return cell_errors[value]
else:
return None
return value
| _GUID |
python | falconry__falcon | falcon/app.py | {
"start": 2803,
"end": 56387
} | class ____(Generic[_ReqT, _RespT]):
'''The main entry point into a Falcon-based WSGI app.
Each App instance provides a callable
`WSGI <https://www.python.org/dev/peps/pep-3333/>`_ interface
and a routing engine (for ASGI applications, see
:class:`falcon.asgi.App`).
Note:
The ``API`` class was renamed to ``App`` in Falcon 3.0. The
old class name remains available as an alias for
backwards-compatibility, but will be removed in a future
release.
Keyword Arguments:
media_type (str): Default media type to use when initializing
:class:`~.RequestOptions` and
:class:`~.ResponseOptions`. The ``falcon``
module provides a number of constants for common media types,
such as ``falcon.MEDIA_MSGPACK``, ``falcon.MEDIA_YAML``,
``falcon.MEDIA_XML``, etc.
middleware: Either a single middleware component object or an iterable
of objects (instantiated classes) that implement the following
middleware component interface. Note that it is only necessary
to implement the methods for the events you would like to
handle; Falcon simply skips over any missing middleware methods::
class ExampleMiddleware:
def process_request(self, req: Request, resp: Response) -> None:
"""Process the request before routing it.
Note:
Because Falcon routes each request based on
req.path, a request can be effectively re-routed
by setting that attribute to a new value from
within process_request().
Args:
req: Request object that will eventually be
routed to an on_* responder method.
resp: Response object that will be routed to
the on_* responder.
"""
def process_resource(
self,
req: Request,
resp: Response,
resource: object,
params: dict[str, Any],
) -> None:
"""Process the request and resource *after* routing.
Note:
This method is only called when the request matches
a route to a resource.
Args:
req: Request object that will be passed to the
routed responder.
resp: Response object that will be passed to the
responder.
resource: Resource object to which the request was
routed. May be None if no route was found for
the request.
params: A dict-like object representing any
additional params derived from the route's URI
template fields, that will be passed to the
resource's responder method as keyword
arguments.
"""
def process_response(
self,
req: Request,
resp: Response,
resource: object,
req_succeeded: bool
) -> None:
"""Post-processing of the response (after routing).
Args:
req: Request object.
resp: Response object.
resource: Resource object to which the request was
routed. May be None if no route was found
for the request.
req_succeeded: True if no exceptions were raised
while the framework processed and routed the
request; otherwise False.
"""
(See also: :ref:`Middleware <middleware>`)
request_type: ``Request``-like class to use instead
of Falcon's default class. Among other things, this feature
affords inheriting from :class:`falcon.Request` in order
to override the ``context_type`` class variable
(default: :class:`falcon.Request`)
response_type: ``Response``-like class to use
instead of Falcon's default class (default:
:class:`falcon.Response`)
router (object): An instance of a custom router
to use in lieu of the default engine.
(See also: :ref:`Custom Routers <routing_custom>`)
independent_middleware (bool): Set to ``False`` if response
middleware should not be executed independently of whether or
not request middleware raises an exception (default
``True``). When this option is set to ``False``, a middleware
component's ``process_response()`` method will NOT be called
when that same component's ``process_request()`` (or that of
a component higher up in the stack) raises an exception.
cors_enable (bool): Set this flag to ``True`` to enable a simple
CORS policy for all responses, including support for preflighted
requests. An instance of :class:`~.CORSMiddleware` can instead be
passed to the middleware argument to customize its behaviour.
(default ``False``).
(See also: :ref:`CORS <cors>`)
sink_before_static_route (bool): Indicates if the sinks should be processed
before (when ``True``) or after (when ``False``) the static routes.
This has an effect only if no route was matched. (default ``True``)
'''
_META_METHODS: ClassVar[frozenset[str]] = frozenset(constants._META_METHODS)
_STREAM_BLOCK_SIZE: ClassVar[int] = 8 * 1024 # 8 KiB
_STATIC_ROUTE_TYPE: ClassVar[type[routing.StaticRoute]] = routing.StaticRoute
# NOTE(kgriffs): This makes it easier to tell what we are dealing with
# without having to import falcon.asgi.
_ASGI: ClassVar[bool] = False
# NOTE(kgriffs): We do it like this rather than just implementing the
# methods directly on the class, so that we keep all the default
# responders colocated in the same module. This will make it more
# likely that the implementations of the async and non-async versions
# of the methods are kept in sync (pun intended).
_default_responder_bad_request: ClassVar[ResponderCallable] = responders.bad_request
_default_responder_path_not_found: ClassVar[ResponderCallable] = (
responders.path_not_found
)
__slots__ = (
'_cors_enable',
'_error_handlers',
'_independent_middleware',
'_middleware',
'_request_type',
'_response_type',
'_router_search',
'_router',
'_serialize_error',
'_sink_and_static_routes',
'_sink_before_static_route',
'_sinks',
'_static_routes',
'_unprepared_middleware',
'req_options',
'resp_options',
)
_cors_enable: bool
_error_handlers: dict[type[Exception], ErrorHandler[_ReqT, _RespT]]
_independent_middleware: bool
_middleware: helpers.PreparedMiddlewareResult
_request_type: type[_ReqT]
_response_type: type[_RespT]
_router_search: FindMethod
# NOTE(caselit): this should actually be a protocol of the methods required
# by a router, hardcoded to CompiledRouter for convenience for now.
_router: routing.CompiledRouter
_serialize_error: ErrorSerializer
_sink_and_static_routes: tuple[
tuple[
Pattern[str] | routing.StaticRoute,
SinkCallable | AsgiSinkCallable | routing.StaticRoute,
bool,
],
...,
]
_sink_before_static_route: bool
_sinks: list[tuple[Pattern[str], SinkCallable | AsgiSinkCallable, Literal[True]]]
_static_routes: list[
tuple[routing.StaticRoute, routing.StaticRoute, Literal[False]]
]
_unprepared_middleware: list[SyncMiddleware[_ReqT, _RespT]]
# Attributes
req_options: RequestOptions
"""A set of behavioral options related to incoming requests.
See also: :class:`~.RequestOptions`
"""
resp_options: ResponseOptions
"""A set of behavioral options related to outgoing responses.
See also: :class:`~.ResponseOptions`
"""
@overload
def __init__(
self: App[Request, Response],
media_type: str = ...,
request_type: None = None,
response_type: None = None,
middleware: SyncMiddleware[_ReqT, _RespT]
| Iterable[SyncMiddleware[_ReqT, _RespT]]
| None = ...,
router: routing.CompiledRouter | None = ...,
independent_middleware: bool = ...,
cors_enable: bool = ...,
sink_before_static_route: bool = ...,
) -> None: ...
@overload
def __init__(
self: App[_ReqT, Response],
media_type: str = ...,
request_type: type[_ReqT] | None = None,
response_type: None = None,
middleware: SyncMiddleware[_ReqT, _RespT]
| Iterable[SyncMiddleware[_ReqT, _RespT]]
| None = ...,
router: routing.CompiledRouter | None = ...,
independent_middleware: bool = ...,
cors_enable: bool = ...,
sink_before_static_route: bool = ...,
) -> None: ...
@overload
def __init__(
self: App[Request, _RespT],
media_type: str = ...,
request_type: None = None,
response_type: type[_RespT] | None = None,
middleware: SyncMiddleware[_ReqT, _RespT]
| Iterable[SyncMiddleware[_ReqT, _RespT]]
| None = ...,
router: routing.CompiledRouter | None = ...,
independent_middleware: bool = ...,
cors_enable: bool = ...,
sink_before_static_route: bool = ...,
) -> None: ...
@overload
def __init__(
self,
media_type: str = ...,
request_type: type[_ReqT] | None = None,
response_type: type[_RespT] | None = None,
middleware: SyncMiddleware[_ReqT, _RespT]
| Iterable[SyncMiddleware[_ReqT, _RespT]]
| None = ...,
router: routing.CompiledRouter | None = ...,
independent_middleware: bool = ...,
cors_enable: bool = ...,
sink_before_static_route: bool = ...,
) -> None: ...
def __init__(
self,
media_type: str = constants.DEFAULT_MEDIA_TYPE,
request_type: type[_ReqT] | None = None,
response_type: type[_RespT] | None = None,
middleware: SyncMiddleware[_ReqT, _RespT]
| Iterable[SyncMiddleware[_ReqT, _RespT]]
| None = None,
router: routing.CompiledRouter | None = None,
independent_middleware: bool = True,
cors_enable: bool = False,
sink_before_static_route: bool = True,
) -> None:
self._cors_enable = cors_enable
self._sink_before_static_route = sink_before_static_route
self._sinks = []
self._static_routes = []
self._sink_and_static_routes = ()
self._unprepared_middleware = []
self._independent_middleware = independent_middleware
self.add_middleware(middleware or [])
if cors_enable:
self.add_middleware([CORSMiddleware()])
self._router = router or routing.DefaultRouter()
self._router_search = self._router.find
self._request_type = request_type or Request # type: ignore[assignment]
self._response_type = response_type or Response # type: ignore[assignment]
self._error_handlers = {}
self._serialize_error = helpers.default_serialize_error
self.req_options = RequestOptions()
self.resp_options = ResponseOptions()
self.req_options.default_media_type = media_type
self.resp_options.default_media_type = media_type
# NOTE(kgriffs): Add default error handlers
self.add_error_handler(Exception, self._python_error_handler)
self.add_error_handler(HTTPError, self._http_error_handler)
self.add_error_handler(HTTPStatus, self._http_status_handler)
def __call__( # noqa: C901
self, env: WSGIEnvironment, start_response: StartResponse
) -> Iterable[bytes]:
"""WSGI `app` method.
Makes instances of App callable from a WSGI server. May be used to
host an App or called directly in order to simulate requests when
testing the App.
(See also: PEP 3333)
Args:
env (dict): A WSGI environment dictionary
start_response (callable): A WSGI helper function for setting
status and headers on a response.
"""
req = self._request_type(env, options=self.req_options)
resp = self._response_type(options=self.resp_options)
resource: object | None = None
params: dict[str, Any] = {}
dependent_mw_resp_stack: list[ProcessResponseMethod] = []
mw_req_stack, mw_rsrc_stack, mw_resp_stack = self._middleware
req_succeeded = False
try:
if req.method in self._META_METHODS:
raise HTTPBadRequest()
# NOTE(ealogar): The execution of request middleware
# should be before routing. This will allow request mw
# to modify the path.
# NOTE: if flag set to use independent middleware, execute
# request middleware independently. Otherwise, only queue
# response middleware after request middleware succeeds.
if self._independent_middleware:
for process_request in mw_req_stack:
process_request(req, resp) # type: ignore[operator]
if resp.complete:
break
else:
for process_request, process_response in mw_req_stack: # type: ignore[assignment,misc]
if process_request and not resp.complete:
process_request(req, resp) # type: ignore[operator]
if process_response:
dependent_mw_resp_stack.insert(0, process_response) # type: ignore[arg-type]
if not resp.complete:
# NOTE(warsaw): Moved this to inside the try except
# because it is possible when using object-based
# traversal for _get_responder() to fail. An example is
# a case where an object does not have the requested
# next-hop child resource. In that case, the object
# being asked to dispatch to its child will raise an
# HTTP exception signalling the problem, e.g. a 404.
responder: ResponderCallable
responder, params, resource, req.uri_template = self._get_responder(req) # type: ignore[assignment]
except Exception as ex:
if not self._handle_exception(req, resp, ex, params):
raise
else:
try:
# NOTE(kgriffs): If the request did not match any
# route, a default responder is returned and the
# resource is None. In that case, we skip the
# resource middleware methods. Resource will also be
# None when a middleware method already set
# resp.complete to True.
if resource:
# Call process_resource middleware methods.
for process_resource in mw_rsrc_stack:
process_resource(req, resp, resource, params)
if resp.complete:
break
if not resp.complete:
responder(req, resp, **params)
req_succeeded = True
except Exception as ex:
if not self._handle_exception(req, resp, ex, params):
raise
# Call process_response middleware methods.
for process_response in mw_resp_stack or dependent_mw_resp_stack:
try:
process_response(req, resp, resource, req_succeeded)
except Exception as ex:
if not self._handle_exception(req, resp, ex, params):
raise
req_succeeded = False
body: Iterable[bytes] = []
length: int | None = 0
try:
body, length = self._get_body(resp, env.get('wsgi.file_wrapper'))
except Exception as ex:
if not self._handle_exception(req, resp, ex, params):
raise
req_succeeded = False
resp_status: str = code_to_http_status(resp.status)
default_media_type: str | None = self.resp_options.default_media_type
if req.method == 'HEAD' or resp_status in _BODILESS_STATUS_CODES:
body = []
# PERF(vytas): move check for the less common and much faster path
# of resp_status being in {204, 304} here; NB: this builds on the
# assumption _TYPELESS_STATUS_CODES <= _BODILESS_STATUS_CODES.
# NOTE(kgriffs): Based on wsgiref.validate's interpretation of
# RFC 2616, as commented in that module's source code. The
# presence of the Content-Length header is not similarly
# enforced.
if resp_status in _TYPELESS_STATUS_CODES:
default_media_type = None
elif (
length is not None
and req.method == 'HEAD'
and resp_status not in _BODILESS_STATUS_CODES
and 'content-length' not in resp._headers
):
# NOTE(kgriffs): We really should be returning a Content-Length
# in this case according to my reading of the RFCs. By
# optionally using len(data) we let a resource simulate HEAD
# by turning around and calling it's own on_get().
resp._headers['content-length'] = str(length)
else:
# PERF(kgriffs): Böse mußt sein. Operate directly on resp._headers
# to reduce overhead since this is a hot/critical code path.
# NOTE(kgriffs): We always set content-length to match the
# body bytes length, even if content-length is already set. The
# reason being that web servers and LBs behave unpredictably
# when the header doesn't match the body (sometimes choosing to
# drop the HTTP connection prematurely, for example).
if length is not None:
resp._headers['content-length'] = str(length)
headers: list[tuple[str, str]] = resp._wsgi_headers(default_media_type)
# Return the response per the WSGI spec.
start_response(resp_status, headers)
return body
# NOTE(caselit): the return type depends on the router, hardcoded to
# CompiledRouterOptions for convenience.
@property
def router_options(self) -> routing.CompiledRouterOptions:
"""Configuration options for the router.
If a custom router is in use, and it does not expose any
configurable options, referencing this attribute will raise
an instance of ``AttributeError``.
See also: :ref:`CompiledRouterOptions <compiled_router_options>`.
"""
return self._router.options
def add_middleware(
self,
middleware: SyncMiddleware[_ReqT, _RespT]
| Iterable[SyncMiddleware[_ReqT, _RespT]],
) -> None:
"""Add one or more additional middleware components.
Arguments:
middleware: Either a single middleware component or an iterable
of components to add. The component(s) will be invoked, in
order, as if they had been appended to the original middleware
list passed to the class initializer.
"""
# NOTE(kgriffs): Since this is called by the initializer, there is
# the chance that middleware may be empty.
if middleware:
try:
# NOTE(kgriffs): Check to see if middleware is an iterable.
middleware = list(
cast(Iterable[SyncMiddleware[_ReqT, _RespT]], middleware)
)
except TypeError:
# NOTE(kgriffs): Middleware is not iterable; assume it is just
# one bare component.
middleware = [cast(SyncMiddleware[_ReqT, _RespT], middleware)]
if (
self._cors_enable
and len(
[
mc
for mc in self._unprepared_middleware + middleware
if isinstance(mc, CORSMiddleware)
]
)
> 1
):
raise ValueError(
'CORSMiddleware is not allowed in conjunction with '
'cors_enable (which already constructs one instance)'
)
self._unprepared_middleware += middleware
# NOTE(kgriffs): Even if middleware is None or an empty list, we still
# need to make sure self._middleware is initialized if this is the
# first call to add_middleware().
self._middleware = self._prepare_middleware(
self._unprepared_middleware,
independent_middleware=self._independent_middleware,
)
def add_route(self, uri_template: str, resource: object, **kwargs: Any) -> None:
"""Associate a templatized URI path with a resource.
Falcon routes incoming requests to resources based on a set of
URI templates. If the path requested by the client matches the
template for a given route, the request is then passed on to the
associated resource for processing.
Note:
If no route matches the request, control then passes to a default
responder that simply raises an instance of
:class:`~.HTTPRouteNotFound`. By default, this error will be
rendered as a 404 response, but this behavior can be modified by
adding a custom error handler (see also
:ref:`this FAQ topic <faq_override_404_500_handlers>`).
On the other hand, if a route is matched but the resource does not
implement a responder for the requested HTTP method, the framework
invokes a default responder that raises an instance of
:class:`~.HTTPMethodNotAllowed`.
This method delegates to the configured router's ``add_route()``
method. To override the default behavior, pass a custom router
object to the :class:`~.App` initializer.
(See also: :ref:`Routing <routing>`)
Args:
uri_template (str): A templatized URI. Care must be
taken to ensure the template does not mask any sink
patterns, if any are registered.
(See also: :meth:`~.App.add_sink`)
Warning:
If :attr:`~falcon.RequestOptions.strip_url_path_trailing_slash`
is enabled, `uri_template` should be provided without a
trailing slash.
(See also: :ref:`trailing_slash_in_path`)
resource (instance): Object which represents a REST
resource. Falcon will pass GET requests to ``on_get()``,
PUT requests to ``on_put()``, etc. If any HTTP methods are not
supported by your resource, simply don't define the
corresponding request handlers, and Falcon will do the right
thing.
Note:
When using an async version of the ``App``, all request
handlers must be awaitable coroutine functions.
Keyword Args:
suffix (str): Optional responder name suffix for this route. If
a suffix is provided, Falcon will map GET requests to
``on_get_{suffix}()``, POST requests to ``on_post_{suffix}()``,
etc. In this way, multiple closely-related routes can be
mapped to the same resource. For example, a single resource
class can use suffixed responders to distinguish requests
for a single item vs. a collection of those same items.
Another class might use a suffixed responder to handle
a shortlink route in addition to the regular route for the
resource. For example::
class Baz(object):
def on_get_foo(self, req, resp):
pass
def on_get_bar(self, req, resp):
pass
baz = Baz()
app = falcon.App()
app.add_route('/foo', baz, suffix='foo')
app.add_route('/bar', baz, suffix='bar')
compile (bool): Optional flag that can be provided when using the default
:class:`.CompiledRouter` to compile the routing logic on this call,
since it will otherwise delay compilation until the first request
is routed. See :meth:`.CompiledRouter.add_route` for further details.
Note:
Any additional keyword arguments not defined above are passed
through to the underlying router's ``add_route()`` method. The
default router ignores any additional keyword arguments, but
custom routers may take advantage of this feature to receive
additional options when setting up routes. Custom routers MUST
accept such arguments using the variadic pattern (``**kwargs``), and
ignore any keyword arguments that they don't support.
"""
# NOTE(richardolsson): Doing the validation here means it doesn't have
# to be duplicated in every future router implementation.
if not isinstance(uri_template, str):
raise TypeError('uri_template is not a string')
if not uri_template.startswith('/'):
raise ValueError("uri_template must start with '/'")
if '//' in uri_template:
raise ValueError("uri_template may not contain '//'")
self._router.add_route(uri_template, resource, **kwargs)
def add_static_route(
self,
prefix: str,
directory: str | pathlib.Path,
downloadable: bool = False,
fallback_filename: str | None = None,
) -> None:
"""Add a route to a directory of static files.
Static routes provide a way to serve files directly. This
feature provides an alternative to serving files at the web server
level when you don't have that option, when authorization is
required, or for testing purposes.
Warning:
Serving files directly from the web server,
rather than through the Python app, will always be more efficient,
and therefore should be preferred in production deployments.
For security reasons, the directory and the fallback_filename (if provided)
should be read only for the account running the application.
Warning:
If you need to serve large files and/or progressive downloads (such
as in the case of video streaming) through the Falcon app, check
that your application server's timeout settings can accommodate the
expected request duration (for instance, the popular Gunicorn kills
``sync`` workers after 30 seconds unless configured otherwise).
Note:
For ASGI apps, file reads are made non-blocking by scheduling
them on the default executor.
Static routes are matched in LIFO order. Therefore, if the same
prefix is used for two routes, the second one will override the
first. This also means that more specific routes should be added
*after* less specific ones. For example, the following sequence
would result in ``'/foo/bar/thing.js'`` being mapped to the
``'/foo/bar'`` route, and ``'/foo/xyz/thing.js'`` being mapped to the
``'/foo'`` route::
app.add_static_route('/foo', foo_path)
app.add_static_route('/foo/bar', foobar_path)
Args:
prefix (str): The path prefix to match for this route. If the
path in the requested URI starts with this string, the remainder
of the path will be appended to the source directory to
determine the file to serve. This is done in a secure manner
to prevent an attacker from requesting a file outside the
specified directory.
Note that static routes are matched in LIFO order, and are only
attempted after checking dynamic routes and sinks.
directory (Union[str, pathlib.Path]): The source directory from
which to serve files.
downloadable (bool): Set to ``True`` to include a
Content-Disposition header in the response. The "filename"
directive is simply set to the name of the requested file.
fallback_filename (str): Fallback filename used when the requested file
is not found. Can be a relative path inside the prefix folder or
any valid absolute path.
"""
sr = self._STATIC_ROUTE_TYPE(
prefix,
directory,
downloadable=downloadable,
fallback_filename=fallback_filename,
)
self._static_routes.insert(0, (sr, sr, False))
self._update_sink_and_static_routes()
def add_sink(
self, sink: SinkCallable[_ReqT, _RespT], prefix: SinkPrefix = r'/'
) -> None:
"""Register a sink method for the App.
If no route matches a request, but the path in the requested URI
matches a sink prefix, Falcon will pass control to the
associated sink, regardless of the HTTP method requested.
Using sinks, you can drain and dynamically handle a large number
of routes, when creating static resources and responders would be
impractical. For example, you might use a sink to create a smart
proxy that forwards requests to one or more backend services.
Note:
To support CORS preflight requests when using the default CORS middleware,
either by setting ``App.cors_enable=True`` or by adding the
:class:`~.CORSMiddleware` to the ``App.middleware``, the sink should
set the ``Allow`` header in the request to the allowed
method values when serving an ``OPTIONS`` request. If the ``Allow`` header
is missing from the response, the default CORS middleware will deny the
preflight request.
Args:
sink (callable): A callable taking the form ``func(req, resp, **kwargs)``.
Note:
When using an async version of the ``App``, this must be a
coroutine function taking the form
``func(req, resp, ws=None, **kwargs)``.
Similar to
:meth:`error handlers <falcon.asgi.App.add_error_handler>`,
in the case of a WebSocket connection, the
:class:`resp <falcon.asgi.Response>` argument will be
``None``, whereas the `ws` keyword argument will receive
the :class:`~falcon.asgi.WebSocket` connection object.
For backwards-compatibility, when `ws` is absent from the
sink's signature, or a regex match (see **prefix** below)
contains a group named 'ws', the
:class:`~falcon.asgi.WebSocket` object is passed in place
of the incompatible `resp`.
This behavior will change in Falcon 5.0: when draining a
WebSocket connection, `resp` will always be set to ``None``
regardless of the sink's signature.
.. versionadded:: 4.1
If an asynchronous sink callable explicitly defines a `ws`
argument, it is used to pass the
:class:`~falcon.asgi.WebSocket` connection object.
prefix (str): A regex string, typically starting with '/', which
will trigger the sink if it matches the path portion of the
request's URI. Both strings and precompiled regex objects
may be specified. Characters are matched starting at the
beginning of the URI path.
Note:
Named groups are converted to kwargs and passed to
the sink as such.
Warning:
If the prefix overlaps a registered route template,
the route will take precedence and mask the sink.
(See also: :meth:`~.add_route`)
"""
if not self._ASGI and iscoroutinefunction(sink):
raise CompatibilityError(
'The sink method must be a regular synchronous function '
'in order to be used with a WSGI app.'
)
if not hasattr(prefix, 'match'):
# Assume it is a string
prefix = re.compile(prefix)
else:
prefix = cast(Pattern[str], prefix)
# NOTE(kgriffs): Insert at the head of the list such that
# in the case of a duplicate prefix, the last one added
# is preferred.
self._sinks.insert(0, (prefix, sink, True))
self._update_sink_and_static_routes()
@overload
def add_error_handler(
self,
exception: type[_ExcT],
handler: Callable[[_ReqT, _RespT, _ExcT, dict[str, Any]], None],
) -> None: ...
@overload
def add_error_handler(
self,
exception: type[Exception] | Iterable[type[Exception]],
handler: ErrorHandler[_ReqT, _RespT] | None = None,
) -> None: ...
def add_error_handler( # type: ignore[misc]
self,
exception: type[Exception] | Iterable[type[Exception]],
handler: ErrorHandler[_ReqT, _RespT] | None = None,
) -> None:
"""Register a handler for one or more exception types.
Error handlers may be registered for any exception type, including
:class:`~.HTTPError` or :class:`~.HTTPStatus`. This feature
provides a central location for logging and otherwise handling
exceptions raised by responders, hooks, and middleware components.
A handler can raise an instance of :class:`~.HTTPError` or
:class:`~.HTTPStatus` to communicate information about the issue to
the client. Alternatively, a handler may modify `resp`
directly.
An error handler "matches" a raised exception if the exception is an
instance of the corresponding exception type. If more than one error
handler matches the raised exception, the framework will choose the
most specific one, as determined by the method resolution order of the
raised exception type. If multiple error handlers are registered for the
*same* exception class, then the most recently-registered handler is
used.
For example, suppose we register error handlers as follows::
app = App()
app.add_error_handler(falcon.HTTPNotFound, custom_handle_not_found)
app.add_error_handler(falcon.HTTPError, custom_handle_http_error)
app.add_error_handler(Exception, custom_handle_uncaught_exception)
app.add_error_handler(falcon.HTTPNotFound, custom_handle_404)
If an instance of ``falcon.HTTPForbidden`` is raised, it will be
handled by ``custom_handle_http_error()``. ``falcon.HTTPError`` is a
superclass of ``falcon.HTTPForbidden`` and a subclass of ``Exception``,
so it is the most specific exception type with a registered handler.
If an instance of ``falcon.HTTPNotFound`` is raised, it will be handled
by ``custom_handle_404()``, not by ``custom_handle_not_found()``, because
``custom_handle_404()`` was registered more recently.
.. Note::
By default, the framework installs three handlers, one for
:class:`~.HTTPError`, one for :class:`~.HTTPStatus`, and one for
the standard ``Exception`` type, which prevents passing uncaught
exceptions to the WSGI server. These can be overridden by adding a
custom error handler method for the exception type in question.
Args:
exception (type or iterable of types): When handling a request,
whenever an error occurs that is an instance of the specified
type(s), the associated handler will be called. Either a single
type or an iterable of types may be specified.
handler (callable): A function or callable object taking the form
``func(req, resp, ex, params)``.
If not specified explicitly, the handler will default to
``exception.handle``, where ``exception`` is the error
type specified above, and ``handle`` is a static method
(i.e., decorated with ``@staticmethod``) that accepts
the same params just described. For example::
class CustomException(CustomBaseException):
@staticmethod
def handle(req, resp, ex, params):
# TODO: Log the error
# Convert to an instance of falcon.HTTPError
raise falcon.HTTPError(falcon.HTTP_792)
If an iterable of exception types is specified instead of
a single type, the handler must be explicitly specified.
.. versionchanged:: 3.0
The error handler is now selected by the most-specific matching
error class, rather than the most-recently registered matching error
class.
"""
def wrap_old_handler(
old_handler: Callable[..., Any],
) -> ErrorHandler[_ReqT, _RespT]:
@wraps(old_handler)
def handler(
req: _ReqT, resp: _RespT, ex: Exception, params: dict[str, Any]
) -> None:
old_handler(ex, req, resp, params)
return handler # type: ignore[return-value]
if handler is None:
handler = getattr(exception, 'handle', None)
if handler is None:
raise AttributeError(
'handler must either be specified explicitly or defined as a '
'static method named "handle" that is a member of the given '
'exception class.'
)
# TODO(vytas): Remove this shimming in a future Falcon version.
arg_names = tuple(misc.get_argnames(handler))
if arg_names[0:1] in (
('e',),
('err',),
('error',),
('ex',),
('exception',),
) or arg_names[1:3] in (('req', 'resp'), ('request', 'response')):
warnings.warn(
f'handler is using a deprecated signature; please order its '
f'arguments as {handler.__qualname__}(req, resp, ex, params). ' # type: ignore
f'This compatibility shim will be removed in Falcon 5.0.',
deprecation.DeprecatedWarning,
)
handler = wrap_old_handler(handler)
exception_tuple: tuple[type[Exception], ...]
try:
exception_tuple = tuple(exception) # type: ignore[arg-type]
except TypeError:
exception_tuple = (exception,) # type: ignore[assignment]
for exc in exception_tuple:
if not issubclass(exc, Exception):
raise TypeError('"exception" must be an exception type.')
self._error_handlers[exc] = handler
def set_error_serializer(self, serializer: ErrorSerializer[_ReqT, _RespT]) -> None:
"""Override the default serializer for instances of :class:`~.HTTPError`.
When a responder raises an instance of :class:`~.HTTPError`,
Falcon converts it to an HTTP response automatically. The
default serializer supports JSON and XML, but may be overridden
by this method to use a custom serializer in order to support
other media types.
Note:
If a custom media type is used and the type includes a
"+json" or "+xml" suffix, the default serializer will
convert the error to JSON or XML, respectively.
Note:
A custom serializer set with this method may not be called if the
default error handler for :class:`~.HTTPError` has been overridden.
See :meth:`~.add_error_handler` for more details.
The :class:`~.HTTPError` class contains helper methods,
such as `to_json()` and `to_dict()`, that can be used from
within custom serializers. For example::
def my_serializer(
req: Request, resp: Response, exception: HTTPError
) -> None:
representation = None
preferred = req.client_prefers((falcon.MEDIA_YAML, falcon.MEDIA_JSON))
if preferred is not None:
if preferred == falcon.MEDIA_JSON:
resp.data = exception.to_json()
else:
resp.text = yaml.dump(exception.to_dict(), encoding=None)
resp.content_type = preferred
resp.append_header('Vary', 'Accept')
Args:
serializer (callable): A function taking the form
``func(req, resp, exception)``, where `req` is the request
object that was passed to the responder method, `resp` is
the response object, and `exception` is an instance of
``falcon.HTTPError``.
"""
self._serialize_error = serializer
# ------------------------------------------------------------------------
# Helpers that require self
# ------------------------------------------------------------------------
def _prepare_middleware(
self,
middleware: list[SyncMiddleware[_ReqT, _RespT]],
independent_middleware: bool = False,
) -> helpers.PreparedMiddlewareResult:
return helpers.prepare_middleware(
middleware=middleware, independent_middleware=independent_middleware
)
def _get_responder(
self, req: _ReqT
) -> tuple[
ResponderCallable | AsgiResponderCallable | AsgiResponderWsCallable,
dict[str, Any],
object,
str | None,
]:
"""Search routes for a matching responder.
Args:
req (Request): The request object.
Returns:
tuple: A 4-member tuple consisting of a responder callable,
a ``dict`` containing parsed path fields (if any were specified in
the matching route's URI template), a reference to the responder's
resource instance, and the matching URI template.
Note:
If a responder was matched to the given URI, but the HTTP
method was not found in the method_map for the responder,
the responder callable element of the returned tuple will be
`falcon.responder.bad_request`.
Likewise, if no responder was matched for the given URI, then
the responder callable element of the returned tuple will be
`falcon.responder.path_not_found`
"""
path = req.path
method = 'WEBSOCKET' if req.is_websocket else req.method
uri_template = None
route = self._router_search(path, req=req)
if route is not None:
try:
resource, method_map, params, uri_template = route
except ValueError:
# NOTE(kgriffs): Older routers may not return the
# template. But for performance reasons they should at
# least return None if they don't support it.
resource, method_map, params = route # type: ignore[misc]
else:
# NOTE(kgriffs): Older routers may indicate that no route
# was found by returning (None, None, None). Therefore, we
# normalize resource as the flag to indicate whether or not
# a route was found, for the sake of backwards-compat.
resource = None
if resource is not None:
try:
responder = method_map[method]
except KeyError:
# NOTE(kgriffs): Dirty hack! We use __class__ here to avoid
# binding self to the default responder method. We could
# decorate the function itself with @staticmethod, but it
# would perhaps be less obvious to the reader why this is
# needed when just looking at the code in the reponder
# module, so we just grab it directly here.
responder = self.__class__._default_responder_bad_request
else:
params = {}
for matcher, obj, is_sink in self._sink_and_static_routes:
m = matcher.match(path)
if m:
if is_sink:
params = m.groupdict() # type: ignore[union-attr]
responder = obj
break
else:
responder = self.__class__._default_responder_path_not_found
return (responder, params, resource, uri_template)
def _compose_status_response(
self, req: _ReqT, resp: _RespT, http_status: HTTPStatus
) -> None:
"""Compose a response for the given HTTPStatus instance."""
# PERF(kgriffs): The code to set the status and headers is identical
# to that used in _compose_error_response(), but refactoring in the
# name of DRY isn't worth the extra CPU cycles.
resp.status = http_status.status
if http_status.headers is not None:
resp.set_headers(http_status.headers)
# NOTE(kgriffs): If http_status.text is None, that's OK because
# it's acceptable to set resp.text to None (to indicate no body).
resp.text = http_status.text
def _compose_error_response(
self, req: _ReqT, resp: _RespT, error: HTTPError
) -> None:
"""Compose a response for the given HTTPError instance."""
resp.status = error.status
if error.headers is not None:
resp.set_headers(error.headers)
self._serialize_error(req, resp, error)
def _http_status_handler(
self, req: _ReqT, resp: _RespT, status: HTTPStatus, params: dict[str, Any]
) -> None:
self._compose_status_response(req, resp, status)
def _http_error_handler(
self, req: _ReqT, resp: _RespT, error: HTTPError, params: dict[str, Any]
) -> None:
self._compose_error_response(req, resp, error)
def _python_error_handler(
self, req: _ReqT, resp: _RespT, error: Exception, params: dict[str, Any]
) -> None:
req.log_error(traceback.format_exc())
self._compose_error_response(req, resp, HTTPInternalServerError())
def _find_error_handler(self, ex: Exception) -> ErrorHandler | None:
# NOTE(csojinb): The `__mro__` class attribute returns the method
# resolution order tuple, i.e. the complete linear inheritance chain
# ``(type(ex), ..., object)``. For a valid exception class, the last
# two entries in the tuple will always be ``BaseException``and
# ``object``, so here we iterate over the lineage of exception types,
# from most to least specific.
# PERF(csojinb): The expression ``type(ex).__mro__[:-1]`` here is not
# super readable, but we inline it to avoid function call overhead.
for exc in type(ex).__mro__[:-1]:
handler = self._error_handlers.get(exc)
if handler is not None:
return handler
return None
def _handle_exception(
self, req: _ReqT, resp: _RespT, ex: Exception, params: dict[str, Any]
) -> bool:
"""Handle an exception raised from mw or a responder.
Args:
ex: Exception to handle
req: Current request object to pass to the handler
registered for the given exception type
resp: Current response object to pass to the handler
registered for the given exception type
params: Responder params to pass to the handler
registered for the given exception type
Returns:
bool: ``True`` if a handler was found and called for the
exception, ``False`` otherwise.
"""
err_handler = self._find_error_handler(ex)
# NOTE(caselit): Reset body, data and media before calling the handler
resp.text = resp.data = resp.media = None
if err_handler is not None:
try:
err_handler(req, resp, ex, params)
except HTTPStatus as status:
self._compose_status_response(req, resp, status)
except HTTPError as error:
self._compose_error_response(req, resp, error)
return True
# NOTE(kgriffs): No error handlers are defined for ex
# and it is not one of (HTTPStatus, HTTPError), since it
# would have matched one of the corresponding default
# handlers.
return False
# PERF(kgriffs): Moved from api_helpers since it is slightly faster
# to call using self, and this function is called for most
# requests.
def _get_body(
self,
resp: Response,
wsgi_file_wrapper: Callable[[ReadableIO, int], Iterable[bytes]] | None = None,
) -> tuple[Iterable[bytes], int | None]:
"""Convert resp content into an iterable as required by PEP 333.
Args:
resp: Instance of falcon.Response
wsgi_file_wrapper: Reference to wsgi.file_wrapper from the
WSGI environ dict, if provided by the WSGI server. Used
when resp.stream is a file-like object (default None).
Returns:
tuple: A two-member tuple of the form (iterable, content_length).
The length is returned as ``None`` when unknown. The
iterable is determined as follows:
* If the result of render_body() is not ``None``, returns
([data], len(data))
* If resp.stream is not ``None``, returns resp.stream
iterable using wsgi.file_wrapper, if necessary:
(closeable_iterator, None)
* Otherwise, returns ([], 0)
"""
data: bytes | None = resp.render_body()
if data is not None:
return [data], len(data)
stream = resp.stream
if stream is not None:
# NOTE(kgriffs): Heuristic to quickly check if stream is
# file-like. Not perfect, but should be good enough until
# proven otherwise.
if hasattr(stream, 'read'):
if wsgi_file_wrapper is not None:
# TODO(kgriffs): Make block size configurable at the
# global level, pending experimentation to see how
# useful that would be. See also the discussion on
# this GitHub PR:
# https://github.com/falconry/falcon/pull/249#discussion_r11269730
iterable = wsgi_file_wrapper(stream, self._STREAM_BLOCK_SIZE) # type: ignore[arg-type]
else:
iterable = helpers.CloseableStreamIterator(
stream, # type: ignore[arg-type]
self._STREAM_BLOCK_SIZE,
)
else:
iterable = stream
return iterable, None
return [], 0
def _update_sink_and_static_routes(self) -> None:
if self._sink_before_static_route:
self._sink_and_static_routes = tuple(self._sinks + self._static_routes) # type: ignore[operator]
else:
self._sink_and_static_routes = tuple(self._static_routes + self._sinks) # type: ignore[operator]
# TODO(myusko): This class is a compatibility alias, and should be removed
# in Falcon 5.0.
| App |
python | pandas-dev__pandas | pandas/tests/frame/methods/test_rank.py | {
"start": 248,
"end": 17401
} | class ____:
s = Series([1, 3, 4, 2, np.nan, 2, 1, 5, np.nan, 3])
df = DataFrame({"A": s, "B": s})
results = {
"average": np.array([1.5, 5.5, 7.0, 3.5, np.nan, 3.5, 1.5, 8.0, np.nan, 5.5]),
"min": np.array([1, 5, 7, 3, np.nan, 3, 1, 8, np.nan, 5]),
"max": np.array([2, 6, 7, 4, np.nan, 4, 2, 8, np.nan, 6]),
"first": np.array([1, 5, 7, 3, np.nan, 4, 2, 8, np.nan, 6]),
"dense": np.array([1, 3, 4, 2, np.nan, 2, 1, 5, np.nan, 3]),
}
def test_rank(self, float_frame):
sp_stats = pytest.importorskip("scipy.stats")
float_frame.loc[::2, "A"] = np.nan
float_frame.loc[::3, "B"] = np.nan
float_frame.loc[::4, "C"] = np.nan
float_frame.loc[::5, "D"] = np.nan
ranks0 = float_frame.rank()
ranks1 = float_frame.rank(1)
mask = np.isnan(float_frame.values)
fvals = float_frame.fillna(np.inf).values
exp0 = np.apply_along_axis(sp_stats.rankdata, 0, fvals)
exp0[mask] = np.nan
exp1 = np.apply_along_axis(sp_stats.rankdata, 1, fvals)
exp1[mask] = np.nan
tm.assert_almost_equal(ranks0.values, exp0)
tm.assert_almost_equal(ranks1.values, exp1)
# integers
df = DataFrame(
np.random.default_rng(2).integers(0, 5, size=40).reshape((10, 4))
)
result = df.rank()
exp = df.astype(float).rank()
tm.assert_frame_equal(result, exp)
result = df.rank(1)
exp = df.astype(float).rank(1)
tm.assert_frame_equal(result, exp)
def test_rank2(self):
df = DataFrame([[1, 3, 2], [1, 2, 3]])
expected = DataFrame([[1.0, 3.0, 2.0], [1, 2, 3]]) / 3.0
result = df.rank(1, pct=True)
tm.assert_frame_equal(result, expected)
df = DataFrame([[1, 3, 2], [1, 2, 3]])
expected = df.rank(0) / 2.0
result = df.rank(0, pct=True)
tm.assert_frame_equal(result, expected)
df = DataFrame([["b", "c", "a"], ["a", "c", "b"]])
expected = DataFrame([[2.0, 3.0, 1.0], [1, 3, 2]])
result = df.rank(1, numeric_only=False)
tm.assert_frame_equal(result, expected)
expected = DataFrame([[2.0, 1.5, 1.0], [1, 1.5, 2]])
result = df.rank(0, numeric_only=False)
tm.assert_frame_equal(result, expected)
df = DataFrame([["b", np.nan, "a"], ["a", "c", "b"]])
expected = DataFrame([[2.0, np.nan, 1.0], [1.0, 3.0, 2.0]])
result = df.rank(1, numeric_only=False)
tm.assert_frame_equal(result, expected)
expected = DataFrame([[2.0, np.nan, 1.0], [1.0, 1.0, 2.0]])
result = df.rank(0, numeric_only=False)
tm.assert_frame_equal(result, expected)
# f7u12, this does not work without extensive workaround
data = [
[datetime(2001, 1, 5), np.nan, datetime(2001, 1, 2)],
[datetime(2000, 1, 2), datetime(2000, 1, 3), datetime(2000, 1, 1)],
]
df = DataFrame(data)
# check the rank
expected = DataFrame([[2.0, np.nan, 1.0], [2.0, 3.0, 1.0]])
result = df.rank(1, numeric_only=False, ascending=True)
tm.assert_frame_equal(result, expected)
expected = DataFrame([[1.0, np.nan, 2.0], [2.0, 1.0, 3.0]])
result = df.rank(1, numeric_only=False, ascending=False)
tm.assert_frame_equal(result, expected)
df = DataFrame({"a": [1e-20, -5, 1e-20 + 1e-40, 10, 1e60, 1e80, 1e-30]})
exp = DataFrame({"a": [3.5, 1.0, 3.5, 5.0, 6.0, 7.0, 2.0]})
tm.assert_frame_equal(df.rank(), exp)
def test_rank_does_not_mutate(self):
# GH#18521
# Check rank does not mutate DataFrame
df = DataFrame(
np.random.default_rng(2).standard_normal((10, 3)), dtype="float64"
)
expected = df.copy()
df.rank()
result = df
tm.assert_frame_equal(result, expected)
def test_rank_mixed_frame(self, float_string_frame):
float_string_frame["datetime"] = datetime.now()
float_string_frame["timedelta"] = timedelta(days=1, seconds=1)
float_string_frame.rank(numeric_only=False)
with pytest.raises(TypeError, match="not supported between instances of"):
float_string_frame.rank(axis=1)
def test_rank_na_option(self, float_frame):
sp_stats = pytest.importorskip("scipy.stats")
float_frame.loc[::2, "A"] = np.nan
float_frame.loc[::3, "B"] = np.nan
float_frame.loc[::4, "C"] = np.nan
float_frame.loc[::5, "D"] = np.nan
# bottom
ranks0 = float_frame.rank(na_option="bottom")
ranks1 = float_frame.rank(1, na_option="bottom")
fvals = float_frame.fillna(np.inf).values
exp0 = np.apply_along_axis(sp_stats.rankdata, 0, fvals)
exp1 = np.apply_along_axis(sp_stats.rankdata, 1, fvals)
tm.assert_almost_equal(ranks0.values, exp0)
tm.assert_almost_equal(ranks1.values, exp1)
# top
ranks0 = float_frame.rank(na_option="top")
ranks1 = float_frame.rank(1, na_option="top")
fval0 = float_frame.fillna((float_frame.min() - 1).to_dict()).values
fval1 = float_frame.T
fval1 = fval1.fillna((fval1.min() - 1).to_dict()).T
fval1 = fval1.fillna(np.inf).values
exp0 = np.apply_along_axis(sp_stats.rankdata, 0, fval0)
exp1 = np.apply_along_axis(sp_stats.rankdata, 1, fval1)
tm.assert_almost_equal(ranks0.values, exp0)
tm.assert_almost_equal(ranks1.values, exp1)
# descending
# bottom
ranks0 = float_frame.rank(na_option="top", ascending=False)
ranks1 = float_frame.rank(1, na_option="top", ascending=False)
fvals = float_frame.fillna(np.inf).values
exp0 = np.apply_along_axis(sp_stats.rankdata, 0, -fvals)
exp1 = np.apply_along_axis(sp_stats.rankdata, 1, -fvals)
tm.assert_almost_equal(ranks0.values, exp0)
tm.assert_almost_equal(ranks1.values, exp1)
# descending
# top
ranks0 = float_frame.rank(na_option="bottom", ascending=False)
ranks1 = float_frame.rank(1, na_option="bottom", ascending=False)
fval0 = float_frame.fillna((float_frame.min() - 1).to_dict()).values
fval1 = float_frame.T
fval1 = fval1.fillna((fval1.min() - 1).to_dict()).T
fval1 = fval1.fillna(np.inf).values
exp0 = np.apply_along_axis(sp_stats.rankdata, 0, -fval0)
exp1 = np.apply_along_axis(sp_stats.rankdata, 1, -fval1)
tm.assert_numpy_array_equal(ranks0.values, exp0)
tm.assert_numpy_array_equal(ranks1.values, exp1)
# bad values throw error
msg = "na_option must be one of 'keep', 'top', or 'bottom'"
with pytest.raises(ValueError, match=msg):
float_frame.rank(na_option="bad", ascending=False)
# invalid type
with pytest.raises(ValueError, match=msg):
float_frame.rank(na_option=True, ascending=False)
def test_rank_axis(self):
# check if using axes' names gives the same result
df = DataFrame([[2, 1], [4, 3]])
tm.assert_frame_equal(df.rank(axis=0), df.rank(axis="index"))
tm.assert_frame_equal(df.rank(axis=1), df.rank(axis="columns"))
@pytest.mark.parametrize("ax", [0, 1])
def test_rank_methods_frame(self, ax, rank_method):
sp_stats = pytest.importorskip("scipy.stats")
xs = np.random.default_rng(2).integers(0, 21, (100, 26))
xs = (xs - 10.0) / 10.0
cols = [chr(ord("z") - i) for i in range(xs.shape[1])]
for vals in [xs, xs + 1e6, xs * 1e-6]:
df = DataFrame(vals, columns=cols)
result = df.rank(axis=ax, method=rank_method)
sprank = np.apply_along_axis(
sp_stats.rankdata,
ax,
vals,
rank_method if rank_method != "first" else "ordinal",
)
sprank = sprank.astype(np.float64)
expected = DataFrame(sprank, columns=cols).astype("float64")
tm.assert_frame_equal(result, expected)
@pytest.mark.parametrize("dtype", ["O", "f8", "i8"])
def test_rank_descending(self, rank_method, dtype):
if "i" in dtype:
df = self.df.dropna().astype(dtype)
else:
df = self.df.astype(dtype)
res = df.rank(ascending=False)
expected = (df.max() - df).rank()
tm.assert_frame_equal(res, expected)
expected = (df.max() - df).rank(method=rank_method)
if dtype != "O":
res2 = df.rank(method=rank_method, ascending=False, numeric_only=True)
tm.assert_frame_equal(res2, expected)
res3 = df.rank(method=rank_method, ascending=False, numeric_only=False)
tm.assert_frame_equal(res3, expected)
@pytest.mark.parametrize("axis", [0, 1])
@pytest.mark.parametrize("dtype", [None, object])
def test_rank_2d_tie_methods(self, rank_method, axis, dtype):
df = self.df
def _check2d(df, expected, method="average", axis=0):
exp_df = DataFrame({"A": expected, "B": expected})
if axis == 1:
df = df.T
exp_df = exp_df.T
result = df.rank(method=rank_method, axis=axis)
tm.assert_frame_equal(result, exp_df)
frame = df if dtype is None else df.astype(dtype)
_check2d(frame, self.results[rank_method], method=rank_method, axis=axis)
@pytest.mark.parametrize(
"rank_method,exp",
[
("dense", [[1.0, 1.0, 1.0], [1.0, 0.5, 2.0 / 3], [1.0, 0.5, 1.0 / 3]]),
(
"min",
[
[1.0 / 3, 1.0, 1.0],
[1.0 / 3, 1.0 / 3, 2.0 / 3],
[1.0 / 3, 1.0 / 3, 1.0 / 3],
],
),
(
"max",
[[1.0, 1.0, 1.0], [1.0, 2.0 / 3, 2.0 / 3], [1.0, 2.0 / 3, 1.0 / 3]],
),
(
"average",
[[2.0 / 3, 1.0, 1.0], [2.0 / 3, 0.5, 2.0 / 3], [2.0 / 3, 0.5, 1.0 / 3]],
),
(
"first",
[
[1.0 / 3, 1.0, 1.0],
[2.0 / 3, 1.0 / 3, 2.0 / 3],
[3.0 / 3, 2.0 / 3, 1.0 / 3],
],
),
],
)
def test_rank_pct_true(self, rank_method, exp):
# see gh-15630.
df = DataFrame([[2012, 66, 3], [2012, 65, 2], [2012, 65, 1]])
result = df.rank(method=rank_method, pct=True)
expected = DataFrame(exp)
tm.assert_frame_equal(result, expected)
@pytest.mark.single_cpu
def test_pct_max_many_rows(self):
# GH 18271
df = DataFrame({"A": np.arange(2**24 + 1), "B": np.arange(2**24 + 1, 0, -1)})
result = df.rank(pct=True).max()
assert (result == 1).all()
@pytest.mark.parametrize(
"contents,dtype",
[
(
[
-np.inf,
-50,
-1,
-1e-20,
-1e-25,
-1e-50,
0,
1e-40,
1e-20,
1e-10,
2,
40,
np.inf,
],
"float64",
),
(
[
-np.inf,
-50,
-1,
-1e-20,
-1e-25,
-1e-45,
0,
1e-40,
1e-20,
1e-10,
2,
40,
np.inf,
],
"float32",
),
([np.iinfo(np.uint8).min, 1, 2, 100, np.iinfo(np.uint8).max], "uint8"),
(
[
np.iinfo(np.int64).min,
-100,
0,
1,
9999,
100000,
1e10,
np.iinfo(np.int64).max,
],
"int64",
),
([NegInfinity(), "1", "A", "BA", "Ba", "C", Infinity()], "object"),
(
[datetime(2001, 1, 1), datetime(2001, 1, 2), datetime(2001, 1, 5)],
"datetime64",
),
],
)
def test_rank_inf_and_nan(self, contents, dtype, frame_or_series):
dtype_na_map = {
"float64": np.nan,
"float32": np.nan,
"object": None,
"datetime64": np.datetime64("nat"),
}
# Insert nans at random positions if underlying dtype has missing
# value. Then adjust the expected order by adding nans accordingly
# This is for testing whether rank calculation is affected
# when values are intertwined with nan values.
values = np.array(contents, dtype=dtype)
exp_order = np.array(range(len(values)), dtype="float64") + 1.0
if dtype in dtype_na_map:
na_value = dtype_na_map[dtype]
nan_indices = np.random.default_rng(2).choice(range(len(values)), 5)
values = np.insert(values, nan_indices, na_value)
exp_order = np.insert(exp_order, nan_indices, np.nan)
# Shuffle the testing array and expected results in the same way
random_order = np.random.default_rng(2).permutation(len(values))
obj = frame_or_series(values[random_order])
expected = frame_or_series(exp_order[random_order], dtype="float64")
result = obj.rank()
tm.assert_equal(result, expected)
def test_df_series_inf_nan_consistency(self):
# GH#32593
index = [5, 4, 3, 2, 1, 6, 7, 8, 9, 10]
col1 = [5, 4, 3, 5, 8, 5, 2, 1, 6, 6]
col2 = [5, 4, np.nan, 5, 8, 5, np.inf, np.nan, 6, -np.inf]
df = DataFrame(
data={
"col1": col1,
"col2": col2,
},
index=index,
dtype="f8",
)
df_result = df.rank()
series_result = df.copy()
series_result["col1"] = df["col1"].rank()
series_result["col2"] = df["col2"].rank()
tm.assert_frame_equal(df_result, series_result)
def test_rank_both_inf(self):
# GH#32593
df = DataFrame({"a": [-np.inf, 0, np.inf]})
expected = DataFrame({"a": [1.0, 2.0, 3.0]})
result = df.rank()
tm.assert_frame_equal(result, expected)
@pytest.mark.parametrize(
"na_option,ascending,expected",
[
("top", True, [3.0, 1.0, 2.0]),
("top", False, [2.0, 1.0, 3.0]),
("bottom", True, [2.0, 3.0, 1.0]),
("bottom", False, [1.0, 3.0, 2.0]),
],
)
def test_rank_inf_nans_na_option(
self, frame_or_series, rank_method, na_option, ascending, expected
):
obj = frame_or_series([np.inf, np.nan, -np.inf])
result = obj.rank(method=rank_method, na_option=na_option, ascending=ascending)
expected = frame_or_series(expected)
tm.assert_equal(result, expected)
@pytest.mark.parametrize(
"na_option,ascending,expected",
[
("bottom", True, [1.0, 2.0, 4.0, 3.0]),
("bottom", False, [1.0, 2.0, 4.0, 3.0]),
("top", True, [2.0, 3.0, 1.0, 4.0]),
("top", False, [2.0, 3.0, 1.0, 4.0]),
],
)
def test_rank_object_first(self, frame_or_series, na_option, ascending, expected):
obj = frame_or_series(["foo", "foo", None, "foo"])
result = obj.rank(method="first", na_option=na_option, ascending=ascending)
expected = frame_or_series(expected)
tm.assert_equal(result, expected)
@pytest.mark.parametrize(
"data,expected",
[
(
{"a": [1, 2, "a"], "b": [4, 5, 6]},
DataFrame({"b": [1.0, 2.0, 3.0]}, columns=Index(["b"], dtype=object)),
),
({"a": [1, 2, "a"]}, DataFrame(index=range(3), columns=[])),
],
)
def test_rank_mixed_axis_zero(self, data, expected):
df = DataFrame(data, columns=Index(list(data.keys()), dtype=object))
with pytest.raises(TypeError, match="'<' not supported between instances of"):
df.rank()
result = df.rank(numeric_only=True)
tm.assert_frame_equal(result, expected)
def test_rank_string_dtype(self, string_dtype_no_object):
# GH#55362
obj = Series(["foo", "foo", None, "foo"], dtype=string_dtype_no_object)
result = obj.rank(method="first")
exp_dtype = (
"Float64" if string_dtype_no_object == "string[pyarrow]" else "float64"
)
if string_dtype_no_object.storage == "python":
# TODO nullable string[python] should also return nullable Int64
exp_dtype = "float64"
expected = Series([1, 2, None, 3], dtype=exp_dtype)
tm.assert_series_equal(result, expected)
| TestRank |
python | kamyu104__LeetCode-Solutions | Python/remove-digit-from-number-to-maximize-result.py | {
"start": 38,
"end": 460
} | class ____(object):
def removeDigit(self, number, digit):
"""
:type number: str
:type digit: str
:rtype: str
"""
i = next((i for i in xrange(len(number)-1) if digit == number[i] < number[i+1]), len(number)-1)
if i+1 == len(number):
i = next((i for i in reversed(xrange(len(number))) if digit == number[i]))
return number[:i]+number[i+1:]
| Solution |
python | apache__airflow | providers/google/tests/unit/google/cloud/operators/test_spanner.py | {
"start": 2844,
"end": 31010
} | class ____:
@mock.patch("airflow.providers.google.cloud.operators.spanner.SpannerHook")
def test_instance_create(self, mock_hook):
mock_hook.return_value.get_instance.return_value = None
op = SpannerDeployInstanceOperator(
project_id=PROJECT_ID,
instance_id=INSTANCE_ID,
configuration_name=CONFIG_NAME,
node_count=int(NODE_COUNT),
display_name=DISPLAY_NAME,
task_id="id",
)
context = mock.MagicMock()
result = op.execute(context=context)
mock_hook.assert_called_once_with(
gcp_conn_id="google_cloud_default",
impersonation_chain=None,
)
mock_hook.return_value.create_instance.assert_called_once_with(
project_id=PROJECT_ID,
instance_id=INSTANCE_ID,
configuration_name=CONFIG_NAME,
node_count=int(NODE_COUNT),
display_name=DISPLAY_NAME,
)
mock_hook.return_value.update_instance.assert_not_called()
assert result is None
@mock.patch("airflow.providers.google.cloud.operators.spanner.SpannerHook")
def test_instance_create_missing_project_id(self, mock_hook):
mock_hook.return_value.get_instance.return_value = None
op = SpannerDeployInstanceOperator(
instance_id=INSTANCE_ID,
configuration_name=CONFIG_NAME,
node_count=int(NODE_COUNT),
display_name=DISPLAY_NAME,
task_id="id",
)
context = mock.MagicMock()
result = op.execute(context=context)
mock_hook.assert_called_once_with(
gcp_conn_id="google_cloud_default",
impersonation_chain=None,
)
mock_hook.return_value.create_instance.assert_called_once_with(
project_id=None,
instance_id=INSTANCE_ID,
configuration_name=CONFIG_NAME,
node_count=int(NODE_COUNT),
display_name=DISPLAY_NAME,
)
mock_hook.return_value.update_instance.assert_not_called()
assert result is None
@mock.patch("airflow.providers.google.cloud.operators.spanner.SpannerHook")
def test_instance_update(self, mock_hook):
mock_hook.return_value.get_instance.return_value = {"name": INSTANCE_ID}
op = SpannerDeployInstanceOperator(
project_id=PROJECT_ID,
instance_id=INSTANCE_ID,
configuration_name=CONFIG_NAME,
node_count=int(NODE_COUNT),
display_name=DISPLAY_NAME,
task_id="id",
)
context = mock.MagicMock()
result = op.execute(context=context)
mock_hook.assert_called_once_with(
gcp_conn_id="google_cloud_default",
impersonation_chain=None,
)
mock_hook.return_value.update_instance.assert_called_once_with(
project_id=PROJECT_ID,
instance_id=INSTANCE_ID,
configuration_name=CONFIG_NAME,
node_count=int(NODE_COUNT),
display_name=DISPLAY_NAME,
)
mock_hook.return_value.create_instance.assert_not_called()
assert result is None
@mock.patch("airflow.providers.google.cloud.operators.spanner.SpannerHook")
def test_instance_update_missing_project_id(self, mock_hook):
mock_hook.return_value.get_instance.return_value = {"name": INSTANCE_ID}
op = SpannerDeployInstanceOperator(
instance_id=INSTANCE_ID,
configuration_name=CONFIG_NAME,
node_count=int(NODE_COUNT),
display_name=DISPLAY_NAME,
task_id="id",
)
context = mock.MagicMock()
result = op.execute(context=context)
mock_hook.assert_called_once_with(
gcp_conn_id="google_cloud_default",
impersonation_chain=None,
)
mock_hook.return_value.update_instance.assert_called_once_with(
project_id=None,
instance_id=INSTANCE_ID,
configuration_name=CONFIG_NAME,
node_count=int(NODE_COUNT),
display_name=DISPLAY_NAME,
)
mock_hook.return_value.create_instance.assert_not_called()
assert result is None
@mock.patch("airflow.providers.google.cloud.operators.spanner.SpannerHook")
def test_instance_create_aborts_and_succeeds_if_instance_exists(self, mock_hook):
mock_hook.return_value.get_instance.return_value = {"name": INSTANCE_ID}
op = SpannerDeployInstanceOperator(
project_id=PROJECT_ID,
instance_id=INSTANCE_ID,
configuration_name=CONFIG_NAME,
node_count=int(NODE_COUNT),
display_name=DISPLAY_NAME,
task_id="id",
)
context = mock.MagicMock()
result = op.execute(context=context)
mock_hook.assert_called_once_with(
gcp_conn_id="google_cloud_default",
impersonation_chain=None,
)
mock_hook.return_value.create_instance.assert_not_called()
assert result is None
@pytest.mark.parametrize(
("project_id", "instance_id", "exp_msg"),
[
("", INSTANCE_ID, "project_id"),
(PROJECT_ID, "", "instance_id"),
],
)
@mock.patch("airflow.providers.google.cloud.operators.spanner.SpannerHook")
def test_instance_create_ex_if_param_missing(self, mock_hook, project_id, instance_id, exp_msg):
with pytest.raises(AirflowException) as ctx:
SpannerDeployInstanceOperator(
project_id=project_id,
instance_id=instance_id,
configuration_name=CONFIG_NAME,
node_count=int(NODE_COUNT),
display_name=DISPLAY_NAME,
task_id="id",
)
err = ctx.value
assert f"The required parameter '{exp_msg}' is empty" in str(err)
mock_hook.assert_not_called()
@mock.patch("airflow.providers.google.cloud.operators.spanner.SpannerHook")
def test_instance_delete(self, mock_hook):
mock_hook.return_value.get_instance.return_value = {"name": INSTANCE_ID}
op = SpannerDeleteInstanceOperator(project_id=PROJECT_ID, instance_id=INSTANCE_ID, task_id="id")
result = op.execute(None)
mock_hook.assert_called_once_with(
gcp_conn_id="google_cloud_default",
impersonation_chain=None,
)
mock_hook.return_value.delete_instance.assert_called_once_with(
project_id=PROJECT_ID, instance_id=INSTANCE_ID
)
assert result
@mock.patch("airflow.providers.google.cloud.operators.spanner.SpannerHook")
def test_instance_delete_missing_project_id(self, mock_hook):
mock_hook.return_value.get_instance.return_value = {"name": INSTANCE_ID}
op = SpannerDeleteInstanceOperator(instance_id=INSTANCE_ID, task_id="id")
result = op.execute(None)
mock_hook.assert_called_once_with(
gcp_conn_id="google_cloud_default",
impersonation_chain=None,
)
mock_hook.return_value.delete_instance.assert_called_once_with(
project_id=None, instance_id=INSTANCE_ID
)
assert result
@mock.patch("airflow.providers.google.cloud.operators.spanner.SpannerHook")
def test_instance_delete_aborts_and_succeeds_if_instance_does_not_exist(self, mock_hook):
mock_hook.return_value.get_instance.return_value = None
op = SpannerDeleteInstanceOperator(project_id=PROJECT_ID, instance_id=INSTANCE_ID, task_id="id")
result = op.execute(None)
mock_hook.assert_called_once_with(
gcp_conn_id="google_cloud_default",
impersonation_chain=None,
)
mock_hook.return_value.delete_instance.assert_not_called()
assert result
@pytest.mark.parametrize(
("project_id", "instance_id", "exp_msg"),
[
("", INSTANCE_ID, "project_id"),
(PROJECT_ID, "", "instance_id"),
],
)
@mock.patch("airflow.providers.google.cloud.operators.spanner.SpannerHook")
def test_instance_delete_ex_if_param_missing(self, mock_hook, project_id, instance_id, exp_msg):
with pytest.raises(AirflowException) as ctx:
SpannerDeleteInstanceOperator(project_id=project_id, instance_id=instance_id, task_id="id")
err = ctx.value
assert f"The required parameter '{exp_msg}' is empty" in str(err)
mock_hook.assert_not_called()
@mock.patch("airflow.providers.google.cloud.operators.spanner.SpannerHook")
def test_instance_query(self, mock_hook):
mock_hook.return_value.execute_dml.return_value = [3]
op = SpannerQueryDatabaseInstanceOperator(
project_id=PROJECT_ID,
instance_id=INSTANCE_ID,
database_id=DB_ID,
query=INSERT_QUERY,
task_id="id",
)
result = op.execute(context=mock.MagicMock())
mock_hook.assert_called_once_with(
gcp_conn_id="google_cloud_default",
impersonation_chain=None,
)
mock_hook.return_value.execute_dml.assert_called_once_with(
project_id=PROJECT_ID, instance_id=INSTANCE_ID, database_id=DB_ID, queries=[INSERT_QUERY]
)
assert result == [3]
@mock.patch("airflow.providers.google.cloud.operators.spanner.SpannerHook")
def test_instance_query_missing_project_id(self, mock_hook):
mock_hook.return_value.execute_dml.return_value = [3]
op = SpannerQueryDatabaseInstanceOperator(
instance_id=INSTANCE_ID, database_id=DB_ID, query=INSERT_QUERY, task_id="id"
)
context = mock.MagicMock()
result = op.execute(context=context)
mock_hook.assert_called_once_with(
gcp_conn_id="google_cloud_default",
impersonation_chain=None,
)
mock_hook.return_value.execute_dml.assert_called_once_with(
project_id=None, instance_id=INSTANCE_ID, database_id=DB_ID, queries=[INSERT_QUERY]
)
assert result == [3]
@pytest.mark.parametrize(
("project_id", "instance_id", "database_id", "query", "exp_msg"),
[
("", INSTANCE_ID, DB_ID, INSERT_QUERY, "project_id"),
(PROJECT_ID, "", DB_ID, INSERT_QUERY, "instance_id"),
(PROJECT_ID, INSTANCE_ID, "", INSERT_QUERY, "database_id"),
(PROJECT_ID, INSTANCE_ID, DB_ID, "", "query"),
],
)
@mock.patch("airflow.providers.google.cloud.operators.spanner.SpannerHook")
def test_instance_query_ex_if_param_missing(
self, mock_hook, project_id, instance_id, database_id, query, exp_msg
):
with pytest.raises(AirflowException) as ctx:
SpannerQueryDatabaseInstanceOperator(
project_id=project_id,
instance_id=instance_id,
database_id=database_id,
query=query,
task_id="id",
)
err = ctx.value
assert f"The required parameter '{exp_msg}' is empty" in str(err)
mock_hook.assert_not_called()
@mock.patch("airflow.providers.google.cloud.operators.spanner.SpannerHook")
def test_instance_query_dml(self, mock_hook):
mock_hook.return_value.execute_dml.return_value = None
op = SpannerQueryDatabaseInstanceOperator(
project_id=PROJECT_ID,
instance_id=INSTANCE_ID,
database_id=DB_ID,
query=INSERT_QUERY,
task_id="id",
)
context = mock.MagicMock()
op.execute(context=context)
mock_hook.assert_called_once_with(
gcp_conn_id="google_cloud_default",
impersonation_chain=None,
)
mock_hook.return_value.execute_dml.assert_called_once_with(
project_id=PROJECT_ID, instance_id=INSTANCE_ID, database_id=DB_ID, queries=[INSERT_QUERY]
)
@mock.patch("airflow.providers.google.cloud.operators.spanner.SpannerHook")
def test_instance_query_dml_list(self, mock_hook):
mock_hook.return_value.execute_dml.return_value = None
op = SpannerQueryDatabaseInstanceOperator(
project_id=PROJECT_ID,
instance_id=INSTANCE_ID,
database_id=DB_ID,
query=[INSERT_QUERY, INSERT_QUERY_2],
task_id="id",
)
context = mock.MagicMock()
op.execute(context=context)
mock_hook.assert_called_once_with(
gcp_conn_id="google_cloud_default",
impersonation_chain=None,
)
mock_hook.return_value.execute_dml.assert_called_once_with(
project_id=PROJECT_ID,
instance_id=INSTANCE_ID,
database_id=DB_ID,
queries=[INSERT_QUERY, INSERT_QUERY_2],
)
@pytest.mark.parametrize(
("sql", "expected_inputs", "expected_outputs", "expected_lineage"),
[
("SELECT id, amount FROM public.orders", ["db1.public.orders"], [], {}),
(
"INSERT INTO public.orders (id, amount) SELECT id, amount FROM public.staging",
["db1.public.staging", "db1.public.orders"],
[],
{},
),
("DELETE FROM public.logs WHERE id=1", [], ["db1.public.logs"], {}),
(
"SELECT o.id, c.name FROM public.orders o JOIN public.customers c ON o.customer_id = c.id",
["db1.public.orders", "db1.public.customers"],
[],
{},
),
(
"UPDATE public.customers SET name='x' WHERE id IN (SELECT id FROM public.staging)",
["db1.public.customers", "db1.public.staging"],
[],
{},
),
(
["INSERT INTO public.t1 SELECT * FROM public.t2;", "DELETE FROM public.t3 WHERE id=1;"],
["db1.public.t1", "db1.public.t2", "db1.public.t3"],
[],
{},
),
("SELECT id, amount FROM myschema.orders", ["db1.myschema.orders"], [], {}),
],
)
def test_spannerquerydatabaseinstanceoperator_get_openlineage_facets(
self, sql, expected_inputs, expected_outputs, expected_lineage
):
# Arrange
class SpannerHookForTests(DbApiHook):
conn_name_attr = "gcp_conn_id"
get_conn = MagicMock(name="conn")
get_connection = MagicMock()
database = DB_ID
def get_openlineage_database_info(self, connection):
return DatabaseInfo(
scheme="spanner",
authority=f"{PROJECT_ID}/{INSTANCE_ID}",
database=DB_ID,
information_schema_columns=[
"table_schema",
"table_name",
"column_name",
"ordinal_position",
"spanner_type",
],
information_schema_table_name="information_schema.columns",
use_flat_cross_db_query=False,
is_information_schema_cross_db=False,
is_uppercase_names=False,
)
dbapi_hook = SpannerHookForTests()
class SpannerOperatorForTest(SpannerQueryDatabaseInstanceOperator):
@property
def hook(self):
return dbapi_hook
op = SpannerOperatorForTest(
task_id=TASK_ID,
instance_id=INSTANCE_ID,
database_id=DB_ID,
gcp_conn_id="spanner_conn",
query=sql,
)
dbapi_hook.get_connection.return_value = Connection(
conn_id="spanner_conn", conn_type="spanner", host="spanner-host"
)
combined_rows = []
for ds in expected_inputs + expected_outputs:
tbl = ds.split(".", 1)[1]
combined_rows.extend(SCHEMA_ROWS.get(tbl, []))
dbapi_hook.get_conn.return_value.cursor.return_value.fetchall.side_effect = [combined_rows, []]
# Act
lineage = op.get_openlineage_facets_on_complete(task_instance=None)
assert lineage is not None
# Assert inputs
input_names = {ds.name for ds in lineage.inputs}
assert input_names == set(expected_inputs)
for ds in lineage.inputs:
assert ds.namespace == f"spanner://{PROJECT_ID}/{INSTANCE_ID}"
# Assert outputs
output_names = {ds.name for ds in lineage.outputs}
assert output_names == set(expected_outputs)
for ds in lineage.outputs:
assert ds.namespace == f"spanner://{PROJECT_ID}/{INSTANCE_ID}"
# Assert SQLJobFacet
sql_job = lineage.job_facets["sql"]
if isinstance(sql, list):
for q in sql:
assert q.replace(";", "").strip() in sql_job.query.replace(";", "")
else:
assert sql_job.query == sql
# Assert column lineage
found_lineage = {
getattr(field, "field", None) or getattr(field, "name", None): [
f"{inp.dataset.name}.{getattr(inp, 'field', getattr(inp, 'name', None))}"
for inp in getattr(field, "inputFields", [])
]
for ds in lineage.outputs + lineage.inputs
for cl_facet in [ds.facets.get("columnLineage")]
if cl_facet
for field in cl_facet.fields
}
for col, sources in expected_lineage.items():
assert col in found_lineage
for src in sources:
assert any(src in s for s in found_lineage[col])
@mock.patch("airflow.providers.google.cloud.operators.spanner.SpannerHook")
def test_database_create(self, mock_hook):
mock_hook.return_value.get_database.return_value = None
op = SpannerDeployDatabaseInstanceOperator(
project_id=PROJECT_ID,
instance_id=INSTANCE_ID,
database_id=DB_ID,
ddl_statements=DDL_STATEMENTS,
task_id="id",
)
context = mock.MagicMock()
result = op.execute(context=context)
mock_hook.assert_called_once_with(
gcp_conn_id="google_cloud_default",
impersonation_chain=None,
)
mock_hook.return_value.create_database.assert_called_once_with(
project_id=PROJECT_ID, instance_id=INSTANCE_ID, database_id=DB_ID, ddl_statements=DDL_STATEMENTS
)
mock_hook.return_value.update_database.assert_not_called()
assert result
@mock.patch("airflow.providers.google.cloud.operators.spanner.SpannerHook")
def test_database_create_missing_project_id(self, mock_hook):
mock_hook.return_value.get_database.return_value = None
op = SpannerDeployDatabaseInstanceOperator(
instance_id=INSTANCE_ID, database_id=DB_ID, ddl_statements=DDL_STATEMENTS, task_id="id"
)
context = mock.MagicMock()
result = op.execute(context=context)
mock_hook.assert_called_once_with(
gcp_conn_id="google_cloud_default",
impersonation_chain=None,
)
mock_hook.return_value.create_database.assert_called_once_with(
project_id=None, instance_id=INSTANCE_ID, database_id=DB_ID, ddl_statements=DDL_STATEMENTS
)
mock_hook.return_value.update_database.assert_not_called()
assert result
@mock.patch("airflow.providers.google.cloud.operators.spanner.SpannerHook")
def test_database_create_with_pre_existing_db(self, mock_hook):
mock_hook.return_value.get_database.return_value = {"name": DB_ID}
op = SpannerDeployDatabaseInstanceOperator(
project_id=PROJECT_ID,
instance_id=INSTANCE_ID,
database_id=DB_ID,
ddl_statements=DDL_STATEMENTS,
task_id="id",
)
context = mock.MagicMock()
result = op.execute(context=context)
mock_hook.assert_called_once_with(
gcp_conn_id="google_cloud_default",
impersonation_chain=None,
)
mock_hook.return_value.create_database.assert_not_called()
mock_hook.return_value.update_database.assert_not_called()
assert result
@pytest.mark.parametrize(
("project_id", "instance_id", "database_id", "ddl_statements", "exp_msg"),
[
("", INSTANCE_ID, DB_ID, DDL_STATEMENTS, "project_id"),
(PROJECT_ID, "", DB_ID, DDL_STATEMENTS, "instance_id"),
(PROJECT_ID, INSTANCE_ID, "", DDL_STATEMENTS, "database_id"),
],
)
@mock.patch("airflow.providers.google.cloud.operators.spanner.SpannerHook")
def test_database_create_ex_if_param_missing(
self, mock_hook, project_id, instance_id, database_id, ddl_statements, exp_msg
):
with pytest.raises(AirflowException) as ctx:
SpannerDeployDatabaseInstanceOperator(
project_id=project_id,
instance_id=instance_id,
database_id=database_id,
ddl_statements=ddl_statements,
task_id="id",
)
err = ctx.value
assert f"The required parameter '{exp_msg}' is empty" in str(err)
mock_hook.assert_not_called()
@mock.patch("airflow.providers.google.cloud.operators.spanner.SpannerHook")
def test_database_update(self, mock_hook):
mock_hook.return_value.get_database.return_value = {"name": DB_ID}
op = SpannerUpdateDatabaseInstanceOperator(
project_id=PROJECT_ID,
instance_id=INSTANCE_ID,
database_id=DB_ID,
ddl_statements=DDL_STATEMENTS,
task_id="id",
)
context = mock.MagicMock()
result = op.execute(context=context)
mock_hook.assert_called_once_with(
gcp_conn_id="google_cloud_default",
impersonation_chain=None,
)
mock_hook.return_value.update_database.assert_called_once_with(
project_id=PROJECT_ID,
instance_id=INSTANCE_ID,
database_id=DB_ID,
ddl_statements=DDL_STATEMENTS,
operation_id=None,
)
assert result
@mock.patch("airflow.providers.google.cloud.operators.spanner.SpannerHook")
def test_database_update_missing_project_id(self, mock_hook):
mock_hook.return_value.get_database.return_value = {"name": DB_ID}
op = SpannerUpdateDatabaseInstanceOperator(
instance_id=INSTANCE_ID, database_id=DB_ID, ddl_statements=DDL_STATEMENTS, task_id="id"
)
context = mock.MagicMock()
result = op.execute(context=context)
mock_hook.assert_called_once_with(
gcp_conn_id="google_cloud_default",
impersonation_chain=None,
)
mock_hook.return_value.update_database.assert_called_once_with(
project_id=None,
instance_id=INSTANCE_ID,
database_id=DB_ID,
ddl_statements=DDL_STATEMENTS,
operation_id=None,
)
assert result
@pytest.mark.parametrize(
("project_id", "instance_id", "database_id", "ddl_statements", "exp_msg"),
[
("", INSTANCE_ID, DB_ID, DDL_STATEMENTS, "project_id"),
(PROJECT_ID, "", DB_ID, DDL_STATEMENTS, "instance_id"),
(PROJECT_ID, INSTANCE_ID, "", DDL_STATEMENTS, "database_id"),
],
)
@mock.patch("airflow.providers.google.cloud.operators.spanner.SpannerHook")
def test_database_update_ex_if_param_missing(
self, mock_hook, project_id, instance_id, database_id, ddl_statements, exp_msg
):
with pytest.raises(AirflowException) as ctx:
SpannerUpdateDatabaseInstanceOperator(
project_id=project_id,
instance_id=instance_id,
database_id=database_id,
ddl_statements=ddl_statements,
task_id="id",
)
err = ctx.value
assert f"The required parameter '{exp_msg}' is empty" in str(err)
mock_hook.assert_not_called()
@mock.patch("airflow.providers.google.cloud.operators.spanner.SpannerHook")
def test_database_update_ex_if_database_not_exist(self, mock_hook):
mock_hook.return_value.get_database.return_value = None
op = SpannerUpdateDatabaseInstanceOperator(
project_id=PROJECT_ID,
instance_id=INSTANCE_ID,
database_id=DB_ID,
ddl_statements=DDL_STATEMENTS,
task_id="id",
)
with pytest.raises(AirflowException) as ctx:
op.execute(None)
err = ctx.value
assert (
"The Cloud Spanner database 'db1' in project 'project-id' and "
"instance 'instance-id' is missing" in str(err)
)
mock_hook.assert_called_once_with(
gcp_conn_id="google_cloud_default",
impersonation_chain=None,
)
@mock.patch("airflow.providers.google.cloud.operators.spanner.SpannerHook")
def test_database_delete(self, mock_hook):
mock_hook.return_value.get_database.return_value = {"name": DB_ID}
op = SpannerDeleteDatabaseInstanceOperator(
project_id=PROJECT_ID, instance_id=INSTANCE_ID, database_id=DB_ID, task_id="id"
)
result = op.execute(None)
mock_hook.assert_called_once_with(
gcp_conn_id="google_cloud_default",
impersonation_chain=None,
)
mock_hook.return_value.delete_database.assert_called_once_with(
project_id=PROJECT_ID, instance_id=INSTANCE_ID, database_id=DB_ID
)
assert result
@mock.patch("airflow.providers.google.cloud.operators.spanner.SpannerHook")
def test_database_delete_missing_project_id(self, mock_hook):
mock_hook.return_value.get_database.return_value = {"name": DB_ID}
op = SpannerDeleteDatabaseInstanceOperator(instance_id=INSTANCE_ID, database_id=DB_ID, task_id="id")
result = op.execute(None)
mock_hook.assert_called_once_with(
gcp_conn_id="google_cloud_default",
impersonation_chain=None,
)
mock_hook.return_value.delete_database.assert_called_once_with(
project_id=None, instance_id=INSTANCE_ID, database_id=DB_ID
)
assert result
@mock.patch("airflow.providers.google.cloud.operators.spanner.SpannerHook")
def test_database_delete_exits_and_succeeds_if_database_does_not_exist(self, mock_hook):
mock_hook.return_value.get_database.return_value = None
op = SpannerDeleteDatabaseInstanceOperator(
project_id=PROJECT_ID, instance_id=INSTANCE_ID, database_id=DB_ID, task_id="id"
)
result = op.execute(None)
mock_hook.assert_called_once_with(
gcp_conn_id="google_cloud_default",
impersonation_chain=None,
)
mock_hook.return_value.delete_database.assert_not_called()
assert result
@pytest.mark.parametrize(
("project_id", "instance_id", "database_id", "ddl_statements", "exp_msg"),
[
("", INSTANCE_ID, DB_ID, DDL_STATEMENTS, "project_id"),
(PROJECT_ID, "", DB_ID, DDL_STATEMENTS, "instance_id"),
(PROJECT_ID, INSTANCE_ID, "", DDL_STATEMENTS, "database_id"),
],
)
@mock.patch("airflow.providers.google.cloud.operators.spanner.SpannerHook")
def test_database_delete_ex_if_param_missing(
self, mock_hook, project_id, instance_id, database_id, ddl_statements, exp_msg
):
with pytest.raises(AirflowException) as ctx:
SpannerDeleteDatabaseInstanceOperator(
project_id=project_id,
instance_id=instance_id,
database_id=database_id,
ddl_statements=ddl_statements,
task_id="id",
)
err = ctx.value
assert f"The required parameter '{exp_msg}' is empty" in str(err)
mock_hook.assert_not_called()
| TestCloudSpanner |
python | huggingface__transformers | src/transformers/models/gemma3n/modeling_gemma3n.py | {
"start": 38924,
"end": 41200
} | class ____(nn.Module):
def __init__(self, config: Gemma3nAudioConfig):
super().__init__()
self.config = config
self.pre_layer_norm = Gemma3nRMSNorm(self.config.hidden_size, eps=self.config.rms_norm_eps)
self.linear_start = nn.Linear(self.config.hidden_size, self.config.hidden_size * 2, bias=False)
self.depthwise_conv1d = nn.Conv1d(
in_channels=self.config.hidden_size,
out_channels=self.config.hidden_size,
kernel_size=self.config.conf_conv_kernel_size,
stride=1,
padding=0, # Manual causal padding
groups=self.config.hidden_size, # Depthwise
bias=False,
)
self.register_buffer("gradient_clipping", torch.tensor(self.config.gradient_clipping), persistent=False)
self.conv_norm = Gemma3nRMSNorm(self.config.hidden_size, eps=self.config.rms_norm_eps)
self.linear_end = nn.Linear(self.config.hidden_size, self.config.hidden_size, bias=False)
self.causal_padding = self.config.conf_conv_kernel_size - 1
def forward(self, audio_encodings: torch.Tensor) -> torch.Tensor:
audio_encodings_residual = audio_encodings # Save for residual connection
audio_encodings = self.pre_layer_norm(audio_encodings)
audio_encodings = self.linear_start(audio_encodings)
audio_encodings = torch.nn.functional.glu(audio_encodings, dim=-1)
# Permute for Conv1d: [B, T, D] -> [B, D, T]
audio_encodings_permuted = audio_encodings.permute(0, 2, 1)
# Apply manual causal padding
audio_encodings_permuted_padded = F.pad(audio_encodings_permuted, (self.causal_padding, 0))
audio_encodings = self.depthwise_conv1d(audio_encodings_permuted_padded)
# Permute back: [B, D, T_out] -> [B, T_out, D]
audio_encodings = audio_encodings.permute(0, 2, 1)
audio_encodings = torch.clamp(audio_encodings, -self.gradient_clipping, self.gradient_clipping)
audio_encodings = self.conv_norm(audio_encodings)
audio_encodings = nn.functional.silu(audio_encodings)
audio_encodings = self.linear_end(audio_encodings)
output = audio_encodings + audio_encodings_residual
return output
| Gemma3nAudioConformerLightConv1d |
python | fastai__fastai | fastai/layers.py | {
"start": 16789,
"end": 17535
} | class ____(Module):
"Like `nn.Sequential`, but with ModuleList semantics, and can access module input"
def __init__(self, *layers): self.layers = nn.ModuleList(layers)
def forward(self, x):
res = x
for l in self.layers:
res.orig = x
nres = l(res)
# We have to remove res.orig to avoid hanging refs and therefore memory leaks
res.orig, nres.orig = None, None
res = nres
return res
def __getitem__(self,i): return self.layers[i]
def append(self,l): return self.layers.append(l)
def extend(self,l): return self.layers.extend(l)
def insert(self,i,l): return self.layers.insert(i,l)
# %% ../nbs/01_layers.ipynb 113
| SequentialEx |
python | spack__spack | lib/spack/spack/ci/common.py | {
"start": 10081,
"end": 10255
} | class ____(Enum):
COPY_ONLY = 1
spack_copy_only = 1
PROTECTED_BRANCH = 2
spack_protected_branch = 2
PULL_REQUEST = 3
spack_pull_request = 3
| PipelineType |
python | kubernetes-client__python | kubernetes/client/models/v1_resource_claim_template.py | {
"start": 383,
"end": 6941
} | class ____(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'api_version': 'str',
'kind': 'str',
'metadata': 'V1ObjectMeta',
'spec': 'V1ResourceClaimTemplateSpec'
}
attribute_map = {
'api_version': 'apiVersion',
'kind': 'kind',
'metadata': 'metadata',
'spec': 'spec'
}
def __init__(self, api_version=None, kind=None, metadata=None, spec=None, local_vars_configuration=None): # noqa: E501
"""V1ResourceClaimTemplate - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._api_version = None
self._kind = None
self._metadata = None
self._spec = None
self.discriminator = None
if api_version is not None:
self.api_version = api_version
if kind is not None:
self.kind = kind
if metadata is not None:
self.metadata = metadata
self.spec = spec
@property
def api_version(self):
"""Gets the api_version of this V1ResourceClaimTemplate. # noqa: E501
APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources # noqa: E501
:return: The api_version of this V1ResourceClaimTemplate. # noqa: E501
:rtype: str
"""
return self._api_version
@api_version.setter
def api_version(self, api_version):
"""Sets the api_version of this V1ResourceClaimTemplate.
APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources # noqa: E501
:param api_version: The api_version of this V1ResourceClaimTemplate. # noqa: E501
:type: str
"""
self._api_version = api_version
@property
def kind(self):
"""Gets the kind of this V1ResourceClaimTemplate. # noqa: E501
Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds # noqa: E501
:return: The kind of this V1ResourceClaimTemplate. # noqa: E501
:rtype: str
"""
return self._kind
@kind.setter
def kind(self, kind):
"""Sets the kind of this V1ResourceClaimTemplate.
Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds # noqa: E501
:param kind: The kind of this V1ResourceClaimTemplate. # noqa: E501
:type: str
"""
self._kind = kind
@property
def metadata(self):
"""Gets the metadata of this V1ResourceClaimTemplate. # noqa: E501
:return: The metadata of this V1ResourceClaimTemplate. # noqa: E501
:rtype: V1ObjectMeta
"""
return self._metadata
@metadata.setter
def metadata(self, metadata):
"""Sets the metadata of this V1ResourceClaimTemplate.
:param metadata: The metadata of this V1ResourceClaimTemplate. # noqa: E501
:type: V1ObjectMeta
"""
self._metadata = metadata
@property
def spec(self):
"""Gets the spec of this V1ResourceClaimTemplate. # noqa: E501
:return: The spec of this V1ResourceClaimTemplate. # noqa: E501
:rtype: V1ResourceClaimTemplateSpec
"""
return self._spec
@spec.setter
def spec(self, spec):
"""Sets the spec of this V1ResourceClaimTemplate.
:param spec: The spec of this V1ResourceClaimTemplate. # noqa: E501
:type: V1ResourceClaimTemplateSpec
"""
if self.local_vars_configuration.client_side_validation and spec is None: # noqa: E501
raise ValueError("Invalid value for `spec`, must not be `None`") # noqa: E501
self._spec = spec
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, V1ResourceClaimTemplate):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, V1ResourceClaimTemplate):
return True
return self.to_dict() != other.to_dict()
| V1ResourceClaimTemplate |
python | pytorch__pytorch | benchmarks/operator_benchmark/pt/qpool_test.py | {
"start": 3709,
"end": 4379
} | class ____(_QPool2dBenchmarkBase):
def init(self, N, C, input_size, output_size, contig, dtype):
self.pool_op = torch.nn.AdaptiveAvgPool2d(output_size=output_size)
super().setup(N, C, *input_size, dtype=dtype, contig=contig)
op_bench.generate_pt_test(
qadaptive_avgpool2d_short_configs + qadaptive_avgpool2d_long_configs,
QAdaptiveAvgPool2dBenchmark,
)
op_bench.generate_pt_test(
qpool2d_short_configs + qpool2d_long_configs, QAvgPool2dBenchmark
)
op_bench.generate_pt_test(
qpool2d_short_configs + qpool2d_long_configs, QMaxPool2dBenchmark
)
if __name__ == "__main__":
op_bench.benchmark_runner.main()
| QAdaptiveAvgPool2dBenchmark |
python | xlwings__xlwings | xlwings/main.py | {
"start": 99104,
"end": 100169
} | class ____:
def __init__(self, impl):
"""
Represents a cell Note.
Before the introduction of threaded comments, a Note was called a Comment.
.. versionadded:: 0.24.2
"""
self.impl = impl
@property
def api(self):
"""
Returns the native object (``pywin32`` or ``appscript`` obj)
of the engine being used.
.. versionadded:: 0.24.2
"""
return self.impl.api
@property
def text(self):
"""
Gets or sets the text of a note. Keep in mind that the note must already exist!
Examples
--------
>>> sheet = xw.Book(...).sheets[0]
>>> sheet['A1'].note.text = 'mynote'
>>> sheet['A1'].note.text
>>> 'mynote'
.. versionadded:: 0.24.2
"""
return self.impl.text
@text.setter
def text(self, value):
self.impl.text = value
def delete(self):
"""
Delete the note.
.. versionadded:: 0.24.2
"""
self.impl.delete()
| Note |
python | huggingface__transformers | src/transformers/data/data_collator.py | {
"start": 51974,
"end": 65611
} | class ____(DataCollatorMixin):
"""
Data collator used for permutation language modeling.
- collates batches of tensors, honoring their tokenizer's pad_token
- preprocesses batches for permutation language modeling with procedures specific to XLNet
"""
tokenizer: PreTrainedTokenizerBase
plm_probability: float = 1 / 6
max_span_length: int = 5 # maximum length of a span of masked tokens
return_tensors: str = "pt"
def torch_call(self, examples: list[list[int] | Any | dict[str, Any]]) -> dict[str, Any]:
if isinstance(examples[0], Mapping):
examples = [e["input_ids"] for e in examples]
batch = _torch_collate_batch(examples, self.tokenizer)
inputs, perm_mask, target_mapping, labels = self.torch_mask_tokens(batch)
return {"input_ids": inputs, "perm_mask": perm_mask, "target_mapping": target_mapping, "labels": labels}
def numpy_call(self, examples: list[list[int] | Any | dict[str, Any]]) -> dict[str, Any]:
if isinstance(examples[0], Mapping):
examples = [e["input_ids"] for e in examples]
batch = _numpy_collate_batch(examples, self.tokenizer)
inputs, perm_mask, target_mapping, labels = self.numpy_mask_tokens(batch)
return {"input_ids": inputs, "perm_mask": perm_mask, "target_mapping": target_mapping, "labels": labels}
def torch_mask_tokens(self, inputs: Any) -> tuple[Any, Any, Any, Any]:
"""
The masked tokens to be predicted for a particular sequence are determined by the following algorithm:
0. Start from the beginning of the sequence by setting `cur_len = 0` (number of tokens processed so far).
1. Sample a `span_length` from the interval `[1, max_span_length]` (length of span of tokens to be masked)
2. Reserve a context of length `context_length = span_length / plm_probability` to surround span to be
masked
3. Sample a starting point `start_index` from the interval `[cur_len, cur_len + context_length -
span_length]` and mask tokens `start_index:start_index + span_length`
4. Set `cur_len = cur_len + context_length`. If `cur_len < max_len` (i.e. there are tokens remaining in the
sequence to be processed), repeat from Step 1.
"""
import torch
if self.tokenizer.mask_token is None:
raise ValueError(
"This tokenizer does not have a mask token which is necessary for permutation language modeling."
" Please add a mask token if you want to use this tokenizer."
)
if inputs.size(1) % 2 != 0:
raise ValueError(
"This collator requires that sequence lengths be even to create a leakage-free perm_mask. Please see"
" relevant comments in source code for details."
)
labels = inputs.clone()
# Creating the mask and target_mapping tensors
masked_indices = torch.full(labels.shape, 0, dtype=torch.bool)
target_mapping = torch.zeros((labels.size(0), labels.size(1), labels.size(1)), dtype=torch.float32)
for i in range(labels.size(0)):
# Start from the beginning of the sequence by setting `cur_len = 0` (number of tokens processed so far).
cur_len = 0
max_len = labels.size(1)
while cur_len < max_len:
# Sample a `span_length` from the interval `[1, max_span_length]` (length of span of tokens to be masked)
span_length = torch.randint(1, self.max_span_length + 1, (1,)).item()
# Reserve a context of length `context_length = span_length / plm_probability` to surround the span to be masked
context_length = int(span_length / self.plm_probability)
# Sample a starting point `start_index` from the interval `[cur_len, cur_len + context_length - span_length]` and mask tokens `start_index:start_index + span_length`
start_index = cur_len + torch.randint(context_length - span_length + 1, (1,)).item()
masked_indices[i, start_index : start_index + span_length] = 1
# Set `cur_len = cur_len + context_length`
cur_len += context_length
# Since we're replacing non-masked tokens with -100 in the labels tensor instead of skipping them altogether,
# the i-th predict corresponds to the i-th token.
target_mapping[i] = torch.eye(labels.size(1))
special_tokens_mask = torch.tensor(
[self.tokenizer.get_special_tokens_mask(val, already_has_special_tokens=True) for val in labels.tolist()],
dtype=torch.bool,
)
masked_indices.masked_fill_(special_tokens_mask, value=0.0)
if self.tokenizer.pad_token is not None:
padding_mask = labels.eq(self.tokenizer.pad_token_id)
masked_indices.masked_fill_(padding_mask, value=0.0)
# Mask indicating non-functional tokens, where functional tokens are [SEP], [CLS], padding, etc.
non_func_mask = ~(padding_mask | special_tokens_mask)
inputs[masked_indices] = self.tokenizer.mask_token_id
labels[~masked_indices] = -100 # We only compute loss on masked tokens
perm_mask = torch.zeros((labels.size(0), labels.size(1), labels.size(1)), dtype=torch.float32)
for i in range(labels.size(0)):
# Generate permutation indices i.e. sample a random factorisation order for the sequence. This will
# determine which tokens a given token can attend to (encoded in `perm_mask`).
# Note: Length of token sequence being permuted has to be less than or equal to reused sequence length
# (see documentation for `mems`), otherwise information may leak through due to reuse. In this implementation,
# we assume that reused length is half of sequence length and permutation length is equal to reused length.
# This requires that the sequence length be even.
# Create a linear factorisation order
perm_index = torch.arange(labels.size(1))
# Split this into two halves, assuming that half the sequence is reused each time
perm_index = perm_index.reshape((-1, labels.size(1) // 2)).transpose(0, 1)
# Permute the two halves such that they do not cross over
perm_index = perm_index[torch.randperm(labels.size(1) // 2)]
# Flatten this out into the desired permuted factorisation order
perm_index = torch.flatten(perm_index.transpose(0, 1))
# Set the permutation indices of non-masked (non-functional) tokens to the
# smallest index (-1) so that:
# (1) They can be seen by all other positions
# (2) They cannot see masked positions, so there won't be information leak
perm_index.masked_fill_(~masked_indices[i] & non_func_mask[i], -1)
# The logic for whether the i-th token can attend on the j-th token based on the factorisation order:
# 0 (can attend): If perm_index[i] > perm_index[j] or j is neither masked nor a functional token
# 1 (cannot attend): If perm_index[i] <= perm_index[j] and j is either masked or a functional token
perm_mask[i] = (
perm_index.reshape((labels.size(1), 1)) <= perm_index.reshape((1, labels.size(1)))
) & masked_indices[i]
return inputs.long(), perm_mask, target_mapping, labels.long()
def numpy_mask_tokens(self, inputs: Any) -> tuple[Any, Any, Any, Any]:
"""
The masked tokens to be predicted for a particular sequence are determined by the following algorithm:
0. Start from the beginning of the sequence by setting `cur_len = 0` (number of tokens processed so far).
1. Sample a `span_length` from the interval `[1, max_span_length]` (length of span of tokens to be masked)
2. Reserve a context of length `context_length = span_length / plm_probability` to surround span to be
masked
3. Sample a starting point `start_index` from the interval `[cur_len, cur_len + context_length -
span_length]` and mask tokens `start_index:start_index + span_length`
4. Set `cur_len = cur_len + context_length`. If `cur_len < max_len` (i.e. there are tokens remaining in the
sequence to be processed), repeat from Step 1.
"""
if self.tokenizer.mask_token is None:
raise ValueError(
"This tokenizer does not have a mask token which is necessary for permutation language modeling."
" Please add a mask token if you want to use this tokenizer."
)
if inputs.shape[1] % 2 != 0:
raise ValueError(
"This collator requires that sequence lengths be even to create a leakage-free perm_mask. Please see"
" relevant comments in source code for details."
)
labels = np.copy(inputs)
# Creating the mask and target_mapping tensors
masked_indices = np.full(labels.shape, 0, dtype=bool)
target_mapping = np.zeros((labels.shape[0], labels.shape[1], labels.shape[1]), dtype=np.float32)
for i in range(labels.shape[0]):
# Start from the beginning of the sequence by setting `cur_len = 0` (number of tokens processed so far).
cur_len = 0
max_len = labels.shape[1]
while cur_len < max_len:
# Sample a `span_length` from the interval `[1, max_span_length]` (length of span of tokens to be masked)
span_length = randint(1, self.max_span_length + 1)
# Reserve a context of length `context_length = span_length / plm_probability` to surround the span to be masked
context_length = int(span_length / self.plm_probability)
# Sample a starting point `start_index` from the interval `[cur_len, cur_len + context_length - span_length]` and mask tokens `start_index:start_index + span_length`
start_index = cur_len + randint(0, context_length - span_length + 1)
masked_indices[i, start_index : start_index + span_length] = 1
# Set `cur_len = cur_len + context_length`
cur_len += context_length
# Since we're replacing non-masked tokens with -100 in the labels tensor instead of skipping them altogether,
# the i-th predict corresponds to the i-th token.
target_mapping[i] = np.eye(labels.shape[1])
special_tokens_mask = np.array(
[self.tokenizer.get_special_tokens_mask(val, already_has_special_tokens=True) for val in labels.tolist()],
dtype=bool,
)
masked_indices[special_tokens_mask] = 0
if self.tokenizer.pad_token is not None:
padding_mask = labels == self.tokenizer.pad_token_id
masked_indices[padding_mask] = 0.0
# Mask indicating non-functional tokens, where functional tokens are [SEP], [CLS], padding, etc.
non_func_mask = ~(padding_mask | special_tokens_mask)
inputs[masked_indices] = self.tokenizer.mask_token_id
labels[~masked_indices] = -100 # We only compute loss on masked tokens
perm_mask = np.zeros((labels.shape[0], labels.shape[1], labels.shape[1]), dtype=np.float32)
for i in range(labels.shape[0]):
# Generate permutation indices i.e. sample a random factorisation order for the sequence. This will
# determine which tokens a given token can attend to (encoded in `perm_mask`).
# Note: Length of token sequence being permuted has to be less than or equal to reused sequence length
# (see documentation for `mems`), otherwise information may leak through due to reuse. In this implementation,
# we assume that reused length is half of sequence length and permutation length is equal to reused length.
# This requires that the sequence length be even.
# Create a linear factorisation order
perm_index = np.arange(labels.shape[1])
# Split this into two halves, assuming that half the sequence is reused each time
perm_index = perm_index.reshape((-1, labels.shape[1] // 2)).T
# Permute the two halves such that they do not cross over
np.random.shuffle(perm_index)
# Flatten this out into the desired permuted factorisation order
perm_index = perm_index.T.flatten()
# Set the permutation indices of non-masked (non-functional) tokens to the
# smallest index (-1) so that:
# (1) They can be seen by all other positions
# (2) They cannot see masked positions, so there won't be information leak
perm_index[~masked_indices[i] & non_func_mask[i]] = -1
# The logic for whether the i-th token can attend on the j-th token based on the factorisation order:
# 0 (can attend): If perm_index[i] > perm_index[j] or j is neither masked nor a functional token
# 1 (cannot attend): If perm_index[i] <= perm_index[j] and j is either masked or a functional token
perm_mask[i] = (
perm_index.reshape((labels.shape[1], 1)) <= perm_index.reshape((1, labels.shape[1]))
) & masked_indices[i]
return inputs.astype(np.int64), perm_mask, target_mapping, labels.astype(np.int64)
@dataclass
| DataCollatorForPermutationLanguageModeling |
python | sphinx-doc__sphinx | tests/roots/test-ext-autodoc/target/overload.py | {
"start": 1081,
"end": 1273
} | class ____(type):
@overload
def __call__(cls, x: int, y: int) -> Any: ...
@overload
def __call__(cls, x: str, y: str) -> Any: ...
def __call__(cls, x, y):
pass
| Meta |
python | Textualize__textual | src/textual/_animator.py | {
"start": 1416,
"end": 2544
} | class ____(ABC):
on_complete: CallbackType | None = None
"""Callback to run after animation completes"""
@abstractmethod
def __call__(
self,
time: float,
app_animation_level: AnimationLevel = "full",
) -> bool: # pragma: no cover
"""Call the animation, return a boolean indicating whether animation is in-progress or complete.
Args:
time: The current timestamp
Returns:
True if the animation has finished, otherwise False.
"""
raise NotImplementedError("")
async def invoke_callback(self) -> None:
"""Calls the [`on_complete`][Animation.on_complete] callback if one is provided."""
if self.on_complete is not None:
await invoke(self.on_complete)
@abstractmethod
async def stop(self, complete: bool = True) -> None:
"""Stop the animation.
Args:
complete: Flag to say if the animation should be taken to completion.
"""
raise NotImplementedError
def __eq__(self, other: object) -> bool:
return False
@dataclass
| Animation |
python | django__django | tests/serializers/models/base.py | {
"start": 783,
"end": 1061
} | class ____(models.Model):
name = models.CharField(max_length=20)
meta_data = models.ForeignKey(
CategoryMetaData, models.SET_NULL, null=True, default=None
)
class Meta:
ordering = ("name",)
def __str__(self):
return self.name
| Category |
python | ray-project__ray | rllib/models/tf/layers/gru_gate.py | {
"start": 231,
"end": 1968
} | class ____(tf.keras.layers.Layer if tf else object):
def __init__(self, init_bias: float = 0.0, **kwargs):
super().__init__(**kwargs)
self._init_bias = init_bias
if log_once("gru_gate"):
deprecation_warning(
old="rllib.models.tf.layers.GRUGate",
)
def build(self, input_shape: TensorShape):
h_shape, x_shape = input_shape
if x_shape[-1] != h_shape[-1]:
raise ValueError(
"Both inputs to GRUGate must have equal size in last axis!"
)
dim = int(h_shape[-1])
self._w_r = self.add_weight(shape=(dim, dim))
self._w_z = self.add_weight(shape=(dim, dim))
self._w_h = self.add_weight(shape=(dim, dim))
self._u_r = self.add_weight(shape=(dim, dim))
self._u_z = self.add_weight(shape=(dim, dim))
self._u_h = self.add_weight(shape=(dim, dim))
def bias_initializer(shape, dtype):
return tf.fill(shape, tf.cast(self._init_bias, dtype=dtype))
self._bias_z = self.add_weight(shape=(dim,), initializer=bias_initializer)
def call(self, inputs: TensorType, **kwargs) -> TensorType:
# Pass in internal state first.
h, X = inputs
r = tf.tensordot(X, self._w_r, axes=1) + tf.tensordot(h, self._u_r, axes=1)
r = tf.nn.sigmoid(r)
z = (
tf.tensordot(X, self._w_z, axes=1)
+ tf.tensordot(h, self._u_z, axes=1)
- self._bias_z
)
z = tf.nn.sigmoid(z)
h_next = tf.tensordot(X, self._w_h, axes=1) + tf.tensordot(
(h * r), self._u_h, axes=1
)
h_next = tf.nn.tanh(h_next)
return (1 - z) * h + z * h_next
| GRUGate |
python | ray-project__ray | python/ray/_private/thirdparty/pynvml/pynvml.py | {
"start": 60569,
"end": 60740
} | class ____(_PrintableStructure):
_fields_ = [
('bridgeCount', c_uint),
('bridgeChipInfo', c_nvmlBridgeChipInfo_t * 128),
]
| c_nvmlBridgeChipHierarchy_t |
python | weaviate__weaviate-python-client | weaviate/collections/queries/near_image/generate/executor.py | {
"start": 1056,
"end": 20109
} | class ____(
Generic[ConnectionType, Properties, References], _BaseExecutor[ConnectionType]
):
@overload
def near_image(
self,
near_image: BLOB_INPUT,
*,
single_prompt: Union[str, _SinglePrompt, None] = None,
grouped_task: Union[str, _GroupedTask, None] = None,
grouped_properties: Optional[List[str]] = None,
generative_provider: Optional[_GenerativeConfigRuntime] = None,
certainty: Optional[NUMBER] = None,
distance: Optional[NUMBER] = None,
limit: Optional[int] = None,
offset: Optional[int] = None,
auto_limit: Optional[int] = None,
filters: Optional[_Filters] = None,
group_by: Literal[None] = None,
rerank: Optional[Rerank] = None,
target_vector: Optional[TargetVectorJoinType] = None,
include_vector: INCLUDE_VECTOR = False,
return_metadata: Optional[METADATA] = None,
return_properties: Union[PROPERTIES, bool, None] = None,
return_references: Literal[None] = None,
) -> executor.Result[GenerativeReturn[Properties, References]]: ...
@overload
def near_image(
self,
near_image: BLOB_INPUT,
*,
single_prompt: Union[str, _SinglePrompt, None] = None,
grouped_task: Union[str, _GroupedTask, None] = None,
grouped_properties: Optional[List[str]] = None,
generative_provider: Optional[_GenerativeConfigRuntime] = None,
certainty: Optional[NUMBER] = None,
distance: Optional[NUMBER] = None,
limit: Optional[int] = None,
offset: Optional[int] = None,
auto_limit: Optional[int] = None,
filters: Optional[_Filters] = None,
group_by: Literal[None] = None,
rerank: Optional[Rerank] = None,
target_vector: Optional[TargetVectorJoinType] = None,
include_vector: INCLUDE_VECTOR = False,
return_metadata: Optional[METADATA] = None,
return_properties: Union[PROPERTIES, bool, None] = None,
return_references: REFERENCES,
) -> executor.Result[GenerativeReturn[Properties, CrossReferences]]: ...
@overload
def near_image(
self,
near_image: BLOB_INPUT,
*,
single_prompt: Union[str, _SinglePrompt, None] = None,
grouped_task: Union[str, _GroupedTask, None] = None,
grouped_properties: Optional[List[str]] = None,
generative_provider: Optional[_GenerativeConfigRuntime] = None,
certainty: Optional[NUMBER] = None,
distance: Optional[NUMBER] = None,
limit: Optional[int] = None,
offset: Optional[int] = None,
auto_limit: Optional[int] = None,
filters: Optional[_Filters] = None,
group_by: Literal[None] = None,
rerank: Optional[Rerank] = None,
target_vector: Optional[TargetVectorJoinType] = None,
include_vector: INCLUDE_VECTOR = False,
return_metadata: Optional[METADATA] = None,
return_properties: Union[PROPERTIES, bool, None] = None,
return_references: Type[TReferences],
) -> executor.Result[GenerativeReturn[Properties, TReferences]]: ...
@overload
def near_image(
self,
near_image: BLOB_INPUT,
*,
single_prompt: Union[str, _SinglePrompt, None] = None,
grouped_task: Union[str, _GroupedTask, None] = None,
grouped_properties: Optional[List[str]] = None,
generative_provider: Optional[_GenerativeConfigRuntime] = None,
certainty: Optional[NUMBER] = None,
distance: Optional[NUMBER] = None,
limit: Optional[int] = None,
offset: Optional[int] = None,
auto_limit: Optional[int] = None,
filters: Optional[_Filters] = None,
group_by: Literal[None] = None,
rerank: Optional[Rerank] = None,
target_vector: Optional[TargetVectorJoinType] = None,
include_vector: INCLUDE_VECTOR = False,
return_metadata: Optional[METADATA] = None,
return_properties: Type[TProperties],
return_references: Literal[None] = None,
) -> executor.Result[GenerativeReturn[TProperties, References]]: ...
@overload
def near_image(
self,
near_image: BLOB_INPUT,
*,
single_prompt: Union[str, _SinglePrompt, None] = None,
grouped_task: Union[str, _GroupedTask, None] = None,
grouped_properties: Optional[List[str]] = None,
generative_provider: Optional[_GenerativeConfigRuntime] = None,
certainty: Optional[NUMBER] = None,
distance: Optional[NUMBER] = None,
limit: Optional[int] = None,
offset: Optional[int] = None,
auto_limit: Optional[int] = None,
filters: Optional[_Filters] = None,
group_by: Literal[None] = None,
rerank: Optional[Rerank] = None,
target_vector: Optional[TargetVectorJoinType] = None,
include_vector: INCLUDE_VECTOR = False,
return_metadata: Optional[METADATA] = None,
return_properties: Type[TProperties],
return_references: REFERENCES,
) -> executor.Result[GenerativeReturn[TProperties, CrossReferences]]: ...
@overload
def near_image(
self,
near_image: BLOB_INPUT,
*,
single_prompt: Union[str, _SinglePrompt, None] = None,
grouped_task: Union[str, _GroupedTask, None] = None,
grouped_properties: Optional[List[str]] = None,
generative_provider: Optional[_GenerativeConfigRuntime] = None,
certainty: Optional[NUMBER] = None,
distance: Optional[NUMBER] = None,
limit: Optional[int] = None,
offset: Optional[int] = None,
auto_limit: Optional[int] = None,
filters: Optional[_Filters] = None,
group_by: Literal[None] = None,
rerank: Optional[Rerank] = None,
target_vector: Optional[TargetVectorJoinType] = None,
include_vector: INCLUDE_VECTOR = False,
return_metadata: Optional[METADATA] = None,
return_properties: Type[TProperties],
return_references: Type[TReferences],
) -> executor.Result[GenerativeReturn[TProperties, TReferences]]: ...
### GroupBy ###
@overload
def near_image(
self,
near_image: BLOB_INPUT,
*,
single_prompt: Union[str, _SinglePrompt, None] = None,
grouped_task: Union[str, _GroupedTask, None] = None,
grouped_properties: Optional[List[str]] = None,
generative_provider: Optional[_GenerativeConfigRuntime] = None,
certainty: Optional[NUMBER] = None,
distance: Optional[NUMBER] = None,
limit: Optional[int] = None,
offset: Optional[int] = None,
auto_limit: Optional[int] = None,
filters: Optional[_Filters] = None,
group_by: GroupBy,
rerank: Optional[Rerank] = None,
target_vector: Optional[TargetVectorJoinType] = None,
include_vector: INCLUDE_VECTOR = False,
return_metadata: Optional[METADATA] = None,
return_properties: Union[PROPERTIES, bool, None] = None,
return_references: Literal[None] = None,
) -> executor.Result[GenerativeGroupByReturn[Properties, References]]: ...
@overload
def near_image(
self,
near_image: BLOB_INPUT,
*,
single_prompt: Union[str, _SinglePrompt, None] = None,
grouped_task: Union[str, _GroupedTask, None] = None,
grouped_properties: Optional[List[str]] = None,
generative_provider: Optional[_GenerativeConfigRuntime] = None,
certainty: Optional[NUMBER] = None,
distance: Optional[NUMBER] = None,
limit: Optional[int] = None,
offset: Optional[int] = None,
auto_limit: Optional[int] = None,
filters: Optional[_Filters] = None,
group_by: GroupBy,
rerank: Optional[Rerank] = None,
target_vector: Optional[TargetVectorJoinType] = None,
include_vector: INCLUDE_VECTOR = False,
return_metadata: Optional[METADATA] = None,
return_properties: Union[PROPERTIES, bool, None] = None,
return_references: REFERENCES,
) -> executor.Result[GenerativeGroupByReturn[Properties, CrossReferences]]: ...
@overload
def near_image(
self,
near_image: BLOB_INPUT,
*,
single_prompt: Union[str, _SinglePrompt, None] = None,
grouped_task: Union[str, _GroupedTask, None] = None,
grouped_properties: Optional[List[str]] = None,
generative_provider: Optional[_GenerativeConfigRuntime] = None,
certainty: Optional[NUMBER] = None,
distance: Optional[NUMBER] = None,
limit: Optional[int] = None,
offset: Optional[int] = None,
auto_limit: Optional[int] = None,
filters: Optional[_Filters] = None,
group_by: GroupBy,
rerank: Optional[Rerank] = None,
target_vector: Optional[TargetVectorJoinType] = None,
include_vector: INCLUDE_VECTOR = False,
return_metadata: Optional[METADATA] = None,
return_properties: Union[PROPERTIES, bool, None] = None,
return_references: Type[TReferences],
) -> executor.Result[GenerativeGroupByReturn[Properties, TReferences]]: ...
@overload
def near_image(
self,
near_image: BLOB_INPUT,
*,
single_prompt: Union[str, _SinglePrompt, None] = None,
grouped_task: Union[str, _GroupedTask, None] = None,
grouped_properties: Optional[List[str]] = None,
generative_provider: Optional[_GenerativeConfigRuntime] = None,
certainty: Optional[NUMBER] = None,
distance: Optional[NUMBER] = None,
limit: Optional[int] = None,
offset: Optional[int] = None,
auto_limit: Optional[int] = None,
filters: Optional[_Filters] = None,
group_by: GroupBy,
rerank: Optional[Rerank] = None,
target_vector: Optional[TargetVectorJoinType] = None,
include_vector: INCLUDE_VECTOR = False,
return_metadata: Optional[METADATA] = None,
return_properties: Type[TProperties],
return_references: Literal[None] = None,
) -> executor.Result[GenerativeGroupByReturn[TProperties, References]]: ...
@overload
def near_image(
self,
near_image: BLOB_INPUT,
*,
single_prompt: Union[str, _SinglePrompt, None] = None,
grouped_task: Union[str, _GroupedTask, None] = None,
grouped_properties: Optional[List[str]] = None,
generative_provider: Optional[_GenerativeConfigRuntime] = None,
certainty: Optional[NUMBER] = None,
distance: Optional[NUMBER] = None,
limit: Optional[int] = None,
offset: Optional[int] = None,
auto_limit: Optional[int] = None,
filters: Optional[_Filters] = None,
group_by: GroupBy,
rerank: Optional[Rerank] = None,
target_vector: Optional[TargetVectorJoinType] = None,
include_vector: INCLUDE_VECTOR = False,
return_metadata: Optional[METADATA] = None,
return_properties: Type[TProperties],
return_references: REFERENCES,
) -> executor.Result[GenerativeGroupByReturn[TProperties, CrossReferences]]: ...
@overload
def near_image(
self,
near_image: BLOB_INPUT,
*,
single_prompt: Union[str, _SinglePrompt, None] = None,
grouped_task: Union[str, _GroupedTask, None] = None,
grouped_properties: Optional[List[str]] = None,
generative_provider: Optional[_GenerativeConfigRuntime] = None,
certainty: Optional[NUMBER] = None,
distance: Optional[NUMBER] = None,
limit: Optional[int] = None,
offset: Optional[int] = None,
auto_limit: Optional[int] = None,
filters: Optional[_Filters] = None,
group_by: GroupBy,
rerank: Optional[Rerank] = None,
target_vector: Optional[TargetVectorJoinType] = None,
include_vector: INCLUDE_VECTOR = False,
return_metadata: Optional[METADATA] = None,
return_properties: Type[TProperties],
return_references: Type[TReferences],
) -> executor.Result[GenerativeGroupByReturn[TProperties, TReferences]]: ...
### DEFAULT ###
@overload
def near_image(
self,
near_image: BLOB_INPUT,
*,
single_prompt: Union[str, _SinglePrompt, None] = None,
grouped_task: Union[str, _GroupedTask, None] = None,
grouped_properties: Optional[List[str]] = None,
generative_provider: Optional[_GenerativeConfigRuntime] = None,
certainty: Optional[NUMBER] = None,
distance: Optional[NUMBER] = None,
limit: Optional[int] = None,
offset: Optional[int] = None,
auto_limit: Optional[int] = None,
filters: Optional[_Filters] = None,
group_by: Optional[GroupBy] = None,
rerank: Optional[Rerank] = None,
target_vector: Optional[TargetVectorJoinType] = None,
include_vector: INCLUDE_VECTOR = False,
return_metadata: Optional[METADATA] = None,
return_properties: Optional[ReturnProperties[TProperties]] = None,
return_references: Optional[ReturnReferences[TReferences]] = None,
) -> executor.Result[
GenerativeSearchReturnType[Properties, References, TProperties, TReferences]
]: ...
def near_image(
self,
near_image: BLOB_INPUT,
*,
single_prompt: Union[str, _SinglePrompt, None] = None,
grouped_task: Union[str, _GroupedTask, None] = None,
grouped_properties: Optional[List[str]] = None,
generative_provider: Optional[_GenerativeConfigRuntime] = None,
certainty: Optional[NUMBER] = None,
distance: Optional[NUMBER] = None,
limit: Optional[int] = None,
offset: Optional[int] = None,
auto_limit: Optional[int] = None,
filters: Optional[_Filters] = None,
group_by: Optional[GroupBy] = None,
rerank: Optional[Rerank] = None,
target_vector: Optional[TargetVectorJoinType] = None,
include_vector: INCLUDE_VECTOR = False,
return_metadata: Optional[METADATA] = None,
return_properties: Optional[ReturnProperties[TProperties]] = None,
return_references: Optional[ReturnReferences[TReferences]] = None,
) -> executor.Result[
GenerativeSearchReturnType[Properties, References, TProperties, TReferences]
]:
"""Perform retrieval-augmented generation (RaG) on the results of a by-image object search in this collection using an image-capable vectorization module and vector-based similarity search.
See the [docs](https://weaviate.io/developers/weaviate/search/image) for a more detailed explanation.
NOTE:
You must have an image-capable vectorization module installed in order to use this method, e.g. `img2vec-neural`, `multi2vec-clip`, or `multi2vec-bind.
Args:
near_image: The image file to search on, REQUIRED. This can be a base64 encoded string of the binary, a path to the file, or a file-like object.
certainty: The minimum similarity score to return. If not specified, the default certainty specified by the server is used.
distance: The maximum distance to search. If not specified, the default distance specified by the server is used.
limit: The maximum number of results to return. If not specified, the default limit specified by the server is returned.
offset: The offset to start from. If not specified, the retrieval begins from the first object in the server.
auto_limit: The maximum number of [autocut](https://weaviate.io/developers/weaviate/api/graphql/additional-operators#autocut) results to return. If not specified, no limit is applied.
filters: The filters to apply to the search.
group_by: How the results should be grouped by a specific property.
rerank: How the results should be reranked. NOTE: A `rerank-*` module must be enabled for this functionality to work.
target_vector: The name of the vector space to search in for named vector configurations. Required if multiple spaces are configured.
include_vector: Whether to include the vector in the results. If not specified, this is set to False.
return_metadata: The metadata to return for each object, defaults to `None`.
return_properties: The properties to return for each object.
return_references: The references to return for each object.
NOTE:
- If `return_properties` is not provided then all properties are returned except for blob properties.
- If `return_metadata` is not provided then no metadata is provided. Use MetadataQuery.full() to retrieve all metadata.
- If `return_references` is not provided then no references are provided.
Returns:
A `GenerativeReturn` or `GenerativeGroupByReturn` object that includes the searched objects.
If `group_by` is provided then a `GenerativeGroupByReturn` object is returned, otherwise a `GenerativeReturn` object is returned.
Raises:
weaviate.exceptions.WeaviateQueryError: If the request to the Weaviate server fails.
"""
def resp(
res: search_get_pb2.SearchReply,
) -> GenerativeSearchReturnType[Properties, References, TProperties, TReferences]:
return cast(
Any,
self._result_to_generative_return(
res,
_QueryOptions.from_input(
return_metadata,
return_properties,
include_vector,
self._references,
return_references,
rerank,
group_by,
),
),
)
request = self._query.near_media(
media=parse_blob(near_image),
type_=NearMediaType.IMAGE.value,
certainty=certainty,
distance=distance,
filters=filters,
group_by=_GroupBy.from_input(group_by),
rerank=rerank,
target_vector=target_vector,
generative=_Generative(
single=single_prompt,
grouped=grouped_task,
grouped_properties=grouped_properties,
generative_provider=generative_provider,
),
limit=limit,
offset=offset,
autocut=auto_limit,
return_metadata=self._parse_return_metadata(return_metadata, include_vector),
return_properties=self._parse_return_properties(return_properties),
return_references=self._parse_return_references(return_references),
)
return executor.execute(
response_callback=resp,
method=self._connection.grpc_search,
request=request,
)
| _NearImageGenerateExecutor |
python | weaviate__weaviate-python-client | weaviate/collections/queries/near_text/generate/sync.py | {
"start": 312,
"end": 457
} | class ____(
Generic[Properties, References],
_NearTextGenerateExecutor[ConnectionSync, Properties, References],
):
pass
| _NearTextGenerate |
python | ijl__orjson | test/test_uuid.py | {
"start": 92,
"end": 3423
} | class ____:
def test_uuid_immutable(self):
"""
UUID objects are immutable
"""
val = uuid.uuid4()
with pytest.raises(TypeError):
val.int = 1 # type: ignore
with pytest.raises(TypeError):
val.int = None # type: ignore
def test_uuid_int(self):
"""
UUID.int is a 128-bit integer
"""
val = uuid.UUID("7202d115-7ff3-4c81-a7c1-2a1f067b1ece")
assert isinstance(val.int, int)
assert val.int >= 2**64
assert val.int < 2**128
assert val.int == 151546616840194781678008611711208857294
def test_uuid_overflow(self):
"""
UUID.int can't trigger errors in _PyLong_AsByteArray
"""
with pytest.raises(ValueError):
uuid.UUID(int=2**128)
with pytest.raises(ValueError):
uuid.UUID(int=-1)
def test_uuid_subclass(self):
"""
UUID subclasses are not serialized
"""
class AUUID(uuid.UUID):
pass
with pytest.raises(orjson.JSONEncodeError):
orjson.dumps(AUUID("{12345678-1234-5678-1234-567812345678}"))
def test_serializes_withopt(self):
"""
dumps() accepts deprecated OPT_SERIALIZE_UUID
"""
assert (
orjson.dumps(
uuid.UUID("7202d115-7ff3-4c81-a7c1-2a1f067b1ece"),
option=orjson.OPT_SERIALIZE_UUID,
)
== b'"7202d115-7ff3-4c81-a7c1-2a1f067b1ece"'
)
def test_nil_uuid(self):
assert (
orjson.dumps(uuid.UUID("00000000-0000-0000-0000-000000000000"))
== b'"00000000-0000-0000-0000-000000000000"'
)
def test_all_ways_to_create_uuid_behave_equivalently(self):
# Note that according to the docstring for the uuid.UUID class, all the
# forms below are equivalent -- they end up with the same value for
# `self.int`, which is all that really matters
uuids = [
uuid.UUID("{12345678-1234-5678-1234-567812345678}"),
uuid.UUID("12345678123456781234567812345678"),
uuid.UUID("urn:uuid:12345678-1234-5678-1234-567812345678"),
uuid.UUID(bytes=b"\x12\x34\x56\x78" * 4),
uuid.UUID(
bytes_le=b"\x78\x56\x34\x12\x34\x12\x78\x56\x12\x34\x56\x78\x12\x34\x56\x78",
),
uuid.UUID(fields=(0x12345678, 0x1234, 0x5678, 0x12, 0x34, 0x567812345678)),
uuid.UUID(int=0x12345678123456781234567812345678),
]
result = orjson.dumps(uuids)
canonical_uuids = [f'"{u!s}"' for u in uuids]
serialized = ("[{}]".format(",".join(canonical_uuids))).encode("utf8")
assert result == serialized
def test_serializes_correctly_with_leading_zeroes(self):
instance = uuid.UUID(int=0x00345678123456781234567812345678)
assert orjson.dumps(instance) == (f'"{instance!s}"').encode("utf-8")
def test_all_uuid_creation_functions_create_serializable_uuids(self):
uuids = (
uuid.uuid1(),
uuid.uuid3(uuid.NAMESPACE_DNS, "python.org"),
uuid.uuid4(),
uuid.uuid5(uuid.NAMESPACE_DNS, "python.org"),
)
for val in uuids:
assert orjson.dumps(val) == f'"{val}"'.encode("utf-8")
| TestUUID |
python | pytorch__pytorch | torch/fx/experimental/normalize.py | {
"start": 442,
"end": 3614
} | class ____(Transformer):
"""
Normalize arguments to Python targets. This means that
`args/kwargs` will be matched up to the module/functional's
signature and rewritten to exclusively kwargs in positional order
if `normalize_to_only_use_kwargs` is true. Also populates default
values. Does not support positional-only parameters or varargs
parameters (*args, **kwargs).
If the nodes have 'type' metadata, it will use it to disambiguate
overloads. Otherwise, it will throw an error.
Example usage:
m = torchvision.models.resnet18()
traced = torch.fx.symbolic_trace(m)
traced = NormalizeArgs(traced).transform()
"""
def __init__(
self, module: torch.fx.GraphModule, normalize_to_only_use_kwargs: bool = True
):
super().__init__(module)
self.node_map: dict[Proxy, Node] = {}
self.normalize_to_only_use_kwargs = normalize_to_only_use_kwargs
def run_node(self, n: Node) -> Any:
args, kwargs = self.fetch_args_kwargs_from_env(n)
def get_type(arg):
if isinstance(arg, fx.Node):
return n.meta.get("type")
return type(arg)
arg_types = map_aggregate(n.args, get_type)
assert isinstance(arg_types, tuple)
arg_types = tuple(create_type_hint(i) for i in arg_types)
kwarg_types = {k: get_type(v) for k, v in kwargs.items()}
if n.op == "call_function":
out = self.call_function(n.target, args, kwargs, arg_types, kwarg_types)
else:
out = super().run_node(n)
if n.op != "output":
self.node_map[out] = n
out.node.meta = n.meta
out.node.type = n.type
return out
def call_function(
self,
target: Target,
args: tuple[Argument, ...],
kwargs: dict[str, Any],
arg_types: Optional[tuple[Any, ...]] = None,
kwarg_types: Optional[dict[str, Any]] = None,
):
assert callable(target)
new_args_and_kwargs = normalize_function(
target,
args, # type: ignore[arg-type]
kwargs,
arg_types, # type: ignore[arg-type]
kwarg_types,
self.normalize_to_only_use_kwargs,
)
if new_args_and_kwargs:
new_args, new_kwargs = new_args_and_kwargs
return self.tracer.create_proxy(
"call_function", target, new_args, new_kwargs
)
else:
return super().call_function(target, args, kwargs)
def call_module(
self, target: Target, args: tuple[Argument, ...], kwargs: dict[str, Any]
):
assert isinstance(target, str)
new_args_and_kwargs = normalize_module(
self.module,
target,
args, # type: ignore[arg-type]
kwargs,
self.normalize_to_only_use_kwargs,
)
if new_args_and_kwargs:
new_args, new_kwargs = new_args_and_kwargs
return super().call_module(target, new_args, new_kwargs)
else:
return super().call_module(target, args, kwargs)
| NormalizeArgs |
python | ZoranPandovski__al-go-rithms | data_structures/heap/Python/BinaryHeaps/BinaryHeaps.py | {
"start": 0,
"end": 988
} | class ____:
# Parent class for MinHeap and MaxHeap
def get_root(self):
# Return first element if it exists else inf (MaxHeap) or -inf (MinHeap)
if self.size > 0:
return self.array[1]
else:
return self.array[0]
def get_array(self):
# Return the heap in form of array
return self.array[1:]
@staticmethod
def parent(ind):
# Calculate parent position for given index
return ind//2
@staticmethod
def child_left(ind):
# Calculate first child position for given index
return ind*2
@staticmethod
def child_right(ind):
# Calculate second child position for given index
return ind*2 + 1
def is_child_left(self, ind):
# Check if first child exists
return self.child_left(ind) < len(self.array)
def is_child_right(self, ind):
# Check if second child exists
return self.child_right(ind) < len(self.array)
| Heap |
python | keon__algorithms | tests/test_maths.py | {
"start": 14392,
"end": 15057
} | class ____(unittest.TestCase):
"""[summary]
Test for the file num_perfect_squares.py
Arguments:
unittest {[type]} -- [description]
"""
def test_num_perfect_squares(self):
self.assertEqual(4,num_perfect_squares(31))
self.assertEqual(3,num_perfect_squares(12))
self.assertEqual(2,num_perfect_squares(13))
self.assertEqual(2,num_perfect_squares(10))
self.assertEqual(4,num_perfect_squares(1500))
self.assertEqual(2,num_perfect_squares(1548524521))
self.assertEqual(3,num_perfect_squares(9999999993))
self.assertEqual(1,num_perfect_squares(9))
| TestNumberOfPerfectSquares |
python | openai__openai-python | src/openai/types/chat/chat_completion_content_part_text.py | {
"start": 206,
"end": 363
} | class ____(BaseModel):
text: str
"""The text content."""
type: Literal["text"]
"""The type of the content part."""
| ChatCompletionContentPartText |
python | davidhalter__jedi | jedi/inference/value/instance.py | {
"start": 1534,
"end": 2375
} | class ____(AnonymousFunctionExecutionFilter):
def __init__(self, instance, *args, **kwargs):
super().__init__(*args, **kwargs)
self._instance = instance
def _convert_param(self, param, name):
if param.position_index == 0:
if function_is_classmethod(self._function_value.tree_node):
return InstanceExecutedParamName(
self._instance.py__class__(),
self._function_value,
name
)
elif not function_is_staticmethod(self._function_value.tree_node):
return InstanceExecutedParamName(
self._instance,
self._function_value,
name
)
return super()._convert_param(param, name)
| AnonymousMethodExecutionFilter |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.