id int64 0 300k | label stringlengths 1 74 ⌀ | text stringlengths 4k 8k |
|---|---|---|
6,000 | connect ec2 endpoint | import logging
from _typeshed import Incomplete
from typing import Any
from .s3.connection import S3Connection
Version: Any
UserAgent: Any
config: Any
BUCKET_NAME_RE: Any
TOO_LONG_DNS_NAME_COMP: Any
GENERATION_RE: Any
VERSION_RE: Any
ENDPOINTS_PATH: Any
def init_logging(): ...
class NullHandler(logging.Handler):
def emit(self, record): ...
log: Any
perflog: Any
def set_file_logger(name, filepath, level: Any = 20, format_string: Incomplete | None = None): ...
def set_stream_logger(name, level: Any = 10, format_string: Incomplete | None = None): ...
def connect_sqs(aws_access_key_id: Incomplete | None = None, aws_secret_access_key: Incomplete | None = None, **kwargs): ...
def connect_s3(aws_access_key_id: str | None = None, aws_secret_access_key: str | None = None, **kwargs) -> S3Connection: ...
def connect_gs(gs_access_key_id: Incomplete | None = None, gs_secret_access_key: Incomplete | None = None, **kwargs): ...
def connect_ec2(aws_access_key_id: Incomplete | None = None, aws_secret_access_key: Incomplete | None = None, **kwargs): ...
def connect_elb(aws_access_key_id: Incomplete | None = None, aws_secret_access_key: Incomplete | None = None, **kwargs): ...
def connect_autoscale(aws_access_key_id: Incomplete | None = None, aws_secret_access_key: Incomplete | None = None, **kwargs): ...
def connect_cloudwatch(
aws_access_key_id: Incomplete | None = None, aws_secret_access_key: Incomplete | None = None, **kwargs
): ...
def connect_sdb(aws_access_key_id: Incomplete | None = None, aws_secret_access_key: Incomplete | None = None, **kwargs): ...
def connect_fps(aws_access_key_id: Incomplete | None = None, aws_secret_access_key: Incomplete | None = None, **kwargs): ...
def connect_mturk(aws_access_key_id: Incomplete | None = None, aws_secret_access_key: Incomplete | None = None, **kwargs): ...
def connect_cloudfront(
aws_access_key_id: Incomplete | None = None, aws_secret_access_key: Incomplete | None = None, **kwargs
): ...
def connect_vpc(aws_access_key_id: Incomplete | None = None, aws_secret_access_key: Incomplete | None = None, **kwargs): ...
def connect_rds(aws_access_key_id: Incomplete | None = None, aws_secret_access_key: Incomplete | None = None, **kwargs): ...
def connect_rds2(aws_access_key_id: Incomplete | None = None, aws_secret_access_key: Incomplete | None = None, **kwargs): ...
def connect_emr(aws_access_key_id: Incomplete | None = None, aws_secret_access_key: Incomplete | None = None, **kwargs): ...
def connect_sns(aws_access_key_id: Incomplete | None = None, aws_secret_access_key: Incomplete | None = None, **kwargs): ...
def connect_iam(aws_access_key_id: Incomplete | None = None, aws_secret_access_key: Incomplete | None = None, **kwargs): ...
def connect_route53(aws_access_key_id: Incomplete | None = None, aws_secret_access_key: Incomplete | None = None, **kwargs): ...
def connect_cloudformation(
aws_access_key_id: Incomplete | None = None, aws_secret_access_key: Incomplete | None = None, **kwargs
): ...
def connect_euca(
host: Incomplete | None = None,
aws_access_key_id: Incomplete | None = None,
aws_secret_access_key: Incomplete | None = None,
port: int = 8773,
path: str = "/services/Eucalyptus",
is_secure: bool = False,
**kwargs,
): ...
def connect_glacier(aws_access_key_id: Incomplete | None = None, aws_secret_access_key: Incomplete | None = None, **kwargs): ...
def METHOD_NAME(
url, aws_access_key_id: Incomplete | None = None, aws_secret_access_key: Incomplete | None = None, **kwargs
): ...
def connect_walrus(
host: Incomplete | None = None,
aws_access_key_id: Incomplete | None = None,
aws_secret_access_key: Incomplete | None = None,
port: int = 8773,
path: str = "/services/Walrus",
is_secure: bool = False,
**kwargs,
): ...
def connect_ses(aws_access_key_id: Incomplete | None = None, aws_secret_access_key: Incomplete | None = None, **kwargs): ...
def connect_sts(aws_access_key_id: Incomplete | None = None, aws_secret_access_key: Incomplete | None = None, **kwargs): ...
def connect_ia(
ia_access_key_id: Incomplete | None = None, ia_secret_access_key: Incomplete | None = None, is_secure: bool = False, **kwargs
): ...
def connect_dynamodb(aws_access_key_id: Incomplete | None = None, aws_secret_access_key: Incomplete | None = None, **kwargs): ...
def connect_swf(aws_access_key_id: Incomplete | None = None, aws_secret_access_key: Incomplete | None = None, **kwargs): ...
def connect_cloudsearch(
aws_access_key_id: Incomplete | None = None, aws_secret_access_key: Incomplete | None = None, **kwargs
): ...
def connect_cloudsearch2(
aws_access_key_id: Incomplete | None = None,
aws_secret_access_key: Incomplete | None = None,
sign_request: bool = False,
**kwargs,
): ...
def connect_cloudsearchdomain(
aws_access_key_id: Incomplete | None = None, aws_secret_access_key: Incomplete | None = None, **kwargs
): ...
def connect_beanstalk(aws_access_key_id: Incomplete | None = None, aws_secret_access_key: Incomplete | None = None, **kwargs): ...
def connect_elastictranscoder(
aws_access_key_id: Incomplete | None = None, aws_secret_access_key: Incomplete | None = None, **kwargs
): ...
def connect_opsworks(aws_access_key_id: Incomplete | None = None, aws_secret_access_key: Incomplete | None = None, **kwargs): ...
def connect_redshift(aws_access_key_id: Incomplete | None = None, aws_secret_access_key: Incomplete | None = None, **kwargs): ...
def connect_support(aws_access_key_id: Incomplete | None = None, aws_secret_access_key: Incomplete | None = None, **kwargs): ...
def connect_cloudtrail(
aws_access_key_id: Incomplete | None = None, aws_secret_access_key: Incomplete | None = None, **kwargs
): ...
def connect_directconnect(
aws_access_key_id: Incomplete | None = None, aws_secret_access_key: Incomplete | None = None, **kwargs
): ...
def connect_kinesis(aws_access_key_id: Incomplete | None = None, aws_secret_access_key: Incomplete | None = None, **kwargs): ...
def connect_logs(aws_access_key_id: Incomplete | None = None, aws_secret_access_key: Incomplete | None = None, **kwargs): ...
def connect_route53domains(
aws_access_key_id: Incomplete | None = None, aws_secret_access_key: Incomplete | None = None, **kwargs
): ...
def connect_cognito_identity(
aws_access_key_id: Incomplete | None = None, aws_secret_access_key: Incomplete | None = None, **kwargs
): ...
def connect_cognito_sync(
aws_access_key_id: Incomplete | None = None, aws_secret_access_key: Incomplete | None = None, **kwargs
): ...
def connect_kms(aws_access_key_id: Incomplete | None = None, aws_secret_access_key: Incomplete | None = None, **kwargs): ...
def connect_awslambda(aws_access_key_id: Incomplete | None = None, aws_secret_access_key: Incomplete | None = None, **kwargs): ...
def connect_codedeploy(
aws_access_key_id: Incomplete | None = None, aws_secret_access_key: Incomplete | None = None, **kwargs
): ...
def connect_configservice(
aws_access_key_id: Incomplete | None = None, aws_secret_access_key: Incomplete | None = None, **kwargs
): ...
def connect_cloudhsm(aws_access_key_id: Incomplete | None = None, aws_secret_access_key: Incomplete | None = None, **kwargs): ...
def connect_ec2containerservice(
aws_access_key_id: Incomplete | None = None, aws_secret_access_key: Incomplete | None = None, **kwargs
): ...
def connect_machinelearning(
aws_access_key_id: Incomplete | None = None, aws_secret_access_key: Incomplete | None = None, **kwargs
): ...
def storage_uri(
uri_str,
default_scheme: str = "file",
debug: int = 0,
validate: bool = True,
bucket_storage_uri_class: Any = ...,
suppress_consec_slashes: bool = True,
is_latest: bool = False,
): ...
def storage_uri_for_key(key): ...
# Explicitly mark this package as incomplete.
def __getattr__(name: str) -> Incomplete: ... |
6,001 | list | # pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar
import urllib.parse
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import (
ClientAuthenticationError,
HttpResponseError,
ResourceExistsError,
ResourceNotFoundError,
ResourceNotModifiedError,
map_error,
)
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
from ..._vendor import _convert_request
from ...operations._trusted_access_roles_operations import build_list_request
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class TrustedAccessRolesOperations:
"""
.. warning::
**DO NOT** instantiate this class directly.
Instead, you should access the following operations through
:class:`~azure.mgmt.containerservice.v2023_03_02_preview.aio.ContainerServiceClient`'s
:attr:`trusted_access_roles` attribute.
"""
models = _models
def __init__(self, *args, **kwargs) -> None:
input_args = METHOD_NAME(args)
self._client = input_args.pop(0) if input_args else kwargs.pop("client")
self._config = input_args.pop(0) if input_args else kwargs.pop("config")
self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer")
self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer")
self._api_version = input_args.pop(0) if input_args else kwargs.pop("api_version")
@distributed_trace
def METHOD_NAME(self, location: str, **kwargs: Any) -> AsyncIterable["_models.TrustedAccessRole"]:
"""List supported trusted access roles.
List supported trusted access roles.
:param location: The name of Azure region. Required.
:type location: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either TrustedAccessRole or the result of cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.containerservice.v2023_03_02_preview.models.TrustedAccessRole]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop(
"api_version", _params.pop("api-version", self._api_version or "2023-03-02-preview")
)
cls: ClsType[_models.TrustedAccessRoleListResult] = kwargs.pop("cls", None)
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_list_request(
location=location,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.METHOD_NAME.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
# make call to next link with the client's api-version
_parsed_next_link = urllib.parse.urlparse(next_link)
_next_request_params = case_insensitive_dict(
{
key: [urllib.parse.quote(v) for v in value]
for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items()
}
)
_next_request_params["api-version"] = self._config.api_version
request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("TrustedAccessRoleListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem) # type: ignore
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
_stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(get_next, extract_data)
METHOD_NAME.metadata = {
"url": "/subscriptions/{subscriptionId}/providers/Microsoft.ContainerService/locations/{location}/trustedAccessRoles"
} |
6,002 | revise organization mappings | #!/usr/bin/env python
from typing import List
import click
import docker
from django.apps import apps
from sentry.runner import configure
from sentry.silo.base import SiloMode
configure()
from django.conf import settings
from sentry.models.organizationmapping import OrganizationMapping
def exec_run(container, command):
wrapped_command = f'sh -c "{" ".join(command)}"'
exit_code, output = container.exec_run(cmd=wrapped_command, stdout=True, stderr=True)
if exit_code:
click.echo("Container operation Failed!")
click.echo(f"Container operation failed with {output}")
def split_database(tables: List[str], source: str, destination: str, reset: bool, verbose: bool):
click.echo(f">> Dumping tables from {source} database")
command = ["pg_dump", "-U", "postgres", "-d", source, "--clean"]
for table in tables:
command.extend(["-t", table])
command.extend([">", f"/tmp/{destination}-tables.sql"])
client = docker.from_env()
postgres = client.containers.get("sentry_postgres")
if verbose:
click.echo(f">> Running {' '.join(command)}")
exec_run(postgres, command)
if reset:
click.echo(f">> Dropping existing {destination} database")
exec_run(postgres, ["dropdb", "-U", "postgres", "--if-exists", destination])
exec_run(postgres, ["createdb", "-U", "postgres", destination])
# Use the dump file to build control silo tables.
click.echo(f">> Building {destination} database from dump file")
import_command = ["psql", "-U", "postgres", destination, "<", f"/tmp/{destination}-tables.sql"]
if verbose:
click.echo(f">> Running {' '.join(import_command)}")
exec_run(postgres, import_command)
def METHOD_NAME(legacy_region_name: str):
if settings.SENTRY_MONOLITH_REGION == legacy_region_name:
click.echo(
"> No OrganizationMapping have been modified. Set 'SENTRY_MONOLITH_REGION' in sentry.conf.py to update monolith mappings."
)
else:
qs = OrganizationMapping.objects.filter(region_name=legacy_region_name)
record_count = len(qs)
qs.update(region_name=settings.SENTRY_MONOLITH_REGION)
click.echo(
f"> {record_count} OrganizationMapping record(s) have been updated from '{legacy_region_name}' to '{settings.SENTRY_MONOLITH_REGION}'"
)
@click.command()
@click.option(
"--legacy-region-name",
default="--monolith--",
help="Previous value of settings.SENTRY_MONOLITH_REGION to overwrite in organization mappings",
)
@click.option("--verbose", default=False, is_flag=True, help="Enable verbose logging")
@click.option(
"--reset",
default=False,
is_flag=True,
help="Reset the target databases to be empty before loading extracted data and schema.",
)
@click.option("--database", default="sentry", help="Which database to derive splits from")
def main(database: str, reset: bool, verbose: bool, legacy_region_name: str):
"""
This is a development tool that can convert a monolith database into
control + region databases by using silo annotations.
This operation will not modify the original source database.
"""
# We have a few tables that either need to be in both silos,
# or only in control. These tables don't have silo annotations
# as they are inherited from django and their silo assignments
# need to be manually defined.
region_tables = ["django_migrations", "django_content_type"]
control_tables = [
"django_migrations",
"django_admin_log",
"django_content_type",
"django_site",
"django_session",
"auth_user",
"auth_group",
"auth_permission",
"auth_group_permissions",
"auth_user_groups",
"auth_user_user_permissions",
]
for model in apps.get_models():
silo_limit = getattr(model._meta, "silo_limit", None)
if not silo_limit:
click.echo(f"> Could not find silo assignment for {model._meta.db_table}")
continue
if SiloMode.CONTROL in silo_limit.modes:
control_tables.append(model._meta.db_table)
if SiloMode.REGION in silo_limit.modes:
region_tables.append(model._meta.db_table)
METHOD_NAME(legacy_region_name=legacy_region_name)
split_database(control_tables, database, "control", reset=reset, verbose=verbose)
split_database(region_tables, database, "region", reset=reset, verbose=verbose)
if __name__ == "__main__":
main() |
6,003 | get dashboard data | import frappe
from frappe import _
from frappe.utils import flt
from india_compliance.gst_india.overrides.sales_invoice import (
update_dashboard_with_gst_logs,
)
from india_compliance.gst_india.overrides.transaction import validate_transaction
from india_compliance.gst_india.utils import get_gst_accounts_by_type, is_api_enabled
from india_compliance.gst_india.utils.e_waybill import get_e_waybill_info
def onload(doc, method=None):
if doc.docstatus != 1:
return
if doc.gst_category == "Overseas":
doc.set_onload(
"bill_of_entry_exists",
frappe.db.exists(
"Bill of Entry",
{"purchase_invoice": doc.name, "docstatus": 1},
),
)
if not doc.get("ewaybill"):
return
gst_settings = frappe.get_cached_doc("GST Settings")
if not is_api_enabled(gst_settings):
return
if (
gst_settings.enable_e_waybill
and gst_settings.enable_e_waybill_from_pi
and doc.ewaybill
):
doc.set_onload("e_waybill_info", get_e_waybill_info(doc))
def validate(doc, method=None):
if validate_transaction(doc) is False:
return
update_itc_totals(doc)
validate_supplier_invoice_number(doc)
validate_with_inward_supply(doc)
def update_itc_totals(doc, method=None):
# Initialize values
doc.itc_integrated_tax = 0
doc.itc_state_tax = 0
doc.itc_central_tax = 0
doc.itc_cess_amount = 0
gst_accounts = get_gst_accounts_by_type(doc.company, "Input")
for tax in doc.get("taxes"):
if tax.account_head == gst_accounts.igst_account:
doc.itc_integrated_tax += flt(tax.base_tax_amount_after_discount_amount)
if tax.account_head == gst_accounts.sgst_account:
doc.itc_state_tax += flt(tax.base_tax_amount_after_discount_amount)
if tax.account_head == gst_accounts.cgst_account:
doc.itc_central_tax += flt(tax.base_tax_amount_after_discount_amount)
if tax.account_head == gst_accounts.cess_account:
doc.itc_cess_amount += flt(tax.base_tax_amount_after_discount_amount)
def validate_supplier_invoice_number(doc):
if (
doc.bill_no
or doc.gst_category == "Unregistered"
or not frappe.get_cached_value(
"GST Settings", "GST Settings", "require_supplier_invoice_no"
)
):
return
frappe.throw(
_("As per your GST Settings, Bill No is mandatory for Purchase Invoice."),
title=_("Missing Mandatory Field"),
)
def METHOD_NAME(data):
transactions = data.setdefault("transactions", [])
reference_section = next(
(row for row in transactions if row.get("label") == "Reference"), None
)
if reference_section is None:
reference_section = {"label": "Reference", "items": []}
transactions.append(reference_section)
reference_section["items"].append("Bill of Entry")
update_dashboard_with_gst_logs(
"Purchase Invoice", data, "e-Waybill Log", "Integration Request"
)
return data
def validate_with_inward_supply(doc):
if not doc.get("_inward_supply"):
return
mismatch_fields = {}
for field in [
"company",
"company_gstin",
"supplier_gstin",
"bill_no",
"bill_date",
"is_reverse_charge",
"place_of_supply",
]:
if doc.get(field) != doc._inward_supply.get(field):
mismatch_fields[field] = doc._inward_supply.get(field)
# mismatch for taxable_value
taxable_value = sum([item.taxable_value for item in doc.items])
if taxable_value != doc._inward_supply.get("taxable_value"):
mismatch_fields["Taxable Value"] = doc._inward_supply.get("taxable_value")
# mismatch for taxes
gst_accounts = get_gst_accounts_by_type(doc.company, "Input")
for tax in ["cgst", "sgst", "igst", "cess"]:
tax_amount = get_tax_amount(doc.taxes, gst_accounts[tax + "_account"])
if tax == "cess":
tax_amount += get_tax_amount(doc.taxes, gst_accounts.cess_non_advol_account)
if tax_amount == doc._inward_supply.get(tax):
continue
mismatch_fields[tax.upper()] = doc._inward_supply.get(tax)
if mismatch_fields:
message = (
"Purchase Invoice does not match with releted GST Inward Supply.<br>"
"Following values are not matching from 2A/2B: <br>"
)
for field, value in mismatch_fields.items():
message += f"<br>{field}: {value}"
frappe.msgprint(
_(message),
title=_("Mismatch with GST Inward Supply"),
)
elif doc._action == "submit":
frappe.msgprint(
_("Invoice matched with GST Inward Supply"),
alert=True,
indicator="green",
)
def get_tax_amount(taxes, account_head):
if not (taxes or account_head):
return 0
return sum(
[
tax.base_tax_amount_after_discount_amount
for tax in taxes
if tax.account_head == account_head
]
) |
6,004 | guess config type | # Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
"""
Utility functions for experiment config classes, internal part.
If you are implementing a config class for a training service, it's unlikely you will need these.
"""
from __future__ import annotations
__all__ = [
'get_base_path', 'set_base_path', 'unset_base_path', 'resolve_path',
'case_insensitive', 'camel_case',
'fields', 'is_instance', 'validate_type', 'is_path_like',
'guess_config_type', 'guess_list_config_type',
'training_service_config_factory', 'load_training_service_config',
'get_ipv4_address', 'diff'
]
import copy
import dataclasses
import importlib
import json
import os.path
from pathlib import Path
import socket
from typing import TYPE_CHECKING, get_type_hints
import typeguard
import nni.runtime.config
from .public import is_missing
if TYPE_CHECKING:
from nni.nas.experiment.pytorch import RetiariiExperiment
from nni.nas.experiment.config import NasExperimentConfig
from ...experiment import Experiment
from ..base import ConfigBase
from ..experiment_config import ExperimentConfig
from ..training_service import TrainingServiceConfig
## handle relative path ##
_current_base_path: Path | None = None
def get_base_path() -> Path:
if _current_base_path is None:
return Path()
return _current_base_path
def set_base_path(path: Path) -> None:
global _current_base_path
assert _current_base_path is None
_current_base_path = path
def unset_base_path() -> None:
global _current_base_path
_current_base_path = None
def resolve_path(path: Path | str, base_path: Path) -> str:
assert path is not None
# Path.resolve() does not work on Windows when file not exist, so use os.path instead
path = os.path.expanduser(path)
if not os.path.isabs(path):
path = os.path.join(base_path, path)
return str(os.path.realpath(path)) # it should be already str, but official doc does not specify it's type
## field name case convertion ##
def case_insensitive(key: str) -> str:
return key.lower().replace('_', '')
def camel_case(key: str) -> str:
words = key.strip('_').split('_')
return words[0] + ''.join(word.title() for word in words[1:])
## type hint utils ##
def fields(config: ConfigBase) -> list[dataclasses.Field]:
# Similar to `dataclasses.fields()`, but use `typing.get_types_hints()` to get `field.type`.
# This is useful when postponed evaluation is enabled.
ret = [copy.copy(field) for field in dataclasses.fields(config)]
types = get_type_hints(type(config))
for field in ret:
field.type = types[field.name]
return ret
def is_instance(value, type_hint) -> bool:
try:
typeguard.check_type(value, type_hint)
except typeguard.TypeCheckError:
return False
return True
def validate_type(config: ConfigBase) -> None:
class_name = type(config).__name__
for field in dataclasses.fields(config):
value = getattr(config, field.name)
#check existense
if is_missing(value):
raise ValueError(f'{class_name}: {field.name} is not set')
if not is_instance(value, field.type):
raise ValueError(f'{class_name}: type of {field.name} ({repr(value)}) is not {field.type}')
def is_path_like(type_hint) -> bool:
# only `PathLike` and `Any` accepts `Path`; check `int` to make sure it's not `Any`
return is_instance(Path(), type_hint) and not is_instance(1, type_hint)
## type inference ##
def METHOD_NAME(obj, type_hint) -> ConfigBase | None:
ret = guess_list_config_type([obj], type_hint, _hint_list_item=True)
return ret[0] if ret else None
def guess_list_config_type(objs, type_hint, _hint_list_item=False) -> list[ConfigBase] | None:
# avoid circular import
from ..base import ConfigBase
from ..training_service import TrainingServiceConfig
# because __init__ of subclasses might be complex, we first create empty objects to determine type
candidate_classes = []
for cls in _all_subclasses(ConfigBase):
if issubclass(cls, TrainingServiceConfig): # training service configs are specially handled
continue
empty_list = [cls.__new__(cls)]
if _hint_list_item:
good_type = is_instance(empty_list[0], type_hint)
else:
good_type = is_instance(empty_list, type_hint)
if good_type:
candidate_classes.append(cls)
if not candidate_classes: # it does not accept config type
return None
if len(candidate_classes) == 1: # the type is confirmed, raise error if cannot convert to this type
return [candidate_classes[0](**obj) for obj in objs]
# multiple candidates available, call __init__ to further verify
candidate_configs = []
for cls in candidate_classes:
try:
configs = [cls(**obj) for obj in objs]
except Exception:
# FIXME: The reason why the guess failed is eaten here. We should at least print one of them.
continue
candidate_configs.append(configs)
if not candidate_configs:
return None
if len(candidate_configs) == 1:
return candidate_configs[0]
# still have multiple candidates, choose the common base class
for base in candidate_configs:
base_class = type(base[0])
is_base = all(isinstance(configs[0], base_class) for configs in candidate_configs)
if is_base:
return base
return None # cannot detect the type, give up
def _all_subclasses(cls):
subclasses = set(cls.__subclasses__())
return subclasses.union(*[_all_subclasses(subclass) for subclass in subclasses])
def training_service_config_factory(platform: str) -> TrainingServiceConfig:
cls = _get_ts_config_class(platform)
if cls is None:
raise ValueError(f'Bad training service platform: {platform}')
return cls()
def load_training_service_config(config) -> TrainingServiceConfig:
if isinstance(config, dict) and 'platform' in config:
cls = _get_ts_config_class(config['platform'])
if cls is not None:
return cls(**config)
# not valid json, don't touch
return config # type: ignore
def _get_ts_config_class(platform: str) -> type[TrainingServiceConfig] | None:
from ..training_service import TrainingServiceConfig # avoid circular import
# import all custom config classes so they can be found in TrainingServiceConfig.__subclasses__()
custom_ts_config_path = nni.runtime.config.get_config_file('training_services.json')
with custom_ts_config_path.open() as config_file:
custom_ts_config = json.load(config_file)
for custom_ts_pkg in custom_ts_config.keys():
pkg = importlib.import_module(custom_ts_pkg)
_config_class = pkg.nni_training_service_info.config_class
for cls in _all_subclasses(TrainingServiceConfig):
if cls.platform == platform:
return cls
return None
## misc ##
def get_ipv4_address() -> str:
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.connect(('192.0.2.0', 80))
addr = s.getsockname()[0]
s.close()
return addr
def diff(config1: ConfigBase, config2: ConfigBase, from_: str = '', to_: str = '') -> str:
# This method is not to get an accurate diff, but to give users a rough idea of what is changed.
import difflib
import pprint
# The ideal diff should directly apply on the pprint of dataclass. However,
# 1. pprint of dataclass is not stable. It actually changes from python 3.9 to 3.10.
# 2. MISSING doesn't have a stable memory address. It might be different in different objects.
str1, str2 = pprint.pformat(config1.json()), pprint.pformat(config2.json())
return '\n'.join(difflib.unified_diff(str1.splitlines(), str2.splitlines(), from_, to_, lineterm='')) |
6,005 | recurring scans | # coding=utf-8
# *** WARNING: this file was generated by pulumi. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import copy
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
__all__ = [
'GetWorkspaceManagedSqlServerVulnerabilityAssessmentResult',
'AwaitableGetWorkspaceManagedSqlServerVulnerabilityAssessmentResult',
'get_workspace_managed_sql_server_vulnerability_assessment',
'get_workspace_managed_sql_server_vulnerability_assessment_output',
]
@pulumi.output_type
class GetWorkspaceManagedSqlServerVulnerabilityAssessmentResult:
"""
A server vulnerability assessment.
"""
def __init__(__self__, id=None, name=None, METHOD_NAME=None, storage_container_path=None, type=None):
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if METHOD_NAME and not isinstance(METHOD_NAME, dict):
raise TypeError("Expected argument 'recurring_scans' to be a dict")
pulumi.set(__self__, "recurring_scans", METHOD_NAME)
if storage_container_path and not isinstance(storage_container_path, str):
raise TypeError("Expected argument 'storage_container_path' to be a str")
pulumi.set(__self__, "storage_container_path", storage_container_path)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
@property
@pulumi.getter
def id(self) -> str:
"""
Fully qualified resource ID for the resource. Ex - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the resource
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="recurringScans")
def METHOD_NAME(self) -> Optional['outputs.VulnerabilityAssessmentRecurringScansPropertiesResponse']:
"""
The recurring scans settings
"""
return pulumi.get(self, "recurring_scans")
@property
@pulumi.getter(name="storageContainerPath")
def storage_container_path(self) -> str:
"""
A blob storage container path to hold the scan results (e.g. https://myStorage.blob.core.windows.net/VaScans/).
"""
return pulumi.get(self, "storage_container_path")
@property
@pulumi.getter
def type(self) -> str:
"""
The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts"
"""
return pulumi.get(self, "type")
class AwaitableGetWorkspaceManagedSqlServerVulnerabilityAssessmentResult(GetWorkspaceManagedSqlServerVulnerabilityAssessmentResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetWorkspaceManagedSqlServerVulnerabilityAssessmentResult(
id=self.id,
name=self.name,
METHOD_NAME=self.METHOD_NAME,
storage_container_path=self.storage_container_path,
type=self.type)
def get_workspace_managed_sql_server_vulnerability_assessment(resource_group_name: Optional[str] = None,
vulnerability_assessment_name: Optional[str] = None,
workspace_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetWorkspaceManagedSqlServerVulnerabilityAssessmentResult:
"""
Get workspace managed sql server's vulnerability assessment.
:param str resource_group_name: The name of the resource group. The name is case insensitive.
:param str vulnerability_assessment_name: The name of the vulnerability assessment.
:param str workspace_name: The name of the workspace.
"""
__args__ = dict()
__args__['resourceGroupName'] = resource_group_name
__args__['vulnerabilityAssessmentName'] = vulnerability_assessment_name
__args__['workspaceName'] = workspace_name
opts = pulumi.InvokeOptions.merge(_utilities.get_invoke_opts_defaults(), opts)
__ret__ = pulumi.runtime.invoke('azure-native:synapse/v20210601preview:getWorkspaceManagedSqlServerVulnerabilityAssessment', __args__, opts=opts, typ=GetWorkspaceManagedSqlServerVulnerabilityAssessmentResult).value
return AwaitableGetWorkspaceManagedSqlServerVulnerabilityAssessmentResult(
id=pulumi.get(__ret__, 'id'),
name=pulumi.get(__ret__, 'name'),
METHOD_NAME=pulumi.get(__ret__, 'recurring_scans'),
storage_container_path=pulumi.get(__ret__, 'storage_container_path'),
type=pulumi.get(__ret__, 'type'))
@_utilities.lift_output_func(get_workspace_managed_sql_server_vulnerability_assessment)
def get_workspace_managed_sql_server_vulnerability_assessment_output(resource_group_name: Optional[pulumi.Input[str]] = None,
vulnerability_assessment_name: Optional[pulumi.Input[str]] = None,
workspace_name: Optional[pulumi.Input[str]] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetWorkspaceManagedSqlServerVulnerabilityAssessmentResult]:
"""
Get workspace managed sql server's vulnerability assessment.
:param str resource_group_name: The name of the resource group. The name is case insensitive.
:param str vulnerability_assessment_name: The name of the vulnerability assessment.
:param str workspace_name: The name of the workspace.
"""
... |
6,006 | p expression number | #!/usr/bin/env python
import numpy as np
import ply.yacc as yacc
import data.calculated_parser.functions as functions
import data.calculated_parser.lexer
class Parser:
"""The parsing portion of the domain specific language"""
def __init__(self, **kwargs):
self.lexer = data.calculated_parser.lexer.Lexer()
self.tokens = self.lexer.tokens
# Sets the operator precedence for the parser. The unary minus is the
# highest, followed by exponentiation, then multiplication/division and
# addition/subtraction is last on the list.
self.precedence = (
("left", "PLUS", "MINUS"),
("left", "TIMES", "DIVIDE"),
("left", "POWER"),
("right", "UMINUS"),
)
self.parser = yacc.yacc(module=self)
self.data = None
self.expression = None
self.result = np.nan
def parse(self, expression, data, key, dims):
"""Parse the expression and return the result
Parameters:
expression -- the string expression to parse
data -- the xarray or netcdf dataset to pull data from
key -- the key passed along from the __getitem__ call, a tuple of
integers and/or slices
dims -- the dimensions that correspond to the key, a list of strings
Returns a numpy array of data.
"""
self.data = data
self.result = np.array(np.nan) # populated by p_statement_expr()
self.key = key
self.dims = dims
self.expression = expression
self.parser.parse(expression)
self.data = None
self.key = None
self.dims = None
return self.result
def get_key_for_variable(self, variable):
"""Using self.key and self.dims, determine the key for the particular
variable.
Params:
variable -- the xarray or netcdf variable
Returns a tuple of integers and/or slices
"""
key = self.key
if not isinstance(key, tuple):
key = (key,)
d = dict(zip(self.dims, key))
try:
if hasattr(variable, "dims"):
# xarray calls it dims
key = [d[k] for k in variable.dims]
else:
key = [d[k] for k in variable.dimensions]
except KeyError:
raise SyntaxError
return tuple(key)
def get_key_for_variable_full_depth(self, variable_key):
variable = self.data.variables[variable_key]
if "depth" in variable_key:
depth_levels = variable.shape[0] # Expecting (depth shape)
return (slice(0, depth_levels),)
key = list(self.key)
# Expecting (time, depth, lat, lon) shape
depth_levels = variable.shape[1]
key.insert(1, slice(0, depth_levels))
return tuple(key)
# Similar to the Lexer, these p_*, methods cannot have proper python
# docstrings, because it's used for the parsing specification.
def p_statement_expr(self, t):
"statement : expression"
if not isinstance(t[1], np.ndarray):
t[1] = np.array(t[1])
self.result = t[1]
def p_expression_variable(self, t):
"expression : ID"
t[0] = self.data.variables[t[1]][
self.get_key_for_variable(self.data.variables[t[1]])
]
def p_expression_variable_full_depth(self, t):
"""expression : LBRKT ID RBRKT"""
t[0] = self.data.variables[t[2]][self.get_key_for_variable_full_depth(t[2])]
def p_expression_uop(self, t):
"""expression : MINUS expression %prec UMINUS"""
t[0] = -t[2]
def p_expression_binop(self, t):
"""expression : expression PLUS expression
| expression MINUS expression
| expression TIMES expression
| expression DIVIDE expression
| expression POWER NUMBER"""
if t[2] == "+":
t[0] = t[1] + t[3]
elif t[2] == "-":
t[0] = t[1] - t[3]
elif t[2] == "*":
t[0] = t[1] * t[3]
elif t[2] == "/":
t[0] = t[1] / t[3]
elif t[2] == "^":
t[0] = t[1] ** t[3]
def p_expression_group(self, t):
"expression : LPAREN expression RPAREN"
t[0] = t[2]
def METHOD_NAME(self, t):
"expression : NUMBER"
t[0] = t[1]
def p_expression_const(self, t):
"expression : CONST"
t[0] = t[1]
def p_expression_function(self, t):
"expression : ID LPAREN arguments RPAREN"
fname = t[1]
arg_list = t[3]
if fname in dir(functions):
t[0] = getattr(functions, fname)(*arg_list)
else:
raise SyntaxError
def p_arguments(self, t):
"arguments : argument"
t[0] = [t[1]]
def p_arguments_1(self, t):
"arguments : arguments COMMA argument"
t[0] = t[1]
t[1].append(t[3])
def p_argument(self, t):
"argument : expression"
t[0] = t[1]
def p_error(self, t):
raise SyntaxError(
"Syntax error in equation: {}...{}".format(self.expression, t)
) |
6,007 | creation data | # coding=utf-8
# *** WARNING: this file was generated by pulumi. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import copy
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
__all__ = [
'GetVolumeResult',
'AwaitableGetVolumeResult',
'get_volume',
'get_volume_output',
]
@pulumi.output_type
class GetVolumeResult:
"""
Response for Volume request.
"""
def __init__(__self__, METHOD_NAME=None, id=None, name=None, size_gi_b=None, storage_target=None, system_data=None, tags=None, type=None, volume_id=None):
if METHOD_NAME and not isinstance(METHOD_NAME, dict):
raise TypeError("Expected argument 'creation_data' to be a dict")
pulumi.set(__self__, "creation_data", METHOD_NAME)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if size_gi_b and not isinstance(size_gi_b, float):
raise TypeError("Expected argument 'size_gi_b' to be a float")
pulumi.set(__self__, "size_gi_b", size_gi_b)
if storage_target and not isinstance(storage_target, dict):
raise TypeError("Expected argument 'storage_target' to be a dict")
pulumi.set(__self__, "storage_target", storage_target)
if system_data and not isinstance(system_data, dict):
raise TypeError("Expected argument 'system_data' to be a dict")
pulumi.set(__self__, "system_data", system_data)
if tags and not isinstance(tags, dict):
raise TypeError("Expected argument 'tags' to be a dict")
pulumi.set(__self__, "tags", tags)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
if volume_id and not isinstance(volume_id, str):
raise TypeError("Expected argument 'volume_id' to be a str")
pulumi.set(__self__, "volume_id", volume_id)
@property
@pulumi.getter(name="creationData")
def METHOD_NAME(self) -> Optional['outputs.SourceCreationDataResponse']:
"""
State of the operation on the resource.
"""
return pulumi.get(self, "creation_data")
@property
@pulumi.getter
def id(self) -> str:
"""
Azure resource identifier.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def name(self) -> str:
"""
Azure resource name.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="sizeGiB")
def size_gi_b(self) -> Optional[float]:
"""
Volume size.
"""
return pulumi.get(self, "size_gi_b")
@property
@pulumi.getter(name="storageTarget")
def storage_target(self) -> 'outputs.IscsiTargetInfoResponse':
"""
Storage target information
"""
return pulumi.get(self, "storage_target")
@property
@pulumi.getter(name="systemData")
def system_data(self) -> 'outputs.SystemDataResponse':
"""
Resource metadata required by ARM RPC
"""
return pulumi.get(self, "system_data")
@property
@pulumi.getter
def tags(self) -> Optional[Mapping[str, str]]:
"""
Azure resource tags.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter
def type(self) -> str:
"""
Azure resource type.
"""
return pulumi.get(self, "type")
@property
@pulumi.getter(name="volumeId")
def volume_id(self) -> str:
"""
Unique Id of the volume in GUID format
"""
return pulumi.get(self, "volume_id")
class AwaitableGetVolumeResult(GetVolumeResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetVolumeResult(
METHOD_NAME=self.METHOD_NAME,
id=self.id,
name=self.name,
size_gi_b=self.size_gi_b,
storage_target=self.storage_target,
system_data=self.system_data,
tags=self.tags,
type=self.type,
volume_id=self.volume_id)
def get_volume(elastic_san_name: Optional[str] = None,
resource_group_name: Optional[str] = None,
volume_group_name: Optional[str] = None,
volume_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetVolumeResult:
"""
Get an Volume.
:param str elastic_san_name: The name of the ElasticSan.
:param str resource_group_name: The name of the resource group. The name is case insensitive.
:param str volume_group_name: The name of the VolumeGroup.
:param str volume_name: The name of the Volume.
"""
__args__ = dict()
__args__['elasticSanName'] = elastic_san_name
__args__['resourceGroupName'] = resource_group_name
__args__['volumeGroupName'] = volume_group_name
__args__['volumeName'] = volume_name
opts = pulumi.InvokeOptions.merge(_utilities.get_invoke_opts_defaults(), opts)
__ret__ = pulumi.runtime.invoke('azure-native:elasticsan/v20211120preview:getVolume', __args__, opts=opts, typ=GetVolumeResult).value
return AwaitableGetVolumeResult(
METHOD_NAME=pulumi.get(__ret__, 'creation_data'),
id=pulumi.get(__ret__, 'id'),
name=pulumi.get(__ret__, 'name'),
size_gi_b=pulumi.get(__ret__, 'size_gi_b'),
storage_target=pulumi.get(__ret__, 'storage_target'),
system_data=pulumi.get(__ret__, 'system_data'),
tags=pulumi.get(__ret__, 'tags'),
type=pulumi.get(__ret__, 'type'),
volume_id=pulumi.get(__ret__, 'volume_id'))
@_utilities.lift_output_func(get_volume)
def get_volume_output(elastic_san_name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
volume_group_name: Optional[pulumi.Input[str]] = None,
volume_name: Optional[pulumi.Input[str]] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetVolumeResult]:
"""
Get an Volume.
:param str elastic_san_name: The name of the ElasticSan.
:param str resource_group_name: The name of the resource group. The name is case insensitive.
:param str volume_group_name: The name of the VolumeGroup.
:param str volume_name: The name of the Volume.
"""
... |
6,008 | get waiting request | from _typeshed import _BufferWithLen
from socket import socket
from typing import TypeVar, overload
from typing_extensions import Literal
from Xlib import error
from Xlib._typing import ErrorHandler
from Xlib.display import _ResourceBaseClass, _ResourceBaseClassesType
from Xlib.protocol import rq
from Xlib.support import lock
from Xlib.xobject import colormap, cursor, drawable, fontable, resource
_T = TypeVar("_T")
class bytesview:
view: memoryview
@overload
def __init__(self, data: bytes | bytesview, offset: int, size: int) -> None: ...
@overload
def __init__(self, data: _BufferWithLen, offset: int = 0, size: int | None = None) -> None: ...
@overload
def __getitem__(self, key: slice) -> bytes: ...
@overload
def __getitem__(self, key: int) -> int: ...
def __len__(self) -> int: ...
class Display:
extension_major_opcodes: dict[str, int]
error_classes: dict[int, type[error.XError]]
event_classes: dict[int, type[rq.Event] | dict[int, type[rq.Event]]]
resource_classes: _ResourceBaseClassesType | None
display_name: str
default_screen: int
socket: socket
socket_error_lock: lock._DummyLock
socket_error: Exception | None
event_queue_read_lock: lock._DummyLock
event_queue_write_lock: lock._DummyLock
event_queue: list[rq.Event]
request_queue_lock: lock._DummyLock
request_serial: int
request_queue: list[tuple[rq.Request | rq.ReplyRequest | ConnectionSetupRequest, int]]
send_recv_lock: lock._DummyLock
send_active: int
recv_active: int
event_waiting: int
event_wait_lock: lock._DummyLock
request_waiting: int
request_wait_lock: lock._DummyLock
recv_buffer_size: int
sent_requests: list[rq.Request | rq.ReplyRequest | ConnectionSetupRequest]
recv_packet_len: int
data_send: bytes
data_recv: bytes
data_sent_bytes: int
resource_id_lock: lock._DummyLock
resource_ids: dict[int, None]
last_resource_id: int
error_handler: ErrorHandler[object] | None
big_endian: bool
info: ConnectionSetupRequest
def __init__(self, display: str | None = None) -> None: ...
def get_display_name(self) -> str: ...
def get_default_screen(self) -> int: ...
def fileno(self) -> int: ...
def next_event(self) -> rq.Event: ...
def pending_events(self) -> int: ...
def flush(self) -> None: ...
def close(self) -> None: ...
def set_error_handler(self, handler: ErrorHandler[object] | None) -> None: ...
def allocate_resource_id(self) -> int: ...
def free_resource_id(self, rid: int) -> None: ...
@overload
def get_resource_class(self, class_name: Literal["resource"], default: object = None) -> type[resource.Resource]: ...
@overload
def get_resource_class(self, class_name: Literal["drawable"], default: object = None) -> type[drawable.Drawable]: ...
@overload
def get_resource_class(self, class_name: Literal["window"], default: object = None) -> type[drawable.Window]: ...
@overload
def get_resource_class(self, class_name: Literal["pixmap"], default: object = None) -> type[drawable.Pixmap]: ...
@overload
def get_resource_class(self, class_name: Literal["fontable"], default: object = None) -> type[fontable.Fontable]: ...
@overload
def get_resource_class(self, class_name: Literal["font"], default: object = None) -> type[fontable.Font]: ...
@overload
def get_resource_class(self, class_name: Literal["gc"], default: object = None) -> type[fontable.GC]: ...
@overload
def get_resource_class(self, class_name: Literal["colormap"], default: object = None) -> type[colormap.Colormap]: ...
@overload
def get_resource_class(self, class_name: Literal["cursor"], default: object) -> type[cursor.Cursor]: ...
@overload
def get_resource_class(self, class_name: str, default: _T) -> type[_ResourceBaseClass] | _T: ...
@overload
def get_resource_class(self, class_name: str, default: None = None) -> type[_ResourceBaseClass] | None: ...
def set_extension_major(self, extname: str, major: int) -> None: ...
def get_extension_major(self, extname: str) -> int: ...
def add_extension_event(self, code: int, evt: type[rq.Event], subcode: int | None = None) -> None: ...
def add_extension_error(self, code: int, err: type[error.XError]) -> None: ...
def check_for_error(self) -> None: ...
def send_request(self, request: rq.Request | rq.ReplyRequest | ConnectionSetupRequest, wait_for_response: bool) -> None: ...
def close_internal(self, whom: object) -> None: ...
def send_and_recv(self, flush: bool = False, event: bool = False, request: int | None = None, recv: bool = False) -> None: ...
def parse_response(self, request: int) -> bool: ...
def parse_error_response(self, request: int) -> bool: ...
def default_error_handler(self, err: object) -> None: ...
def parse_request_response(self, request: int) -> bool: ...
def parse_event_response(self, etype: int) -> None: ...
def METHOD_NAME(self, sno: int) -> rq.ReplyRequest | ConnectionSetupRequest | None: ...
def get_waiting_replyrequest(self) -> rq.ReplyRequest | ConnectionSetupRequest: ...
def parse_connection_setup(self) -> bool: ...
PixmapFormat: rq.Struct
VisualType: rq.Struct
Depth: rq.Struct
Screen: rq.Struct
class ConnectionSetupRequest(rq.GetAttrData):
def __init__(self, display: Display, *args: object, **keys: object) -> None: ... |
6,009 | to bytes | """Module for editing the hardcoded table mapping of ground levels to dungeon tilesets."""
# Copyright 2020-2023 Capypara and the SkyTemple Contributors
#
# This file is part of SkyTemple.
#
# SkyTemple is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SkyTemple is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SkyTemple. If not, see <https://www.gnu.org/licenses/>.
from __future__ import annotations
from typing import cast, List, Optional, Tuple
from range_typed_integers import i16, u8, u32
from skytemple_files.common.ppmdu_config.data import Pmd2Data
from skytemple_files.common.ppmdu_config.script_data import (
Pmd2ScriptLevel,
Pmd2ScriptLevelMapType,
)
from skytemple_files.common.util import AutoString, read_u8, read_i16, read_u32
from skytemple_files.container.dungeon_bin.model import DungeonBinPack
from skytemple_files.dungeon_data.fixed_bin.model import FixedBin, FixedFloor
from skytemple_files.dungeon_data.mappa_bin.protocol import MappaBinProtocol
from skytemple_files.graphics.dma.protocol import DmaProtocol
from skytemple_files.graphics.dpc.protocol import DpcProtocol
from skytemple_files.graphics.dpci.protocol import DpciProtocol
from skytemple_files.graphics.dpl.protocol import DplProtocol
from skytemple_files.graphics.dpla.protocol import DplaProtocol
from skytemple_files.hardcoded.dungeons import DungeonDefinition
class GroundTilesetMapping(AutoString):
ground_level: i16
dungeon_tileset: u8
floor_id: u8
unk3: u32
def __init__(self, ground_level: i16, dungeon_tileset: u8, floor_id: u8, unk3: u32):
self.ground_level = ground_level
self.dungeon_id = dungeon_tileset
self.floor_id = floor_id
self.unk3 = unk3
# Compat
@property
def unk2(self) -> u8:
return self.floor_id
@unk2.setter
def unk2(self, value: u8) -> None:
self.floor_id = value
def __eq__(self, other: object) -> bool:
if not isinstance(other, GroundTilesetMapping):
return False
return (
self.ground_level == other.ground_level
and self.dungeon_id == other.dungeon_id
and self.unk2 == other.unk2
and self.unk3 == other.unk3
)
def METHOD_NAME(self) -> bytes:
return (
self.ground_level.METHOD_NAME(2, "little", signed=True)
+ self.dungeon_id.METHOD_NAME(1, "little", signed=False)
+ self.unk2.METHOD_NAME(1, "little", signed=False)
+ self.unk3.METHOD_NAME(4, "little", signed=False)
)
class HardcodedGroundDungeonTilesets:
@staticmethod
def get_ground_dungeon_tilesets(
overlay11bin: bytes, config: Pmd2Data
) -> List[GroundTilesetMapping]:
"""Returns the list."""
block = config.bin_sections.overlay11.data.LEVEL_TILEMAP_LIST
lst = []
for i in range(block.address, block.address + block.length, 8):
lst.append(
GroundTilesetMapping(
read_i16(overlay11bin, i),
read_u8(overlay11bin, i + 2),
read_u8(overlay11bin, i + 3),
read_u32(overlay11bin, i + 4),
)
)
return lst
@staticmethod
def set_ground_dungeon_tilesets(
value: List[GroundTilesetMapping], overlay11bin: bytearray, config: Pmd2Data
) -> None:
"""
Sets the list.
The length of the list must exactly match the original ROM's length (see get_dungeon_list).
"""
block = config.bin_sections.overlay11.data.LEVEL_TILEMAP_LIST
assert block.length is not None
expected_length = int(block.length / 8)
if len(value) != expected_length:
raise ValueError(
f"The list must have exactly the length of {expected_length} entries."
)
for i, entry in enumerate(value):
overlay11bin[
block.address + i * 8 : block.address + (i + 1) * 8
] = entry.METHOD_NAME()
def resolve_mapping_for_level(
level: Pmd2ScriptLevel,
tileset_mappings: List[GroundTilesetMapping],
mappa: MappaBinProtocol,
fixed: FixedBin,
dungeon_bin: DungeonBinPack,
dungeons: List[DungeonDefinition],
) -> Optional[
Tuple[
DmaProtocol,
DpcProtocol,
DpciProtocol,
DplProtocol,
DplaProtocol,
Optional[FixedFloor],
]
]:
"""Returns tileset data and fixed floor data (if applicable) for the given level"""
if (
level.mapty_enum != Pmd2ScriptLevelMapType.FIXED_ROOM
and level.mapty_enum != Pmd2ScriptLevelMapType.TILESET
):
return None
dungeon_id, floor_id = None, None
for mapping in tileset_mappings:
if mapping.ground_level == level.id:
dungeon_id, floor_id = mapping.dungeon_id, mapping.floor_id
break
if not dungeon_id:
return None
assert dungeon_id is not None
assert floor_id is not None
mappa_idx = dungeons[dungeon_id].mappa_index
start_offset = dungeons[dungeon_id].start_after
length = dungeons[dungeon_id].number_floors
floor_id = cast(u8, min(length - 1, floor_id - 1)) # type: ignore
layout = mappa.floor_lists[mappa_idx][start_offset + floor_id].layout
tileset_id = layout.tileset_id
if tileset_id > 169:
tileset_id = u8(0)
dma: DmaProtocol = dungeon_bin.get(f"dungeon{tileset_id}.dma")
dpl: DplProtocol = dungeon_bin.get(f"dungeon{tileset_id}.dpl")
dpla: DplaProtocol = dungeon_bin.get(f"dungeon{tileset_id}.dpla")
dpci: DpciProtocol = dungeon_bin.get(f"dungeon{tileset_id}.dpci")
dpc: DpcProtocol = dungeon_bin.get(f"dungeon{tileset_id}.dpc")
fixedf = None
if level.mapty_enum == Pmd2ScriptLevelMapType.FIXED_ROOM:
fixedf = fixed.fixed_floors[layout.fixed_floor_id]
return dma, dpc, dpci, dpl, dpla, fixedf |
6,010 | stub list transcription jobs | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
# SPDX-License-Identifier: Apache-2.0
"""
Stub functions that are used by the Amazon Transcribe unit tests.
When tests are run against an actual AWS account, the stubber class does not
set up stubs and passes all calls through to the Boto3 client.
"""
from test_tools.example_stubber import ExampleStubber
class TranscribeStubber(ExampleStubber):
"""
A class that implements a variety of stub functions that are used by the
Amazon Transcribe unit tests.
The stubbed functions all expect certain parameters to be passed to them as
part of the tests, and will raise errors when the actual parameters differ from
the expected.
"""
def __init__(self, client, use_stubs=True):
"""
Initializes the object with a specific client and configures it for
stubbing or AWS passthrough.
:param client: A Boto3 Transcribe client.
:param use_stubs: When True, use stubs to intercept requests. Otherwise,
pass requests through to AWS.
"""
super().__init__(client, use_stubs)
@staticmethod
def _make_job(job):
api_job = {
'TranscriptionJobName': job['name'],
'Media': {'MediaFileUri': job['media_uri']},
'MediaFormat': job['media_format'],
'LanguageCode': job['language_code']}
if 'vocabulary_name' in job:
api_job['Settings'] = {'VocabularyName': job['vocabulary_name']}
if 'status' in job:
api_job['TranscriptionJobStatus'] = job['status']
if 'file_uri' in job:
api_job['Transcript'] = {'TranscriptFileUri': job['file_uri']}
return api_job
@staticmethod
def _make_vocabulary(vocabulary):
api_vocab = {
'VocabularyName': vocabulary['name'],
'LanguageCode': vocabulary['language_code']}
if 'phrases' in vocabulary:
api_vocab['Phrases'] = vocabulary['phrases']
elif 'table_uri' in vocabulary:
api_vocab['VocabularyFileUri'] = vocabulary['table_uri']
return api_vocab
def stub_start_transcription_job(self, job, error_code=None):
expected_params = self._make_job(job)
response = {'TranscriptionJob': {'TranscriptionJobName': job['name']}}
self._stub_bifurcator(
'start_transcription_job', expected_params, response, error_code=error_code)
def METHOD_NAME(
self, job_filter, jobs, response_slice, next_token=None,error_code=None):
expected_params = {'JobNameContains': job_filter}
if next_token is not None:
expected_params['NextToken'] = next_token
response = {'TranscriptionJobSummaries': [{
'TranscriptionJobName': job['name']
} for job in jobs[response_slice[0]:response_slice[1]]]}
if response_slice[1] < len(jobs):
response['NextToken'] = 'test-token'
self._stub_bifurcator(
'list_transcription_jobs', expected_params, response, error_code=error_code)
def stub_get_transcription_job(self, job, error_code=None):
expected_params = {'TranscriptionJobName': job['name']}
response = {'TranscriptionJob': self._make_job(job)}
self._stub_bifurcator(
'get_transcription_job', expected_params, response, error_code=error_code)
def stub_delete_transcription_job(self, job_name, error_code=None):
expected_params = {'TranscriptionJobName': job_name}
self._stub_bifurcator(
'delete_transcription_job', expected_params, error_code=error_code)
def stub_create_vocabulary(self, vocabulary, error_code=None):
expected_params = self._make_vocabulary(vocabulary)
response = {'VocabularyName': vocabulary['name']}
self._stub_bifurcator(
'create_vocabulary', expected_params, response, error_code=error_code)
def stub_list_vocabularies(
self, vocab_filter, vocabularies, vocab_slice, next_token=None,
error_code=None):
expected_params = {'NameContains': vocab_filter}
if next_token is not None:
expected_params['NextToken'] = next_token
response = {
'Vocabularies': [
self._make_vocabulary(vocab)
for vocab in vocabularies[vocab_slice[0]:vocab_slice[1]]]}
if vocab_slice[1] < len(vocabularies):
response['NextToken'] = 'test-token'
self._stub_bifurcator(
'list_vocabularies', expected_params, response, error_code=error_code)
def stub_get_vocabulary(self, vocabulary, error_code=None):
expected_params = {'VocabularyName': vocabulary['name']}
response = self._make_vocabulary(vocabulary)
self._stub_bifurcator(
'get_vocabulary', expected_params, response, error_code=error_code)
def stub_update_vocabulary(self, vocabulary, error_code=None):
expected_params = self._make_vocabulary(vocabulary)
response = {'VocabularyName': vocabulary['name']}
self._stub_bifurcator(
'update_vocabulary', expected_params, response, error_code=error_code)
def stub_delete_vocabulary(self, vocab_name, error_code=None):
expected_params = {'VocabularyName': vocab_name}
self._stub_bifurcator(
'delete_vocabulary', expected_params, error_code=error_code) |
6,011 | test syncs existing fixture | from django.test import TestCase
from corehq.apps.linked_domain.models import DomainLink
from corehq.apps.fixtures.models import (
LookupTable,
LookupTableRow,
TypeField,
Field,
)
from corehq.apps.linked_domain.exceptions import UnsupportedActionError
from corehq.apps.linked_domain.updates import update_fixture
class TestUpdateFixturesReal(TestCase):
def test_update_creates_new_synced_fixture(self):
self._create_table(self.upstream_domain, 'test-table', ['col_1'], [{'col_1': 'one'}, {'col_1': 'two'}])
update_fixture(self.link, 'test-table')
created_table = LookupTable.objects.by_domain_tag(self.downstream_domain, 'test-table')
self.assertEqual(created_table.tag, 'test-table')
self.assertTrue(created_table.is_synced)
self.assertColumnsEqual(created_table, ['col_1'])
self.assertTableFieldsEqual(created_table, [{'col_1': 'one'}, {'col_1': 'two'}])
def METHOD_NAME(self):
upstream_cols = ['col_1']
downstream_cols = ['col_2']
upstream_rows = [{'col_1': 'one'}]
downstream_rows = [{'col_2': 'two'}]
self._create_table(self.upstream_domain, 'test-table', upstream_cols, upstream_rows)
self._create_table(self.downstream_domain, 'test-table', downstream_cols, downstream_rows, is_synced=True)
update_fixture(self.link, 'test-table')
created_table = LookupTable.objects.by_domain_tag(self.downstream_domain, 'test-table')
self.assertColumnsEqual(created_table, upstream_cols)
self.assertTableFieldsEqual(created_table, upstream_rows)
def test_update_raises_error_on_unsynced_duplicate_name(self):
self._create_table(self.upstream_domain, 'test-table', ['col_1'], [])
self._create_table(self.downstream_domain, 'test-table', ['col_2'], [], is_synced=False)
with self.assertRaisesMessage(UnsupportedActionError,
'Failed to push Lookup Table "test-table" due to matching (same Table ID) unlinked Lookup Table'
' in the downstream project space. Please edit the Lookup Table to resolve the matching or click'
' "Push & Overwrite" to overwrite and link them.'):
update_fixture(self.link, 'test-table')
def test_produces_pull_message(self):
self._create_table(self.upstream_domain, 'test-table', ['col_1'], [])
self._create_table(self.downstream_domain, 'test-table', ['col_2'], [], is_synced=False)
with self.assertRaisesMessage(UnsupportedActionError,
'Failed to sync Lookup Table "test-table" due to matching (same Table ID) unlinked Lookup Table'
' in the downstream project space. Please edit the Lookup Table to resolve the matching or click'
' "Sync & Overwrite" to overwrite and link them.'):
update_fixture(self.link, 'test-table', is_pull=True)
def test_force_update_overwrites_conflicting_duplicate_name(self):
upstream_cols = ['col_1']
downstream_cols = ['col_2']
upstream_rows = [{'col_1': 'one'}]
downstream_rows = [{'col_2': 'two'}]
self._create_table(self.upstream_domain, 'test-table', upstream_cols, upstream_rows)
self._create_table(self.downstream_domain, 'test-table', downstream_cols, downstream_rows)
update_fixture(self.link, 'test-table', overwrite=True)
created_table = LookupTable.objects.by_domain_tag(self.downstream_domain, 'test-table')
self.assertColumnsEqual(created_table, upstream_cols)
self.assertTableFieldsEqual(created_table, upstream_rows)
def test_syncing_local_table_raises_error(self):
self._create_table(self.upstream_domain, 'test-table', ['col_1'], [], is_global=False)
with self.assertRaisesMessage(UnsupportedActionError, "Found non-global lookup table 'test-table'"):
update_fixture(self.link, 'test-table')
def setUp(self):
self.downstream_domain = 'downstream'
self.upstream_domain = 'upstream'
self.link = DomainLink(linked_domain=self.downstream_domain, master_domain=self.upstream_domain)
def _create_table(self, domain, tag, col_names, rows, is_global=True, is_synced=False):
columns = [TypeField(name=col_name) for col_name in col_names]
table = LookupTable.objects.create(
domain=domain, tag=tag, fields=columns, is_global=is_global, is_synced=is_synced)
for i, row in enumerate(rows):
fields = {key: [Field(value=val)] for (key, val) in row.items()}
LookupTableRow.objects.create(domain=domain, table_id=table.id, fields=fields, sort_key=i)
return table
def assertColumnsEqual(self, table, expected_column_names):
cols = [col.name for col in table.fields]
self.assertEqual(cols, expected_column_names)
def assertTableFieldsEqual(self, table, expected_field_values):
rows = LookupTableRow.objects.filter(domain=table.domain, table_id=table.id)
field_values = [row.fields_without_attributes for row in rows]
self.assertListEqual(field_values, expected_field_values) |
6,012 | plot lsection | # -*- coding: utf-8 -*-
"""
mslib.mswms.mpl_lsec
~~~~~~~~~~~~~~~~~~~~
Linear section style super class.
This file is part of MSS.
:copyright: Copyright 2021 May Baer
:copyright: Copyright 2021-2023 by the MSS team, see AUTHORS.
:license: APACHE-2.0, see LICENSE for details.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
# style definitions should be put in mpl_lsec_styles.py
import logging
from xml.dom.minidom import getDOMImplementation
import matplotlib as mpl
from pint import Quantity
from mslib.mswms import mss_2D_sections
from mslib.utils.units import convert_to
mpl.rcParams['xtick.direction'] = 'out'
mpl.rcParams['ytick.direction'] = 'out'
class AbstractLinearSectionStyle(mss_2D_sections.Abstract2DSectionStyle):
"""
Abstract Linear Section Style
Superclass for all Matplotlib-based linear section styles.
"""
def __init__(self, driver=None):
"""
Constructor.
"""
super().__init__(driver=driver)
self.variable = ""
self.unit = ""
self.y_values = []
def _prepare_datafields(self):
self.y_values = self.data[self.variable]
if self.variable in self.driver.data_units:
self.unit = self.driver.data_units[self.variable]
def supported_crs(self):
"""
Returns a list of the coordinate reference systems supported by
this style.
"""
return ["LINE:1"]
def METHOD_NAME(self, data, lats, lons, valid_time, init_time):
"""
"""
# Check if required data is available.
self.data_units = self.driver.data_units.copy()
for datatype, dataitem, dataunit in self.required_datafields:
if dataitem not in data:
raise KeyError(f"required data field '{dataitem}' not found")
origunit = self.driver.data_units[dataitem]
if dataunit is not None:
data[dataitem] = convert_to(data[dataitem], origunit, dataunit)
self.data_units[dataitem] = dataunit
else:
logging.debug("Please add units to plot variables")
# Copy parameters to properties.
self.data = data
self.lats = lats
self.lons = lons
self.valid_time = valid_time
self.init_time = init_time
# Derive additional data fields and make the plot.
self._prepare_datafields()
impl = getDOMImplementation()
xmldoc = impl.createDocument(None, "MSS_LinearSection_Data", None)
# Title of this section.
node = xmldoc.createElement("Title")
node.appendChild(xmldoc.createTextNode(self.title))
xmldoc.documentElement.appendChild(node)
# Time information of this section.
if self.valid_time is not None:
node = xmldoc.createElement("ValidTime")
node.appendChild(xmldoc.createTextNode(self.valid_time.strftime("%Y-%m-%dT%H:%M:%SZ")))
xmldoc.documentElement.appendChild(node)
if self.init_time is not None:
node = xmldoc.createElement("InitTime")
node.appendChild(xmldoc.createTextNode(self.init_time.strftime("%Y-%m-%dT%H:%M:%SZ")))
xmldoc.documentElement.appendChild(node)
# Longitude data.
node = xmldoc.createElement("Longitude")
node.setAttribute("num_waypoints", f"{len(self.lons)}")
data_str = ",".join([str(lon) for lon in self.lons])
node.appendChild(xmldoc.createTextNode(data_str))
xmldoc.documentElement.appendChild(node)
# Latitude data.
node = xmldoc.createElement("Latitude")
node.setAttribute("num_waypoints", f"{len(self.lats)}")
data_str = ",".join([str(lat) for lat in self.lats])
node.appendChild(xmldoc.createTextNode(data_str))
xmldoc.documentElement.appendChild(node)
# Variable data.
node = xmldoc.createElement("Data")
node.setAttribute("num_waypoints", f"{len(self.y_values)}")
node.setAttribute("unit", self.unit)
if isinstance(self.y_values[0], Quantity):
data_str = ",".join([str(val.magnitude) for val in self.y_values])
else:
data_str = ",".join([str(val) for val in self.y_values])
node.appendChild(xmldoc.createTextNode(data_str))
xmldoc.documentElement.appendChild(node)
# Return the XML document as formatted string.
return xmldoc.toprettyxml(indent=" ") |
6,013 | import | """Helper functions for the ``actions`` package."""
from __future__ import annotations
import functools
import importlib
import importlib.resources as importlib_resources
import logging
import os
import re
from collections import namedtuple
from collections.abc import Generator
from typing import Any
from typing import Callable
from ansible_navigator.action_defs import RunStdoutReturn
from ansible_navigator.ui_framework import error_notification
logger = logging.getLogger(__name__)
# Basic structure for storing information about one action
ActionT = namedtuple("ActionT", ("name", "cls", "kegex"))
Kegex = namedtuple("Kegex", ("name", "kegex"))
# Dictionary with information about all registered actions
_ACTIONS: dict[str, dict] = {}
def METHOD_NAME(package: str, action: str) -> None:
"""Import the given action from a package.
:param package: The name of the package
:param action: The action to import
"""
importlib.import_module(f"{package}.{action}")
def _import_all(package: str) -> None:
"""Import all actions in a package.
:param package: The name of the package
"""
for entry in importlib_resources.files(package).iterdir():
if entry.is_file() and entry.name.endswith(".py") and not entry.name.startswith("_"):
METHOD_NAME(package, entry.name[0:-3])
def register(cls: Any) -> Any:
"""Register an action, used as a decorator.
:param cls: The class to register
:returns: The class after registration
"""
package, _, action = cls.__module__.rpartition(".")
pkg_info = _ACTIONS.setdefault(package, {})
pkg_info[action] = ActionT(name=action, cls=cls, kegex=re.compile(cls.KEGEX))
return cls
def get(package: str, action: str) -> Callable:
"""Import and return a given action.
:param package: The name of the package
:param action: The name of the action
:returns: The action's registered class
"""
METHOD_NAME(package, action)
return _ACTIONS[package][action].cls
def get_factory(package: str) -> Callable:
"""Create a ``get()`` function for one package.
:param package: The name of the package
:returns: The action's registered class
"""
return functools.partial(get, package)
def kegex(package: str, action: str) -> tuple:
"""Return a tuple of name, class, ``kegex`` for an action.
:param package: The name of the package
:param action: The name of the action
:returns: The name, class and kegex for an action
"""
METHOD_NAME(package, action)
return _ACTIONS[package][action]
def kegexes(package: str) -> Generator:
"""Return a tuple of tuples, name, ``kegex`` for all actions.
:param package: The name of the package
:returns: A generator for all ``kegexes``
"""
_import_all(package)
return (kegex(package, name) for name in names(package))
def kegexes_factory(package: str) -> Callable:
"""Create a ``kegexes()`` function for all packages.
:param package: The name of the package
:returns: A ``kegexes()`` method for the package
"""
return functools.partial(kegexes, package)
def names(package: str) -> list:
"""List all actions in one package.
:param package: The name of the package
:returns: All packages
"""
_import_all(package)
return sorted(_ACTIONS[package])
def names_factory(package: str) -> Callable:
"""Create a ``names()`` function for one package.
:param package: The name of the package
:returns: a ``names()`` method for the package
"""
return functools.partial(names, package)
def run_interactive(package: str, action: str, *args: Any, **_kwargs: Any) -> Any:
"""Call the given action's ``run()`` method.
:param package: The name of the package
:param action: The name of the action
:param args: The arguments passed to the action's run method
:param _kwargs: The keyword arguments passed to the action's run method
:returns: The outcome of running the action's run method
"""
action_cls = get(package, action)
app, interaction = args
app_action = action_cls(app.args)
supports_interactive = hasattr(app_action, "run")
if not supports_interactive:
logger.error("Subcommand '%s' does not support mode interactive", action)
run_action = app_action.run if supports_interactive else app_action.no_interactive_mode
# Allow tracebacks to bring down the UI, used in tests
if os.getenv("ANSIBLE_NAVIGATOR_ALLOW_UI_TRACEBACK") == "true":
return run_action(app=app, interaction=interaction)
# Capture and show a UI notification
try:
return run_action(app=app, interaction=interaction)
except Exception:
logger.critical("Subcommand '%s' encountered a fatal error.", action)
logger.exception("Logging an uncaught exception")
warn_msg = [f"Unexpected errors were encountered while running '{action}'."]
warn_msg.append("Please log an issue with the log file contents.")
warning = error_notification(warn_msg)
interaction.ui.show_form(warning)
return None
def run_interactive_factory(package: str) -> Callable:
"""Create a ``run_interactive()`` function for one package.
:param package: The name of the package
:returns: A partial ``run_interactive()`` method for the package
"""
return functools.partial(run_interactive, package)
def run_stdout(package: str, action: str, *args: Any, **_kwargs: Any) -> RunStdoutReturn:
"""Call the given action's ``run_stdout()`` method.
:param package: The name of the package
:param action: The name of the action
:param args: The arguments passed to the action's run_stdout method
:param _kwargs: The keyword arguments passed to the action's run_stdout method
:returns: The outcome of running the action's ``run_stdout()`` method
""" # noqa: D402 # Refers to the action's run_stdout in the first line, not this function
action_cls = get(package, action)
args = args[0]
return action_cls(args).run_stdout()
def run_stdout_factory(package: str) -> Callable:
"""Create a ``run_stdout()`` function for one package.
:param package: The name of the package
:returns: A partial ``run_stdout()`` method for the package
"""
return functools.partial(run_stdout, package) |
6,014 | cover image | from django.conf import settings
from django_filters import rest_framework as filters
from djqscsv import render_to_csv_response
from drf_spectacular.utils import extend_schema, extend_schema_view
from dry_rest_permissions.generics import DRYPermissionFiltersBase, DRYPermissions
from rest_framework import filters as drf_filters
from rest_framework import mixins, status, viewsets
from rest_framework.decorators import action
from rest_framework.parsers import MultiPartParser
from rest_framework.permissions import IsAuthenticated
from rest_framework.response import Response
from care.facility.api.serializers.facility import (
FacilityBasicInfoSerializer,
FacilityImageUploadSerializer,
FacilitySerializer,
)
from care.facility.models import (
Facility,
FacilityCapacity,
FacilityPatientStatsHistory,
HospitalDoctors,
PatientRegistration,
)
from care.users.models import User
class FacilityFilter(filters.FilterSet):
name = filters.CharFilter(field_name="name", lookup_expr="icontains")
facility_type = filters.NumberFilter(field_name="facility_type")
district = filters.NumberFilter(field_name="district__id")
district_name = filters.CharFilter(
field_name="district__name", lookup_expr="icontains"
)
local_body = filters.NumberFilter(field_name="local_body__id")
local_body_name = filters.CharFilter(
field_name="local_body__name", lookup_expr="icontains"
)
state = filters.NumberFilter(field_name="state__id")
state_name = filters.CharFilter(field_name="state__name", lookup_expr="icontains")
kasp_empanelled = filters.BooleanFilter(field_name="kasp_empanelled")
class FacilityQSPermissions(DRYPermissionFiltersBase):
def filter_queryset(self, request, queryset, view):
if request.user.is_superuser:
pass
elif request.user.user_type >= User.TYPE_VALUE_MAP["StateLabAdmin"]:
queryset = queryset.filter(state=request.user.state)
elif request.user.user_type >= User.TYPE_VALUE_MAP["DistrictLabAdmin"]:
queryset = queryset.filter(district=request.user.district)
else:
queryset = queryset.filter(users__id__exact=request.user.id)
return queryset
class FacilityViewSet(
mixins.CreateModelMixin,
mixins.ListModelMixin,
mixins.RetrieveModelMixin,
mixins.UpdateModelMixin,
mixins.DestroyModelMixin,
viewsets.GenericViewSet,
):
"""Viewset for facility CRUD operations."""
queryset = Facility.objects.all().select_related(
"ward", "local_body", "district", "state"
)
permission_classes = (
IsAuthenticated,
DRYPermissions,
)
filter_backends = (
FacilityQSPermissions,
filters.DjangoFilterBackend,
drf_filters.SearchFilter,
)
filterset_class = FacilityFilter
lookup_field = "external_id"
search_fields = ["name", "district__name", "state__name"]
FACILITY_CAPACITY_CSV_KEY = "capacity"
FACILITY_DOCTORS_CSV_KEY = "doctors"
FACILITY_TRIAGE_CSV_KEY = "triage"
def initialize_request(self, request, *args, **kwargs):
self.action = self.action_map.get(request.method.lower())
return super().initialize_request(request, *args, **kwargs)
def get_parsers(self):
if self.action == "cover_image":
return [MultiPartParser()]
return super().get_parsers()
def get_serializer_class(self):
if self.request.query_params.get("all") == "true":
return FacilityBasicInfoSerializer
if self.action == "cover_image":
# Check DRYpermissions before updating
return FacilityImageUploadSerializer
else:
return FacilitySerializer
def destroy(self, request, *args, **kwargs):
if (
request.user.is_superuser
or request.user.user_type >= User.TYPE_VALUE_MAP["DistrictLabAdmin"]
):
if not PatientRegistration.objects.filter(
facility=self.get_object(), is_active=True
).exists():
return super().destroy(request, *args, **kwargs)
else:
return Response(
{"facility": "cannot delete facility with active patients"},
status=status.HTTP_400_BAD_REQUEST,
)
return Response({"permission": "denied"}, status=status.HTTP_403_FORBIDDEN)
def list(self, request, *args, **kwargs):
if settings.CSV_REQUEST_PARAMETER in request.GET:
mapping = Facility.CSV_MAPPING.copy()
pretty_mapping = Facility.CSV_MAKE_PRETTY.copy()
if self.FACILITY_CAPACITY_CSV_KEY in request.GET:
mapping.update(FacilityCapacity.CSV_RELATED_MAPPING.copy())
pretty_mapping.update(FacilityCapacity.CSV_MAKE_PRETTY.copy())
elif self.FACILITY_DOCTORS_CSV_KEY in request.GET:
mapping.update(HospitalDoctors.CSV_RELATED_MAPPING.copy())
pretty_mapping.update(HospitalDoctors.CSV_MAKE_PRETTY.copy())
elif self.FACILITY_TRIAGE_CSV_KEY in request.GET:
mapping.update(FacilityPatientStatsHistory.CSV_RELATED_MAPPING.copy())
pretty_mapping.update(
FacilityPatientStatsHistory.CSV_MAKE_PRETTY.copy()
)
queryset = self.filter_queryset(self.get_queryset()).values(*mapping.keys())
return render_to_csv_response(
queryset, field_header_map=mapping, field_serializer_map=pretty_mapping
)
return super(FacilityViewSet, self).list(request, *args, **kwargs)
@extend_schema(tags=["facility"])
@action(methods=["POST"], detail=True)
def METHOD_NAME(self, request, external_id):
facility = self.get_object()
serializer = FacilityImageUploadSerializer(facility, data=request.data)
serializer.is_valid(raise_exception=True)
serializer.save()
return Response(serializer.data)
@extend_schema(tags=["facility"])
@METHOD_NAME.mapping.delete
def cover_image_delete(self, *args, **kwargs):
facility = self.get_object()
facility.cover_image_url = None
facility.save()
return Response(status=status.HTTP_204_NO_CONTENT)
@extend_schema_view(
list=extend_schema(tags=["facility"]),
retrieve=extend_schema(tags=["facility"]),
)
class AllFacilityViewSet(
mixins.RetrieveModelMixin,
mixins.ListModelMixin,
viewsets.GenericViewSet,
):
queryset = Facility.objects.all().select_related("local_body", "district", "state")
serializer_class = FacilityBasicInfoSerializer
filter_backends = (filters.DjangoFilterBackend, drf_filters.SearchFilter)
filterset_class = FacilityFilter
lookup_field = "external_id"
search_fields = ["name", "district__name", "state__name"] |
6,015 | get unique planning area ids | """ HFI admin logic """
from collections import defaultdict
from itertools import groupby
from operator import attrgetter
from typing import Dict, List, Set, Tuple
from datetime import datetime
from app.db.models.hfi_calc import PlanningWeatherStation
from app.schemas.hfi_calc import HFIAdminAddedStation
def update_stations(stations_to_remove: List[PlanningWeatherStation],
all_planning_area_stations: List[PlanningWeatherStation],
to_add: List[HFIAdminAddedStation],
timestamp: datetime,
username: str) -> List[PlanningWeatherStation]:
"""
Orchestrates removal and addition of stations
"""
stations_marked_for_removal, stations_with_order_updates = remove_stations(
stations_to_remove, all_planning_area_stations, timestamp, username)
next_order_by_planning_area = get_next_order_by_planning_area(
stations_with_order_updates, all_planning_area_stations)
stations_to_add = add_stations(to_add, next_order_by_planning_area, timestamp, username)
return stations_marked_for_removal + stations_with_order_updates + stations_to_add
def remove_stations(remove_station_list: List[PlanningWeatherStation],
all_planning_area_stations: List[PlanningWeatherStation],
timestamp: datetime,
username: str):
"""
Marks stations for removal and update station ordering for planning area
"""
stations_to_remove = []
planning_areas_with_removals = defaultdict(set)
# Mark stations for removal and track their orders for updating other stations in planning area
for station in remove_station_list:
station.update_timestamp = timestamp
station.update_user = username
station.is_deleted = True
planning_areas_with_removals[station.planning_area_id].add(
station.order_of_appearance_in_planning_area_list)
station.order_of_appearance_in_planning_area_list = None
stations_to_remove.append(station)
# Handle order updates
stations_with_order_updates = update_station_ordering(planning_areas_with_removals, all_planning_area_stations)
return stations_to_remove, stations_with_order_updates
def update_station_ordering(planning_areas_with_removals: Dict[int, Set[Tuple[int, int]]],
all_planning_area_stations: List[PlanningWeatherStation]):
"""
Given a dict of [planning_area_id] -> (station_code, order),
indicating a station removed from a planning area, and list of all stations
for the keyed planning areas, update the order of the stations.
"""
stations_with_order_updates = []
key = attrgetter('planning_area_id')
all_stations_by_planning_area = dict((k, list(map(lambda x: x, values)))
for k, values in groupby(sorted(all_planning_area_stations, key=key), key))
for planning_area_id, removed_stations in planning_areas_with_removals.items():
all_stations = all_stations_by_planning_area.get(planning_area_id, None)
if all_stations is not None:
other_stations = get_stations_with_order(get_other_stations(removed_stations, all_stations))
sorted_other_stations: List[PlanningWeatherStation] = sorted(
other_stations, key=attrgetter('order_of_appearance_in_planning_area_list'))
for idx, sorted_station in enumerate(sorted_other_stations):
sorted_station.order_of_appearance_in_planning_area_list = idx + 1
stations_with_order_updates.append(sorted_station)
return stations_with_order_updates
def get_other_stations(stations_removed: Set[Tuple[int, int]], all_stations: List[PlanningWeatherStation]):
"""
Given a set of removed stations, {(station_code, order), ...},
and list of all stations, return a list of stations not in set
"""
return list(filter(
lambda x: (x.station_code, x.order_of_appearance_in_planning_area_list) not in stations_removed,
all_stations))
def get_stations_with_order(stations: List[PlanningWeatherStation]):
"""
Returns list of stations that have an order
"""
return list(filter(lambda x: x.order_of_appearance_in_planning_area_list is not None, stations))
def add_stations(stations_to_add: List[HFIAdminAddedStation],
next_order_by_planning_area: Dict[int, int],
timestamp: datetime,
username: str) -> List[PlanningWeatherStation]:
"""
Given a list of station data to add, and the next order for a station for each planning area,
return the station data and order as planning weather stations.
"""
added_stations: List[PlanningWeatherStation] = []
for station_to_add in stations_to_add:
order = next_order_by_planning_area.get(station_to_add.planning_area_id, 1)
station = PlanningWeatherStation(
planning_area_id=station_to_add.planning_area_id,
station_code=station_to_add.station_code,
order_of_appearance_in_planning_area_list=order,
fuel_type_id=station_to_add.fuel_type_id,
create_user=username,
update_user=username,
create_timestamp=timestamp,
update_timestamp=timestamp,
is_deleted=False
)
added_stations.append(station)
next_order_by_planning_area[station.planning_area_id] = order + 1
return added_stations
def get_next_order_by_planning_area(station_with_order_updates: List[PlanningWeatherStation],
all_planning_area_stations: List[PlanningWeatherStation]) -> Dict[int, int]:
""" Return next highest ordering for each planning area """
next_order_by_planning_area = {}
key = attrgetter('planning_area_id')
updated_stations_by_planning_area = dict((k, list(map(lambda x: x, values)))
for k, values in groupby(sorted(station_with_order_updates, key=key), key))
all_stations_by_planning_area = dict((k, list(map(lambda x: x, values)))
for k, values in groupby(sorted(all_planning_area_stations, key=key), key))
for planning_area_id, planning_area_stations in all_stations_by_planning_area.items():
updated_stations = updated_stations_by_planning_area.get(planning_area_id, [])
next_order_by_planning_area[planning_area_id] = get_next_order(updated_stations, planning_area_stations)
return next_order_by_planning_area
def get_next_order(updated_stations: List[PlanningWeatherStation], other_stations: List[PlanningWeatherStation]):
"""
Returns the next order for a list of planning stations based on updated and existing stations.
Updated stations include additions and removals, so the next order could be smaller than the
max order in the existing stations list.
"""
updated_orders = [station.order_of_appearance_in_planning_area_list for station in updated_stations]
# An existing station could be removed and hence have no order
existing_orders = [
station.order_of_appearance_in_planning_area_list for station in other_stations
if station.order_of_appearance_in_planning_area_list is not None]
if len(updated_orders) == 0:
if len(existing_orders) == 0:
return 1
return max(existing_orders) + 1
return max(updated_orders) + 1
def METHOD_NAME(stations: List[PlanningWeatherStation]):
return list({station.planning_area_id for station in stations}) |
6,016 | get group urls | __all__ = ["PluginConfigMixin"]
from django import forms
from rest_framework import serializers
from sentry.exceptions import PluginError
from sentry.utils.forms import form_to_config
from .providers import ProviderMixin
from .validators import DEFAULT_VALIDATORS
VALIDATOR_ERRORS = (forms.ValidationError, serializers.ValidationError, PluginError)
ERR_FIELD_REQUIRED = "This field is required."
# TODO(dcramer): replace one-off validation code with standardized validator
# (e.g. project_plugin_details.py)
class ConfigValidator:
def __init__(self, config, data=None, initial=None, context=None):
self.errors = {}
self.result = {}
self.context = context or {}
self.config = {f["name"]: f for f in config}
self._data = data or {}
self._initial = initial or {}
self._validated = False
def is_valid(self):
data = self._data
initial = self._initial
cleaned = self.result
errors = self.errors
for field in self.config.values():
key = field["name"]
value = data.get(key, initial.get(key))
if field.get("required") and not value:
errors[key] = ERR_FIELD_REQUIRED
try:
value = self.validate_field(name=key, value=value)
except (forms.ValidationError, serializers.ValidationError, PluginError) as e:
errors[key] = str(e)
if not errors.get(key):
cleaned[key] = value
self._validated = True
return not errors
def validate_field(self, name, value):
"""
```
if name == 'foo' and value != 'bar':
raise PluginError('foo must be bar')
return value
```
"""
field = self.config[name]
if value is None:
if field.get("required"):
raise PluginError("Field is required")
return value
if isinstance(value, str):
value = value.strip()
# TODO(dcramer): probably should do something with default
# validations here, though many things will end up bring string
# based
if not value and field.get("required"):
raise PluginError("Field is required")
for validator in DEFAULT_VALIDATORS.get(field["type"], ()):
value = validator(value=value)
for validator in field.get("validators", ()):
value = validator(value=value, **self.context)
return value
class PluginConfigMixin(ProviderMixin):
asset_key = None
assets = []
def get_assets(self):
return self.assets
def get_metadata(self):
"""
Return extra metadata which is used to represent this plugin.
This is available via the API, and commonly used for runtime
configuration that changes per-install, but not per-project.
"""
return {}
def get_config(self, project, **kwargs):
form = self.project_conf_form
if not form:
return []
return form_to_config(form)
def validate_config_field(self, project, name, value, actor=None):
"""
```
if name == 'foo' and value != 'bar':
raise PluginError('foo must be bar')
return value
```
"""
for config in self.get_config(project=project, user=actor):
if config["name"] != name:
continue
if value is None:
if config.get("required"):
raise PluginError("Field is required")
if config.get("type") == "secret":
value = self.get_option(name, project)
return value
if isinstance(value, str):
value = value.strip()
# TODO(dcramer): probably should do something with default
# validations here, though many things will end up bring string
# based
if not value:
if config.get("required"):
raise PluginError("Field is required")
if config.get("type") == "secret":
value = self.get_option(name, project)
for validator in DEFAULT_VALIDATORS.get(config["type"], ()):
value = validator(project=project, value=value, actor=actor)
for validator in config.get("validators", ()):
value = validator(value, project=project, actor=actor)
return value
return value
def validate_config(self, project, config, actor=None):
"""
```
if config['foo'] and not config['bar']:
raise PluginError('You cannot configure foo with bar')
return config
```
"""
return config
def METHOD_NAME(self):
return []
def get_project_urls(self):
return []
def setup(self, bindings):
pass
@staticmethod
def feature_flag_name(f):
"""
For the time being, we want the features for plugins to be treated separately than integrations
(integration features prefix with integrations-). This is because in Saas Sentry,
users can install the Trello and Asana plugins but not Jira even though both utilize issue-commits.
By not prefixing, we can avoid making new feature flags for data-forwarding which are restricted.
"""
return f |
6,017 | orders | """
Logic to provide consistency across exchanges
Copyright (C) 2021 Emerson Dove
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as published
by the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public License
along with this program. If not, see <https://www.gnu.org/licenses/>.
"""
import abc
from datetime import datetime as dt
from typing import Union
import pandas
from blankly.exchanges.interfaces.abc_base_exchange_interface import ABCBaseExchangeInterface
from blankly.utils.utils import AttributeDict
from blankly.exchanges.METHOD_NAME.limit_order import LimitOrder
from blankly.exchanges.METHOD_NAME.market_order import MarketOrder
class ABCExchangeInterface(ABCBaseExchangeInterface, abc.ABC):
@abc.abstractmethod
def __init__(self, exchange_name, authenticated_api):
"""
Create an abstract exchange interface
Args:
exchange_name (str): Define exchange name ex: 'binance' or 'coinbase_pro'
authenticated_api (obj): Authenticated direct calls object
"""
pass
@abc.abstractmethod
def get_calls(self):
"""
Get the direct & authenticated exchange object
Returns:
The exchange's direct calls object. A blankly Bot class should have immediate access to this by
default
"""
pass
@abc.abstractmethod
def get_exchange_type(self):
"""
Get the type of exchange ex: "coinbase_pro" or "binance"
Returns:
A string that corresponds to the type of exchange
TODO add return example
"""
pass
@abc.abstractmethod
def get_products(self) -> list:
"""
Get all trading pairs on the exchange & some information about the exchange limits.
TODO add return example
"""
pass
@abc.abstractmethod
def get_account(self,
symbol: str = None) -> AttributeDict:
"""
Get all assets in an account, or sort by symbol/account_id
Args:
symbol (Optional): Filter by particular symbol
These arguments are mutually exclusive
TODO add return example
"""
pass
@abc.abstractmethod
def market_order(self,
symbol: str,
side: str,
size: float) -> MarketOrder:
"""
Used for buying or selling market orders
Args:
symbol: asset to buy
side: buy/sell
size: desired amount of base asset to use
"""
pass
@abc.abstractmethod
def limit_order(self,
symbol: str,
side: str,
price: float,
size: float) -> LimitOrder:
"""
Used for buying or selling limit orders
Args:
symbol: asset to buy
side: buy/sell
price: price to set limit order
size: amount of asset (like BTC) for the limit to be valued
"""
pass
@abc.abstractmethod
def take_profit_order(self,
symbol: str,
price: float,
size: float) -> LimitOrder:
"""
Take profit order
Args:
symbol: asset to buy
price: price to sell at
size: amount of asset (like BTC)
"""
pass
@abc.abstractmethod
def stop_loss_order(self,
symbol: str,
price: float,
size: float) -> LimitOrder:
"""
Stop loss order
Args:
symbol: asset to buy
price: price to sell at
size: amount of asset (like BTC)
"""
pass
@abc.abstractmethod
def cancel_order(self,
symbol: str,
order_id: str) -> dict:
"""
Cancel an order on a particular symbol & order id
Args:
symbol: This is the asset id that the order is under
order_id: The unique ID of the order.
TODO add return example
"""
@abc.abstractmethod
def get_open_orders(self,
symbol: str = None) -> list:
"""
List open orders.
Args:
symbol (optional) (str): Asset such as BTC-USD
TODO add return example
"""
pass
@abc.abstractmethod
def get_order(self,
symbol: str,
order_id: str) -> dict:
"""
Get a certain order
Args:
symbol: Asset that the order is under
order_id: The unique ID of the order.
TODO add return example
"""
pass
@abc.abstractmethod
def get_fees(self, symbol: str) -> dict:
"""
Get market fees
TODO add return example
"""
pass
@abc.abstractmethod
def get_order_filter(self,
symbol: str) -> dict:
"""
Find order limits for the exchange
Args:
symbol: The asset such as (BTC-USD, or MSFT)
TODO add return example
"""
pass
@abc.abstractmethod
def get_price(self,
symbol: str) -> float:
"""
Returns just the price of a symbol.
Args:
symbol: The asset such as (BTC-USD, or MSFT)
TODO add return example
"""
pass
@property
@abc.abstractmethod
def account(self) -> AttributeDict:
"""
Get all assets in an account, or sort by assets/account_id
TODO add return example
"""
pass
@property
@abc.abstractmethod
def METHOD_NAME(self) -> list:
"""
List open orders.
TODO add return example
"""
pass
@property
@abc.abstractmethod
def cash(self) -> float:
"""
Get the amount of cash in a portfolio. The cash default is set in the settings .json file
"""
pass |
6,018 | unpair | """
Support for Bluetooth (using BlueZ in Linux).
The following packages are required packages for this module:
bluez >= 5.7
bluez-libs >= 5.7
bluez-utils >= 5.7
pybluez >= 0.18
"""
import shlex
import salt.utils.validate.net
from salt.exceptions import CommandExecutionError
HAS_PYBLUEZ = False
try:
import bluetooth # pylint: disable=import-error
HAS_PYBLUEZ = True
except ImportError:
pass
__func_alias__ = {"address_": "address"}
# Define the module's virtual name
__virtualname__ = "bluetooth"
def __virtual__():
"""
Only load the module if bluetooth is installed
"""
if HAS_PYBLUEZ:
return __virtualname__
return (
False,
"The bluetooth execution module cannot be loaded: bluetooth not installed.",
)
def version():
"""
Return Bluez version from bluetoothd -v
CLI Example:
.. code-block:: bash
salt '*' bluetoothd.version
"""
cmd = "bluetoothctl -v"
out = __salt__["cmd.run"](cmd).splitlines()
bluez_version = out[0]
pybluez_version = "<= 0.18 (Unknown, but installed)"
try:
pybluez_version = bluetooth.__version__
except Exception as exc: # pylint: disable=broad-except
pass
return {"Bluez": bluez_version, "PyBluez": pybluez_version}
def address_():
"""
Get the many addresses of the Bluetooth adapter
CLI Example:
.. code-block:: bash
salt '*' bluetooth.address
"""
ret = {}
cmd = "hciconfig"
out = __salt__["cmd.run"](cmd).splitlines()
dev = ""
for line in out:
if line.startswith("hci"):
comps = line.split(":")
dev = comps[0]
ret[dev] = {
"device": dev,
"path": "/sys/class/bluetooth/{}".format(dev),
}
if "BD Address" in line:
comps = line.split()
ret[dev]["address"] = comps[2]
if "DOWN" in line:
ret[dev]["power"] = "off"
if "UP RUNNING" in line:
ret[dev]["power"] = "on"
return ret
def power(dev, mode):
"""
Power a bluetooth device on or off
CLI Examples:
.. code-block:: bash
salt '*' bluetooth.power hci0 on
salt '*' bluetooth.power hci0 off
"""
if dev not in address_():
raise CommandExecutionError("Invalid dev passed to bluetooth.power")
if mode == "on" or mode is True:
state = "up"
mode = "on"
else:
state = "down"
mode = "off"
cmd = "hciconfig {} {}".format(dev, state)
__salt__["cmd.run"](cmd).splitlines()
info = address_()
if info[dev]["power"] == mode:
return True
return False
def discoverable(dev):
"""
Enable this bluetooth device to be discoverable.
CLI Example:
.. code-block:: bash
salt '*' bluetooth.discoverable hci0
"""
if dev not in address_():
raise CommandExecutionError("Invalid dev passed to bluetooth.discoverable")
cmd = "hciconfig {} iscan".format(dev)
__salt__["cmd.run"](cmd).splitlines()
cmd = "hciconfig {}".format(dev)
out = __salt__["cmd.run"](cmd)
if "UP RUNNING ISCAN" in out:
return True
return False
def noscan(dev):
"""
Turn off scanning modes on this device.
CLI Example:
.. code-block:: bash
salt '*' bluetooth.noscan hci0
"""
if dev not in address_():
raise CommandExecutionError("Invalid dev passed to bluetooth.noscan")
cmd = "hciconfig {} noscan".format(dev)
__salt__["cmd.run"](cmd).splitlines()
cmd = "hciconfig {}".format(dev)
out = __salt__["cmd.run"](cmd)
if "SCAN" in out:
return False
return True
def scan():
"""
Scan for bluetooth devices in the area
CLI Example:
.. code-block:: bash
salt '*' bluetooth.scan
"""
ret = []
devices = bluetooth.discover_devices(lookup_names=True)
for device in devices:
ret.append({device[0]: device[1]})
return ret
def block(bdaddr):
"""
Block a specific bluetooth device by BD Address
CLI Example:
.. code-block:: bash
salt '*' bluetooth.block DE:AD:BE:EF:CA:FE
"""
if not salt.utils.validate.net.mac(bdaddr):
raise CommandExecutionError("Invalid BD address passed to bluetooth.block")
cmd = "hciconfig {} block".format(bdaddr)
__salt__["cmd.run"](cmd).splitlines()
def unblock(bdaddr):
"""
Unblock a specific bluetooth device by BD Address
CLI Example:
.. code-block:: bash
salt '*' bluetooth.unblock DE:AD:BE:EF:CA:FE
"""
if not salt.utils.validate.net.mac(bdaddr):
raise CommandExecutionError("Invalid BD address passed to bluetooth.unblock")
cmd = "hciconfig {} unblock".format(bdaddr)
__salt__["cmd.run"](cmd).splitlines()
def pair(address, key):
"""
Pair the bluetooth adapter with a device
CLI Example:
.. code-block:: bash
salt '*' bluetooth.pair DE:AD:BE:EF:CA:FE 1234
Where DE:AD:BE:EF:CA:FE is the address of the device to pair with, and 1234
is the passphrase.
TODO: This function is currently broken, as the bluez-simple-agent program
no longer ships with BlueZ >= 5.0. It needs to be refactored.
"""
if not salt.utils.validate.net.mac(address):
raise CommandExecutionError("Invalid BD address passed to bluetooth.pair")
try:
int(key)
except Exception: # pylint: disable=broad-except
raise CommandExecutionError(
"bluetooth.pair requires a numerical key to be used"
)
addy = address_()
cmd = "echo {} | bluez-simple-agent {} {}".format(
shlex.quote(addy["device"]), shlex.quote(address), shlex.quote(key)
)
out = __salt__["cmd.run"](cmd, python_shell=True).splitlines()
return out
def METHOD_NAME(address):
"""
Unpair the bluetooth adapter from a device
CLI Example:
.. code-block:: bash
salt '*' bluetooth.unpair DE:AD:BE:EF:CA:FE
Where DE:AD:BE:EF:CA:FE is the address of the device to unpair.
TODO: This function is currently broken, as the bluez-simple-agent program
no longer ships with BlueZ >= 5.0. It needs to be refactored.
"""
if not salt.utils.validate.net.mac(address):
raise CommandExecutionError("Invalid BD address passed to bluetooth.unpair")
cmd = "bluez-test-device remove {}".format(address)
out = __salt__["cmd.run"](cmd).splitlines()
return out
def start():
"""
Start the bluetooth service.
CLI Example:
.. code-block:: bash
salt '*' bluetooth.start
"""
out = __salt__["service.start"]("bluetooth")
return out
def stop():
"""
Stop the bluetooth service.
CLI Example:
.. code-block:: bash
salt '*' bluetooth.stop
"""
out = __salt__["service.stop"]("bluetooth")
return out |
6,019 | shell lookup | import shutil
from buildtest.exceptions import BuildTestError
from buildtest.utils.command import BuildTestCommand
from buildtest.utils.file import is_file
def get_shells():
"""Return a list of shell returned from /etc/shells file. If file exist we return a list
The command we run is the following which will omit any lines that start with ``#`` which
is for comments. If file doesn't exist we return an empty list
.. code-block:: console
$ grep '^[^#]' /etc/shells
/bin/bash
/bin/csh
/bin/dash
/bin/ksh
/bin/sh
/bin/tcsh
/bin/zsh
Returns:
list: Return a list of shells
"""
etc_shell = "/etc/shells"
if not is_file(etc_shell):
return []
cmd = BuildTestCommand(f"grep '^[^#]' {etc_shell}")
cmd.execute()
out = cmd.get_output()
out = [item.strip() for item in out]
return out
def get_python_shells():
"""Return a list of all python shells by running ``which -a python3 python`` which
will report full path to all python and python3 wrapper in current $PATH.
Shown below is an expected output.
.. code-block:: console
$ which -a python3 python
/Users/siddiq90/.local/share/virtualenvs/buildtest-KLOcDrW0/bin/python3
/usr/local/bin/python3
/usr/bin/python3
/Users/siddiq90/.local/share/virtualenvs/buildtest-KLOcDrW0/bin/python
/usr/bin/python
Returns:
list: A list of full path to python shells
"""
python_shells = []
if not shutil.which("which"):
raise BuildTestError("Unable to find program 'which'. Please install 'which' ")
cmd = BuildTestCommand("which -a python python3")
cmd.execute()
out = cmd.get_output()
python_shells += [item.strip() for item in out]
return python_shells
def METHOD_NAME():
"""Return a dictionary of shell types and list of all shell interpreter. If shell is not present the entry will be an empty list."""
shells = {"bash": ["bash"], "sh": ["sh"], "csh": ["csh"], "zsh": ["zsh"]}
for name in shells.keys():
cmd = BuildTestCommand(f"which -a {name}")
cmd.execute()
out = cmd.get_output()
shells[name] += [item.strip() for item in out]
return shells
def is_bash_shell(name):
"""Return ``True`` if specified shell is valid bash shell
>>> is_bash_shell("bash")
True
>>> is_bash_shell("/bin/bash")
True
"""
return name in shell_dict["bash"]
def is_sh_shell(name):
"""Return ``True`` if specified shell is valid sh shell
>>> is_sh_shell("sh")
True
>>> is_sh_shell("/bin/sh")
True
"""
return name in shell_dict["sh"]
def is_csh_shell(name):
"""Return ``True`` if specified shell is valid csh shell"""
return name in shell_dict["csh"]
def is_zsh_shell(name):
"""Return ``True`` if specified shell is valid zsh shell"""
return name in shell_dict["zsh"]
python_shells = get_python_shells()
system_shells = get_shells()
shell_dict = METHOD_NAME()
class Shell:
def __init__(self, shell="bash"):
"""The Shell initializer takes an input shell and shell options and split
string by shell name and options.
Args:
shell (str): Specify shell program and any options passed to shell. Defaults to ``bash``
"""
# enforce input argument 'shell' to be a string
if not isinstance(shell, str):
raise BuildTestError(
f"Invalid type for input: {shell} must be of type 'str'"
)
self.name = shell.split()[0]
self.valid_shells = (
system_shells
+ python_shells
+ ["bash", "csh", "tcsh", "sh", "zsh", "python", "python3"]
)
# if input shell is not in list of valid shells we raise error.
if self.name not in self.valid_shells:
raise BuildTestError(
f"Invalid shell: {self.name} select from one of the following shells: {self.valid_shells}"
)
self._opts = " ".join(shell.split()[1:])
self.path = self.name
@property
def opts(self):
"""retrieve the shell opts that are set on init, and updated with setter"""
return self._opts
@opts.setter
def opts(self, shell_opts):
"""Override the shell options in class attribute, this would be useful
when shell options need to change due to change in shell program.
"""
self._opts = shell_opts
return self._opts
@property
def path(self):
"""This method returns the full path to shell program using ``shutil.which()``
If shell program is not found we raise an exception. The shebang is
is updated assuming path is valid which is just adding character '#!'
in front of path. The return is full path to shell program. This method
automatically updates the shell path when there is a change in attribute
self.name
>>> shell = Shell("bash")
>>> shell.path
'/usr/bin/bash'
>>> shell.name="sh"
>>> shell.path
'/usr/bin/sh'
"""
return self._path
# Identity functions
def __str__(self):
return "[buildtest.shell][%s]" % self.name
def __repr__(self):
return self.__str__()
@path.setter
def path(self, name):
"""If the user provides a new path with a name, do same checks to
ensure that it's found.
"""
path = shutil.which(name)
# raise an exception if shell program is not found
if not path:
raise BuildTestError(f"Can't find program: {name}")
# Update the name not that we are sure path is found
self.name = name
# if input shell is not in list of valid shells we raise error.
if self.name not in self.valid_shells:
raise BuildTestError(
f"Please select one of the following shells: {self.valid_shells}"
)
self._path = path
# shebang is formed by adding the char '#!' with path to program
self.shebang = f"#!{path}"
def get(self):
"""Return shell attributes as a dictionary"""
return {
"name": self.name,
"opts": self._opts,
"path": self._path,
"shebang": self.shebang,
} |
6,020 | custom response | import os
import json
try:
from urllib.parse import parse_qs
except ImportError:
from urlparse import parse_qs
import boto3.session
from chalice import Chalice, BadRequestError, NotFoundError, Response,\
CORSConfig, UnauthorizedError, AuthResponse, AuthRoute
# This is a test app that is used by integration tests.
# This app exercises all the major features of chalice
# and helps prevent regressions.
app = Chalice(app_name=os.environ['APP_NAME'])
app.websocket_api.session = boto3.session.Session()
app.experimental_feature_flags.update([
'WEBSOCKETS'
])
app.api.binary_types.append('application/binary')
@app.authorizer(ttl_seconds=300)
def dummy_auth(auth_request):
if auth_request.token == 'yes':
return AuthResponse(
routes=['/builtin-auth',
AuthRoute('/fake-profile', methods=['POST'])],
context={'foo': 'bar'},
principal_id='foo'
)
else:
raise UnauthorizedError('Authorization failed')
@app.route('/')
def index():
return {'hello': 'world'}
@app.route('/a/b/c/d/e/f/g')
def nested_route():
return {'nested': True}
@app.route('/path/{name}')
def supports_path_params(name):
return {'path': name}
@app.route('/singledoc')
def single_doc():
"""Single line docstring."""
return {'docstring': 'single'}
@app.route('/multidoc')
def multi_doc():
"""Multi-line docstring.
And here is another line.
"""
return {'docstring': 'multi'}
@app.route('/post', methods=['POST'])
def supports_only_post():
return {'success': True}
@app.route('/put', methods=['PUT'])
def supports_only_put():
return {'success': True}
@app.route('/jsonpost', methods=['POST'])
def supports_post_body_as_json():
json_body = app.current_request.json_body
return {'json_body': json_body}
@app.route('/multimethod', methods=['GET', 'POST'])
def multiple_methods():
return {'method': app.current_request.method}
@app.route('/badrequest')
def bad_request_error():
raise BadRequestError("Bad request.")
@app.route('/notfound')
def not_found_error():
raise NotFoundError("Not found")
@app.route('/arbitrary-error')
def raise_arbitrary_error():
raise TypeError("Uncaught exception")
@app.route('/formencoded', methods=['POST'],
content_types=['application/x-www-form-urlencoded'])
def form_encoded():
parsed = parse_qs(app.current_request.raw_body.decode('utf-8'))
return {
'parsed': parsed
}
@app.route('/json-only', content_types=['application/json'])
def json_only():
return {'success': True}
@app.route('/cors', methods=['GET', 'POST', 'PUT'], cors=True)
def supports_cors():
# It doesn't really matter what we return here because
# we'll be checking the response headers to verify CORS support.
return {'cors': True}
@app.route('/custom_cors', methods=['GET', 'POST', 'PUT'], cors=CORSConfig(
allow_origin='https://foo.example.com',
allow_headers=['X-Special-Header'],
max_age=600,
expose_headers=['X-Special-Header'],
allow_credentials=True))
def supports_custom_cors():
return {'cors': True}
@app.route('/todict', methods=['GET'])
def todict():
return app.current_request.to_dict()
@app.route('/multifile')
def multifile():
from chalicelib import MESSAGE
return {"message": MESSAGE}
@app.route('/custom-response', methods=['GET'])
def METHOD_NAME():
return Response(
status_code=204,
body='',
headers={
'Content-Type': 'text/plain',
'Set-Cookie': ['key=value', 'foo=bar'],
},
)
@app.route('/api-key-required', methods=['GET'], api_key_required=True)
def api_key_required():
return {"success": True}
@app.route('/binary', methods=['POST'],
content_types=['application/octet-stream'])
def binary_round_trip():
return Response(
app.current_request.raw_body,
headers={
'Content-Type': 'application/octet-stream'
},
status_code=200)
@app.route('/custom-binary', methods=['POST'],
content_types=['application/binary'])
def custom_binary_round_trip():
return Response(
app.current_request.raw_body,
headers={
'Content-Type': 'application/binary'
},
status_code=200)
@app.route('/get-binary', methods=['GET'])
def binary_response():
return Response(
body=b'\xDE\xAD\xBE\xEF',
headers={
'Content-Type': 'application/octet-stream'
},
status_code=200)
@app.route('/shared', methods=['GET'])
def shared_get():
return {'method': 'GET'}
@app.route('/shared', methods=['POST'])
def shared_post():
return {'method': 'POST'}
@app.route('/builtin-auth', authorizer=dummy_auth)
def builtin_auth():
return {'success': True, 'context': app.current_request.context}
# Testing a common use case where you can have read only GET access
# but you need to be auth'd to POST.
@app.route('/fake-profile', methods=['GET'])
def fake_profile_read_only():
return {'success': True, 'context': app.current_request.context}
@app.route('/fake-profile', authorizer=dummy_auth,
methods=['POST'])
def fake_profile_post():
return {'success': True, 'context': app.current_request.context}
@app.route('/repr-raw-body', methods=['POST'])
def repr_raw_body():
return {'repr-raw-body': app.current_request.raw_body.decode('utf-8')}
SOCKET_MESSAGES = []
@app.on_ws_connect()
def connect(event):
pass
@app.on_ws_message()
def message(event):
SOCKET_MESSAGES.append((event.connection_id, event.body))
app.websocket_api.send(event.connection_id, json.dumps(SOCKET_MESSAGES))
@app.on_ws_disconnect()
def disconnect(event):
pass |
6,021 | spherical yn | from ._ufuncs import (_spherical_jn, _spherical_yn, _spherical_in,
_spherical_kn, _spherical_jn_d, _spherical_yn_d,
_spherical_in_d, _spherical_kn_d)
def spherical_jn(n, z, derivative=False):
r"""Spherical Bessel function of the first kind or its derivative.
Defined as [1]_,
.. math:: j_n(z) = \sqrt{\frac{\pi}{2z}} J_{n + 1/2}(z),
where :math:`J_n` is the Bessel function of the first kind.
Parameters
----------
n : int, array_like
Order of the Bessel function (n >= 0).
z : complex or float, array_like
Argument of the Bessel function.
derivative : bool, optional
If True, the value of the derivative (rather than the function
itself) is returned.
Returns
-------
jn : ndarray
Notes
-----
For real arguments greater than the order, the function is computed
using the ascending recurrence [2]_. For small real or complex
arguments, the definitional relation to the cylindrical Bessel function
of the first kind is used.
The derivative is computed using the relations [3]_,
.. math::
j_n'(z) = j_{n-1}(z) - \frac{n + 1}{z} j_n(z).
j_0'(z) = -j_1(z)
.. versionadded:: 0.18.0
References
----------
.. [1] https://dlmf.nist.gov/10.47.E3
.. [2] https://dlmf.nist.gov/10.51.E1
.. [3] https://dlmf.nist.gov/10.51.E2
"""
if derivative:
return _spherical_jn_d(n, z)
else:
return _spherical_jn(n, z)
def METHOD_NAME(n, z, derivative=False):
r"""Spherical Bessel function of the second kind or its derivative.
Defined as [1]_,
.. math:: y_n(z) = \sqrt{\frac{\pi}{2z}} Y_{n + 1/2}(z),
where :math:`Y_n` is the Bessel function of the second kind.
Parameters
----------
n : int, array_like
Order of the Bessel function (n >= 0).
z : complex or float, array_like
Argument of the Bessel function.
derivative : bool, optional
If True, the value of the derivative (rather than the function
itself) is returned.
Returns
-------
yn : ndarray
Notes
-----
For real arguments, the function is computed using the ascending
recurrence [2]_. For complex arguments, the definitional relation to
the cylindrical Bessel function of the second kind is used.
The derivative is computed using the relations [3]_,
.. math::
y_n' = y_{n-1} - \frac{n + 1}{z} y_n.
y_0' = -y_1
.. versionadded:: 0.18.0
References
----------
.. [1] https://dlmf.nist.gov/10.47.E4
.. [2] https://dlmf.nist.gov/10.51.E1
.. [3] https://dlmf.nist.gov/10.51.E2
"""
if derivative:
return _spherical_yn_d(n, z)
else:
return _spherical_yn(n, z)
def spherical_in(n, z, derivative=False):
r"""Modified spherical Bessel function of the first kind or its derivative.
Defined as [1]_,
.. math:: i_n(z) = \sqrt{\frac{\pi}{2z}} I_{n + 1/2}(z),
where :math:`I_n` is the modified Bessel function of the first kind.
Parameters
----------
n : int, array_like
Order of the Bessel function (n >= 0).
z : complex or float, array_like
Argument of the Bessel function.
derivative : bool, optional
If True, the value of the derivative (rather than the function
itself) is returned.
Returns
-------
in : ndarray
Notes
-----
The function is computed using its definitional relation to the
modified cylindrical Bessel function of the first kind.
The derivative is computed using the relations [2]_,
.. math::
i_n' = i_{n-1} - \frac{n + 1}{z} i_n.
i_1' = i_0
.. versionadded:: 0.18.0
References
----------
.. [1] https://dlmf.nist.gov/10.47.E7
.. [2] https://dlmf.nist.gov/10.51.E5
"""
if derivative:
return _spherical_in_d(n, z)
else:
return _spherical_in(n, z)
def spherical_kn(n, z, derivative=False):
r"""Modified spherical Bessel function of the second kind or its derivative.
Defined as [1]_,
.. math:: k_n(z) = \sqrt{\frac{\pi}{2z}} K_{n + 1/2}(z),
where :math:`K_n` is the modified Bessel function of the second kind.
Parameters
----------
n : int, array_like
Order of the Bessel function (n >= 0).
z : complex or float, array_like
Argument of the Bessel function.
derivative : bool, optional
If True, the value of the derivative (rather than the function
itself) is returned.
Returns
-------
kn : ndarray
Notes
-----
The function is computed using its definitional relation to the
modified cylindrical Bessel function of the second kind.
The derivative is computed using the relations [2]_,
.. math::
k_n' = -k_{n-1} - \frac{n + 1}{z} k_n.
k_0' = -k_1
.. versionadded:: 0.18.0
References
----------
.. [1] https://dlmf.nist.gov/10.47.E9
.. [2] https://dlmf.nist.gov/10.51.E5
"""
if derivative:
return _spherical_kn_d(n, z)
else:
return _spherical_kn(n, z) |
6,022 | flatten fun for sparse ad | # Copyright 2021 The JAX Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from collections.abc import Sequence
import itertools
from typing import Any, Callable, Union
import jax
from jax._src import core
from jax import tree_util
from jax._src.api_util import _ensure_index, _ensure_index_tuple
from jax.util import safe_zip
from jax._src.util import split_list, wraps
from jax._src.traceback_util import api_boundary
from jax.experimental.sparse._base import JAXSparse
is_sparse = lambda x: isinstance(x, JAXSparse)
def METHOD_NAME(fun, argnums: Union[int, tuple[int]], args: tuple[Any]):
argnums_tup = _ensure_index_tuple(argnums)
assert all(0 <= argnum < len(args) for argnum in argnums_tup)
# We do a two-step flattening to figure out how argnums maps to args_flat.
# First, flatten arguments to a list containing sparse and dense objects.
args_flat1, tree1 = tree_util.tree_flatten(args, is_leaf=is_sparse)
*leaf_argnums1, end = split_list(range(tree1.num_leaves),
[child.num_leaves for child in tree1.children()])
assert not end
argnums_flat1 = list(itertools.chain.from_iterable(
nums for i, nums in enumerate(leaf_argnums1) if i in argnums_tup))
# Next, fully flatten to a list of dense buffers.
args_flat, tree2 = tree_util.tree_flatten(args_flat1)
*leaf_argnums2, end = split_list(range(tree2.num_leaves),
[child.num_leaves for child in tree2.children()])
assert not end
# For sparse args, we only mark the first buffer (the data) for differentiation.
leaf_argnums2 = [nums[:1] if is_sparse(arg) else nums
for arg, nums in safe_zip(args_flat1, leaf_argnums2)]
argnums_flat = tuple(itertools.chain.from_iterable(
nums for i, nums in enumerate(leaf_argnums2) if i in argnums_flat1))
def fun_flat(*args_flat, **kwargs):
args = tree_util.tree_unflatten(tree1, tree_util.tree_unflatten(tree2, args_flat))
return fun(*args, **kwargs)
def reconstruct(i, grad_out):
bufs, tree = tree_util.tree_flatten(args_flat1[i])
f_recons = lambda g: tree_util.tree_unflatten(tree, [g, *bufs[1:]])
for _ in range(grad_out.ndim - bufs[0].ndim):
f_recons = jax.vmap(f_recons)
return f_recons(grad_out)
def postprocess_gradients(grads_out):
out = [reconstruct(*args) for args in safe_zip(argnums_flat1, grads_out)]
return out[0] if isinstance(argnums, int) else out
return fun_flat, argnums_flat, args_flat, postprocess_gradients
def value_and_grad(fun: Callable, argnums: Union[int, Sequence[int]] = 0,
has_aux=False, **kwargs) -> Callable[..., tuple[Any, Any]]:
"""Sparse-aware version of :func:`jax.value_and_grad`
Arguments and return values are the same as :func:`jax.value_and_grad`, but when
taking the gradient with respect to a :class:`jax.experimental.sparse` array, the
gradient is computed in the subspace defined by the array's sparsity pattern.
Example:
>>> from jax.experimental import sparse
>>> X = sparse.BCOO.fromdense(jnp.arange(6.))
>>> y = jnp.ones(6)
>>> sparse.value_and_grad(lambda X, y: X @ y)(X, y)
(Array(15., dtype=float32), BCOO(float32[6], nse=5))
"""
raw_value_and_grad_fun = jax.value_and_grad(fun, argnums=argnums, has_aux=has_aux, **kwargs)
argnums = core.concrete_or_error(_ensure_index, argnums)
@wraps(fun, docstr=raw_value_and_grad_fun.__doc__, argnums=argnums)
@api_boundary
def value_and_grad_fun(*args, **kwargs):
fun_flat, argnums_flat, args_flat, postprocess_gradients = METHOD_NAME(fun, argnums, args)
val_out, grad_out = jax.value_and_grad(fun_flat, argnums=argnums_flat, has_aux=has_aux, **kwargs)(*args_flat)
return val_out, postprocess_gradients(grad_out)
return value_and_grad_fun
def grad(fun: Callable, argnums: Union[int, Sequence[int]] = 0,
has_aux=False, **kwargs) -> Callable:
"""Sparse-aware version of :func:`jax.grad`
Arguments and return values are the same as :func:`jax.grad`, but when taking
the gradient with respect to a :class:`jax.experimental.sparse` array, the
gradient is computed in the subspace defined by the array's sparsity pattern.
Example:
>>> from jax.experimental import sparse
>>> X = sparse.BCOO.fromdense(jnp.arange(6.))
>>> y = jnp.ones(6)
>>> sparse.grad(lambda X, y: X @ y)(X, y)
BCOO(float32[6], nse=5)
"""
raw_grad_fun = jax.grad(fun, argnums=argnums, **kwargs)
argnums = core.concrete_or_error(_ensure_index, argnums)
@wraps(fun, docstr=raw_grad_fun.__doc__, argnums=argnums)
@api_boundary
def grad_fun(*args, **kwargs):
fun_flat, argnums_flat, args_flat, postprocess_gradients = METHOD_NAME(fun, argnums, args)
out = jax.grad(fun_flat, argnums=argnums_flat, has_aux=has_aux, **kwargs)(*args_flat)
if has_aux:
return postprocess_gradients(out[0]), out[1]
return postprocess_gradients(out)
return grad_fun
def jacfwd(fun: Callable, argnums: Union[int, Sequence[int]] = 0,
has_aux: bool = False, **kwargs) -> Callable:
"""Sparse-aware version of :func:`jax.jacfwd`
Arguments and return values are the same as :func:`jax.jacfwd`, but when taking
the gradient with respect to a :class:`jax.experimental.sparse` array, the
gradient is computed in the subspace defined by the array's sparsity pattern.
Currently this is only implemented for dense outputs.
"""
raw_jacfwd_fun = jax.jacfwd(fun, argnums=argnums, **kwargs)
argnums = core.concrete_or_error(_ensure_index, argnums)
@wraps(fun, docstr=raw_jacfwd_fun.__doc__, argnums=argnums)
@api_boundary
def jacfwd_fun(*args, **kwargs):
fun_flat, argnums_flat, args_flat, postprocess_gradients = METHOD_NAME(fun, argnums, args)
out = jax.jacfwd(fun_flat, argnums=argnums_flat, has_aux=has_aux, **kwargs)(*args_flat)
if has_aux:
return postprocess_gradients(out[0]), out[1]
return postprocess_gradients(out)
return jacfwd_fun
def jacrev(fun: Callable, argnums: Union[int, Sequence[int]] = 0,
has_aux: bool = False, **kwargs) -> Callable:
"""Sparse-aware version of :func:`jax.jacrev`
Arguments and return values are the same as :func:`jax.jacrev`, but when taking
the gradient with respect to a :class:`jax.experimental.sparse` array, the
gradient is computed in the subspace defined by the array's sparsity pattern.
Currently this is only implemented for dense outputs.
"""
raw_jacrev_fun = jax.jacrev(fun, argnums=argnums, **kwargs)
argnums = core.concrete_or_error(_ensure_index, argnums)
@wraps(fun, docstr=raw_jacrev_fun.__doc__, argnums=argnums)
@api_boundary
def jacrev_fun(*args, **kwargs):
fun_flat, argnums_flat, args_flat, postprocess_gradients = METHOD_NAME(fun, argnums, args)
out = jax.jacrev(fun_flat, argnums=argnums_flat, has_aux=has_aux, **kwargs)(*args_flat)
if has_aux:
return postprocess_gradients(out[0]), out[1]
return postprocess_gradients(out)
return jacrev_fun
jacobian = jacrev |
6,023 | supervised | from __future__ import annotations
from abc import abstractmethod
from asyncio import CancelledError, Condition, Task, as_completed, create_task, wait
from dataclasses import dataclass
from pathlib import Path
from time import monotonic
from typing import (
Any,
AsyncIterator,
Awaitable,
Generic,
MutableSequence,
Optional,
Protocol,
Sequence,
TypeVar,
)
from uuid import UUID, uuid4
from weakref import WeakSet
from pynvim_pp.logging import suppress_and_log
from std2.aitertools import aenumerate
from std2.asyncio import cancel
from .settings import (
BaseClient,
CompleteOptions,
Display,
Limits,
MatchOptions,
Weights,
)
from .timeit import TracingLocker, timeit
from .types import Completion, Context
_T = TypeVar("_T")
_T_co = TypeVar("_T_co", contravariant=True)
_O_co = TypeVar("_O_co", contravariant=True, bound=BaseClient)
@dataclass(frozen=True)
class Metric:
instance: UUID
comp: Completion
weight_adjust: float
weight: Weights
label_width: int
kind_width: int
class PReviewer(Protocol[_T]):
async def register(self, assoc: BaseClient) -> None:
...
async def begin(self, context: Context) -> _T:
...
async def s_begin(self, token: _T, assoc: BaseClient, instance: UUID) -> None:
...
def trans(self, token: _T, instance: UUID, completion: Completion) -> Metric:
...
async def s_end(
self, instance: UUID, interrupted: bool, elapsed: float, items: int
) -> None:
...
class Supervisor:
def __init__(
self,
vars_dir: Path,
display: Display,
match: MatchOptions,
comp: CompleteOptions,
limits: Limits,
reviewer: PReviewer,
) -> None:
self.vars_dir = vars_dir
self.match, self.display = match, display
self.comp, self.limits = comp, limits
self._reviewer = reviewer
self.idling = Condition()
self._workers: WeakSet[Worker] = WeakSet()
self._lock = TracingLocker(name="Supervisor", force=True)
self._work_task: Optional[Task] = None
async def register(self, worker: Worker, assoc: BaseClient) -> None:
with suppress_and_log():
await self._reviewer.register(assoc)
self._workers.add(worker)
async def notify_idle(self) -> None:
async with self.idling:
self.idling.notify_all()
async def interrupt(self) -> None:
task = self._work_task
self._work_task = None
if task:
await cancel(task)
def collect(self, context: Context) -> Awaitable[Sequence[Metric]]:
now = monotonic()
timeout = (
self.limits.completion_manual_timeout
if context.manual
else self.limits.completion_auto_timeout
)
async def cont(prev: Optional[Task]) -> Sequence[Metric]:
with timeit("CANCEL -- ALL"):
if prev:
await cancel(prev)
with suppress_and_log(), timeit("COLLECTED -- ALL"):
async with self._lock:
acc: MutableSequence[Metric] = []
token = await self._reviewer.begin(context)
tasks = tuple(
worker.METHOD_NAME(context, token=token, now=now, acc=acc)
for worker in self._workers
)
_, pending = await wait(tasks, timeout=timeout)
if not acc:
for fut in as_completed(pending):
await fut
if acc:
break
await cancel(*pending)
return acc
self._work_task = task = create_task(cont(self._work_task))
return task
class Worker(Generic[_O_co, _T_co]):
def __init__(self, supervisor: Supervisor, options: _O_co, misc: _T_co) -> None:
self._work_task: Optional[Task] = None
self._work_lock = TracingLocker(name=options.short_name, force=True)
self._supervisor, self._options, self._misc = supervisor, options, misc
create_task(self._supervisor.register(self, assoc=options))
@abstractmethod
def work(self, context: Context) -> AsyncIterator[Completion]:
...
def METHOD_NAME(
self,
context: Context,
token: Any,
now: float,
acc: MutableSequence[Metric],
) -> Task:
prev = self._work_task
async def cont() -> None:
instance, items = uuid4(), 0
interrupted = False
with timeit(f"CANCEL WORKER -- {self._options.short_name}"):
if prev:
await cancel(prev)
with suppress_and_log(), timeit(f"WORKER -- {self._options.short_name}"):
await self._supervisor._reviewer.s_begin(
token, assoc=self._options, instance=instance
)
try:
async for items, completion in aenumerate(
self.work(context), start=1
):
metric = self._supervisor._reviewer.trans(
token, instance=instance, completion=completion
)
acc.append(metric)
except CancelledError:
interrupted = True
raise
finally:
elapsed = monotonic() - now
await self._supervisor._reviewer.s_end(
instance,
interrupted=interrupted,
elapsed=elapsed,
items=items,
)
self._work_task = task = create_task(cont())
return task |
6,024 | test load kps | # Copyright (c) OpenMMLab. All rights reserved.
import copy
import os.path as osp
import numpy as np
import pytest
from mmcv.transforms import LoadAnnotations, LoadImageFromFile
class TestLoadImageFromFile:
def test_load_img(self):
# file_client_args and backend_args can not be both set
with pytest.raises(
ValueError,
match='"file_client_args" and "backend_args" cannot be set'):
LoadImageFromFile(
file_client_args={'backend': 'disk'},
backend_args={'backend': 'disk'})
data_prefix = osp.join(osp.dirname(__file__), '../data')
results = dict(img_path=osp.join(data_prefix, 'color.jpg'))
transform = LoadImageFromFile()
results = transform(copy.deepcopy(results))
assert results['img_path'] == osp.join(data_prefix, 'color.jpg')
assert results['img'].shape == (300, 400, 3)
assert results['img'].dtype == np.uint8
assert results['img_shape'] == (300, 400)
assert results['ori_shape'] == (300, 400)
assert repr(transform) == transform.__class__.__name__ + \
"(ignore_empty=False, to_float32=False, color_type='color', " + \
"imdecode_backend='cv2', backend_args=None)"
# to_float32
transform = LoadImageFromFile(to_float32=True)
results = transform(copy.deepcopy(results))
assert results['img'].dtype == np.float32
# gray image
results = dict(img_path=osp.join(data_prefix, 'grayscale.jpg'))
transform = LoadImageFromFile()
results = transform(copy.deepcopy(results))
assert results['img'].shape == (300, 400, 3)
assert results['img'].dtype == np.uint8
transform = LoadImageFromFile(color_type='unchanged')
results = transform(copy.deepcopy(results))
assert results['img'].shape == (300, 400)
assert results['img'].dtype == np.uint8
# test load empty
fake_img_path = osp.join(data_prefix, 'fake.jpg')
results['img_path'] = fake_img_path
transform = LoadImageFromFile(ignore_empty=False)
with pytest.raises(FileNotFoundError):
transform(copy.deepcopy(results))
transform = LoadImageFromFile(ignore_empty=True)
assert transform(copy.deepcopy(results)) is None
class TestLoadAnnotations:
def setup_class(cls):
data_prefix = osp.join(osp.dirname(__file__), '../data')
seg_map = osp.join(data_prefix, 'grayscale.jpg')
cls.results = {
'seg_map_path':
seg_map,
'instances': [{
'bbox': [0, 0, 10, 20],
'bbox_label': 1,
'keypoints': [1, 2, 3]
}, {
'bbox': [10, 10, 110, 120],
'bbox_label': 2,
'keypoints': [4, 5, 6]
}]
}
def test_init(self):
# file_client_args and backend_args can not be both set
with pytest.raises(
ValueError,
match='"file_client_args" and "backend_args" cannot be set'):
LoadAnnotations(
file_client_args={'backend': 'disk'},
backend_args={'backend': 'disk'})
def test_load_bboxes(self):
transform = LoadAnnotations(
with_bbox=True,
with_label=False,
with_seg=False,
with_keypoints=False,
)
results = transform(copy.deepcopy(self.results))
assert 'gt_bboxes' in results
assert (results['gt_bboxes'] == np.array([[0, 0, 10, 20],
[10, 10, 110, 120]])).all()
assert results['gt_bboxes'].dtype == np.float32
def test_load_labels(self):
transform = LoadAnnotations(
with_bbox=False,
with_label=True,
with_seg=False,
with_keypoints=False,
)
results = transform(copy.deepcopy(self.results))
assert 'gt_bboxes_labels' in results
assert (results['gt_bboxes_labels'] == np.array([1, 2])).all()
assert results['gt_bboxes_labels'].dtype == np.int64
def METHOD_NAME(self):
transform = LoadAnnotations(
with_bbox=False,
with_label=False,
with_seg=False,
with_keypoints=True,
)
results = transform(copy.deepcopy(self.results))
assert 'gt_keypoints' in results
assert (results['gt_keypoints'] == np.array([[[1, 2, 3]],
[[4, 5, 6]]])).all()
assert results['gt_keypoints'].dtype == np.float32
def test_load_seg_map(self):
transform = LoadAnnotations(
with_bbox=False,
with_label=False,
with_seg=True,
with_keypoints=False,
)
results = transform(copy.deepcopy(self.results))
assert 'gt_seg_map' in results
assert results['gt_seg_map'].shape[:2] == (300, 400)
assert results['gt_seg_map'].dtype == np.uint8
def test_repr(self):
transform = LoadAnnotations(
with_bbox=True,
with_label=False,
with_seg=False,
with_keypoints=False,
)
assert repr(transform) == (
'LoadAnnotations(with_bbox=True, '
'with_label=False, with_seg=False, '
"with_keypoints=False, imdecode_backend='cv2', "
'backend_args=None)') |
6,025 | training step | # Copyright The Lightning AI team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""MNIST backbone image classifier example.
To run: python backbone_image_classifier.py --trainer.max_epochs=50
"""
from os import path
from typing import Optional
import torch
from torch.nn import functional as F
from torch.utils.data import DataLoader, random_split
from lightning.pytorch import cli_lightning_logo, LightningDataModule, LightningModule
from lightning.pytorch.cli import LightningCLI
from lightning.pytorch.demos.mnist_datamodule import MNIST
from lightning.pytorch.utilities.imports import _TORCHVISION_AVAILABLE
if _TORCHVISION_AVAILABLE:
from torchvision import transforms
DATASETS_PATH = path.join(path.dirname(__file__), "..", "..", "Datasets")
class Backbone(torch.nn.Module):
"""
>>> Backbone() # doctest: +ELLIPSIS +NORMALIZE_WHITESPACE
Backbone(
(l1): Linear(...)
(l2): Linear(...)
)
"""
def __init__(self, hidden_dim=128):
super().__init__()
self.l1 = torch.nn.Linear(28 * 28, hidden_dim)
self.l2 = torch.nn.Linear(hidden_dim, 10)
def forward(self, x):
x = x.view(x.size(0), -1)
x = torch.relu(self.l1(x))
return torch.relu(self.l2(x))
class LitClassifier(LightningModule):
"""
>>> LitClassifier(Backbone()) # doctest: +ELLIPSIS +NORMALIZE_WHITESPACE
LitClassifier(
(backbone): ...
)
"""
def __init__(self, backbone: Optional[Backbone] = None, learning_rate: float = 0.0001):
super().__init__()
self.save_hyperparameters(ignore=["backbone"])
if backbone is None:
backbone = Backbone()
self.backbone = backbone
def forward(self, x):
# use forward for inference/predictions
return self.backbone(x)
def METHOD_NAME(self, batch, batch_idx):
x, y = batch
y_hat = self(x)
loss = F.cross_entropy(y_hat, y)
self.log("train_loss", loss, on_epoch=True)
return loss
def validation_step(self, batch, batch_idx):
x, y = batch
y_hat = self(x)
loss = F.cross_entropy(y_hat, y)
self.log("valid_loss", loss, on_step=True)
def test_step(self, batch, batch_idx):
x, y = batch
y_hat = self(x)
loss = F.cross_entropy(y_hat, y)
self.log("test_loss", loss)
def predict_step(self, batch, batch_idx, dataloader_idx=None):
x, y = batch
return self(x)
def configure_optimizers(self):
# self.hparams available because we called self.save_hyperparameters()
return torch.optim.Adam(self.parameters(), lr=self.hparams.learning_rate)
class MyDataModule(LightningDataModule):
def __init__(self, batch_size: int = 32):
super().__init__()
dataset = MNIST(DATASETS_PATH, train=True, download=True, transform=transforms.ToTensor())
self.mnist_test = MNIST(DATASETS_PATH, train=False, download=True, transform=transforms.ToTensor())
self.mnist_train, self.mnist_val = random_split(dataset, [55000, 5000])
self.batch_size = batch_size
def train_dataloader(self):
return DataLoader(self.mnist_train, batch_size=self.batch_size)
def val_dataloader(self):
return DataLoader(self.mnist_val, batch_size=self.batch_size)
def test_dataloader(self):
return DataLoader(self.mnist_test, batch_size=self.batch_size)
def predict_dataloader(self):
return DataLoader(self.mnist_test, batch_size=self.batch_size)
def cli_main():
cli = LightningCLI(
LitClassifier, MyDataModule, seed_everything_default=1234, save_config_kwargs={"overwrite": True}, run=False
)
cli.trainer.fit(cli.model, datamodule=cli.datamodule)
cli.trainer.test(ckpt_path="best", datamodule=cli.datamodule)
predictions = cli.trainer.predict(ckpt_path="best", datamodule=cli.datamodule)
print(predictions[0])
if __name__ == "__main__":
cli_lightning_logo()
cli_main() |
6,026 | test equality of objs with different message | # -*- coding: utf-8 -*-
from unittest.mock import patch
from django.core.exceptions import ValidationError
from django.test import TestCase, SimpleTestCase
from django_extensions.validators import NoControlCharactersValidator, NoWhitespaceValidator, HexValidator
class NoControlCharactersValidatorTests(TestCase):
"""Tests for NoControlCharactersValidator."""
def test_should_raise_default_message_and_code_if_value_contains_new_line(self):
self.validator = NoControlCharactersValidator()
value_with_new_line = 'test\nvalue'
with self.assertRaises(ValidationError) as cm:
self.validator(value_with_new_line)
self.assertEqual(cm.exception.message, 'Control Characters like new lines or tabs are not allowed.')
self.assertEqual(cm.exception.code, 'no_control_characters')
self.assertDictEqual(cm.exception.params, {'value': value_with_new_line, 'whitelist': None})
def test_should_raise_custom_message_and_code_if_value_contains_tabs(self):
self.validator = NoControlCharactersValidator(message='custom message', code='custom code')
value_with_tabs = 'test\tvalue'
with self.assertRaises(ValidationError) as cm:
self.validator(value_with_tabs)
self.assertEqual(cm.exception.message, 'custom message')
self.assertEqual(cm.exception.code, 'custom code')
self.assertDictEqual(cm.exception.params, {'value': value_with_tabs, 'whitelist': None})
def test_should_not_raise_if_value_contains_characters_which_is_on_whitelist(self):
self.validator = NoControlCharactersValidator(message='custom message', code='custom code', whitelist=['\n'])
value_with_new_line = 'test\nvalue'
result = self.validator(value_with_new_line)
self.assertIsNone(result)
class NoWhiteSpaceValidatorTests(TestCase):
"""Tests for NoWhitespaceValidator."""
def test_should_raise_default_message_and_code_if_value_has_leading_whitespace(self):
self.validator = NoWhitespaceValidator()
value_with_leading_whitespace = ' test_value'
with self.assertRaises(ValidationError) as cm:
self.validator(value_with_leading_whitespace)
self.assertEqual(cm.exception.message, 'Leading and Trailing whitespaces are not allowed.')
self.assertEqual(cm.exception.code, 'no_whitespace')
self.assertDictEqual(cm.exception.params, {'value': value_with_leading_whitespace})
def test_should_raise_custom_message_and_code_if_value_has_trailing_whitespace(self):
self.validator = NoWhitespaceValidator(message='custom message', code='custom code')
value_with_trailing_whitespace = 'test value '
with self.assertRaises(ValidationError) as cm:
self.validator(value_with_trailing_whitespace)
self.assertEqual(cm.exception.message, 'custom message')
self.assertEqual(cm.exception.code, 'custom code')
self.assertDictEqual(cm.exception.params, {'value': value_with_trailing_whitespace})
def test_should_not_raise_if_value_doesnt_have_leading_or_trailing_whitespaces(self):
self.validator = NoWhitespaceValidator()
value_without_leading_or_trailing_whitespaces = 'test value'
result = self.validator(value_without_leading_or_trailing_whitespaces)
self.assertIsNone(result)
class TestHexValidator(SimpleTestCase):
def test_custom_message_and_code(self):
self.validator = HexValidator(message='message', code='code')
self.assertEqual(self.validator.message, 'message')
self.assertEqual(self.validator.code, 'code')
def test_equality_of_objs_with_obj_of_different_type(self):
self.assertNotEqual(TypeError(), HexValidator())
def test_equality_of_objs_with_different_code(self):
self.assertNotEqual(HexValidator(code='1'), HexValidator(code='a'))
def METHOD_NAME(self):
self.assertNotEqual(HexValidator(code='code', message='a'), HexValidator(code='code', message='acb'))
def test_equality_of_objs_with_same_code_and_message(self):
self.assertEqual(HexValidator(code='c', message='m'), HexValidator(code='c', message='m'))
def test_fixed_length(self):
value = 'abcd'
self.validator = HexValidator(length=5)
with self.assertRaises(ValidationError) as err:
self.validator(value)
self.assertEqual(str(err.exception), "['Invalid length. Must be 5 characters.']")
self.assertEqual(err.exception.code, 'hex_only_length')
def test_min_length(self):
value = 'a'
self.validator = HexValidator(min_length=5)
with self.assertRaises(ValidationError) as err:
self.validator(value)
self.assertEqual(str(err.exception), "['Ensure that there are more than 5 characters.']")
self.assertEqual(err.exception.code, 'hex_only_min_length')
def test_with_max_length(self):
value = 'abcd'
self.validator = HexValidator(max_length=2)
with self.assertRaises(ValidationError) as err:
self.validator(value)
self.assertEqual(str(err.exception), "['Ensure that there are no more than 2 characters.']")
self.assertEqual(err.exception.code, 'hex_only_max_length')
def test_invalid_type(self):
value = 1
with patch('django_extensions.validators.force_str', return_value=1):
self.validator = HexValidator()
with self.assertRaises(ValidationError) as err:
self.validator(value)
self.assertEqual(str(err.exception), "['Only a hex string is allowed.']")
self.assertEqual(err.exception.code, 'hex_only')
def test_invalid_hex(self):
value = '1'
self.validator = HexValidator()
with self.assertRaises(ValidationError) as err:
self.validator(value)
self.assertEqual(str(err.exception), "['Only a hex string is allowed.']")
self.assertEqual(err.exception.code, 'hex_only')
def test_valid_hex(self):
value = 'b901ef'
self.validator = HexValidator()
result = self.validator(value)
self.assertIsNone(result) |
6,027 | char field field factory | from iommi.shortcut import Shortcut
from iommi.base import MISSING
def setup_db_compat():
setup_db_compat_django()
def register_factory(django_field_class, *, shortcut_name=MISSING, factory=MISSING, **kwargs):
from iommi.form import register_field_factory
from iommi.query import register_filter_factory
from iommi.table import register_column_factory
register_field_factory(django_field_class, shortcut_name=shortcut_name, factory=factory, **kwargs)
register_filter_factory(django_field_class, shortcut_name=shortcut_name, factory=factory, **kwargs)
register_column_factory(django_field_class, shortcut_name=shortcut_name, factory=factory, **kwargs)
def setup_db_compat_django():
from iommi.form import register_field_factory
from iommi.query import register_filter_factory
from iommi.table import register_column_factory
from iommi.sort_after import LAST
from django.db.models import (
AutoField,
BinaryField,
BooleanField,
CharField,
DateField,
DateTimeField,
DecimalField,
EmailField,
FileField,
FilePathField,
FloatField,
ForeignKey,
GenericIPAddressField,
ImageField,
IntegerField,
ManyToManyField,
ManyToManyRel,
ManyToOneRel,
TextField,
TimeField,
URLField,
UUIDField,
JSONField,
)
def _get_choices_from_model_choices(model_field):
return [value for value, label in model_field.choices]
def _build_display_name_formatter(model_field):
label_by_value = dict(model_field.choices)
def choice_display_name_formatter(choice, **_):
return label_by_value[choice]
return choice_display_name_formatter
def METHOD_NAME(model_field, **_):
if model_field.choices:
return Shortcut(
call_target__attribute='choice',
choices=_get_choices_from_model_choices(model_field),
choice_display_name_formatter=_build_display_name_formatter(model_field),
)
return Shortcut(call_target__attribute='text')
def char_field_filter_factory(model_field, **_):
if model_field.choices:
return Shortcut(
call_target__attribute='choice',
choices=_get_choices_from_model_choices(model_field),
field__choice_display_name_formatter=_build_display_name_formatter(model_field),
)
return Shortcut(call_target__attribute='text')
def char_field_column_factory(model_field, **_):
if model_field.choices:
return Shortcut(
call_target__attribute='choice',
choices=_get_choices_from_model_choices(model_field),
filter__field__choice_display_name_formatter=_build_display_name_formatter(model_field),
)
return Shortcut(call_target__attribute='text')
register_field_factory(CharField, factory=METHOD_NAME)
register_filter_factory(CharField, factory=char_field_filter_factory)
register_column_factory(CharField, factory=char_field_column_factory)
register_factory(UUIDField, shortcut_name='text')
register_factory(TimeField, shortcut_name='time')
register_factory(EmailField, shortcut_name='email')
register_factory(DecimalField, shortcut_name='decimal')
register_factory(DateField, shortcut_name='date')
register_factory(DateTimeField, shortcut_name='datetime')
register_factory(FloatField, shortcut_name='float')
register_factory(IntegerField, shortcut_name='integer')
register_factory(FileField, shortcut_name='file')
register_factory(AutoField, shortcut_name='integer', include=False)
register_factory(
ManyToOneRel,
shortcut_name='foreign_key_reverse',
include=False,
after=LAST,
)
register_factory(
ManyToManyRel,
shortcut_name='many_to_many_reverse',
include=False,
after=LAST,
)
register_factory(ManyToManyField, shortcut_name='many_to_many')
register_factory(ForeignKey, shortcut_name='foreign_key')
register_factory(GenericIPAddressField, shortcut_name='text')
register_factory(FilePathField, shortcut_name='text')
register_factory(BinaryField, factory=None)
register_factory(JSONField, shortcut_name='text', include=False)
# Column specific
register_column_factory(BooleanField, shortcut_name='boolean')
register_column_factory(TextField, shortcut_name='text')
# Filter specific
register_filter_factory(URLField, shortcut_name='url')
register_filter_factory(BooleanField, shortcut_name='boolean')
register_filter_factory(TextField, shortcut_name='text')
# Field specific
register_field_factory(ImageField, shortcut_name='image')
register_field_factory(URLField, shortcut_name='url')
register_field_factory(
BooleanField,
factory=lambda model_field, **kwargs: (
Shortcut(call_target__attribute='boolean')
if not model_field.null
else Shortcut(call_target__attribute='boolean_tristate')
),
)
register_field_factory(TextField, shortcut_name='textarea')
register_field_factory(FileField, shortcut_name='file')
def base_defaults_factory(model_field):
from iommi.base import capitalize
r = {}
if hasattr(model_field, 'verbose_name'):
r['display_name'] = capitalize(model_field.verbose_name)
return r
# TODO: move to form.py! remember to take the tests with them
def field_defaults_factory(model_field):
from django.db.models import BooleanField, ManyToManyField
r = base_defaults_factory(model_field)
if hasattr(model_field, 'null') and not isinstance(model_field, BooleanField):
r['required'] = not model_field.null and not model_field.blank
if isinstance(model_field, ManyToManyField):
r['required'] = False
if hasattr(model_field, 'null'):
r['parse_empty_string_as_none'] = model_field.null
return r |
6,028 | create row comment | from baserow_premium.license.models import License, LicenseUser
from baserow_premium.row_comments.models import RowComment
from baserow_premium.views.models import (
CalendarView,
CalendarViewFieldOptions,
KanbanView,
KanbanViewFieldOptions,
)
from baserow.contrib.database.fields.models import Field
from baserow.core.prosemirror.schema import schema
from baserow.core.prosemirror.utils import prosemirror_doc_from_plain_text
VALID_ONE_SEAT_LICENSE = (
# id: "1", instance_id: "1"
b"eyJ2ZXJzaW9uIjogMSwgImlkIjogIjEiLCAidmFsaWRfZnJvbSI6ICIyMDIxLTA4LTI5VDE5OjUyOjU3"
b"Ljg0MjY5NiIsICJ2YWxpZF90aHJvdWdoIjogIjIwMjEtMDktMjlUMTk6NTI6NTcuODQyNjk2IiwgInBy"
b"b2R1Y3RfY29kZSI6ICJwcmVtaXVtIiwgInNlYXRzIjogMSwgImlzc3VlZF9vbiI6ICIyMDIxLTA4LTI5"
b"VDE5OjUyOjU3Ljg0MjY5NiIsICJpc3N1ZWRfdG9fZW1haWwiOiAiYnJhbUBiYXNlcm93LmlvIiwgImlz"
b"c3VlZF90b19uYW1lIjogIkJyYW0iLCAiaW5zdGFuY2VfaWQiOiAiMSJ9.e33Z4CxLSmD-R55Es24P3mR"
b"8Oqn3LpaXvgYLzF63oFHat3paon7IBjBmOX3eyd8KjirVf3empJds4uUw2Nn2m7TVvRAtJ8XzNl-8ytf"
b"2RLtmjMx1Xkgp5VZ8S7UqJ_cKLyl76eVRtGEA1DH2HdPKu1vBPJ4bzDfnhDPYl4k5z9XSSgqAbQ9WO0U"
b"5kiI3BYjVRZSKnZMeguAGZ47ezDj_WArGcHAB8Pa2v3HFp5Y34DMJ8r3_hD5hxCKgoNx4AHx1Q-hRDqp"
b"Aroj-4jl7KWvlP-OJNc1BgH2wnhFmeKHotv-Iumi83JQohyceUbG6j8rDDQvJfcn0W2_ebmUH3TKr-w="
b"="
)
VALID_100_SEAT_LICENSE_UNTIL_YEAR_2099 = (
# id: "test-license", instance_id: "1"
# valid from the year 1000 through the year 2099
b"eyJ2ZXJzaW9uIjogMSwgImlkIjogInRlc3QtbGljZW5zZSIsICJ2YWxpZF9mcm9tIjogIjEwMDAtMDEt"
b"MDFUMTI6MDA6MDAuMDAwMDAwIiwgInZhbGlkX3Rocm91Z2giOiAiMjA5OS0wMS0wMVQxMjowMDowMC4w"
b"MDAwMDAiLCAicHJvZHVjdF9jb2RlIjogInByZW1pdW0iLCAic2VhdHMiOiAxMDAsICJpc3N1ZWRfb24i"
b"OiAiMjAyMS0wOC0yOVQxOTo1Mjo1Ny44NDI2OTYiLCAiaXNzdWVkX3RvX2VtYWlsIjogImJyYW1AYmFz"
b"ZXJvdy5pbyIsICJpc3N1ZWRfdG9fbmFtZSI6ICJCcmFtIiwgImluc3RhbmNlX2lkIjogIjEifQ==.SoF"
b"QKxwNjNM-lvJ4iy7d8dc4EmWZagMBzgAmQgUJoGo6FtXaTOILOnc3Tm2uSwZ2MImBeehIff8GPE521-k"
b"a9-0DDYEX4BYVgpLxLF3gFZxgX0QJgsNsauOjEZH8IGFGX1Asdsll2rNbzYDKz68jG7GgK04Lfn19cQ-"
b"Qg0Qlgq0gB_7CoUulo73fhCjOZHoH1HAzxh77SbgXxJbDQOYlXqortVvl5vDpNcPTbar4IihBJRgaFTM"
b"7DjR0On8GCX7j944VkXguiGPdglBXTcqRbPf1qqmZ8jaHrKX6wHYysBJs10OqWqT5p_s8cuRrr0IzLDz"
b"Ss-q11zuFn-ekeJzo5A=="
)
class PremiumFixtures:
def create_user(self, *args, **kwargs):
has_active_premium_license = kwargs.pop("has_active_premium_license", False)
user = super().create_user(*args, **kwargs)
if has_active_premium_license:
self.create_active_premium_license_for_user(user)
return user
def create_active_premium_license_for_user(self, user):
test_license, created = License.objects.get_or_create(
license=VALID_100_SEAT_LICENSE_UNTIL_YEAR_2099.decode()
)
LicenseUser.objects.get_or_create(user=user, license=test_license)
def remove_all_active_premium_licenses(self, user):
LicenseUser.objects.filter(user=user).delete()
def create_premium_license(self, **kwargs):
if "license" not in kwargs:
kwargs["license"] = VALID_100_SEAT_LICENSE_UNTIL_YEAR_2099.decode()
return License.objects.create(**kwargs)
def create_premium_license_user(self, **kwargs):
if "user" not in kwargs:
kwargs["user"] = self.create_user()
if "license" not in kwargs:
kwargs["license"] = self.create_premium_license()
return LicenseUser.objects.create(**kwargs)
def create_kanban_view(self, user=None, **kwargs):
if "table" not in kwargs:
kwargs["table"] = self.create_database_table(user=user)
if "name" not in kwargs:
kwargs["name"] = self.fake.name()
if "order" not in kwargs:
kwargs["order"] = 0
if "single_select_field" not in kwargs:
kwargs["single_select_field"] = self.create_single_select_field(
table=kwargs["table"],
)
kanban_view = KanbanView.objects.create(**kwargs)
self.create_kanban_view_field_options(kanban_view)
return kanban_view
def create_kanban_view_field_options(self, kanban_view, **kwargs):
return [
self.create_kanban_view_field_option(kanban_view, field, **kwargs)
for field in Field.objects.filter(table=kanban_view.table)
]
def create_kanban_view_field_option(self, kanban_view, field, **kwargs):
return KanbanViewFieldOptions.objects.create(
kanban_view=kanban_view, field=field, **kwargs
)
def create_calendar_view(self, user=None, **kwargs):
if "table" not in kwargs:
kwargs["table"] = self.create_database_table(user=user)
if "name" not in kwargs:
kwargs["name"] = self.fake.name()
if "order" not in kwargs:
kwargs["order"] = 0
if "date_field" not in kwargs:
kwargs["date_field"] = self.create_date_field(
table=kwargs["table"],
)
calendar_view = CalendarView.objects.create(**kwargs)
self.create_calendar_view_field_options(calendar_view)
return calendar_view
def create_calendar_view_field_options(self, calendar_view, **kwargs):
return [
self.create_calendar_view_field_option(calendar_view, field, **kwargs)
for field in Field.objects.filter(table=calendar_view.table)
]
def create_calendar_view_field_option(self, calendar_view, field, **kwargs):
return CalendarViewFieldOptions.objects.create(
calendar_view=calendar_view, field=field, **kwargs
)
def METHOD_NAME(self, user, row, comment):
return RowComment.objects.create(
user=user, table=row.get_parent(), row_id=row.id, comment=comment
)
def create_comment_message_from_plain_text(self, plain_text):
return prosemirror_doc_from_plain_text(plain_text)
def create_comment_message_with_mentions(self, mentions):
return schema.node(
"doc",
{},
[
schema.node(
"paragraph",
{},
[
schema.node(
"mention", {"id": mention.id, "label": mention.first_name}
)
for mention in mentions
],
)
],
).to_json() |
6,029 | test drt | #!/usr/bin/env python3
# Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
# for details. All rights reserved. Use of this source code is governed by a
# BSD-style license that can be found in the LICENSE file.
# A script which makes it easy to execute common DOM-related tasks
import os
import subprocess
import sys
from sys import argv
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
import utils
dart_out_dir = utils.GetBuildRoot(utils.GuessOS(), 'release', '64')
if utils.IsWindows():
dart_bin = os.path.join(dart_out_dir, 'dart.exe')
else:
dart_bin = os.path.join(dart_out_dir, 'dart')
dart_dir = os.path.abspath(
os.path.join(
os.path.dirname(os.path.realpath(__file__)), os.path.pardir,
os.path.pardir))
def help():
print('Helper script to make it easy to perform common tasks encountered '
'during the life of a Dart DOM developer.\n'
'\n'
'For example, to re-generate DOM classes then run a specific test:\n'
' dom.py gen test_drt html/element_test\n'
'\n'
'Or re-generate DOM classes and run the Dart analyzer:\n'
' dom.py gen analyze\n')
print('Commands: ')
for cmd in sorted(commands.keys()):
print('\t%s - %s' % (cmd, commands[cmd][1]))
def analyze():
''' Runs the dart analyzer. '''
return call([
os.path.join(dart_out_dir, 'dart-sdk', 'bin', 'dart'),
'analyze',
os.path.join('tests', 'lib', 'html', 'element_test.dart'),
])
def build():
''' Builds the Dart binary '''
return call([
os.path.join('tools', 'build.py'),
'--mode=release',
'--arch=ia32',
'runtime',
])
def dart2js():
compile_dart2js(argv.pop(0), True)
def compile_dart2js(dart_file, checked):
out_file = dart_file + '.js'
dart2js_path = os.path.join(dart_out_dir, 'dart-sdk', 'bin', 'dart2js')
args = [dart2js_path, dart_file, '--library-root=sdk/', '-o%s' % out_file]
if checked:
args.append('--checked')
call(args)
return out_file
def gen():
os.chdir(os.path.join('tools', 'dom', 'scripts'))
result = call([
os.path.join(os.getcwd(), 'dartdomgenerator.py'), '--rebuild',
'--parallel', '--systems=htmldart2js,htmldartium'
])
os.chdir(os.path.join('..', '..', '..'))
return result
def size_check():
''' Displays the dart2js size of swarm. '''
dart_file = os.path.join('samples', 'swarm', 'swarm.dart')
out_file = compile_dart2js(dart_file, False)
return call([
'du',
'-kh',
'--apparent-size',
out_file,
])
os.remove(out_file)
os.remove(out_file + '.deps')
os.remove(out_file + '.map')
def test_ff():
test_dart2js('ff', argv)
def METHOD_NAME():
test_dart2js('drt', argv)
def test_chrome():
test_dart2js('chrome', argv)
def test_dart2js(browser, argv):
cmd = [
os.path.join('tools', 'test.py'),
'-c',
'dart2js',
'-r',
browser,
'--mode=release',
'--checked',
'--arch=ia32',
'-v',
]
if argv:
cmd.append(argv.pop(0))
else:
print(
'Test commands should be followed by tests to run. Defaulting to html'
)
cmd.append('html')
return call(cmd)
def test_server():
start_test_server(5400, os.path.join('out', 'ReleaseX64'))
def test_server_dartium():
start_test_server(5500, os.path.join('..', 'out', 'Release'))
def start_test_server(port, build_directory):
print(
'Browse tests at '
'\033[94mhttp://localhost:%d/root_build/generated_tests/\033[0m' % port)
return call([
utils.CheckedInSdkExecutable(),
os.path.join('tools', 'testing', 'dart', 'http_server.dart'),
'--port=%d' % port,
'--crossOriginPort=%d' % (port + 1), '--network=0.0.0.0',
'--build-directory=%s' % build_directory
])
def call(args):
print(' '.join(args))
pipe = subprocess.Popen(
args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
output, error = pipe.communicate()
if output:
print(output)
if error:
print(error)
return pipe.returncode
commands = {
'analyze': [analyze, 'Run the dart analyzer'],
'build': [build, 'Build dart in release mode'],
'dart2js': [dart2js, 'Run dart2js on the .dart file specified'],
'gen': [gen, 'Re-generate DOM generated files (run go.sh)'],
'size_check': [size_check, 'Check the size of dart2js compiled Swarm'],
'test_chrome': [
test_chrome, 'Run tests in checked mode in Chrome.\n'
'\t\tOptionally provide name of test to run.'
],
# TODO(antonm): fix option name.
'test_drt': [
METHOD_NAME, 'Run tests in checked mode in content shell.\n'
'\t\tOptionally provide name of test to run.'
],
'test_ff': [
test_ff, 'Run tests in checked mode in Firefox.\n'
'\t\tOptionally provide name of test to run.'
],
'test_server': [
test_server, 'Starts the testing server for manually '
'running browser tests.'
],
'test_server_dartium': [
test_server_dartium, 'Starts the testing server for '
'manually running browser tests from a dartium enlistment.'
],
}
def main():
success = True
argv.pop(0)
if not argv:
help()
success = False
while (argv):
# Make sure that we're always rooted in the dart root folder.
os.chdir(dart_dir)
command = argv.pop(0)
if not command in commands:
help()
success = False
break
returncode = commands[command][0]()
success = success and not bool(returncode)
sys.exit(not success)
if __name__ == '__main__':
main() |
6,030 | on 200 | # --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
#
# Code generated by aaz-dev-tools
# --------------------------------------------------------------------------------------------
# pylint: skip-file
# flake8: noqa
from azure.cli.core.aaz import *
@register_command(
"maintenance update list",
)
class List(AAZCommand):
"""List updates to resources.
:example: Updates_List
az maintenance update list --provider-name "Microsoft.Compute" --resource-group "examplerg" --resource-name "smdtest1" --resource-type "virtualMachineScaleSets"
"""
_aaz_info = {
"version": "2023-04-01",
"resources": [
["mgmt-plane", "/subscriptions/{}/resourcegroups/{}/providers/{}/{}/{}/providers/microsoft.maintenance/updates", "2023-04-01"],
]
}
def _handler(self, command_args):
super()._handler(command_args)
self._execute_operations()
return self._output()
_args_schema = None
@classmethod
def _build_arguments_schema(cls, *args, **kwargs):
if cls._args_schema is not None:
return cls._args_schema
cls._args_schema = super()._build_arguments_schema(*args, **kwargs)
# define Arg Group ""
_args_schema = cls._args_schema
_args_schema.provider_name = AAZStrArg(
options=["--provider-name"],
help="Resource provider name",
required=True,
)
_args_schema.resource_group = AAZResourceGroupNameArg(
required=True,
)
_args_schema.resource_name = AAZStrArg(
options=["--resource-name"],
help="Resource identifier",
required=True,
)
_args_schema.resource_type = AAZStrArg(
options=["--resource-type"],
help="Resource type",
required=True,
)
return cls._args_schema
def _execute_operations(self):
self.pre_operations()
self.UpdatesList(ctx=self.ctx)()
self.post_operations()
@register_callback
def pre_operations(self):
pass
@register_callback
def post_operations(self):
pass
def _output(self, *args, **kwargs):
result = self.deserialize_output(self.ctx.vars.instance.value, client_flatten=True)
return result
class UpdatesList(AAZHttpOperation):
CLIENT_TYPE = "MgmtClient"
def __call__(self, *args, **kwargs):
request = self.make_request()
session = self.client.send_request(request=request, stream=False, **kwargs)
if session.http_response.status_code in [200]:
return self.METHOD_NAME(session)
return self.on_error(session.http_response)
@property
def url(self):
return self.client.format_url(
"/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/{providerName}/{resourceType}/{resourceName}/providers/Microsoft.Maintenance/updates",
**self.url_parameters
)
@property
def method(self):
return "GET"
@property
def error_format(self):
return "ODataV4Format"
@property
def url_parameters(self):
parameters = {
**self.serialize_url_param(
"providerName", self.ctx.args.provider_name,
required=True,
),
**self.serialize_url_param(
"resourceGroupName", self.ctx.args.resource_group,
required=True,
),
**self.serialize_url_param(
"resourceName", self.ctx.args.resource_name,
required=True,
),
**self.serialize_url_param(
"resourceType", self.ctx.args.resource_type,
required=True,
),
**self.serialize_url_param(
"subscriptionId", self.ctx.subscription_id,
required=True,
),
}
return parameters
@property
def query_parameters(self):
parameters = {
**self.serialize_query_param(
"api-version", "2023-04-01",
required=True,
),
}
return parameters
@property
def header_parameters(self):
parameters = {
**self.serialize_header_param(
"Accept", "application/json",
),
}
return parameters
def METHOD_NAME(self, session):
data = self.deserialize_http_content(session)
self.ctx.set_var(
"instance",
data,
schema_builder=self._build_schema_on_200
)
_schema_on_200 = None
@classmethod
def _build_schema_on_200(cls):
if cls._schema_on_200 is not None:
return cls._schema_on_200
cls._schema_on_200 = AAZObjectType()
_schema_on_200 = cls._schema_on_200
_schema_on_200.value = AAZListType()
value = cls._schema_on_200.value
value.Element = AAZObjectType()
_element = cls._schema_on_200.value.Element
_element.impact_duration_in_sec = AAZIntType(
serialized_name="impactDurationInSec",
)
_element.impact_type = AAZStrType(
serialized_name="impactType",
)
_element.maintenance_scope = AAZStrType(
serialized_name="maintenanceScope",
)
_element.not_before = AAZStrType(
serialized_name="notBefore",
)
_element.properties = AAZObjectType(
flags={"client_flatten": True},
)
_element.status = AAZStrType()
properties = cls._schema_on_200.value.Element.properties
properties.resource_id = AAZStrType(
serialized_name="resourceId",
)
return cls._schema_on_200
class _ListHelper:
"""Helper class for List"""
__all__ = ["List"] |
6,031 | left | from talon import Context, actions, clip
ctx = Context()
ctx.matches = r"""
os: mac
"""
@ctx.action_class("edit")
class EditActions:
def copy():
actions.key("cmd-c")
def cut():
actions.key("cmd-x")
def delete():
actions.key("backspace")
def delete_line():
actions.edit.select_line()
actions.edit.delete()
# action(edit.delete_paragraph):
# action(edit.delete_sentence):
def delete_word():
actions.edit.select_word()
actions.edit.delete()
def down():
actions.key("down")
# action(edit.extend_again):
# action(edit.extend_column):
def extend_down():
actions.key("shift-down")
def extend_file_end():
actions.key("cmd-shift-down")
def extend_file_start():
actions.key("cmd-shift-up")
def extend_left():
actions.key("shift-left")
# action(edit.extend_line):
def extend_line_down():
actions.key("shift-down")
def extend_line_end():
actions.key("cmd-shift-right")
def extend_line_start():
actions.key("cmd-shift-left")
def extend_line_up():
actions.key("shift-up")
def extend_page_down():
actions.key("cmd-shift-pagedown")
def extend_page_up():
actions.key("cmd-shift-pageup")
# action(edit.extend_paragraph_end):
# action(edit.extend_paragraph_next()):
# action(edit.extend_paragraph_previous()):
# action(edit.extend_paragraph_start()):
def extend_right():
actions.key("shift-right")
# action(edit.extend_sentence_end):
# action(edit.extend_sentence_next):
# action(edit.extend_sentence_previous):
# action(edit.extend_sentence_start):
def extend_up():
actions.key("shift-up")
def extend_word_left():
actions.key("shift-alt-left")
def extend_word_right():
actions.key("shift-alt-right")
def file_end():
actions.key("cmd-down")
def file_start():
actions.key("cmd-up")
def find(text: str = None):
if text is not None:
clip.set_text(text, mode="find")
actions.key("cmd-f")
def find_next():
actions.key("cmd-g")
def find_previous():
actions.key("cmd-shift-g")
def indent_less():
actions.key("cmd-left delete")
def indent_more():
actions.key("cmd-left tab")
# action(edit.jump_column(n: int)
# action(edit.jump_line(n: int)
def METHOD_NAME():
actions.key("left")
def line_down():
actions.key("down home")
def line_end():
actions.key("cmd-right")
def line_insert_up():
actions.key("cmd-left enter up")
def line_start():
actions.key("cmd-left")
def line_up():
actions.key("up cmd-left")
# action(edit.move_again):
def page_down():
actions.key("pagedown")
def page_up():
actions.key("pageup")
# action(edit.paragraph_end):
# action(edit.paragraph_next):
# action(edit.paragraph_previous):
# action(edit.paragraph_start):
def paste():
actions.key("cmd-v")
def paste_match_style():
actions.key("cmd-alt-shift-v")
def print():
actions.key("cmd-p")
def redo():
actions.key("cmd-shift-z")
def right():
actions.key("right")
def save():
actions.key("cmd-s")
def save_all():
actions.key("cmd-shift-s")
def select_all():
actions.key("cmd-a")
def select_line(n: int = None):
if n is not None:
actions.edit.jump_line(n)
actions.key("cmd-right cmd-shift-left")
# action(edit.select_lines(a: int, b: int)):
def select_none():
actions.key("right")
# action(edit.select_paragraph):
# action(edit.select_sentence):
def undo():
actions.key("cmd-z")
def up():
actions.key("up")
def word_left():
actions.key("alt-left")
def word_right():
actions.key("alt-right")
def zoom_in():
actions.key("cmd-=")
def zoom_out():
actions.key("cmd--")
def zoom_reset():
actions.key("cmd-0") |
6,032 | test action selection with epsilon decay | # coding=utf-8
# Copyright 2020 The TF-Agents Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for py_epsilon_greedy_policy."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl.testing.absltest import mock
from tf_agents.policies import py_epsilon_greedy_policy
from tf_agents.trajectories import policy_step
from tf_agents.trajectories import time_step as ts
from tf_agents.utils import test_utils
class EpsilonGreedyPolicyTest(test_utils.TestCase):
def setUp(self):
super(EpsilonGreedyPolicyTest, self).setUp()
self.greedy_policy = mock.MagicMock()
self.random_policy = mock.MagicMock()
self.greedy_policy.time_step_spec = ts.time_step_spec()
self.greedy_policy.action_spec = ()
self.greedy_policy.info_spec = ()
self.greedy_policy.policy_state_spec = ()
self.random_policy.time_step_spec = ts.time_step_spec()
self.random_policy.action_spec = ()
self.random_policy.info_spec = ()
self.random_policy.policy_state_spec = ()
self.random_policy.action.return_value = policy_step.PolicyStep(0, ())
def testCtorAutoRandomPolicy(self):
policy = py_epsilon_greedy_policy.EpsilonGreedyPolicy(
self.greedy_policy, 0.5
)
self.assertEqual(
self.greedy_policy.action_spec, policy._random_policy.action_spec
)
def testCtorValueErrorNegativeEpsilon(self):
with self.assertRaises(ValueError):
py_epsilon_greedy_policy.EpsilonGreedyPolicy(
self.greedy_policy, -0.00001, random_policy=self.random_policy
)
def testCtorValueErrorEpsilonMorThanOne(self):
with self.assertRaises(ValueError):
py_epsilon_greedy_policy.EpsilonGreedyPolicy(
self.greedy_policy, 1.00001, random_policy=self.random_policy
)
def testCtorValueErrorMissingEpsilonEndValue(self):
with self.assertRaises(ValueError):
py_epsilon_greedy_policy.EpsilonGreedyPolicy(
self.greedy_policy,
0.99,
random_policy=self.random_policy,
epsilon_decay_end_count=100,
)
def testZeroState(self):
policy = py_epsilon_greedy_policy.EpsilonGreedyPolicy(
self.greedy_policy, 0.5, random_policy=self.random_policy
)
policy.get_initial_state()
self.greedy_policy.get_initial_state.assert_called_once_with(
batch_size=None
)
self.random_policy.get_initial_state.assert_called_once_with(
batch_size=None
)
def testActionAlwaysRandom(self):
policy = py_epsilon_greedy_policy.EpsilonGreedyPolicy(
self.greedy_policy, 1, random_policy=self.random_policy
)
time_step = mock.MagicMock()
for _ in range(5):
policy.action(time_step)
self.random_policy.action.assert_called_with(time_step)
self.assertEqual(5, self.random_policy.action.call_count)
self.assertEqual(0, self.greedy_policy.action.call_count)
def testActionAlwaysGreedy(self):
policy = py_epsilon_greedy_policy.EpsilonGreedyPolicy(
self.greedy_policy, 0, random_policy=self.random_policy
)
time_step = mock.MagicMock()
for _ in range(5):
policy.action(time_step)
self.greedy_policy.action.assert_called_with(time_step, policy_state=())
self.assertEqual(0, self.random_policy.action.call_count)
self.assertEqual(5, self.greedy_policy.action.call_count)
def testActionSelection(self):
policy = py_epsilon_greedy_policy.EpsilonGreedyPolicy(
self.greedy_policy, 0.9, random_policy=self.random_policy
)
time_step = mock.MagicMock()
# Replace the random generator with fixed behaviour
random = mock.MagicMock()
policy._rng = random
# 0.8 < 0.9, so random policy should be used.
policy._rng.rand.return_value = 0.8
policy.action(time_step)
self.random_policy.action.assert_called_with(time_step)
self.assertEqual(1, self.random_policy.action.call_count)
self.assertEqual(0, self.greedy_policy.action.call_count)
# 0.91 > 0.9, so greedy policy should be used.
policy._rng.rand.return_value = 0.91
policy.action(time_step)
self.greedy_policy.action.assert_called_with(time_step, policy_state=())
self.assertEqual(1, self.random_policy.action.call_count)
self.assertEqual(1, self.greedy_policy.action.call_count)
def METHOD_NAME(self):
policy = py_epsilon_greedy_policy.EpsilonGreedyPolicy(
self.greedy_policy,
0.9,
random_policy=self.random_policy,
epsilon_decay_end_count=10,
epsilon_decay_end_value=0.4,
)
time_step = mock.MagicMock()
# Replace the random generator with fixed behaviour
random = mock.MagicMock()
policy._rng = random
# 0.8 < 0.9 and 0.8 < 0.85, so random policy should be used.
policy._rng.rand.return_value = 0.8
for _ in range(2):
policy.action(time_step)
self.random_policy.action.assert_called_with(time_step)
self.assertEqual(2, self.random_policy.action.call_count)
self.assertEqual(0, self.greedy_policy.action.call_count)
# epislon will change from [0.8 to 0.4], and greedy policy should be used
for _ in range(8):
policy.action(time_step)
self.greedy_policy.action.assert_called_with(time_step, policy_state=())
self.assertEqual(2, self.random_policy.action.call_count)
self.assertEqual(8, self.greedy_policy.action.call_count)
# 0.399 < 0.4, random policy should be used.
policy._rng.rand.return_value = 0.399
self.random_policy.reset_mock()
for _ in range(5):
policy.action(time_step)
self.random_policy.action.assert_called_with(time_step)
self.assertEqual(5, self.random_policy.action.call_count)
# greedy policy should not be called any more
self.assertEqual(8, self.greedy_policy.action.call_count)
if __name__ == '__main__':
test_utils.main() |
6,033 | data sources add file path for specified | #-------------------------------------------------------------------------------
# DataSources
#-------------------------------------------------------------------------------
class DataSourcesAbstractAPI:
@staticmethod
def init_data_sources_environment(object):
pass
@staticmethod
def finish_data_sources_environment(object):
pass
@staticmethod
def data_sources_new(operatorName):
raise NotImplementedError
@staticmethod
def data_sources_delete(dataSources):
raise NotImplementedError
@staticmethod
def data_sources_set_result_file_path(dataSources, name):
raise NotImplementedError
@staticmethod
def data_sources_set_result_file_path_with_key(dataSources, name, sKey):
raise NotImplementedError
@staticmethod
def data_sources_set_domain_result_file_path_with_key(dataSources, name, sKey, id):
raise NotImplementedError
@staticmethod
def data_sources_add_file_path(dataSources, name):
raise NotImplementedError
@staticmethod
def data_sources_add_file_path_with_key(dataSources, name, sKey):
raise NotImplementedError
@staticmethod
def data_sources_add_file_path_for_specified_result(dataSources, name, sKey, sResultKey):
raise NotImplementedError
@staticmethod
def data_sources_set_result_file_path_utf8(dataSources, name):
raise NotImplementedError
@staticmethod
def data_sources_set_result_file_path_with_key_utf8(dataSources, name, sKey):
raise NotImplementedError
@staticmethod
def data_sources_set_domain_result_file_path_utf8(dataSources, name, id):
raise NotImplementedError
@staticmethod
def data_sources_set_domain_result_file_path_with_key_utf8(dataSources, name, sKey, id):
raise NotImplementedError
@staticmethod
def data_sources_add_file_path_utf8(dataSources, name):
raise NotImplementedError
@staticmethod
def data_sources_add_file_path_with_key_utf8(dataSources, name, sKey):
raise NotImplementedError
@staticmethod
def data_sources_add_domain_file_path_with_key_utf8(dataSources, name, sKey, id):
raise NotImplementedError
@staticmethod
def METHOD_NAME(dataSources, name, sKey, sResultKey):
raise NotImplementedError
@staticmethod
def data_sources_add_upstream_data_sources(dataSources, upstreamDataSources):
raise NotImplementedError
@staticmethod
def data_sources_add_upstream_data_sources_for_specified_result(dataSources, upstreamDataSources, sResultKey):
raise NotImplementedError
@staticmethod
def data_sources_add_upstream_domain_data_sources(dataSources, upstreamDataSources, id):
raise NotImplementedError
@staticmethod
def data_sources_get_result_key(dataSources):
raise NotImplementedError
@staticmethod
def data_sources_get_result_key_by_index(dataSources, index):
raise NotImplementedError
@staticmethod
def data_sources_get_num_result_keys(dataSources):
raise NotImplementedError
@staticmethod
def data_sources_get_num_keys(dataSources):
raise NotImplementedError
@staticmethod
def data_sources_get_key(dataSources, index, num_path):
raise NotImplementedError
@staticmethod
def data_sources_get_path(dataSources, key, index):
raise NotImplementedError
@staticmethod
def data_sources_get_namespace(dataSources, key):
raise NotImplementedError
@staticmethod
def data_sources_get_new_path_collection_for_key(dataSources, key):
raise NotImplementedError
@staticmethod
def data_sources_get_new_collection_for_results_path(dataSources):
raise NotImplementedError
@staticmethod
def data_sources_get_size(dataSources):
raise NotImplementedError
@staticmethod
def data_sources_get_path_by_path_index(dataSources, index):
raise NotImplementedError
@staticmethod
def data_sources_get_key_by_path_index(dataSources, index):
raise NotImplementedError
@staticmethod
def data_sources_get_label_space_by_path_index(dataSources, index):
raise NotImplementedError
@staticmethod
def data_sources_register_namespace(dataSources, result_key, ns):
raise NotImplementedError
@staticmethod
def data_sources_new_on_client(client):
raise NotImplementedError
@staticmethod
def data_sources_get_copy(id, client):
raise NotImplementedError
|
6,034 | type | # coding=utf-8
# *** WARNING: this file was generated by pulumi. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import copy
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = [
'GetTagByOperationResult',
'AwaitableGetTagByOperationResult',
'get_tag_by_operation',
'get_tag_by_operation_output',
]
@pulumi.output_type
class GetTagByOperationResult:
"""
Tag Contract details.
"""
def __init__(__self__, display_name=None, id=None, name=None, METHOD_NAME=None):
if display_name and not isinstance(display_name, str):
raise TypeError("Expected argument 'display_name' to be a str")
pulumi.set(__self__, "display_name", display_name)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if METHOD_NAME and not isinstance(METHOD_NAME, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", METHOD_NAME)
@property
@pulumi.getter(name="displayName")
def display_name(self) -> str:
"""
Tag name.
"""
return pulumi.get(self, "display_name")
@property
@pulumi.getter
def id(self) -> str:
"""
Fully qualified resource ID for the resource. Ex - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the resource
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def METHOD_NAME(self) -> str:
"""
The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts"
"""
return pulumi.get(self, "type")
class AwaitableGetTagByOperationResult(GetTagByOperationResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetTagByOperationResult(
display_name=self.display_name,
id=self.id,
name=self.name,
METHOD_NAME=self.METHOD_NAME)
def get_tag_by_operation(api_id: Optional[str] = None,
operation_id: Optional[str] = None,
resource_group_name: Optional[str] = None,
service_name: Optional[str] = None,
tag_id: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetTagByOperationResult:
"""
Get tag associated with the Operation.
Azure REST API version: 2022-08-01.
:param str api_id: API revision identifier. Must be unique in the current API Management service instance. Non-current revision has ;rev=n as a suffix where n is the revision number.
:param str operation_id: Operation identifier within an API. Must be unique in the current API Management service instance.
:param str resource_group_name: The name of the resource group. The name is case insensitive.
:param str service_name: The name of the API Management service.
:param str tag_id: Tag identifier. Must be unique in the current API Management service instance.
"""
__args__ = dict()
__args__['apiId'] = api_id
__args__['operationId'] = operation_id
__args__['resourceGroupName'] = resource_group_name
__args__['serviceName'] = service_name
__args__['tagId'] = tag_id
opts = pulumi.InvokeOptions.merge(_utilities.get_invoke_opts_defaults(), opts)
__ret__ = pulumi.runtime.invoke('azure-native:apimanagement:getTagByOperation', __args__, opts=opts, typ=GetTagByOperationResult).value
return AwaitableGetTagByOperationResult(
display_name=pulumi.get(__ret__, 'display_name'),
id=pulumi.get(__ret__, 'id'),
name=pulumi.get(__ret__, 'name'),
METHOD_NAME=pulumi.get(__ret__, 'type'))
@_utilities.lift_output_func(get_tag_by_operation)
def get_tag_by_operation_output(api_id: Optional[pulumi.Input[str]] = None,
operation_id: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
service_name: Optional[pulumi.Input[str]] = None,
tag_id: Optional[pulumi.Input[str]] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetTagByOperationResult]:
"""
Get tag associated with the Operation.
Azure REST API version: 2022-08-01.
:param str api_id: API revision identifier. Must be unique in the current API Management service instance. Non-current revision has ;rev=n as a suffix where n is the revision number.
:param str operation_id: Operation identifier within an API. Must be unique in the current API Management service instance.
:param str resource_group_name: The name of the resource group. The name is case insensitive.
:param str service_name: The name of the API Management service.
:param str tag_id: Tag identifier. Must be unique in the current API Management service instance.
"""
... |
6,035 | external url | from blinker import Signal
from mongoengine.signals import pre_save, post_save
from werkzeug.utils import cached_property
from udata.core.storages import images, default_image_basename
from udata.frontend.markdown import mdstrip
from udata.i18n import lazy_gettext as _
from udata.models import db, BadgeMixin, WithMetrics
from udata.utils import hash_url
from udata.uris import endpoint_for
__all__ = ('Reuse', 'REUSE_TYPES', 'REUSE_TOPICS')
REUSE_TYPES = {
'api': _('API'),
'application': _('Application'),
'idea': _('Idea'),
'news_article': _('News Article'),
'paper': _('Paper'),
'post': _('Post'),
'visualization': _('Visualization'),
'hardware': _('Connected device'),
}
REUSE_TOPICS = {
'health': _('Health'),
'transport_and_mobility': _('Transport and mobility'),
'housing_and_development': _('Housing and development'),
'food_and_agriculture': _('Food and agriculture'),
'culture_and_recreation': _('Culture and recreation'),
'economy_and_business': _('Economy and business'),
'environment_and_energy': _('Environment and energy'),
'work_and_training': _('Work and training'),
'politics_and_public_life': _('Politics and public life'),
'safety_and_security': _('Safety and security'),
'education_and_research': _('Education and research'),
'society_and_demography': _('Society and demography'),
'law_and_justice': _('Law and justice'),
'open_data_tools': _('Open data tools'),
'others': _('Others'),
}
IMAGE_SIZES = [500, 100, 50, 25]
IMAGE_MAX_SIZE = 800
TITLE_SIZE_LIMIT = 350
DESCRIPTION_SIZE_LIMIT = 100000
class ReuseQuerySet(db.OwnedQuerySet):
def visible(self):
return self(private__ne=True, datasets__0__exists=True, deleted=None)
def hidden(self):
return self(db.Q(private=True) |
db.Q(datasets__0__exists=False) |
db.Q(deleted__ne=None))
class Reuse(db.Datetimed, WithMetrics, BadgeMixin, db.Owned, db.Document):
title = db.StringField(required=True)
slug = db.SlugField(max_length=255, required=True, populate_from='title',
update=True, follow=True)
description = db.StringField(required=True)
type = db.StringField(required=True, choices=list(REUSE_TYPES))
url = db.StringField(required=True)
urlhash = db.StringField(required=True, unique=True)
image_url = db.StringField()
image = db.ImageField(
fs=images, basename=default_image_basename, max_size=IMAGE_MAX_SIZE,
thumbnails=IMAGE_SIZES)
datasets = db.ListField(
db.ReferenceField('Dataset', reverse_delete_rule=db.PULL))
tags = db.TagListField()
topic = db.StringField(required=True, choices=list(REUSE_TOPICS))
# badges = db.ListField(db.EmbeddedDocumentField(ReuseBadge))
private = db.BooleanField()
ext = db.MapField(db.GenericEmbeddedDocumentField())
extras = db.ExtrasField()
featured = db.BooleanField()
deleted = db.DateTimeField()
def __str__(self):
return self.title or ''
__badges__ = {}
__metrics_keys__ = [
'discussions',
'datasets',
'followers',
'views',
]
meta = {
'indexes': ['$title',
'created_at',
'last_modified',
'metrics.datasets',
'metrics.followers',
'metrics.views',
'urlhash'] + db.Owned.meta['indexes'],
'ordering': ['-created_at'],
'queryset_class': ReuseQuerySet,
'auto_create_index_on_save': True
}
before_save = Signal()
after_save = Signal()
on_create = Signal()
on_update = Signal()
before_delete = Signal()
after_delete = Signal()
on_delete = Signal()
verbose_name = _('reuse')
@classmethod
def pre_save(cls, sender, document, **kwargs):
# Emit before_save
cls.before_save.send(document)
@classmethod
def post_save(cls, sender, document, **kwargs):
if 'post_save' in kwargs.get('ignores', []):
return
cls.after_save.send(document)
if kwargs.get('created'):
cls.on_create.send(document)
else:
cls.on_update.send(document)
if document.deleted:
cls.on_delete.send(document)
def url_for(self, *args, **kwargs):
return endpoint_for('reuses.show', 'api.reuse', reuse=self, *args, **kwargs)
display_url = property(url_for)
@property
def is_visible(self):
return not self.is_hidden
@property
def is_hidden(self):
return len(self.datasets) == 0 or self.private or self.deleted
@property
def METHOD_NAME(self):
return self.url_for(_external=True)
@property
def type_label(self):
return REUSE_TYPES[self.type]
@property
def topic_label(self):
return REUSE_TOPICS[self.topic]
def clean(self):
super(Reuse, self).clean()
'''Auto populate urlhash from url'''
if not self.urlhash or 'url' in self._get_changed_fields():
self.urlhash = hash_url(self.url)
@classmethod
def get(cls, id_or_slug):
obj = cls.objects(slug=id_or_slug).first()
return obj or cls.objects.get_or_404(id=id_or_slug)
@classmethod
def url_exists(cls, url):
urlhash = hash_url(url)
return cls.objects(urlhash=urlhash).count() > 0
@cached_property
def json_ld(self):
result = {
'@context': 'http://schema.org',
'@type': 'CreativeWork',
'alternateName': self.slug,
'dateCreated': self.created_at.isoformat(),
'dateModified': self.last_modified.isoformat(),
'url': endpoint_for('reuses.show', 'api.reuse', reuse=self, _external=True),
'name': self.title,
'isBasedOnUrl': self.url,
}
if self.description:
result['description'] = mdstrip(self.description)
if self.organization:
author = self.organization.json_ld
elif self.owner:
author = self.owner.json_ld
else:
author = None
if author:
result['author'] = author
return result
@property
def views_count(self):
return self.metrics.get('views', 0)
def count_datasets(self):
self.metrics['datasets'] = len(self.datasets)
self.save(signal_kwargs={'ignores': ['post_save']})
def count_discussions(self):
from udata.models import Discussion
self.metrics['discussions'] = Discussion.objects(subject=self, closed=None).count()
self.save()
def count_followers(self):
from udata.models import Follow
self.metrics['followers'] = Follow.objects(until=None).followers(self).count()
self.save()
pre_save.connect(Reuse.pre_save, sender=Reuse)
post_save.connect(Reuse.post_save, sender=Reuse) |
6,036 | name | # coding=utf-8
# *** WARNING: this file was generated by pulumi. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import copy
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
__all__ = ['VendorSkuPreviewArgs', 'VendorSkuPreview']
@pulumi.input_type
class VendorSkuPreviewArgs:
def __init__(__self__, *,
sku_name: pulumi.Input[str],
vendor_name: pulumi.Input[str],
preview_subscription: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a VendorSkuPreview resource.
:param pulumi.Input[str] sku_name: The name of the vendor sku.
:param pulumi.Input[str] vendor_name: The name of the vendor.
:param pulumi.Input[str] preview_subscription: Preview subscription ID.
"""
pulumi.set(__self__, "sku_name", sku_name)
pulumi.set(__self__, "vendor_name", vendor_name)
if preview_subscription is not None:
pulumi.set(__self__, "preview_subscription", preview_subscription)
@property
@pulumi.getter(METHOD_NAME="skuName")
def sku_name(self) -> pulumi.Input[str]:
"""
The name of the vendor sku.
"""
return pulumi.get(self, "sku_name")
@sku_name.setter
def sku_name(self, value: pulumi.Input[str]):
pulumi.set(self, "sku_name", value)
@property
@pulumi.getter(METHOD_NAME="vendorName")
def vendor_name(self) -> pulumi.Input[str]:
"""
The name of the vendor.
"""
return pulumi.get(self, "vendor_name")
@vendor_name.setter
def vendor_name(self, value: pulumi.Input[str]):
pulumi.set(self, "vendor_name", value)
@property
@pulumi.getter(METHOD_NAME="previewSubscription")
def preview_subscription(self) -> Optional[pulumi.Input[str]]:
"""
Preview subscription ID.
"""
return pulumi.get(self, "preview_subscription")
@preview_subscription.setter
def preview_subscription(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "preview_subscription", value)
class VendorSkuPreview(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
preview_subscription: Optional[pulumi.Input[str]] = None,
sku_name: Optional[pulumi.Input[str]] = None,
vendor_name: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Customer subscription which can use a sku.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] preview_subscription: Preview subscription ID.
:param pulumi.Input[str] sku_name: The name of the vendor sku.
:param pulumi.Input[str] vendor_name: The name of the vendor.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: VendorSkuPreviewArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Customer subscription which can use a sku.
:param str resource_name: The name of the resource.
:param VendorSkuPreviewArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(VendorSkuPreviewArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
preview_subscription: Optional[pulumi.Input[str]] = None,
sku_name: Optional[pulumi.Input[str]] = None,
vendor_name: Optional[pulumi.Input[str]] = None,
__props__=None):
opts = pulumi.ResourceOptions.merge(_utilities.get_resource_opts_defaults(), opts)
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = VendorSkuPreviewArgs.__new__(VendorSkuPreviewArgs)
__props__.__dict__["preview_subscription"] = preview_subscription
if sku_name is None and not opts.urn:
raise TypeError("Missing required property 'sku_name'")
__props__.__dict__["sku_name"] = sku_name
if vendor_name is None and not opts.urn:
raise TypeError("Missing required property 'vendor_name'")
__props__.__dict__["vendor_name"] = vendor_name
__props__.__dict__["name"] = None
__props__.__dict__["provisioning_state"] = None
__props__.__dict__["system_data"] = None
__props__.__dict__["type"] = None
alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure-native:hybridnetwork:VendorSkuPreview"), pulumi.Alias(type_="azure-native:hybridnetwork/v20200101preview:VendorSkuPreview"), pulumi.Alias(type_="azure-native:hybridnetwork/v20210501:VendorSkuPreview")])
opts = pulumi.ResourceOptions.merge(opts, alias_opts)
super(VendorSkuPreview, __self__).__init__(
'azure-native:hybridnetwork/v20220101preview:VendorSkuPreview',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'VendorSkuPreview':
"""
Get an existing VendorSkuPreview resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = VendorSkuPreviewArgs.__new__(VendorSkuPreviewArgs)
__props__.__dict__["name"] = None
__props__.__dict__["provisioning_state"] = None
__props__.__dict__["system_data"] = None
__props__.__dict__["type"] = None
return VendorSkuPreview(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def METHOD_NAME(self) -> pulumi.Output[str]:
"""
The preview subscription ID.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(METHOD_NAME="provisioningState")
def provisioning_state(self) -> pulumi.Output[str]:
"""
The provisioning state of the PreviewSubscription resource.
"""
return pulumi.get(self, "provisioning_state")
@property
@pulumi.getter(METHOD_NAME="systemData")
def system_data(self) -> pulumi.Output['outputs.SystemDataResponse']:
"""
The system meta data relating to this resource.
"""
return pulumi.get(self, "system_data")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
"""
The type of the resource.
"""
return pulumi.get(self, "type")
|
6,037 | is valid image | from __future__ import annotations
import os
import re
from typing import Any
import sentry_sdk
from sentry.lang.java.utils import deobfuscation_template
from sentry.models import Project, ProjectDebugFile
from sentry.utils import json
from sentry.utils.safe import get_path
# Obfuscated type values are either in the form of "xyz" or "xyz<abc>" where
# both "xyz" or "abc" need to be deobfuscated. It may also be possible for
# the values to be more complicated such as "_xyz", so the regex should capture
# any values other than "<" and ">".
VIEW_HIERARCHY_TYPE_REGEX = re.compile(r"([^<>]+)(?:<([^<>]+)>)?")
def METHOD_NAME(image):
return bool(image) and image.get("type") == "dart_symbols" and image.get("uuid") is not None
def has_dart_symbols_file(data):
"""
Checks whether an event contains a dart symbols file
"""
images = get_path(data, "debug_meta", "images", filter=True)
return get_path(images, 0, "type") == "dart_symbols"
def get_dart_symbols_images(event: dict[str, Any]) -> set[str]:
return {
str(image["uuid"]).lower()
for image in get_path(event, "debug_meta", "images", filter=METHOD_NAME, default=())
}
def generate_dart_symbols_map(uuid: str, project: Project):
"""
NOTE: This function makes assumptions about the structure of the debug file
since we are not currently storing the file. This may need to be updated if we
decide to do some pre-processing on the debug file before storing it.
In its current state, the debug file is expected to be a json file with a list
of strings. The strings alternate between deobfuscated and obfuscated names.
If we preprocess it into a map, we can remove this code and just fetch the file.
"""
obfuscated_to_deobfuscated_name_map = {}
with sentry_sdk.start_span(op="dart_symbols.generate_dart_symbols_map") as span:
try:
dif_paths = ProjectDebugFile.difcache.fetch_difs(project, [uuid], features=["mapping"])
debug_file_path = dif_paths.get(uuid)
if debug_file_path is None:
return
dart_symbols_file_size_in_mb = os.path.getsize(debug_file_path) / (1024 * 1024.0)
span.set_tag("dart_symbols_file_size_in_mb", dart_symbols_file_size_in_mb)
with open(debug_file_path) as f:
debug_array = json.loads(f.read())
if len(debug_array) % 2 != 0:
raise Exception("Debug array contains an odd number of elements")
# Obfuscated names are the odd indices and deobfuscated names are the even indices
obfuscated_to_deobfuscated_name_map = dict(zip(debug_array[1::2], debug_array[::2]))
except Exception as err:
sentry_sdk.capture_exception(err)
return
return obfuscated_to_deobfuscated_name_map
def _deobfuscate_view_hierarchy(event_data: dict[str, Any], project: Project, view_hierarchy):
"""
Deobfuscates a view hierarchy in-place.
If we're unable to fetch a dart symbols uuid, then the view hierarchy remains unmodified.
"""
dart_symbols_uuids = get_dart_symbols_images(event_data)
if len(dart_symbols_uuids) == 0:
return
with sentry_sdk.start_span(op="dart_symbols.deobfuscate_view_hierarchy_data"):
for dart_symbols_uuid in dart_symbols_uuids:
map = generate_dart_symbols_map(dart_symbols_uuid, project)
if map is None:
return
windows_to_deobfuscate = [*view_hierarchy.get("windows")]
while windows_to_deobfuscate:
window = windows_to_deobfuscate.pop()
if window.get("type") is None:
# If there is no type, then skip this window
continue
matcher = re.match(VIEW_HIERARCHY_TYPE_REGEX, window.get("type"))
if not matcher:
continue
obfuscated_values = matcher.groups()
for obfuscated_value in obfuscated_values:
if obfuscated_value is not None and obfuscated_value in map:
window["type"] = window["type"].replace(
obfuscated_value, map[obfuscated_value]
)
if children := window.get("children"):
windows_to_deobfuscate.extend(children)
def deobfuscate_view_hierarchy(data):
return deobfuscation_template(data, "dart_symbols", _deobfuscate_view_hierarchy) |
6,038 | generate dict | from copy import deepcopy
from random import randint
try:
from urllib.parse import urlparse
except ImportError:
from urlparse import urlparse
from moto.moto_api._internal import mock_random
from tests.test_eks.test_eks_constants import (
ClusterAttributes,
ClusterInputs,
FargateProfileAttributes,
FargateProfileInputs,
NodegroupAttributes,
NodegroupInputs,
ResponseAttributes,
STATUS,
)
generate_random_name = mock_random.get_random_string
def attributes_to_test(inputs, name):
"""
Assembles the list of tuples which will be used to validate test results.
"""
result = deepcopy(inputs.REQUIRED + inputs.OPTIONAL + [STATUS])
if isinstance(inputs, ClusterInputs):
result += [(ClusterAttributes.NAME, name)]
elif isinstance(inputs, NodegroupInputs):
result += [(NodegroupAttributes.NODEGROUP_NAME, name)]
return result
def generate_clusters(client, num_clusters, minimal):
"""
Generates 'num_clusters' number of clusters with randomized data and adds them to the mocked backend.
If 'minimal' is True, only the required values are generated; if False all values are generated.
Returns a list of the names of the generated clusters.
"""
return [
client.create_cluster(
name=generate_random_name(), **_input_builder(ClusterInputs, minimal)
)[ResponseAttributes.CLUSTER][ClusterAttributes.NAME]
for _ in range(num_clusters)
]
def generate_fargate_profiles(client, cluster_name, num_profiles, minimal):
"""
Generates 'num_profiles' number of fargate profiles with randomized data and adds them to
the mocked backend.
If 'minimal' is True, only the required values are generated; if False, all values are generated.
Returns a list of the names of the generated Fargate profiles.
"""
return [
client.create_fargate_profile(
fargateProfileName=generate_random_name(),
clusterName=cluster_name,
**_input_builder(FargateProfileInputs, minimal),
)[ResponseAttributes.FARGATE_PROFILE][
FargateProfileAttributes.FARGATE_PROFILE_NAME
]
for _ in range(num_profiles)
]
def generate_nodegroups(client, cluster_name, num_nodegroups, minimal):
"""
Generates 'num_nodegroups' number of nodegroups with randomized data and adds them to the mocked backend.
If 'minimal' is True, only the required values are generated; if False, all values are generated.
Returns a list of the names of the generated nodegroups.
"""
return [
client.create_nodegroup(
nodegroupName=generate_random_name(),
clusterName=cluster_name,
**_input_builder(NodegroupInputs, minimal),
)[ResponseAttributes.NODEGROUP][NodegroupAttributes.NODEGROUP_NAME]
for _ in range(num_nodegroups)
]
def METHOD_NAME(prefix, count):
return {f"{prefix}_{_count}": str(_count) for _count in range(count)}
def is_valid_uri(value):
"""
Returns true if a provided string has the form of a valid uri.
"""
result = urlparse(value)
return all([result.scheme, result.netloc, result.path])
def region_matches_partition(region, partition):
"""
Returns True if the provided region and partition are a valid pair.
"""
valid_matches = [
("cn-", "aws-cn"),
("us-gov-", "aws-us-gov"),
("us-gov-iso-", "aws-iso"),
("us-gov-iso-b-", "aws-iso-b"),
]
for prefix, expected_partition in valid_matches:
if region.startswith(prefix):
return partition == expected_partition
return partition == "aws"
def _input_builder(options, minimal):
"""
Assembles the inputs which will be used to generate test object into a dictionary.
"""
values = deepcopy(options.REQUIRED)
if not minimal:
values.extend(deepcopy(options.OPTIONAL))
return dict(values)
def random_names(name_list):
"""
Returns one value picked at random a list, and one value guaranteed not to be on the list.
"""
name_on_list = name_list[randint(0, len(name_list) - 1)]
name_not_on_list = generate_random_name()
while name_not_on_list in name_list:
name_not_on_list = generate_random_name()
return name_on_list, name_not_on_list |
6,039 | close | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from copy import deepcopy
from typing import Any, Awaitable, Optional, TYPE_CHECKING
from azure.core.rest import AsyncHttpResponse, HttpRequest
from azure.mgmt.core import AsyncARMPipelineClient
from msrest import Deserializer, Serializer
from .. import models
from ._configuration import SourceControlConfigurationClientConfiguration
from .operations import ClusterExtensionTypeOperations, ClusterExtensionTypesOperations, ExtensionTypeVersionsOperations, ExtensionsOperations, FluxConfigOperationStatusOperations, FluxConfigurationsOperations, LocationExtensionTypesOperations, OperationStatusOperations, Operations, SourceControlConfigurationsOperations
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from azure.core.credentials_async import AsyncTokenCredential
class SourceControlConfigurationClient:
"""KubernetesConfiguration Client.
:ivar cluster_extension_type: ClusterExtensionTypeOperations operations
:vartype cluster_extension_type:
azure.mgmt.kubernetesconfiguration.v2022_01_01_preview.aio.operations.ClusterExtensionTypeOperations
:ivar cluster_extension_types: ClusterExtensionTypesOperations operations
:vartype cluster_extension_types:
azure.mgmt.kubernetesconfiguration.v2022_01_01_preview.aio.operations.ClusterExtensionTypesOperations
:ivar extension_type_versions: ExtensionTypeVersionsOperations operations
:vartype extension_type_versions:
azure.mgmt.kubernetesconfiguration.v2022_01_01_preview.aio.operations.ExtensionTypeVersionsOperations
:ivar location_extension_types: LocationExtensionTypesOperations operations
:vartype location_extension_types:
azure.mgmt.kubernetesconfiguration.v2022_01_01_preview.aio.operations.LocationExtensionTypesOperations
:ivar extensions: ExtensionsOperations operations
:vartype extensions:
azure.mgmt.kubernetesconfiguration.v2022_01_01_preview.aio.operations.ExtensionsOperations
:ivar operation_status: OperationStatusOperations operations
:vartype operation_status:
azure.mgmt.kubernetesconfiguration.v2022_01_01_preview.aio.operations.OperationStatusOperations
:ivar flux_configurations: FluxConfigurationsOperations operations
:vartype flux_configurations:
azure.mgmt.kubernetesconfiguration.v2022_01_01_preview.aio.operations.FluxConfigurationsOperations
:ivar flux_config_operation_status: FluxConfigOperationStatusOperations operations
:vartype flux_config_operation_status:
azure.mgmt.kubernetesconfiguration.v2022_01_01_preview.aio.operations.FluxConfigOperationStatusOperations
:ivar source_control_configurations: SourceControlConfigurationsOperations operations
:vartype source_control_configurations:
azure.mgmt.kubernetesconfiguration.v2022_01_01_preview.aio.operations.SourceControlConfigurationsOperations
:ivar operations: Operations operations
:vartype operations:
azure.mgmt.kubernetesconfiguration.v2022_01_01_preview.aio.operations.Operations
:param credential: Credential needed for the client to connect to Azure.
:type credential: ~azure.core.credentials_async.AsyncTokenCredential
:param subscription_id: The ID of the target subscription.
:type subscription_id: str
:param base_url: Service URL. Default value is 'https://management.azure.com'.
:type base_url: str
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
"""
def __init__(
self,
credential: "AsyncTokenCredential",
subscription_id: str,
base_url: str = "https://management.azure.com",
**kwargs: Any
) -> None:
self._config = SourceControlConfigurationClientConfiguration(credential=credential, subscription_id=subscription_id, **kwargs)
self._client = AsyncARMPipelineClient(base_url=base_url, config=self._config, **kwargs)
client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
self._serialize = Serializer(client_models)
self._deserialize = Deserializer(client_models)
self._serialize.client_side_validation = False
self.cluster_extension_type = ClusterExtensionTypeOperations(self._client, self._config, self._serialize, self._deserialize)
self.cluster_extension_types = ClusterExtensionTypesOperations(self._client, self._config, self._serialize, self._deserialize)
self.extension_type_versions = ExtensionTypeVersionsOperations(self._client, self._config, self._serialize, self._deserialize)
self.location_extension_types = LocationExtensionTypesOperations(self._client, self._config, self._serialize, self._deserialize)
self.extensions = ExtensionsOperations(self._client, self._config, self._serialize, self._deserialize)
self.operation_status = OperationStatusOperations(self._client, self._config, self._serialize, self._deserialize)
self.flux_configurations = FluxConfigurationsOperations(self._client, self._config, self._serialize, self._deserialize)
self.flux_config_operation_status = FluxConfigOperationStatusOperations(self._client, self._config, self._serialize, self._deserialize)
self.source_control_configurations = SourceControlConfigurationsOperations(self._client, self._config, self._serialize, self._deserialize)
self.operations = Operations(self._client, self._config, self._serialize, self._deserialize)
def _send_request(
self,
request: HttpRequest,
**kwargs: Any
) -> Awaitable[AsyncHttpResponse]:
"""Runs the network request through the client's chained policies.
>>> from azure.core.rest import HttpRequest
>>> request = HttpRequest("GET", "https://www.example.org/")
<HttpRequest [GET], url: 'https://www.example.org/'>
>>> response = await client._send_request(request)
<AsyncHttpResponse: 200 OK>
For more information on this code flow, see https://aka.ms/azsdk/python/protocol/quickstart
:param request: The network request you want to make. Required.
:type request: ~azure.core.rest.HttpRequest
:keyword bool stream: Whether the response payload will be streamed. Defaults to False.
:return: The response of your network call. Does not do error handling on your response.
:rtype: ~azure.core.rest.AsyncHttpResponse
"""
request_copy = deepcopy(request)
request_copy.url = self._client.format_url(request_copy.url)
return self._client.send_request(request_copy, **kwargs)
async def METHOD_NAME(self) -> None:
await self._client.METHOD_NAME()
async def __aenter__(self) -> "SourceControlConfigurationClient":
await self._client.__aenter__()
return self
async def __aexit__(self, *exc_details) -> None:
await self._client.__aexit__(*exc_details) |
6,040 | test taskstatus list logged in | import urllib.parse
from django.contrib.auth.models import User
from django.test import TestCase
from dfirtrack_main.models import Taskstatus
class TaskstatusViewTestCase(TestCase):
"""taskstatus view tests"""
@classmethod
def setUpTestData(cls):
# create object
Taskstatus.objects.create(taskstatus_name='taskstatus_1')
# create user
User.objects.create_user(
username='testuser_taskstatus', password='TZjmjiUQviOnIEral6l9'
)
def test_taskstatus_list_not_logged_in(self):
"""test list view"""
# create url
destination = '/login/?next=' + urllib.parse.quote('/taskstatus/', safe='')
# get response
response = self.client.get('/taskstatus/', follow=True)
# compare
self.assertRedirects(
response, destination, status_code=302, target_status_code=200
)
def METHOD_NAME(self):
"""test list view"""
# login testuser
self.client.login(
username='testuser_taskstatus', password='TZjmjiUQviOnIEral6l9'
)
# get response
response = self.client.get('/taskstatus/')
# compare
self.assertEqual(response.status_code, 200)
def test_taskstatus_list_template(self):
"""test list view"""
# login testuser
self.client.login(
username='testuser_taskstatus', password='TZjmjiUQviOnIEral6l9'
)
# get response
response = self.client.get('/taskstatus/')
# compare
self.assertTemplateUsed(
response, 'dfirtrack_main/taskstatus/taskstatus_list.html'
)
def test_taskstatus_list_get_user_context(self):
"""test list view"""
# login testuser
self.client.login(
username='testuser_taskstatus', password='TZjmjiUQviOnIEral6l9'
)
# get response
response = self.client.get('/taskstatus/')
# compare
self.assertEqual(str(response.context['user']), 'testuser_taskstatus')
def test_taskstatus_list_redirect(self):
"""test list view"""
# login testuser
self.client.login(
username='testuser_taskstatus', password='TZjmjiUQviOnIEral6l9'
)
# create url
destination = urllib.parse.quote('/taskstatus/', safe='/')
# get response
response = self.client.get('/taskstatus', follow=True)
# compare
self.assertRedirects(
response, destination, status_code=301, target_status_code=200
)
def test_taskstatus_detail_not_logged_in(self):
"""test detail view"""
# get object
taskstatus_1 = Taskstatus.objects.get(taskstatus_name='taskstatus_1')
# create url
destination = '/login/?next=' + urllib.parse.quote(
'/taskstatus/' + str(taskstatus_1.taskstatus_id) + '/', safe=''
)
# get response
response = self.client.get(
'/taskstatus/' + str(taskstatus_1.taskstatus_id) + '/', follow=True
)
# compare
self.assertRedirects(
response, destination, status_code=302, target_status_code=200
)
def test_taskstatus_detail_logged_in(self):
"""test detail view"""
# get object
taskstatus_1 = Taskstatus.objects.get(taskstatus_name='taskstatus_1')
# login testuser
self.client.login(
username='testuser_taskstatus', password='TZjmjiUQviOnIEral6l9'
)
# get response
response = self.client.get(
'/taskstatus/' + str(taskstatus_1.taskstatus_id) + '/'
)
# compare
self.assertEqual(response.status_code, 200)
def test_taskstatus_detail_template(self):
"""test detail view"""
# get object
taskstatus_1 = Taskstatus.objects.get(taskstatus_name='taskstatus_1')
# login testuser
self.client.login(
username='testuser_taskstatus', password='TZjmjiUQviOnIEral6l9'
)
# get response
response = self.client.get(
'/taskstatus/' + str(taskstatus_1.taskstatus_id) + '/'
)
# compare
self.assertTemplateUsed(
response, 'dfirtrack_main/taskstatus/taskstatus_detail.html'
)
def test_taskstatus_detail_get_user_context(self):
"""test detail view"""
# get object
taskstatus_1 = Taskstatus.objects.get(taskstatus_name='taskstatus_1')
# login testuser
self.client.login(
username='testuser_taskstatus', password='TZjmjiUQviOnIEral6l9'
)
# get response
response = self.client.get(
'/taskstatus/' + str(taskstatus_1.taskstatus_id) + '/'
)
# compare
self.assertEqual(str(response.context['user']), 'testuser_taskstatus')
def test_taskstatus_detail_redirect(self):
"""test detail view"""
# get object
taskstatus_1 = Taskstatus.objects.get(taskstatus_name='taskstatus_1')
# login testuser
self.client.login(
username='testuser_taskstatus', password='TZjmjiUQviOnIEral6l9'
)
# create url
destination = urllib.parse.quote(
'/taskstatus/' + str(taskstatus_1.taskstatus_id) + '/', safe='/'
)
# get response
response = self.client.get(
'/taskstatus/' + str(taskstatus_1.taskstatus_id), follow=True
)
# compare
self.assertRedirects(
response, destination, status_code=301, target_status_code=200
) |
6,041 | get bb ellipse | """
`regfilter` provides functions for filtering out regions outside the image space.
:Authors: Mihai Cara
:License: :doc:`LICENSE`
"""
__author__ = 'Mihai Cara'
def fast_filter_outer_regions(reglist, width, height, origin=1):
# fast_filter_outer_regions filters regions that are outside a rectangle
# ('image's rectangle') of width 'width' and height 'height' that is has the
# bottom-left corner at (origin,origin). This function is based on checking
# for the intersection of the image's rectangle with the bounding box of the
# regions and therefore it is approximate (some regions that in reality are
# not within the image's rectangle will not be filtered out if their
# bounding box still intersects the image rectangle even though the shape
# itself does not intersect the image's rectangle.
for k in range(len(reglist)-1,-1,-1):
reg = reglist[k]
regname = reg.name.lower()
if regname[:3] == 'cir' or regname == 'annulus':
blc, trc = _get_bb_circle(reg)
elif regname[-3:] == 'box':
blc, trc = _get_bb_box(reg)
elif regname == 'ellipse':
blc, trc = METHOD_NAME(reg)
elif regname == 'polygon':
blc, trc = _get_bb_polygon(reg)
elif regname == 'point':
x = reg.coord_list[0]
y = reg.coord_list[1]
if not _is_point_inside(width, height, x, y, origin=origin):
del reglist[k]
continue
elif regname[:4] == 'rect':
blc, trc = _get_bb_rect(reg)
elif regname == 'panda':
blc, trc = _get_bb_circle(reg, True)
elif regname == 'epanda':
blc, trc = METHOD_NAME(reg, True)
elif regname == 'bpanda':
blc, trc = _get_bb_box(reg, True)
else:
continue
if not _is_rect_inside(width, height, blc, trc, origin=origin):
del reglist[k]
continue
def _is_rect_inside(w1, h1, blc2, trc2, origin=1):
pad = 0.5
o = origin-pad
return ((o < trc2[0]) and (o + w1 > blc2[0]) \
and (o < trc2[1]) and (o + h1 > blc2[1]))
def _is_point_inside(w, h, x, y, origin=1):
pad = 0.5
o = origin-pad
return (o < x and (o + w > x) and (o < y) and (o + h > y))
def _get_bb_rect(shape):
# CIAO rectangles
return (shape.coord_list[0],shape.coord_list[2]), \
(shape.coord_list[1],shape.coord_list[3])
def _get_bb_box(shape, bpanda=False):
from math import sin, cos, radians
# check if angle is provided:
rem = len(shape.coord_list) % 2
# check if bpanda:
pnd = 1 if bpanda else 0
xc = shape.coord_list[0]
yc = shape.coord_list[1]
w = shape.coord_list[-2-rem-pnd] / 2.0
h = shape.coord_list[-1-rem-pnd] / 2.0
th = radians(shape.coord_list[-1]) if rem > 0 else 0.0
cs = cos(th)
sn = sin(th)
xm = max(abs(w*cs-h*sn),abs(w*cs+h*sn))
ym = max(abs(w*sn+h*cs),abs(w*sn-h*cs))
return (xc-xm,yc-ym),(xc+xm,yc+ym)
def _get_bb_circle(shape, panda=False):
# check if panda:
pnd = 1 if panda else 0
xc = shape.coord_list[0]
yc = shape.coord_list[1]
r = shape.coord_list[-1-pnd]
return (xc-r,yc-r),(xc+r,yc+r)
def METHOD_NAME(shape, epanda=False):
from math import sin, cos, radians, sqrt
# check if angle is provided:
rem = len(shape.coord_list) % 2
# check if epanda:
pnd = 1 if epanda else 0
xc = shape.coord_list[0]
yc = shape.coord_list[1]
a = shape.coord_list[-2-rem-pnd]
b = shape.coord_list[-1-rem-pnd]
th = radians(shape.coord_list[-1]) if rem > 0 else 0.0
cs = cos(th)
sn = sin(th)
xm = sqrt( (a*cs)**2 + (b*sn)**2 )
ym = sqrt( (a*sn)**2 + (b*cs)**2 )
return (xc-xm,yc-ym),(xc+xm,yc+ym)
def _get_bb_point(shape):
xc = shape.coord_list[0]
yc = shape.coord_list[1]
return (xc-0.5,yc-0.5),(xc+0.5,yc+0.5)
def _get_bb_polygon(shape):
xs = shape.coord_list[0::2]
ys = shape.coord_list[1::2]
minx = min(xs)
maxx = max(xs)
miny = min(ys)
maxy = max(ys)
return (minx,miny),(maxx,maxy) |
6,042 | setup test environment | from __future__ import print_function
import os
import re
import sys
import time
import signal
import traceback
import warnings
import __res
from __res import importer
def METHOD_NAME():
try:
from yatest_lib.ya import Ya
import yatest.common as yc
yc.runtime._set_ya_config(ya=Ya())
except ImportError:
pass
def check_imports(no_check=(), extra=(), skip_func=None, py_main=None):
"""
tests all bundled modules are importable
just add
"PEERDIR(library/python/import_test)" to your CMakeLists.txt and
"from import_test import test_imports" to your python test source file.
"""
if not isinstance(b'', str):
def str_(s):
return s.decode('UTF-8')
else:
def str_(s):
return s
exceptions = list(no_check)
for key, _ in __res.iter_keys(b'py/no_check_imports/'):
exceptions += str_(__res.find(key)).split()
if exceptions:
exceptions.sort()
print('NO_CHECK_IMPORTS', ' '.join(exceptions))
# all test modules get imported when tests are run
exceptions.append('__tests__.*')
patterns = [re.escape(s).replace(r'\*', r'.*') for s in exceptions]
rx = re.compile('^({})$'.format('|'.join(patterns)))
failed = []
import_times = {}
def norm(s):
return (s[:-9] if s.endswith('.__init__') else s)
modules = sys.extra_modules | set(extra)
modules = sorted(modules, key=norm)
if py_main:
modules = [py_main] + modules
for module in modules:
if module not in extra and (rx.search(module) or skip_func and skip_func(module)):
print('SKIP', module)
continue
name = module.rsplit('.', 1)[-1]
if name == '__main__' and 'if __name__ ==' not in importer.get_source(module):
print('SKIP', module, '''without "if __name__ == '__main__'" check''')
continue
def print_backtrace_marked(e):
tb_exc = traceback.format_exception(*e)
for item in tb_exc:
for line in item.splitlines():
print('FAIL:', line, file=sys.stderr)
try:
print('TRY', module)
# XXX waiting for py3 to use print(..., flush=True)
sys.stdout.flush()
s = time.time()
with warnings.catch_warnings():
warnings.filterwarnings(action="ignore", category=DeprecationWarning)
if module == '__main__':
importer.load_module('__main__', '__main__py')
elif module.endswith('.__init__'):
__import__(module[:-len('.__init__')])
else:
__import__(module)
delay = time.time() - s
import_times[str(module)] = delay
print('OK ', module, '{:.3f}s'.format(delay))
except Exception as e:
print('FAIL:', module, e, file=sys.stderr)
print_backtrace_marked(sys.exc_info())
failed.append('{}: {}'.format(module, e))
except BaseException:
e = sys.exc_info()
print('FAIL:', module, e, file=sys.stderr)
print_backtrace_marked(e)
failed.append('{}: {}'.format(module, e))
raise
print("Slowest imports:")
for m, t in sorted(import_times.items(), key=lambda x: x[1], reverse=True)[:30]:
print(' ', '{:.3f}s'.format(t), m)
if failed:
raise ImportError('modules not imported:\n' + '\n'.join(failed))
test_imports = check_imports
def main():
METHOD_NAME()
skip_names = sys.argv[1:]
try:
import faulthandler
except ImportError:
faulthandler = None
if faulthandler:
# Dump python backtrace in case of any errors
faulthandler.enable()
if hasattr(signal, "SIGUSR2"):
# SIGUSR2 is used by test_tool to teardown tests
faulthandler.register(signal.SIGUSR2, chain=True)
os.environ['Y_PYTHON_IMPORT_TEST'] = ''
# We should initialize Django before importing any applications
if os.getenv('DJANGO_SETTINGS_MODULE'):
try:
import django
except ImportError:
pass
else:
django.setup()
py_main = __res.find('PY_MAIN')
if py_main:
py_main_module = py_main.split(b':', 1)[0].decode('UTF-8')
else:
py_main_module = None
try:
check_imports(no_check=skip_names, py_main=py_main_module)
except Exception:
sys.exit(1) |
6,043 | test create metadata | """
:codeauthor: Ted Strzalkowski (tedski@gmail.com)
tests.unit.modules.mdadm_test
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
"""
import re
import pytest
import salt.modules.mdadm_raid as mdadm
from tests.support.mock import MagicMock, patch
@pytest.fixture
def configure_loader_modules():
return {mdadm: {}}
def test_create():
mock = MagicMock(return_value="salt")
with patch.dict(mdadm.__salt__, {"cmd.run": mock}), patch(
"salt.utils.path.which", lambda exe: exe
):
ret = mdadm.create(
"/dev/md0",
5,
devices=["/dev/sdb1", "/dev/sdc1", "/dev/sdd1"],
test_mode=False,
force=True,
chunk=256,
)
assert "salt" == ret
mock.assert_called_with(
[
"mdadm",
"-C",
"/dev/md0",
"-R",
"-v",
"-l",
"5",
"--force",
"--chunk",
"256",
"-e",
"default",
"-n",
"3",
"/dev/sdb1",
"/dev/sdc1",
"/dev/sdd1",
],
python_shell=False,
)
def METHOD_NAME():
mock = MagicMock(return_value="salt")
with patch.dict(mdadm.__salt__, {"cmd.run": mock}), patch(
"salt.utils.path.which", lambda exe: exe
):
ret = mdadm.create(
"/dev/md0",
5,
devices=["/dev/sdb1", "/dev/sdc1", "/dev/sdd1"],
metadata=0.9,
test_mode=False,
force=True,
chunk=256,
)
assert "salt" == ret
mock.assert_called_with(
[
"mdadm",
"-C",
"/dev/md0",
"-R",
"-v",
"-l",
"5",
"--force",
"--chunk",
"256",
"-e",
"0.9",
"-n",
"3",
"/dev/sdb1",
"/dev/sdc1",
"/dev/sdd1",
],
python_shell=False,
)
def test_create_test_mode():
mock = MagicMock()
with patch.dict(mdadm.__salt__, {"cmd.run": mock}):
ret = mdadm.create(
"/dev/md0",
5,
devices=["/dev/sdb1", "/dev/sdc1", "/dev/sdd1"],
force=True,
chunk=256,
test_mode=True,
)
assert sorted(
"mdadm -C /dev/md0 -R -v --chunk 256 "
"--force -l 5 -e default -n 3 "
"/dev/sdb1 /dev/sdc1 /dev/sdd1".split()
) == sorted(ret.split())
assert not mock.called, "test mode failed, cmd.run called"
def test_examine():
"""
Test for mdadm_raid.examine
"""
mock = MagicMock(
return_value=(
"ARRAY /dev/md/pool metadata=1.2"
" UUID=567da122:fb8e445e:55b853e0:81bd0a3e name=positron:pool"
)
)
with patch.dict(mdadm.__salt__, {"cmd.run_stdout": mock}):
assert mdadm.examine("/dev/md0") == {
"ARRAY /dev/md/pool metadata": (
"1.2 UUID=567da122:fb8e445e:55b853e0:81bd0a3e" " name=positron:pool"
)
}
mock.assert_called_with(
"mdadm -Y -E /dev/md0", ignore_retcode=False, python_shell=False
)
def test_examine_quiet():
"""
Test for mdadm_raid.examine
"""
mock = MagicMock(return_value="")
with patch.dict(mdadm.__salt__, {"cmd.run_stdout": mock}):
assert mdadm.examine("/dev/md0", quiet=True) == {}
mock.assert_called_with(
"mdadm -Y -E /dev/md0", ignore_retcode=True, python_shell=False
)
def test_device_match_regex_pattern():
assert re.match(
mdadm._VOL_REGEX_PATTERN_MATCH.format("/dev/md/1"),
"ARRAY /dev/md/1 metadata=1.2 UUID=51f245bc:a1402c8a:2d598e79:589c07cf"
" name=tst-ob-001:1",
)
assert not re.match(
mdadm._VOL_REGEX_PATTERN_MATCH.format("/dev/md/1"),
"ARRAY /dev/md/10 metadata=1.2 UUID=51f245bc:a1402c8a:2d598e79:589c07cf"
" name=tst-ob-001:1",
) |
6,044 | id | # coding=utf-8
# *** WARNING: this file was generated by pulumi. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import copy
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
__all__ = [
'ListStaticSiteConfiguredRolesResult',
'AwaitableListStaticSiteConfiguredRolesResult',
'list_static_site_configured_roles',
'list_static_site_configured_roles_output',
]
@pulumi.output_type
class ListStaticSiteConfiguredRolesResult:
"""
String list resource.
"""
def __init__(__self__, METHOD_NAME=None, kind=None, name=None, properties=None, type=None):
if METHOD_NAME and not isinstance(METHOD_NAME, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", METHOD_NAME)
if kind and not isinstance(kind, str):
raise TypeError("Expected argument 'kind' to be a str")
pulumi.set(__self__, "kind", kind)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if properties and not isinstance(properties, list):
raise TypeError("Expected argument 'properties' to be a list")
pulumi.set(__self__, "properties", properties)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
@property
@pulumi.getter
def METHOD_NAME(self) -> str:
"""
Resource Id.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def kind(self) -> Optional[str]:
"""
Kind of resource.
"""
return pulumi.get(self, "kind")
@property
@pulumi.getter
def name(self) -> str:
"""
Resource Name.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def properties(self) -> Sequence[str]:
"""
List of string resources.
"""
return pulumi.get(self, "properties")
@property
@pulumi.getter
def type(self) -> str:
"""
Resource type.
"""
return pulumi.get(self, "type")
class AwaitableListStaticSiteConfiguredRolesResult(ListStaticSiteConfiguredRolesResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return ListStaticSiteConfiguredRolesResult(
METHOD_NAME=self.METHOD_NAME,
kind=self.kind,
name=self.name,
properties=self.properties,
type=self.type)
def list_static_site_configured_roles(name: Optional[str] = None,
resource_group_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableListStaticSiteConfiguredRolesResult:
"""
Description for Lists the roles configured for the static site.
:param str name: Name of the static site.
:param str resource_group_name: Name of the resource group to which the resource belongs.
"""
__args__ = dict()
__args__['name'] = name
__args__['resourceGroupName'] = resource_group_name
opts = pulumi.InvokeOptions.merge(_utilities.get_invoke_opts_defaults(), opts)
__ret__ = pulumi.runtime.invoke('azure-native:web/v20220901:listStaticSiteConfiguredRoles', __args__, opts=opts, typ=ListStaticSiteConfiguredRolesResult).value
return AwaitableListStaticSiteConfiguredRolesResult(
METHOD_NAME=pulumi.get(__ret__, 'id'),
kind=pulumi.get(__ret__, 'kind'),
name=pulumi.get(__ret__, 'name'),
properties=pulumi.get(__ret__, 'properties'),
type=pulumi.get(__ret__, 'type'))
@_utilities.lift_output_func(list_static_site_configured_roles)
def list_static_site_configured_roles_output(name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[ListStaticSiteConfiguredRolesResult]:
"""
Description for Lists the roles configured for the static site.
:param str name: Name of the static site.
:param str resource_group_name: Name of the resource group to which the resource belongs.
"""
... |
6,045 | get job group | #!/usr/bin/env python
"""
Retrieves job status according to given criteria.
Example:
$ dstat -a -u your.dirac.username
"""
import datetime
from DIRAC import exit as DIRACExit, S_OK, S_ERROR
from DIRAC import gLogger
from DIRAC.Interfaces.Utilities.DCommands import DSession
from DIRAC.Interfaces.Utilities.DConfigCache import ConfigCache
from DIRAC.Interfaces.Utilities.DCommands import ArrayFormatter
from DIRAC.Core.Base.Script import Script
from DIRAC.Core.Utilities.TimeUtilities import toString, day
from DIRAC.WorkloadManagementSystem.Client.JobStatus import JOB_STATES, JOB_FINAL_STATES
from DIRAC.WorkloadManagementSystem.Client.JobMonitoringClient import (
JobMonitoringClient,
)
def selectJobs(owner, date, jobGroup, jobName):
conditions = {"Owner": owner}
if jobGroup:
conditions["JobGroup"] = jobGroup
if jobName:
conditions["JobName"] = jobName
monitoring = JobMonitoringClient()
result = monitoring.getJobs(conditions, date)
return result
def getJobSummary(jobs):
if not jobs:
return S_OK({})
monitoring = JobMonitoringClient()
result = monitoring.getJobsSummary(jobs)
if not result["OK"]:
return result
if isinstance(result["Value"], str):
try:
jobSummary = eval(result["Value"])
except:
return S_ERROR("Problem while converting result from job monitoring")
else:
jobSummary = result["Value"]
return S_OK(jobSummary)
def chunks(l, n):
return [l[i : i + n] for i in range(0, len(l), n)]
EXTRA_DISPLAY_COLUMNS = [
"JobType",
"ApplicationStatus",
"StartExecTime",
"EndExecTime",
"CPUTime",
]
DEFAULT_DISPLAY_COLUMNS = [
"Owner",
"JobName",
"OwnerGroup",
"JobGroup",
"Site",
"Status",
"MinorStatus",
"SubmissionTime",
]
class Params:
def __init__(self):
self.__session = None
self.user = None
self.status = [e.lower() for e in set(JOB_STATES) - set(JOB_FINAL_STATES)]
self.fmt = "pretty"
self.jobDate = 10
self.fields = DEFAULT_DISPLAY_COLUMNS
self.jobGroup = None
self.jobName = None
self.inputFile = None
def setSession(self, session):
self.__session = session
customFields = session.getEnv("dstat_fields", "")["Value"]
if customFields:
self.fields = customFields.split(",")
return S_OK()
def setUser(self, arg=None):
self.user = arg
return S_OK()
def getUser(self):
return self.user
def setStatus(self, arg=None):
self.status = arg.lower().split(",")
return S_OK()
def setStatusAll(self, arg=None):
self.status = [e.lower() for e in JOB_STATES]
return S_OK()
def getStatus(self):
return self.status
def setFmt(self, arg=None):
self.fmt = arg.lower()
return S_OK()
def getFmt(self):
return self.fmt
def setJobDate(self, arg=None):
self.jobDate = int(arg)
return S_OK()
def getJobDate(self):
return self.jobDate
def setFields(self, arg=None):
self.fields = arg.split(",")
return S_OK()
def getFields(self):
return self.fields
def setJobGroup(self, arg=None):
self.jobGroup = arg
return S_OK()
def METHOD_NAME(self):
return self.jobGroup
def setJobName(self, arg=None):
self.jobName = arg
return S_OK()
def getJobName(self):
return self.jobName
def setInputFile(self, arg=None):
self.inputFile = arg
return S_OK()
def getInputFile(self):
return self.inputFile
@Script()
def main():
params = Params()
Script.registerArgument(["JobID: DIRAC Job ID"], mandatory=False)
Script.registerSwitch("u:", "User=", "job owner", params.setUser)
Script.registerSwitch(
"S:",
"Status=",
f"select job by status (comma separated list of statuses in: {','.join(JOB_STATES)})",
params.setStatus,
)
Script.registerSwitch("a", "StatusAll", "display jobs of any status", params.setStatusAll)
Script.registerSwitch("g:", "JobGroup=", "select job by job group", params.setJobGroup)
Script.registerSwitch("n:", "JobName=", "select job by job name", params.setJobName)
Script.registerSwitch("f:", "Fmt=", "display format (pretty, csv, json)", params.setFmt)
Script.registerSwitch("D:", "JobDate=", "age of jobs to display (in days)", params.setJobDate)
Script.registerSwitch(
"F:",
"Fields=",
"display list of job fields (comma separated list of fields. e.g. %s)"
% ",".join(DEFAULT_DISPLAY_COLUMNS + EXTRA_DISPLAY_COLUMNS),
params.setFields,
)
Script.registerSwitch("i:", "input-file=", "read JobIDs from file", params.setInputFile)
configCache = ConfigCache()
Script.parseCommandLine(ignoreErrors=True)
configCache.cacheConfig()
args = Script.getPositionalArgs()
session = DSession()
params.setSession(session)
exitCode = 0
if args:
# handle comma separated list of JobIDs
newargs = []
for arg in args:
newargs += arg.split(",")
args = newargs
jobs = args
if params.getInputFile() != None:
with open(params.getInputFile()) as f:
for l in f.readlines():
jobs += l.split(",")
if not jobs:
# time interval
jobDate = toString(datetime.datetime.utcnow().date() - params.getJobDate() * day)
# job owner
userName = params.getUser()
if userName is None:
result = session.getUserName()
if result["OK"]:
userName = result["Value"]
elif userName == "*" or userName.lower() == "__all__":
# jobs from all users
userName = None
result = selectJobs(
owner=userName,
date=jobDate,
jobGroup=params.METHOD_NAME(),
jobName=params.getJobName(),
)
if not result["OK"]:
gLogger.error(result["Message"])
DIRACExit(-1)
jobs = result["Value"]
try:
jobs = [int(job) for job in jobs]
except Exception as x:
gLogger.error("Expected integer for jobID")
exitCode = 2
DIRACExit(exitCode)
summaries = {}
statuses = params.getStatus()
# split summary requests in chunks of a reasonable size (saves memory)
for chunk in chunks(jobs, 1000):
result = getJobSummary(chunk)
if not result["OK"]:
gLogger.error(result["Message"])
DIRACExit(2)
# filter on job statuses
if "all" in statuses:
summaries = result["Value"]
else:
for j in result["Value"]:
if result["Value"][j]["Status"].lower() in statuses:
summaries[j] = result["Value"][j]
for s in summaries.values():
s["JobID"] = int(s["JobID"])
af = ArrayFormatter(params.getFmt())
gLogger.notice(af.dictFormat(summaries, ["JobID"] + params.getFields(), sort="JobID"))
DIRACExit(exitCode)
if __name__ == "__main__":
main() |
6,046 | f1 score | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2021 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Official evaluation script for v1.1 of the SQuAD dataset.
From https://github.com/allenai/bi-att-flow/blob/master/squad/evaluate-v1.1.py
"""
import re
import string
from collections import Counter, abc
from typing import Any, Callable, Dict, List, TypeVar
from neural_compressor.utils import logger
def normalize_answer(text: str) -> str:
"""Normalize the answer text.
Lower text, remove punctuation, articles and extra whitespace,
and replace other whitespace (newline, tab, etc.) to space.
Args:
s: The text to be normalized.
Returns:
The normalized text.
"""
def _remove_articles(text):
return re.sub(r"\b(a|an|the)\b", " ", text)
def _white_space_fix(text):
return " ".join(text.split())
def _remove_punc(text):
exclude = set(string.punctuation)
return "".join(ch for ch in text if ch not in exclude)
def _lower(text):
return text.lower()
return _white_space_fix(_remove_articles(_remove_punc(_lower(text))))
def METHOD_NAME(prediction: abc.Sequence, ground_truth: abc.Sequence):
"""Calculate the F1 score of the prediction and the ground_truth.
Args:
prediction: the predicted answer.
ground_truth: the correct answer.
Returns:
The F1 score of prediction. Float point number.
"""
assert isinstance(prediction, abc.Sequence) and isinstance(
ground_truth, abc.Sequence
), "prediction and ground_truth should be Sequence"
common = Counter(prediction) & Counter(ground_truth)
num_same = sum(common.values())
if num_same == 0:
return 0
precision = 1.0 * num_same / len(prediction)
recall = 1.0 * num_same / len(ground_truth)
f1 = (2 * precision * recall) / (precision + recall)
return f1
T = TypeVar("T")
def metric_max_over_ground_truths(
metric_fn: Callable[[T, T], float], prediction: str, ground_truths: List[str]
) -> float:
"""Calculate the max metric for each ground truth.
For each answer in ground_truths, evaluate the metric of prediction with
this answer, and return the max metric.
Args:
metric_fn: the function to calculate the metric.
prediction: the prediction result.
ground_truths: the list of correct answers.
Returns:
The max metric. Float point number.
"""
scores_for_ground_truths = []
for ground_truth in ground_truths:
prediction_tokens = normalize_answer(prediction).split()
ground_truth_tokens = normalize_answer(ground_truth).split()
score = metric_fn(prediction_tokens, ground_truth_tokens)
scores_for_ground_truths.append(score)
return max(scores_for_ground_truths)
def evaluate(predictions: Dict[str, str], dataset: List[Dict[str, Any]]) -> float:
"""Evaluate the average F1 score of Question-Answering results.
The F1 score is the harmonic mean of the precision and recall. It can be computed
with the equation: F1 = 2 * (precision * recall) / (precision + recall).
For all question-and-answers in dataset, it evaluates the f1-score
Args:
predictions: The result of predictions to be evaluated. A dict mapping the id of
a question to the predicted answer of the question.
dataset: The dataset to evaluate the prediction. A list instance of articles.
An article contains a list of paragraphs, a paragraph contains a list of
question-and-answers (qas), and a question-and-answer contains an id, a question,
and a list of correct answers. For example:
[{'paragraphs':
[{'qas':[{'answers': [{'answer_start': 177, 'text': 'Denver Broncos'}, ...],
'question': 'Which NFL team represented the AFC at Super Bowl 50?',
'id': '56be4db0acb8001400a502ec'}]}]}]
Returns:
The F1 score of this prediction. Float point number in forms of a percentage.
"""
f1 = total = 0
for article in dataset:
for paragraph in article["paragraphs"]:
for qa in paragraph["qas"]:
total += 1
if qa["id"] not in predictions:
message = "Unanswered question " + qa["id"] + " will receive score 0."
logger.warning(message)
continue
ground_truths = list(map(lambda x: x["text"], qa["answers"]))
prediction = predictions[qa["id"]]
f1 += metric_max_over_ground_truths(METHOD_NAME, prediction, ground_truths)
f1 = 100.0 * f1 / total
return f1 |
6,047 | get init state | #!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
from typing import Optional, Dict, Any, TYPE_CHECKING
from mephisto.abstractions.blueprint import AgentState
import os
import time
from uuid import uuid4
from dataclasses import dataclass, fields
@dataclass
class RemoteRequest:
uuid: str
target: str
args_json: Optional[str]
response_json: Optional[str]
timestamp: float
def to_dict(self):
return dict((field.name, getattr(self, field.name)) for field in fields(self))
class RemoteProcedureAgentState(AgentState):
"""
Holds information about tasks with live interactions in a remote query model.
"""
def _set_init_state(self, data: Any):
"""Set the initial state for this agent"""
self.init_data: Optional[Dict[str, Any]] = data
def METHOD_NAME(self) -> Optional[Dict[str, Any]]:
"""
Return the initial state for this agent,
None if no such state exists
"""
if self.init_data is None:
return None
prev_requests = []
if len(self.requests) > 0:
requests = self.requests.values()
sorted_requests = sorted(requests, key=lambda x: x.timestamp)
prev_requests = [r.to_dict() for r in sorted_requests]
return {
"task_data": self.init_data,
"previous_requests": prev_requests,
}
def _get_expected_data_file(self) -> str:
"""Return the place we would expect to find data for this agent state"""
agent_dir = self.agent.get_data_dir()
os.makedirs(agent_dir, exist_ok=True)
return os.path.join(agent_dir, "state.json")
def _load_data(self) -> None:
"""Load stored data from a file to this object"""
self.requests: Dict[str, RemoteRequest] = {}
self.init_data = None
self.final_submission: Optional[Dict[str, Any]] = None
agent_file = self._get_expected_data_file()
if self.agent.db.key_exists(agent_file):
state = self.agent.db.read_dict(agent_file)
self.requests = {x["uuid"]: RemoteRequest(**x) for x in state["requests"]}
self.init_data = state["init_data"]
self.final_submission = state["final_submission"]
# Backwards compatibility for times
if "start_time" in state:
self.metadata.task_start = state["start_time"]
self.metadata.task_end = state["end_time"]
def get_data(self) -> Dict[str, Any]:
"""Return dict with the messages of this agent"""
return {
"final_submission": self.final_submission,
"init_data": self.init_data,
"requests": [r.to_dict() for r in self.requests.values()],
"start_time": self.metadata.task_start,
"end_time": self.metadata.task_end,
}
def get_parsed_data(self) -> Dict[str, Any]:
"""Return the formatted content"""
# TODO implement actually getting this data
return self.get_data()
def _save_data(self) -> None:
"""Save all messages from this agent to"""
agent_file = self._get_expected_data_file()
self.agent.db.write_dict(agent_file, self.get_data())
def update_data(self, live_update: Dict[str, Any]) -> None:
"""
Append the incoming packet as well as who it came from
"""
if "handles" in live_update:
# outgoing
response_id = str(uuid4())
response = RemoteRequest(
uuid=response_id,
target=live_update["handles"],
args_json=None,
response_json=live_update["response"],
timestamp=time.time(),
)
self.requests[response_id] = response
else:
# incoming
request = RemoteRequest(
uuid=live_update["request_id"],
target=live_update["target"],
args_json=live_update["args"],
response_json=None,
timestamp=time.time(),
)
self.requests[live_update["request_id"]] = request
def _update_submit(self, submitted_data: Dict[str, Any]) -> None:
"""Append any final submission to this state"""
self.final_submission = submitted_data |
6,048 | run subprocess | """Read and write the machine's pretty hostname.
See the `name_management` package docstring for background on the pretty hostname
and how it's distinct from other names on the machine.
"""
import asyncio
from logging import getLogger
from typing import List, Union
_log = getLogger(__name__)
async def get_pretty_hostname(default: str = "no name set") -> str:
"""Get the currently-configured pretty hostname.
May raise an exception from the underlying ``hostnamectl`` process
if this happens to run at the same time systemd-hostnamed is restarting.
"""
# NOTE: The `api` package also retrieves the pretty hostname.
# This logic must be kept in sync with the logic in `api`.
result = (
await METHOD_NAME(
command="hostnamectl",
args=["--pretty", "status"],
)
).decode("utf-8")
# Strip the trailing newline, since it's not part of the actual name value.
# TODO(mm, 2022-07-18): When we upgrade to systemd 249, use `hostnamectl --json`
# for CLI output that we can parse more robustly.
assert len(result) >= 1 and result[-1] == "\n"
return result[:-1]
async def persist_pretty_hostname(name: str) -> str:
"""Change the robot's pretty hostname.
Writes the new name to /etc/machine-info so it persists across reboots.
:param name: The name to set.
:returns: The name that was set. This may be different from ``name``,
if the pretty hostname could not be written.
"""
try:
# We can't run `hostnamectl --pretty <name>` to write this for us
# because it fails with a read-only filesystem error, for unknown reasons.
_rewrite_machine_info(new_pretty_hostname=name)
checked_name = name
except OSError:
_log.exception("Could not set pretty hostname")
checked_name = await get_pretty_hostname()
# Now that we've rewritten /etc/machine-info to contain the new pretty hostname,
# restart systemd-hostnamed so that commands like `hostnamectl status --pretty`
# pick it up immediately.
await METHOD_NAME(
command="systemctl", args=["reload-or-restart", "systemd-hostnamed"]
)
return checked_name
def _rewrite_machine_info(new_pretty_hostname: str) -> None:
"""Write a new value for the pretty hostname.
:raises OSError: If the new value could not be written.
"""
try:
with open("/etc/machine-info") as emi:
contents = emi.read()
except OSError:
_log.exception("Couldn't read /etc/machine-info")
contents = ""
new_contents = _rewrite_machine_info_str(
current_machine_info_contents=contents, new_pretty_hostname=new_pretty_hostname
)
with open("/etc/machine-info", "w") as emi:
emi.write(new_contents)
def _rewrite_machine_info_str(
current_machine_info_contents: str, new_pretty_hostname: str
) -> str:
"""
Return current_machine_info_contents - the full contents of
/etc/machine-info - with the PRETTY_HOSTNAME=... line rewritten to refer
to new_pretty_hostname.
"""
current_lines = current_machine_info_contents.splitlines()
preserved_lines = [
ln for ln in current_lines if not ln.startswith("PRETTY_HOSTNAME")
]
# FIXME(mm, 2022-04-27): This will not correctly store the pretty hostname
# if it contains newlines or certain other special characters.
# https://github.com/Opentrons/opentrons/issues/9960
new_lines = preserved_lines + [f"PRETTY_HOSTNAME={new_pretty_hostname}"]
new_contents = "\n".join(new_lines) + "\n"
return new_contents
# TODO(mm, 2022-07-18): Deduplicate with identical subprocess error-checking code
# in .avahi and .static_hostname modules.
async def METHOD_NAME(
command: Union[str, bytes],
args: List[Union[str, bytes]],
) -> bytes:
process = await asyncio.create_subprocess_exec(
command,
*args,
stdin=asyncio.subprocess.DEVNULL,
stdout=asyncio.subprocess.PIPE,
stderr=asyncio.subprocess.PIPE,
)
stdout, stderr = await process.communicate()
ret = process.returncode
if ret != 0:
_log.error(
f"Error calling {command!r}: {ret} "
f"stdout: {stdout!r} stderr: {stderr!r}"
)
# TODO(mm, 2022-07-18): Use a structured and specific exception type
# once this function is deduplicated.
raise RuntimeError(f"Error calling {command!r}")
return stdout |
6,049 | tear down | # Copyright (c) Facebook, Inc. and its affiliates.
import unittest
import tests.test_utils as test_utils
import torch
from mmf.common.sample import SampleList
from mmf.models.vinvl import VinVLBase, VinVLForClassification, VinVLForPretraining
from mmf.utils.build import build_model
from mmf.utils.configuration import Configuration
from mmf.utils.env import setup_imports, teardown_imports
from mmf.utils.general import get_current_device
from omegaconf import OmegaConf
try:
from transformers3.modeling_bert import BertConfig
except ImportError:
from transformers.modeling_bert import BertConfig
class TestVinVLBase(unittest.TestCase):
def test_forward(self):
img_feature_dim = 2054
bert_model_name = "bert-base-uncased"
use_img_layernorm = True
img_layer_norm_eps = 1e-12
bert_config = BertConfig.from_pretrained(bert_model_name)
# augment hf BertConfig for vinvl BertImgModel config
bert_config.img_feature_dim = img_feature_dim
bert_config.use_img_layernorm = use_img_layernorm
bert_config.img_layer_norm_eps = img_layer_norm_eps
model = VinVLBase(bert_config)
model.eval()
model = model.to(get_current_device())
bs = 8
num_feats = 70
max_sentence_len = 25
input_ids = torch.ones((bs, max_sentence_len), dtype=torch.long)
img_feat = torch.rand((bs, num_feats, img_feature_dim))
with torch.no_grad():
model_output = model(input_ids, img_feat).last_hidden_state
self.assertEqual(model_output.shape, torch.Size([8, 95, 768]))
def mock_vinvl_input_tensors(
cls, bs=8, num_feats=70, max_sentence_len=25, img_feature_dim=2054
):
cls.input_ids = torch.ones((bs, max_sentence_len), dtype=torch.long)
cls.img_feats = torch.rand((bs, num_feats, img_feature_dim))
cls.attention_mask = torch.ones(
(bs, max_sentence_len + num_feats), dtype=torch.long
)
cls.token_type_ids = torch.zeros_like(cls.input_ids)
cls.labels = torch.ones((bs, 1)).long()
cls.lm_label_ids = -torch.ones_like(cls.input_ids).long()
cls.contrastive_labels = torch.zeros((bs, 1)).long()
class TestVinVLForClassificationAndPretraining(unittest.TestCase):
def setUp(self):
mock_vinvl_input_tensors(self)
def test_classification_forward(self):
model = VinVLForClassification().to(get_current_device())
model.eval()
with torch.no_grad():
model_output = model(
input_ids=self.input_ids,
img_feats=self.img_feats,
attention_mask=self.attention_mask,
token_type_ids=self.token_type_ids,
labels=self.labels,
)
self.assertTrue("losses" in model_output)
self.assertTrue("scores" in model_output)
self.assertTrue("ce" in model_output["losses"])
def test_pretraining_forward(self):
model = VinVLForPretraining().to(get_current_device())
model.eval()
with torch.no_grad():
model_output = model(
img_feats=self.img_feats,
attention_mask=self.attention_mask,
token_type_ids=self.token_type_ids,
input_ids_masked=self.input_ids,
lm_label_ids=self.lm_label_ids,
contrastive_labels=self.contrastive_labels,
input_ids_corrupt=self.input_ids,
token_type_ids_corrupt=self.token_type_ids,
attention_mask_corrupt=self.attention_mask,
)
self.assertTrue("losses" in model_output)
self.assertTrue("masked_lm_loss" in model_output["losses"])
self.assertTrue("three_way_contrastive_loss" in model_output["losses"])
class TestVinVLModel(unittest.TestCase):
def setUp(self):
test_utils.setup_proxy()
setup_imports()
model_name = "vinvl"
args = test_utils.dummy_args(model=model_name, dataset="test")
configuration = Configuration(args)
config = configuration.get_config()
model_config = config.model_config[model_name]
model_config.model = model_name
model_config.do_pretraining = False
classification_config_dict = {
"do_pretraining": False,
"heads": {"mlp": {"num_labels": 3129}},
"ce_loss": {"ignore_index": -1},
}
self.classification_config = OmegaConf.create(
{**model_config, **classification_config_dict}
)
pretraining_config_dict = {
"do_pretraining": True,
"heads": {"mlm": {"hidden_size": 768}},
}
self.pretraining_config = OmegaConf.create(
{**model_config, **pretraining_config_dict}
)
self.sample_list = self._get_sample_list()
def METHOD_NAME(self):
teardown_imports()
def _get_sample_list(self):
bs = 8
num_feats = 70
class MockObj:
pass
mock_input = MockObj()
mock_vinvl_input_tensors(mock_input, bs=bs, num_feats=num_feats)
input_mask = torch.ones_like(mock_input.input_ids)
max_features = torch.ones((bs, num_feats)) * num_feats
bbox = torch.randint(50, 200, (bs, num_feats, 4)).float()
image_height = torch.randint(100, 300, (bs,))
image_width = torch.randint(100, 300, (bs,))
image_info = {
"max_features": max_features,
"bbox": bbox,
"image_height": image_height,
"image_width": image_width,
}
sample_list = SampleList()
sample_list.add_field("input_ids", mock_input.input_ids)
sample_list.add_field("input_ids_corrupt", mock_input.input_ids)
sample_list.add_field("input_ids_masked", mock_input.input_ids)
sample_list.add_field("image_feature_0", mock_input.img_feats)
sample_list.add_field("image_info_0", image_info)
sample_list.add_field("input_mask", input_mask)
sample_list.add_field("input_mask_corrupt", input_mask)
sample_list.add_field("segment_ids", mock_input.token_type_ids)
sample_list.add_field("segment_ids_corrupt", mock_input.token_type_ids)
sample_list.add_field("labels", mock_input.labels)
sample_list.add_field("contrastive_labels", mock_input.contrastive_labels)
sample_list.add_field("lm_label_ids", mock_input.lm_label_ids)
sample_list = sample_list.to(get_current_device())
sample_list.dataset_name = "test"
sample_list.dataset_type = "test"
return sample_list
def test_vinvl_for_classification(self):
model_for_classification = build_model(self.classification_config)
model_for_classification.eval()
model_for_classification = model_for_classification.to(get_current_device())
with torch.no_grad():
model_output = model_for_classification(self.sample_list)
self.assertTrue("losses" in model_output)
self.assertTrue("ce" in model_output["losses"])
def test_vinvl_for_pretraining(self):
model_for_pretraining = build_model(self.pretraining_config)
model_for_pretraining.eval()
model_for_pretraining = model_for_pretraining.to(get_current_device())
with torch.no_grad():
model_output = model_for_pretraining(self.sample_list)
self.assertTrue("losses" in model_output)
self.assertTrue("masked_lm_loss" in model_output["losses"])
self.assertTrue("three_way_contrastive_loss" in model_output["losses"]) |
6,050 | get nodes | """Utility functions for tests."""
import os
import subprocess
import tarfile
from contextlib import contextmanager
from glob import glob
from gzip import GzipFile
from io import BytesIO
import nibabel as nb
import numpy as np
import requests
from bids.layout import BIDSLayout
from nipype import logging
LOGGER = logging.getLogger("nipype.utils")
def METHOD_NAME(wf_results):
"""Load nodes from a Nipype workflow's results."""
return {node.fullname: node for node in wf_results.nodes}
def download_test_data(dset, data_dir=None):
"""Download test data."""
URLS = {
"fmriprepwithoutfreesurfer": (
"https://upenn.box.com/shared/static/seyp1cu9w5v3ds6iink37hlsa217yge1.tar.gz"
),
"nibabies": "https://upenn.box.com/shared/static/rsd7vpny5imv3qkd7kpuvdy9scpnfpe2.tar.gz",
"ds001419": "https://upenn.box.com/shared/static/yye7ljcdodj9gd6hm2r6yzach1o6xq1d.tar.gz",
"pnc": "https://upenn.box.com/shared/static/ui2847ys49d82pgn5ewai1mowcmsv2br.tar.gz",
}
if dset == "*":
for k in URLS:
download_test_data(k, data_dir=data_dir)
return
if dset not in URLS:
raise ValueError(f"dset ({dset}) must be one of: {', '.join(URLS.keys())}")
if not data_dir:
data_dir = os.path.join(os.path.dirname(get_test_data_path()), "test_data")
dset_name = dset
if dset == "ds001419":
dset_name = "ds001419-fmriprep"
out_dir = os.path.join(data_dir, dset_name)
if os.path.isdir(out_dir):
LOGGER.info(
f"Dataset {dset} already exists. "
"If you need to re-download the data, please delete the folder."
)
return out_dir
else:
LOGGER.info(f"Downloading {dset} to {out_dir}")
os.makedirs(out_dir, exist_ok=True)
with requests.get(URLS[dset], stream=True) as req:
with tarfile.open(fileobj=GzipFile(fileobj=BytesIO(req.content))) as t:
t.extractall(out_dir)
return out_dir
def get_test_data_path():
"""Return the path to test datasets, terminated with separator.
Test-related data are kept in tests folder in "data".
Based on function by Yaroslav Halchenko used in Neurosynth Python package.
"""
return os.path.abspath(os.path.join(os.path.dirname(__file__), "data") + os.path.sep)
def check_generated_files(out_dir, output_list_file):
"""Compare files generated by xcp_d with a list of expected files."""
xcpd_dir = os.path.join(out_dir, "xcp_d")
found_files = sorted(glob(os.path.join(xcpd_dir, "**/*"), recursive=True))
found_files = [os.path.relpath(f, out_dir) for f in found_files]
# Ignore figures
found_files = [f for f in found_files if "figures" not in f]
with open(output_list_file, "r") as fo:
expected_files = fo.readlines()
expected_files = [f.rstrip() for f in expected_files]
if sorted(found_files) != sorted(expected_files):
expected_not_found = sorted(list(set(expected_files) - set(found_files)))
found_not_expected = sorted(list(set(found_files) - set(expected_files)))
msg = ""
if expected_not_found:
msg += "\nExpected but not found:\n\t"
msg += "\n\t".join(expected_not_found)
if found_not_expected:
msg += "\nFound but not expected:\n\t"
msg += "\n\t".join(found_not_expected)
raise ValueError(msg)
def check_affines(data_dir, out_dir, input_type):
"""Confirm affines don't change across XCP-D runs."""
fmri_layout = BIDSLayout(str(data_dir), validate=False, derivatives=False)
xcp_layout = BIDSLayout(str(out_dir), validate=False, derivatives=False)
if input_type == "cifti": # Get the .dtseries.nii
denoised_files = xcp_layout.get(
invalid_filters="allow",
datatype="func",
extension=".dtseries.nii",
)
space = denoised_files[0].get_entities()["space"]
bold_files = fmri_layout.get(
invalid_filters="allow",
datatype="func",
space=space,
extension=".dtseries.nii",
)
elif input_type == "nifti": # Get the .nii.gz
# Problem: it's collecting native-space data
denoised_files = xcp_layout.get(
datatype="func",
suffix="bold",
extension=".nii.gz",
)
space = denoised_files[0].get_entities()["space"]
bold_files = fmri_layout.get(
invalid_filters="allow",
datatype="func",
space=space,
suffix="bold",
extension=".nii.gz",
)
else: # Nibabies
denoised_files = xcp_layout.get(
datatype="func",
space="MNIInfant",
suffix="bold",
extension=".nii.gz",
)
bold_files = fmri_layout.get(
invalid_filters="allow",
datatype="func",
space="MNIInfant",
suffix="bold",
extension=".nii.gz",
)
bold_file = bold_files[0].path
denoised_file = denoised_files[0].path
if input_type == "cifti":
assert (
nb.load(bold_file)._nifti_header.get_intent()
== nb.load(denoised_file)._nifti_header.get_intent()
)
else:
if not np.array_equal(nb.load(bold_file).affine, nb.load(denoised_file).affine):
raise AssertionError(f"Affines do not match:\n\t{bold_file}\n\t{denoised_file}")
print("No affines changed.")
def run_command(command, env=None):
"""Run a given shell command with certain environment variables set."""
merged_env = os.environ
if env:
merged_env.update(env)
process = subprocess.Popen(
command,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
shell=True,
env=merged_env,
)
while True:
line = process.stdout.readline()
line = str(line, "utf-8")[:-1]
print(line)
if line == "" and process.poll() is not None:
break
if process.returncode != 0:
raise Exception(
f"Non zero return code: {process.returncode}\n" f"{command}\n\n{process.stdout.read()}"
)
@contextmanager
def chdir(path):
"""Temporarily change directories.
Taken from https://stackoverflow.com/a/37996581/2589328.
"""
oldpwd = os.getcwd()
os.chdir(path)
try:
yield
finally:
os.chdir(oldpwd)
def reorder_expected_outputs():
"""Load each of the expected output files and sort the lines alphabetically.
This function is called manually by devs when they modify the test outputs.
"""
test_data_path = get_test_data_path()
expected_output_files = sorted(glob(os.path.join(test_data_path, "test_*_outputs.txt")))
for expected_output_file in expected_output_files:
LOGGER.info(f"Sorting {expected_output_file}")
with open(expected_output_file, "r") as fo:
file_contents = fo.readlines()
file_contents = sorted(file_contents)
with open(expected_output_file, "w") as fo:
fo.writelines(file_contents) |
6,051 | etag | # coding=utf-8
# *** WARNING: this file was generated by pulumi. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import copy
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
__all__ = [
'GetMCASDataConnectorResult',
'AwaitableGetMCASDataConnectorResult',
'get_mcas_data_connector',
'get_mcas_data_connector_output',
]
@pulumi.output_type
class GetMCASDataConnectorResult:
"""
Represents MCAS (Microsoft Cloud App Security) data connector.
"""
def __init__(__self__, data_types=None, METHOD_NAME=None, id=None, kind=None, name=None, system_data=None, tenant_id=None, type=None):
if data_types and not isinstance(data_types, dict):
raise TypeError("Expected argument 'data_types' to be a dict")
pulumi.set(__self__, "data_types", data_types)
if METHOD_NAME and not isinstance(METHOD_NAME, str):
raise TypeError("Expected argument 'etag' to be a str")
pulumi.set(__self__, "etag", METHOD_NAME)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if kind and not isinstance(kind, str):
raise TypeError("Expected argument 'kind' to be a str")
pulumi.set(__self__, "kind", kind)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if system_data and not isinstance(system_data, dict):
raise TypeError("Expected argument 'system_data' to be a dict")
pulumi.set(__self__, "system_data", system_data)
if tenant_id and not isinstance(tenant_id, str):
raise TypeError("Expected argument 'tenant_id' to be a str")
pulumi.set(__self__, "tenant_id", tenant_id)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
@property
@pulumi.getter(name="dataTypes")
def data_types(self) -> 'outputs.MCASDataConnectorDataTypesResponse':
"""
The available data types for the connector.
"""
return pulumi.get(self, "data_types")
@property
@pulumi.getter
def METHOD_NAME(self) -> Optional[str]:
"""
Etag of the azure resource
"""
return pulumi.get(self, "etag")
@property
@pulumi.getter
def id(self) -> str:
"""
Azure resource Id
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def kind(self) -> str:
"""
The kind of the data connector
Expected value is 'MicrosoftCloudAppSecurity'.
"""
return pulumi.get(self, "kind")
@property
@pulumi.getter
def name(self) -> str:
"""
Azure resource name
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="systemData")
def system_data(self) -> 'outputs.SystemDataResponse':
"""
Azure Resource Manager metadata containing createdBy and modifiedBy information.
"""
return pulumi.get(self, "system_data")
@property
@pulumi.getter(name="tenantId")
def tenant_id(self) -> str:
"""
The tenant id to connect to, and get the data from.
"""
return pulumi.get(self, "tenant_id")
@property
@pulumi.getter
def type(self) -> str:
"""
Azure resource type
"""
return pulumi.get(self, "type")
class AwaitableGetMCASDataConnectorResult(GetMCASDataConnectorResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetMCASDataConnectorResult(
data_types=self.data_types,
METHOD_NAME=self.METHOD_NAME,
id=self.id,
kind=self.kind,
name=self.name,
system_data=self.system_data,
tenant_id=self.tenant_id,
type=self.type)
def get_mcas_data_connector(data_connector_id: Optional[str] = None,
operational_insights_resource_provider: Optional[str] = None,
resource_group_name: Optional[str] = None,
workspace_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetMCASDataConnectorResult:
"""
Gets a data connector.
:param str data_connector_id: Connector ID
:param str operational_insights_resource_provider: The namespace of workspaces resource provider- Microsoft.OperationalInsights.
:param str resource_group_name: The name of the resource group. The name is case insensitive.
:param str workspace_name: The name of the workspace.
"""
__args__ = dict()
__args__['dataConnectorId'] = data_connector_id
__args__['operationalInsightsResourceProvider'] = operational_insights_resource_provider
__args__['resourceGroupName'] = resource_group_name
__args__['workspaceName'] = workspace_name
opts = pulumi.InvokeOptions.merge(_utilities.get_invoke_opts_defaults(), opts)
__ret__ = pulumi.runtime.invoke('azure-native:securityinsights/v20210301preview:getMCASDataConnector', __args__, opts=opts, typ=GetMCASDataConnectorResult).value
return AwaitableGetMCASDataConnectorResult(
data_types=pulumi.get(__ret__, 'data_types'),
METHOD_NAME=pulumi.get(__ret__, 'etag'),
id=pulumi.get(__ret__, 'id'),
kind=pulumi.get(__ret__, 'kind'),
name=pulumi.get(__ret__, 'name'),
system_data=pulumi.get(__ret__, 'system_data'),
tenant_id=pulumi.get(__ret__, 'tenant_id'),
type=pulumi.get(__ret__, 'type'))
@_utilities.lift_output_func(get_mcas_data_connector)
def get_mcas_data_connector_output(data_connector_id: Optional[pulumi.Input[str]] = None,
operational_insights_resource_provider: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
workspace_name: Optional[pulumi.Input[str]] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetMCASDataConnectorResult]:
"""
Gets a data connector.
:param str data_connector_id: Connector ID
:param str operational_insights_resource_provider: The namespace of workspaces resource provider- Microsoft.OperationalInsights.
:param str resource_group_name: The name of the resource group. The name is case insensitive.
:param str workspace_name: The name of the workspace.
"""
... |
6,052 | test prealloc | #!/usr/bin/env python3
#
# Test for preallocate filter
#
# Copyright (c) 2020 Virtuozzo International GmbH.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import os
import iotests
MiB = 1024 * 1024
disk = os.path.join(iotests.test_dir, 'disk')
overlay = os.path.join(iotests.test_dir, 'overlay')
refdisk = os.path.join(iotests.test_dir, 'refdisk')
drive_opts = f'node-name=disk,driver={iotests.imgfmt},' \
f'file.node-name=filter,file.driver=preallocate,' \
f'file.file.node-name=file,file.file.filename={disk}'
class TestPreallocateBase(iotests.QMPTestCase):
def setUp(self):
iotests.qemu_img_create('-f', iotests.imgfmt, disk, str(10 * MiB))
def tearDown(self):
try:
self.check_small()
check = iotests.qemu_img_check(disk)
self.assertFalse('leaks' in check)
self.assertFalse('corruptions' in check)
self.assertEqual(check['check-errors'], 0)
finally:
os.remove(disk)
def check_big(self):
self.assertTrue(os.path.getsize(disk) > 100 * MiB)
def check_small(self):
self.assertTrue(os.path.getsize(disk) < 10 * MiB)
class TestQemuImg(TestPreallocateBase):
def test_qemu_img(self):
p = iotests.QemuIoInteractive('--image-opts', drive_opts)
p.cmd('write 0 1M')
p.cmd('flush')
self.check_big()
p.close()
class TestPreallocateFilter(TestPreallocateBase):
def setUp(self):
super().setUp()
self.vm = iotests.VM().add_drive(path=None, opts=drive_opts)
self.vm.launch()
def tearDown(self):
self.vm.shutdown()
super().tearDown()
def METHOD_NAME(self):
self.vm.hmp_qemu_io('drive0', 'write 0 1M')
self.check_big()
def test_external_snapshot(self):
self.METHOD_NAME()
result = self.vm.qmp('blockdev-snapshot-sync', node_name='disk',
snapshot_file=overlay,
snapshot_node_name='overlay')
self.assert_qmp(result, 'return', {})
# on reopen to r-o base preallocation should be dropped
self.check_small()
self.vm.hmp_qemu_io('drive0', 'write 1M 1M')
result = self.vm.qmp('block-commit', device='overlay')
self.assert_qmp(result, 'return', {})
self.complete_and_wait()
# commit of new megabyte should trigger preallocation
self.check_big()
def test_reopen_opts(self):
result = self.vm.qmp('blockdev-reopen', options=[{
'node-name': 'disk',
'driver': iotests.imgfmt,
'file': {
'node-name': 'filter',
'driver': 'preallocate',
'prealloc-size': 20 * MiB,
'prealloc-align': 5 * MiB,
'file': {
'node-name': 'file',
'driver': 'file',
'filename': disk
}
}
}])
self.assert_qmp(result, 'return', {})
self.vm.hmp_qemu_io('drive0', 'write 0 1M')
self.assertTrue(os.path.getsize(disk) == 25 * MiB)
class TestTruncate(iotests.QMPTestCase):
def setUp(self):
iotests.qemu_img_create('-f', iotests.imgfmt, disk, str(10 * MiB))
iotests.qemu_img_create('-f', iotests.imgfmt, refdisk, str(10 * MiB))
def tearDown(self):
os.remove(disk)
os.remove(refdisk)
def do_test(self, prealloc_mode, new_size):
ret = iotests.qemu_io_silent('--image-opts', '-c', 'write 0 10M', '-c',
f'truncate -m {prealloc_mode} {new_size}',
drive_opts)
self.assertEqual(ret, 0)
ret = iotests.qemu_io_silent('-f', iotests.imgfmt, '-c', 'write 0 10M',
'-c',
f'truncate -m {prealloc_mode} {new_size}',
refdisk)
self.assertEqual(ret, 0)
stat = os.stat(disk)
refstat = os.stat(refdisk)
# Probably we'll want preallocate filter to keep align to cluster when
# shrink preallocation, so, ignore small differece
self.assertLess(abs(stat.st_size - refstat.st_size), 64 * 1024)
# Preallocate filter may leak some internal clusters (for example, if
# guest write far over EOF, skipping some clusters - they will remain
# fallocated, preallocate filter don't care about such leaks, it drops
# only trailing preallocation.
self.assertLess(abs(stat.st_blocks - refstat.st_blocks) * 512,
1024 * 1024)
def test_real_shrink(self):
self.do_test('off', '5M')
def test_truncate_inside_preallocated_area__falloc(self):
self.do_test('falloc', '50M')
def test_truncate_inside_preallocated_area__metadata(self):
self.do_test('metadata', '50M')
def test_truncate_inside_preallocated_area__full(self):
self.do_test('full', '50M')
def test_truncate_inside_preallocated_area__off(self):
self.do_test('off', '50M')
def test_truncate_over_preallocated_area__falloc(self):
self.do_test('falloc', '150M')
def test_truncate_over_preallocated_area__metadata(self):
self.do_test('metadata', '150M')
def test_truncate_over_preallocated_area__full(self):
self.do_test('full', '150M')
def test_truncate_over_preallocated_area__off(self):
self.do_test('off', '150M')
if __name__ == '__main__':
iotests.main(supported_fmts=['qcow2'], required_fmts=['preallocate']) |
6,053 | extract data | # pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import (
ClientAuthenticationError,
HttpResponseError,
ResourceExistsError,
ResourceNotFoundError,
map_error,
)
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
from ..._vendor import _convert_request
from ...operations._operations import build_list_request
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class Operations:
"""
.. warning::
**DO NOT** instantiate this class directly.
Instead, you should access the following operations through
:class:`~azure.mgmt.subscription.aio.SubscriptionClient`'s
:attr:`operations` attribute.
"""
models = _models
def __init__(self, *args, **kwargs) -> None:
input_args = list(args)
self._client = input_args.pop(0) if input_args else kwargs.pop("client")
self._config = input_args.pop(0) if input_args else kwargs.pop("config")
self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer")
self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer")
@distributed_trace
def list(self, **kwargs: Any) -> AsyncIterable["_models.Operation"]:
"""Lists all of the available Microsoft.Subscription API operations.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either Operation or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.subscription.models.Operation]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[_models.OperationListResult]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_list_request(
api_version=api_version,
template_url=self.list.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
else:
request = HttpRequest("GET", next_link)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
request.method = "GET"
return request
async def METHOD_NAME(pipeline_response):
deserialized = self._deserialize("OperationListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponseBody, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(get_next, METHOD_NAME)
list.metadata = {"url": "/providers/Microsoft.Subscription/operations"} # type: ignore |
6,054 | test collection delete | import unittest
from unittest import mock
from pyramid import httpexceptions
from kinto.core.resource import Resource, ShareableResource
from kinto.core.storage import exceptions as storage_exceptions
from kinto.core.testing import DummyRequest
from . import BaseTest
class ResourceTest(BaseTest):
def test_raise_if_backend_fails_to_obtain_timestamp(self):
request = self.get_request()
with mock.patch.object(
request.registry.storage,
"resource_timestamp",
side_effect=storage_exceptions.BackendError,
):
with self.assertRaises(storage_exceptions.BackendError):
self.resource_class(request)
def test_raise_unavailable_if_fail_to_obtain_timestamp_with_readonly(self):
request = self.get_request()
excepted_exc = httpexceptions.HTTPServiceUnavailable
request.registry.settings = {"readonly": "true", "explicit_permissions": "true"}
with mock.patch.object(
request.registry.storage,
"resource_timestamp",
side_effect=storage_exceptions.ReadonlyError,
):
with self.assertRaises(excepted_exc) as cm:
self.resource_class(request)
self.assertIn("writable", cm.exception.message)
def test_resource_can_be_created_without_context(self):
try:
self.resource_class(self.get_request())
except Exception as e:
self.fail(e)
def test_default_parent_id_is_empty(self):
request = self.get_request()
parent_id = self.resource.get_parent_id(request)
self.assertEqual(parent_id, "")
class DeprecatedShareableResource(unittest.TestCase):
def test_deprecated_warning(self):
with mock.patch("warnings.warn") as mocked_warnings:
ShareableResource(context=mock.MagicMock(), request=mock.MagicMock())
message = "`ShareableResource` is deprecated, use `Resource` instead."
mocked_warnings.assert_called_with(message, DeprecationWarning)
class DeprecatedMethodsTest(unittest.TestCase):
def setUp(self):
super().setUp()
patch = mock.patch("warnings.warn")
self.mocked_warnings = patch.start()
self.addCleanup(patch.stop)
req = DummyRequest()
req.validated = {"body": {}, "header": {}, "querystring": {}}
req.registry.storage.list_all.return_value = []
req.registry.storage.delete_all.return_value = []
req.registry.storage.create.return_value = {"id": "abc", "last_modified": 123}
self.resource = Resource(context=mock.MagicMock(), request=req)
def test_record_id(self):
self.resource.record_id
message = "`record_id` is deprecated, use `object_id` instead."
self.mocked_warnings.assert_called_with(message, DeprecationWarning)
def test_process_record(self, *args, **kwargs):
self.resource.process_record(new={}, old=None)
message = "`process_record()` is deprecated, use `process_object()` instead."
self.mocked_warnings.assert_called_with(message, DeprecationWarning)
def test_collection_get(self, *args, **kwargs):
self.resource.collection_get()
message = "`collection_get()` is deprecated, use `plural_get()` instead."
self.mocked_warnings.assert_called_with(message, DeprecationWarning)
def test_collection_post(self, *args, **kwargs):
self.resource.collection_post()
message = "`collection_post()` is deprecated, use `plural_post()` instead."
self.mocked_warnings.assert_called_with(message, DeprecationWarning)
def METHOD_NAME(self, *args, **kwargs):
self.resource.collection_delete()
message = "`collection_delete()` is deprecated, use `plural_delete()` instead."
self.mocked_warnings.assert_called_with(message, DeprecationWarning)
class NewResource(Resource):
def get_parent_id(self, request):
return "overrided"
class ParentIdOverrideResourceTest(BaseTest):
resource_class = NewResource
def test_get_parent_can_be_overridded(self):
request = self.get_request()
parent_id = self.resource.get_parent_id(request)
self.assertEqual(parent_id, "overrided")
self.assertEqual(self.resource.model.parent_id, "overrided")
class CustomModelResource(Resource):
def __init__(self, *args, **kwargs):
self.model = mock.MagicMock()
self.model.name = mock.sentinel.model
super().__init__(*args, **kwargs)
class CustomModelResourceTets(unittest.TestCase):
def test_custom_model_is_not_overriden(self):
c = CustomModelResource(request=mock.MagicMock())
self.assertEqual(c.model.name, mock.sentinel.model) |
6,055 | test outstream | """Test IO capturing functionality"""
import io
import os
import subprocess
import sys
import threading
import time
import warnings
from concurrent.futures import Future, ThreadPoolExecutor
from unittest import mock
import pytest
import zmq
from jupyter_client.session import Session
from ipykernel.iostream import MASTER, BackgroundSocket, IOPubThread, OutStream
@pytest.fixture
def ctx():
ctx = zmq.Context()
yield ctx
ctx.destroy()
@pytest.fixture
def iopub_thread(ctx):
with ctx.socket(zmq.PUB) as pub:
thread = IOPubThread(pub)
thread.start()
yield thread
thread.stop()
thread.close()
def test_io_api(iopub_thread):
"""Test that wrapped stdout has the same API as a normal TextIO object"""
session = Session()
stream = OutStream(session, iopub_thread, "stdout")
assert stream.errors is None
assert not stream.isatty()
with pytest.raises(io.UnsupportedOperation):
stream.detach()
with pytest.raises(io.UnsupportedOperation):
next(stream)
with pytest.raises(io.UnsupportedOperation):
stream.read()
with pytest.raises(io.UnsupportedOperation):
stream.readline()
with pytest.raises(io.UnsupportedOperation):
stream.seek(0)
with pytest.raises(io.UnsupportedOperation):
stream.tell()
with pytest.raises(TypeError):
stream.write(b"") # type:ignore
def test_io_isatty(iopub_thread):
session = Session()
stream = OutStream(session, iopub_thread, "stdout", isatty=True)
assert stream.isatty()
def test_io_thread(iopub_thread):
thread = iopub_thread
thread._setup_pipe_in()
msg = [thread._pipe_uuid, b"a"]
thread._handle_pipe_msg(msg)
ctx1, pipe = thread._setup_pipe_out()
pipe.close()
thread._pipe_in.close()
thread._check_mp_mode = lambda: MASTER
thread._really_send([b"hi"])
ctx1.destroy()
thread.close()
thread.close()
thread._really_send(None)
def test_background_socket(iopub_thread):
sock = BackgroundSocket(iopub_thread)
assert sock.__class__ == BackgroundSocket
with warnings.catch_warnings():
warnings.simplefilter("ignore", DeprecationWarning)
sock.linger = 101
assert iopub_thread.socket.linger == 101
assert sock.io_thread == iopub_thread
sock.send(b"hi")
def METHOD_NAME(iopub_thread):
session = Session()
pub = iopub_thread.socket
with warnings.catch_warnings():
warnings.simplefilter("ignore", DeprecationWarning)
stream = OutStream(session, pub, "stdout")
stream.close()
stream = OutStream(session, iopub_thread, "stdout", pipe=object())
stream.close()
stream = OutStream(session, iopub_thread, "stdout", watchfd=False)
stream.close()
stream = OutStream(session, iopub_thread, "stdout", isatty=True, echo=io.StringIO())
with stream:
with pytest.raises(io.UnsupportedOperation):
stream.fileno()
stream._watch_pipe_fd()
stream.flush()
stream.write("hi")
stream.writelines(["ab", "cd"])
assert stream.writable()
async def test_event_pipe_gc(iopub_thread):
session = Session(key=b'abc')
stream = OutStream(
session,
iopub_thread,
"stdout",
isatty=True,
watchfd=False,
)
save_stdout = sys.stdout
assert iopub_thread._event_pipes == {}
with stream, mock.patch.object(sys, "stdout", stream), ThreadPoolExecutor(1) as pool:
pool.submit(print, "x").result()
pool_thread = pool.submit(threading.current_thread).result()
assert list(iopub_thread._event_pipes) == [pool_thread]
# run gc once in the iopub thread
f: Future = Future()
async def run_gc():
try:
await iopub_thread._event_pipe_gc()
except Exception as e:
f.set_exception(e)
else:
f.set_result(None)
iopub_thread.io_loop.add_callback(run_gc)
# wait for call to finish in iopub thread
f.result()
assert iopub_thread._event_pipes == {}
def subprocess_test_echo_watch():
# handshake Pub subscription
session = Session(key=b'abc')
# use PUSH socket to avoid subscription issues
with zmq.Context() as ctx, ctx.socket(zmq.PUSH) as pub:
pub.connect(os.environ["IOPUB_URL"])
iopub_thread = IOPubThread(pub)
iopub_thread.start()
stdout_fd = sys.stdout.fileno()
sys.stdout.flush()
stream = OutStream(
session,
iopub_thread,
"stdout",
isatty=True,
echo=sys.stdout,
watchfd="force",
)
save_stdout = sys.stdout
with stream, mock.patch.object(sys, "stdout", stream):
# write to low-level FD
os.write(stdout_fd, b"fd\n")
# print (writes to stream)
print("print\n", end="")
sys.stdout.flush()
# write to unwrapped __stdout__ (should also go to original FD)
sys.__stdout__.write("__stdout__\n")
sys.__stdout__.flush()
# write to original sys.stdout (should be the same as __stdout__)
save_stdout.write("stdout\n")
save_stdout.flush()
# is there another way to flush on the FD?
fd_file = os.fdopen(stdout_fd, "w")
fd_file.flush()
# we don't have a sync flush on _reading_ from the watched pipe
time.sleep(1)
stream.flush()
iopub_thread.stop()
iopub_thread.close()
@pytest.mark.skipif(sys.platform.startswith("win"), reason="Windows")
def test_echo_watch(ctx):
"""Test echo on underlying FD while capturing the same FD
Test runs in a subprocess to avoid messing with pytest output capturing.
"""
s = ctx.socket(zmq.PULL)
port = s.bind_to_random_port("tcp://127.0.0.1")
url = f"tcp://127.0.0.1:{port}"
session = Session(key=b'abc')
messages = []
stdout_chunks = []
with s:
env = dict(os.environ)
env["IOPUB_URL"] = url
env["PYTHONUNBUFFERED"] = "1"
env.pop("PYTEST_CURRENT_TEST", None)
p = subprocess.run(
[
sys.executable,
"-c",
f"import {__name__}; {__name__}.subprocess_test_echo_watch()",
],
env=env,
capture_output=True,
check=True,
text=True,
timeout=10,
)
print(f"{p.stdout=}")
print(f"{p.stderr}=", file=sys.stderr)
assert p.returncode == 0
while s.poll(timeout=100):
ident, msg = session.recv(s)
assert msg is not None # for type narrowing
if msg["header"]["msg_type"] == "stream" and msg["content"]["name"] == "stdout":
stdout_chunks.append(msg["content"]["text"])
# check outputs
# use sets of lines to ignore ordering issues with
# async flush and watchfd thread
# Check the stream output forwarded over zmq
zmq_stdout = "".join(stdout_chunks)
assert set(zmq_stdout.strip().splitlines()) == {
"fd",
"print",
"stdout",
"__stdout__",
}
# Check what was written to the process stdout (kernel terminal)
# just check that each output source went to the terminal
assert set(p.stdout.strip().splitlines()) == {
"fd",
"print",
"stdout",
"__stdout__",
} |
6,056 | test deleting theme invalidates themes cache | from pathlib import Path
import pytest
from django.core.files.base import ContentFile
from django.urls import reverse
from ....cache.test import assert_invalidates_cache
from ....test import assert_has_error_message
from ... import THEME_CACHE
from ...models import Theme, Css, Media
@pytest.fixture
def delete_link(theme):
return reverse("misago:admin:themes:delete", kwargs={"pk": theme.pk})
def test_theme_without_children_can_be_deleted(admin_client, delete_link, theme):
admin_client.post(delete_link)
with pytest.raises(Theme.DoesNotExist):
theme.refresh_from_db()
def test_theme_css_are_deleted_together_with_theme(admin_client, delete_link, css):
admin_client.post(delete_link)
with pytest.raises(Css.DoesNotExist):
css.refresh_from_db()
def test_theme_source_css_files_are_deleted_together_with_theme(
admin_client, delete_link, css
):
admin_client.post(delete_link)
assert not Path(css.source_file.path).exists()
def test_theme_build_css_files_are_deleted_together_with_theme(
admin_client, delete_link, css
):
css.build_file = ContentFile("body {}", name="test.css")
css.build_hash = "abcdefgh"
css.save()
admin_client.post(delete_link)
assert not Path(css.build_file.path).exists()
def test_theme_media_are_deleted_together_with_theme(admin_client, delete_link, media):
admin_client.post(delete_link)
with pytest.raises(Media.DoesNotExist):
media.refresh_from_db()
def test_theme_images_are_deleted_together_with_theme(admin_client, delete_link, image):
admin_client.post(delete_link)
with pytest.raises(Media.DoesNotExist):
image.refresh_from_db()
def test_theme_media_files_are_deleted_together_with_theme(
admin_client, delete_link, media
):
admin_client.post(delete_link)
assert not Path(media.file.path).exists()
def test_theme_image_files_are_deleted_together_with_theme(
admin_client, delete_link, image
):
admin_client.post(delete_link)
assert not Path(image.thumbnail.path).exists()
def test_theme_is_deleted_with_children(admin_client, delete_link, theme):
Theme.objects.create(name="Child Theme", parent=theme)
admin_client.post(delete_link)
assert Theme.objects.count() == 1
def test_theme_children_are_deleted_recursively(admin_client, delete_link, theme):
child_theme = Theme.objects.create(name="Child Theme", parent=theme)
Theme.objects.create(name="Descendant Theme", parent=child_theme)
Theme.objects.create(name="Descendant Theme", parent=child_theme)
admin_client.post(delete_link)
assert Theme.objects.count() == 1
def test_children_theme_can_be_deleted(admin_client, delete_link, theme, other_theme):
theme.move_to(other_theme)
theme.save()
admin_client.post(delete_link)
with pytest.raises(Theme.DoesNotExist):
theme.refresh_from_db()
def test_deleting_children_theme_doesnt_delete_parent_themes(
admin_client, delete_link, theme, other_theme
):
theme.move_to(other_theme)
theme.save()
admin_client.post(delete_link)
other_theme.refresh_from_db()
def METHOD_NAME(admin_client, delete_link):
with assert_invalidates_cache(THEME_CACHE):
admin_client.post(delete_link)
def test_deleting_default_theme_sets_error_message(admin_client, default_theme):
delete_link = reverse("misago:admin:themes:delete", kwargs={"pk": default_theme.pk})
response = admin_client.post(delete_link)
assert_has_error_message(response)
def test_default_theme_is_not_deleted(admin_client, default_theme):
delete_link = reverse("misago:admin:themes:delete", kwargs={"pk": default_theme.pk})
admin_client.post(delete_link)
default_theme.refresh_from_db()
def test_deleting_active_theme_sets_error_message(admin_client, theme):
theme.is_active = True
theme.save()
delete_link = reverse("misago:admin:themes:delete", kwargs={"pk": theme.pk})
response = admin_client.post(delete_link)
assert_has_error_message(response)
def test_active_theme_is_not_deleted(admin_client, theme):
theme.is_active = True
theme.save()
delete_link = reverse("misago:admin:themes:delete", kwargs={"pk": theme.pk})
admin_client.post(delete_link)
theme.refresh_from_db()
def test_deleting_theme_containing_active_child_theme_sets_error_message(
admin_client, theme, other_theme
):
other_theme.move_to(theme)
other_theme.is_active = True
other_theme.save()
delete_link = reverse("misago:admin:themes:delete", kwargs={"pk": theme.pk})
response = admin_client.post(delete_link)
assert_has_error_message(response)
def test_theme_containing_active_child_theme_is_not_deleted(
admin_client, theme, other_theme
):
other_theme.move_to(theme)
other_theme.is_active = True
other_theme.save()
delete_link = reverse("misago:admin:themes:delete", kwargs={"pk": theme.pk})
admin_client.post(delete_link)
theme.refresh_from_db() |
6,057 | test catches leaked mock when enabled | #!/usr/bin/env python
#
# Copyright 2009, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Tests that leaked mock objects can be caught be Google Mock."""
from googlemock.test import gmock_test_utils
PROGRAM_PATH = gmock_test_utils.GetTestExecutablePath('gmock_leak_test_')
TEST_WITH_EXPECT_CALL = [PROGRAM_PATH, '--gtest_filter=*ExpectCall*']
TEST_WITH_ON_CALL = [PROGRAM_PATH, '--gtest_filter=*OnCall*']
TEST_MULTIPLE_LEAKS = [PROGRAM_PATH, '--gtest_filter=*MultipleLeaked*']
environ = gmock_test_utils.environ
SetEnvVar = gmock_test_utils.SetEnvVar
# Tests in this file run a Google-Test-based test program and expect it
# to terminate prematurely. Therefore they are incompatible with
# the premature-exit-file protocol by design. Unset the
# premature-exit filepath to prevent Google Test from creating
# the file.
SetEnvVar(gmock_test_utils.PREMATURE_EXIT_FILE_ENV_VAR, None)
class GMockLeakTest(gmock_test_utils.TestCase):
def testCatchesLeakedMockByDefault(self):
self.assertNotEqual(
0,
gmock_test_utils.Subprocess(TEST_WITH_EXPECT_CALL,
env=environ).exit_code)
self.assertNotEqual(
0,
gmock_test_utils.Subprocess(TEST_WITH_ON_CALL,
env=environ).exit_code)
def testDoesNotCatchLeakedMockWhenDisabled(self):
self.assertEquals(
0,
gmock_test_utils.Subprocess(TEST_WITH_EXPECT_CALL +
['--gmock_catch_leaked_mocks=0'],
env=environ).exit_code)
self.assertEquals(
0,
gmock_test_utils.Subprocess(TEST_WITH_ON_CALL +
['--gmock_catch_leaked_mocks=0'],
env=environ).exit_code)
def METHOD_NAME(self):
self.assertNotEqual(
0,
gmock_test_utils.Subprocess(TEST_WITH_EXPECT_CALL +
['--gmock_catch_leaked_mocks'],
env=environ).exit_code)
self.assertNotEqual(
0,
gmock_test_utils.Subprocess(TEST_WITH_ON_CALL +
['--gmock_catch_leaked_mocks'],
env=environ).exit_code)
def testCatchesLeakedMockWhenEnabledWithExplictFlagValue(self):
self.assertNotEqual(
0,
gmock_test_utils.Subprocess(TEST_WITH_EXPECT_CALL +
['--gmock_catch_leaked_mocks=1'],
env=environ).exit_code)
def testCatchesMultipleLeakedMocks(self):
self.assertNotEqual(
0,
gmock_test_utils.Subprocess(TEST_MULTIPLE_LEAKS +
['--gmock_catch_leaked_mocks'],
env=environ).exit_code)
if __name__ == '__main__':
gmock_test_utils.Main() |
6,058 | multi occurrence | # This program is free software; you can redistribute it and/or modify
# it under the terms of the (LGPL) GNU Lesser General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library Lesser General Public License for more details at
# ( http://www.gnu.org/licenses/lgpl.html ).
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
# written by: Jeff Ortel ( jortel@redhat.com )
"""
Provides base classes for XML->object I{unmarshalling}.
"""
from suds import *
from suds.umx import *
from suds.umx.attrlist import AttrList
from suds.sax.text import Text
from suds.sudsobject import Factory, merge
reserved = {'class':'cls', 'def':'dfn'}
class Core:
"""
The abstract XML I{node} unmarshaller. This class provides the
I{core} unmarshalling functionality.
"""
def process(self, content):
"""
Process an object graph representation of the xml I{node}.
@param content: The current content being unmarshalled.
@type content: L{Content}
@return: A suds object.
@rtype: L{Object}
"""
self.reset()
return self.append(content)
def append(self, content):
"""
Process the specified node and convert the XML document into
a I{suds} L{object}.
@param content: The current content being unmarshalled.
@type content: L{Content}
@return: A I{append-result} tuple as: (L{Object}, I{value})
@rtype: I{append-result}
@note: This is not the proper entry point.
@see: L{process()}
"""
self.start(content)
self.append_attributes(content)
self.append_children(content)
self.append_text(content)
self.end(content)
return self.postprocess(content)
def postprocess(self, content):
"""
Perform final processing of the resulting data structure as follows:
- Mixed values (children and text) will have a result of the I{content.node}.
- Simi-simple values (attributes, no-children and text) will have a result of a
property object.
- Simple values (no-attributes, no-children with text nodes) will have a string
result equal to the value of the content.node.getText().
@param content: The current content being unmarshalled.
@type content: L{Content}
@return: The post-processed result.
@rtype: I{any}
"""
node = content.node
if len(node.children) and node.hasText():
return node
attributes = AttrList(node.attributes)
if attributes.rlen() and \
not len(node.children) and \
node.hasText():
p = Factory.property(node.name, node.getText())
return merge(content.data, p)
if len(content.data):
return content.data
lang = attributes.lang()
if content.node.isnil():
return None
if not len(node.children) and content.text is None:
if self.nillable(content):
return None
else:
return Text('', lang=lang)
if isinstance(content.text, str):
return Text(content.text, lang=lang)
else:
return content.text
def append_attributes(self, content):
"""
Append attribute nodes into L{Content.data}.
Attributes in the I{schema} or I{xml} namespaces are skipped.
@param content: The current content being unmarshalled.
@type content: L{Content}
"""
attributes = AttrList(content.node.attributes)
for attr in attributes.real():
name = attr.name
value = attr.value
self.append_attribute(name, value, content)
def append_attribute(self, name, value, content):
"""
Append an attribute name/value into L{Content.data}.
@param name: The attribute name
@type name: basestring
@param value: The attribute's value
@type value: basestring
@param content: The current content being unmarshalled.
@type content: L{Content}
"""
key = name
key = '_%s' % reserved.get(key, key)
setattr(content.data, key, value)
def append_children(self, content):
"""
Append child nodes into L{Content.data}
@param content: The current content being unmarshalled.
@type content: L{Content}
"""
for child in content.node:
cont = Content(child)
cval = self.append(cont)
key = reserved.get(child.name, child.name)
if key in content.data:
v = getattr(content.data, key)
if isinstance(v, list):
v.append(cval)
else:
setattr(content.data, key, [v, cval])
continue
if self.METHOD_NAME(cont):
if cval is None:
setattr(content.data, key, [])
else:
setattr(content.data, key, [cval,])
else:
setattr(content.data, key, cval)
def append_text(self, content):
"""
Append text nodes into L{Content.data}
@param content: The current content being unmarshalled.
@type content: L{Content}
"""
if content.node.hasText():
content.text = content.node.getText()
def reset(self):
pass
def start(self, content):
"""
Processing on I{node} has started. Build and return
the proper object.
@param content: The current content being unmarshalled.
@type content: L{Content}
@return: A subclass of Object.
@rtype: L{Object}
"""
content.data = Factory.object(content.node.name)
def end(self, content):
"""
Processing on I{node} has ended.
@param content: The current content being unmarshalled.
@type content: L{Content}
"""
pass
def single_occurrence(self, content):
"""
Get whether the content has at most a single occurrence (not a list).
@param content: The current content being unmarshalled.
@type content: L{Content}
@return: True if content has at most a single occurrence, else False.
@rtype: boolean
'"""
return not self.METHOD_NAME(content)
def METHOD_NAME(self, content):
"""
Get whether the content has more than one occurrence (a list).
@param content: The current content being unmarshalled.
@type content: L{Content}
@return: True if content has more than one occurrence, else False.
@rtype: boolean
'"""
return False
def nillable(self, content):
"""
Get whether the object is nillable.
@param content: The current content being unmarshalled.
@type content: L{Content}
@return: True if nillable, else False
@rtype: boolean
'"""
return False |
6,059 | channel arn | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import copy
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = [
'GetStreamKeyResult',
'AwaitableGetStreamKeyResult',
'get_stream_key',
'get_stream_key_output',
]
@pulumi.output_type
class GetStreamKeyResult:
"""
A collection of values returned by getStreamKey.
"""
def __init__(__self__, arn=None, METHOD_NAME=None, id=None, tags=None, value=None):
if arn and not isinstance(arn, str):
raise TypeError("Expected argument 'arn' to be a str")
pulumi.set(__self__, "arn", arn)
if METHOD_NAME and not isinstance(METHOD_NAME, str):
raise TypeError("Expected argument 'channel_arn' to be a str")
pulumi.set(__self__, "channel_arn", METHOD_NAME)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if tags and not isinstance(tags, dict):
raise TypeError("Expected argument 'tags' to be a dict")
pulumi.set(__self__, "tags", tags)
if value and not isinstance(value, str):
raise TypeError("Expected argument 'value' to be a str")
pulumi.set(__self__, "value", value)
@property
@pulumi.getter
def arn(self) -> str:
"""
ARN of the Stream Key.
"""
return pulumi.get(self, "arn")
@property
@pulumi.getter(name="channelArn")
def METHOD_NAME(self) -> str:
return pulumi.get(self, "channel_arn")
@property
@pulumi.getter
def id(self) -> str:
"""
The provider-assigned unique ID for this managed resource.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def tags(self) -> Mapping[str, str]:
"""
Map of tags assigned to the resource.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter
def value(self) -> str:
"""
Stream Key value.
"""
return pulumi.get(self, "value")
class AwaitableGetStreamKeyResult(GetStreamKeyResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetStreamKeyResult(
arn=self.arn,
METHOD_NAME=self.METHOD_NAME,
id=self.id,
tags=self.tags,
value=self.value)
def get_stream_key(METHOD_NAME: Optional[str] = None,
tags: Optional[Mapping[str, str]] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetStreamKeyResult:
"""
Data source for managing an AWS IVS (Interactive Video) Stream Key.
## Example Usage
### Basic Usage
```python
import pulumi
import pulumi_aws as aws
example = aws.ivs.get_stream_key(channel_arn="arn:aws:ivs:us-west-2:326937407773:channel/0Y1lcs4U7jk5")
```
:param str channel_arn: ARN of the Channel.
:param Mapping[str, str] tags: Map of tags assigned to the resource.
"""
__args__ = dict()
__args__['channelArn'] = METHOD_NAME
__args__['tags'] = tags
opts = pulumi.InvokeOptions.merge(_utilities.get_invoke_opts_defaults(), opts)
__ret__ = pulumi.runtime.invoke('aws:ivs/getStreamKey:getStreamKey', __args__, opts=opts, typ=GetStreamKeyResult).value
return AwaitableGetStreamKeyResult(
arn=pulumi.get(__ret__, 'arn'),
METHOD_NAME=pulumi.get(__ret__, 'channel_arn'),
id=pulumi.get(__ret__, 'id'),
tags=pulumi.get(__ret__, 'tags'),
value=pulumi.get(__ret__, 'value'))
@_utilities.lift_output_func(get_stream_key)
def get_stream_key_output(METHOD_NAME: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Optional[Mapping[str, str]]]] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetStreamKeyResult]:
"""
Data source for managing an AWS IVS (Interactive Video) Stream Key.
## Example Usage
### Basic Usage
```python
import pulumi
import pulumi_aws as aws
example = aws.ivs.get_stream_key(channel_arn="arn:aws:ivs:us-west-2:326937407773:channel/0Y1lcs4U7jk5")
```
:param str channel_arn: ARN of the Channel.
:param Mapping[str, str] tags: Map of tags assigned to the resource.
"""
... |
6,060 | test put mc | # --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
import importlib
import unittest
from azure.cli.command_modules.acs._consts import DecoratorMode
from azure.cli.command_modules.acs.base_decorator import (
BaseAKSContext,
BaseAKSManagedClusterDecorator,
BaseAKSModels,
BaseAKSParamDict,
validate_decorator_mode,
)
from azure.cli.command_modules.acs.tests.latest.mocks import MockCLI, MockClient, MockCmd
from azure.cli.core.azclierror import CLIInternalError
from azure.cli.core.profiles import ResourceType
class BaseDecoratorHelperFunctionsTestCase(unittest.TestCase):
def test_validate_decorator_mode(self):
self.assertEqual(validate_decorator_mode(DecoratorMode.CREATE), True)
self.assertEqual(validate_decorator_mode(DecoratorMode.UPDATE), True)
self.assertEqual(validate_decorator_mode(DecoratorMode), False)
self.assertEqual(validate_decorator_mode(1), False)
self.assertEqual(validate_decorator_mode("1"), False)
self.assertEqual(validate_decorator_mode(True), False)
self.assertEqual(validate_decorator_mode({}), False)
class BaseAKSModelsTestCase(unittest.TestCase):
def setUp(self):
self.cli_ctx = MockCLI()
self.cmd = MockCmd(self.cli_ctx)
def test_models(self):
# load models directly (instead of through the `get_sdk` method provided by the cli component)
from azure.cli.core.profiles._shared import AZURE_API_PROFILES
sdk_profile = AZURE_API_PROFILES["latest"][ResourceType.MGMT_CONTAINERSERVICE]
api_version = sdk_profile.default_api_version
module_name = "azure.mgmt.containerservice.v{}.models".format(api_version.replace("-", "_"))
module = importlib.import_module(module_name)
models = BaseAKSModels(self.cmd, ResourceType.MGMT_CONTAINERSERVICE)
self.assertEqual(models.model_module, module)
self.assertEqual(models.AgentPool, module.AgentPool)
class BaseAKSParamDictTestCase(unittest.TestCase):
def test__init__(self):
# fail on not passing dictionary-like parameters
with self.assertRaises(CLIInternalError):
BaseAKSParamDict([])
def test_get(self):
param_dict = BaseAKSParamDict({"abc": "xyz"})
self.assertEqual(param_dict.get("abc"), "xyz")
param_dict_2 = BaseAKSParamDict({"a": None, "ab": False, "abc": ""})
self.assertEqual(param_dict_2.get("a", True), True)
self.assertEqual(param_dict_2.get("a", "xyz"), "xyz")
self.assertEqual(param_dict_2.get("ab", True), False)
self.assertEqual(param_dict_2.get("abc", True), "")
self.assertEqual(param_dict_2.get("abcd", True), True)
def test_keys(self):
param_dict = BaseAKSParamDict({"abc": "xyz"})
self.assertEqual(list(param_dict.keys()), ["abc"])
def test_values(self):
param_dict = BaseAKSParamDict({"abc": "xyz"})
self.assertEqual(list(param_dict.values()), ["xyz"])
def test_items(self):
param_dict = BaseAKSParamDict({"abc": "xyz"})
self.assertEqual(list(param_dict.items()), [("abc", "xyz")])
def test_print_usage_statistics(self):
param_dict = BaseAKSParamDict({"abc": "xyz", "def": 100})
param_dict.print_usage_statistics()
class BaseAKSContextTestCase(unittest.TestCase):
def setUp(self):
self.cli_ctx = MockCLI()
self.cmd = MockCmd(self.cli_ctx)
self.models = BaseAKSModels(self.cmd, ResourceType.MGMT_CONTAINERSERVICE)
def test__init__(self):
# fail on not passing dictionary-like parameters
with self.assertRaises(CLIInternalError):
BaseAKSContext(self.cmd, [], self.models, decorator_mode=DecoratorMode.CREATE)
# fail on not passing decorator_mode with Enum type DecoratorMode
with self.assertRaises(CLIInternalError):
BaseAKSContext(self.cmd, BaseAKSParamDict({}), self.models, decorator_mode=1)
def test_get_intermediate(self):
ctx_1 = BaseAKSContext(self.cmd, BaseAKSParamDict({}), self.models, decorator_mode=DecoratorMode.CREATE)
self.assertEqual(
ctx_1.get_intermediate("fake-intermediate", "not found"),
"not found",
)
def test_set_intermediate(self):
ctx_1 = BaseAKSContext(self.cmd, BaseAKSParamDict({}), self.models, decorator_mode=DecoratorMode.CREATE)
ctx_1.set_intermediate("test-intermediate", "test-intermediate-value")
self.assertEqual(
ctx_1.get_intermediate("test-intermediate"),
"test-intermediate-value",
)
ctx_1.set_intermediate("test-intermediate", "new-test-intermediate-value")
self.assertEqual(
ctx_1.get_intermediate("test-intermediate"),
"test-intermediate-value",
)
ctx_1.set_intermediate(
"test-intermediate",
"new-test-intermediate-value",
overwrite_exists=True,
)
self.assertEqual(
ctx_1.get_intermediate("test-intermediate"),
"new-test-intermediate-value",
)
def test_remove_intermediate(self):
ctx_1 = BaseAKSContext(self.cmd, BaseAKSParamDict({}), self.models, decorator_mode=DecoratorMode.CREATE)
ctx_1.set_intermediate("test-intermediate", "test-intermediate-value")
self.assertEqual(
ctx_1.get_intermediate("test-intermediate"),
"test-intermediate-value",
)
ctx_1.remove_intermediate("test-intermediate")
self.assertEqual(ctx_1.get_intermediate("test-intermediate"), None)
class BaseAKSManagedClusterDecoratorTestCase(unittest.TestCase):
def setUp(self):
self.cli_ctx = MockCLI()
self.cmd = MockCmd(self.cli_ctx)
self.client = MockClient()
self.test_models = BaseAKSModels(self.cmd, ResourceType.MGMT_CONTAINERSERVICE)
def test_init(self):
dec_1 = BaseAKSManagedClusterDecorator(self.cmd, self.client)
self.assertEqual(dec_1.cmd, self.cmd)
self.assertEqual(dec_1.client, self.client)
def test_init_models(self):
dec_1 = BaseAKSManagedClusterDecorator(self.cmd, self.client)
with self.assertRaises(NotImplementedError):
dec_1.init_models()
def test_init_context(self):
dec_1 = BaseAKSManagedClusterDecorator(self.cmd, self.client)
with self.assertRaises(NotImplementedError):
dec_1.init_context()
def test_check_is_postprocessing_required(self):
dec_1 = BaseAKSManagedClusterDecorator(self.cmd, self.client)
mc_1 = self.test_models.ManagedCluster(location="test_location")
with self.assertRaises(NotImplementedError):
dec_1.check_is_postprocessing_required(mc_1)
def test_immediate_processing_after_request(self):
dec_1 = BaseAKSManagedClusterDecorator(self.cmd, self.client)
mc_1 = self.test_models.ManagedCluster(location="test_location")
with self.assertRaises(NotImplementedError):
dec_1.immediate_processing_after_request(mc_1)
def test_postprocessing_after_mc_created(self):
dec_1 = BaseAKSManagedClusterDecorator(self.cmd, self.client)
mc_1 = self.test_models.ManagedCluster(location="test_location")
with self.assertRaises(NotImplementedError):
dec_1.postprocessing_after_mc_created(mc_1)
def METHOD_NAME(self):
dec_1 = BaseAKSManagedClusterDecorator(self.cmd, self.client)
mc_1 = self.test_models.ManagedCluster(location="test_location")
with self.assertRaises(NotImplementedError):
dec_1.put_mc(mc_1)
if __name__ == "__main__":
unittest.main() |
6,061 | to hex | #!/usr/bin/python
# Simple fuzzing tool by disassembling random code. By Nguyen Anh Quynh, 2014
# Syntax:
# ./suite/fuzz.py --> Fuzz all archs
# ./suite/fuzz.py x86 --> Fuzz all X86 (all 16bit, 32bit, 64bit)
# ./suite/fuzz.py x86-16 --> Fuzz X86-32 arch only
# ./suite/fuzz.py x86-32 --> Fuzz X86-32 arch only
# ./suite/fuzz.py x86-64 --> Fuzz X86-64 arch only
# ./suite/fuzz.py arm --> Fuzz all ARM (arm, thumb)
# ./suite/fuzz.py aarch64 --> Fuzz ARM-64
# ./suite/fuzz.py mips --> Fuzz all Mips (32bit, 64bit)
# ./suite/fuzz.py ppc --> Fuzz PPC
from capstone import *
from time import time
from random import randint
import sys
# file providing code to disassemble
FILE = '/usr/bin/python'
TIMES = 64
INTERVALS = (4, 5, 7, 9, 11, 13)
all_tests = (
(CS_ARCH_X86, CS_MODE_16, "X86-16bit (Intel syntax)", 0),
(CS_ARCH_X86, CS_MODE_16, "X86-16bit (ATT syntax)", CS_OPT_SYNTAX_ATT),
(CS_ARCH_X86, CS_MODE_32, "X86-32 (Intel syntax)", 0),
(CS_ARCH_X86, CS_MODE_32, "X86-32 (ATT syntax)", CS_OPT_SYNTAX_ATT),
(CS_ARCH_X86, CS_MODE_64, "X86-64 (Intel syntax)", 0),
(CS_ARCH_X86, CS_MODE_64, "X86-64 (ATT syntax)", CS_OPT_SYNTAX_ATT),
(CS_ARCH_ARM, CS_MODE_ARM, "ARM", 0),
(CS_ARCH_ARM, CS_MODE_THUMB, "THUMB (ARM)", 0),
(CS_ARCH_MIPS, CS_MODE_MIPS32 + CS_MODE_BIG_ENDIAN, "MIPS-32 (Big-endian)", 0),
(CS_ARCH_MIPS, CS_MODE_MIPS64 + CS_MODE_LITTLE_ENDIAN, "MIPS-64-EL (Little-endian)", 0),
(CS_ARCH_ARM64, CS_MODE_ARM, "ARM-64 (AArch64)", 0),
(CS_ARCH_PPC, CS_MODE_BIG_ENDIAN, "PPC", 0),
(CS_ARCH_PPC, CS_MODE_BIG_ENDIAN, "PPC, print register with number only", CS_OPT_SYNTAX_NOREGNAME),
(CS_ARCH_SPARC, CS_MODE_BIG_ENDIAN, "Sparc", 0),
(CS_ARCH_SYSZ, 0, "SystemZ", 0),
(CS_ARCH_XCORE, 0, "XCore", 0),
(CS_ARCH_M68K, 0, "M68K", 0),
(CS_ARCH_RISCV, CS_MODE_RISCV32, "riscv32", 0),
(CS_ARCH_RISCV, CS_MODE_RISCV64, "riscv64", 0),
)
# for debugging
def METHOD_NAME(s):
return " ".join("0x" + "{0:x}".format(ord(c)).zfill(2) for c in s) # <-- Python 3 is OK
# read @size bytes from @f & return data.
# return None when there is not enough data
def get_code(f, size):
code = f.read(size)
if len(code) != size: # reached end-of-file?
# then reset file position to begin-of-file
f.seek(0)
return None
return code
def cs(md, code):
insns = md.disasm(code, 0)
for i in insns:
if i.address == 0x100000:
print i
def cs_lite(md, code):
insns = md.disasm_lite(code, 0)
for (addr, size, mnem, ops) in insns:
if addr == 0x100000:
print i
cfile = open(FILE)
for (arch, mode, comment, syntax) in all_tests:
try:
request = sys.argv[1]
if not request in comment.lower():
continue
except:
pass
try:
md = Cs(arch, mode)
md.detail = True
if syntax != 0:
md.syntax = syntax
# test disasm()
print("\nFuzzing disasm() @platform: %s" %comment)
for ii in INTERVALS:
print("Interval: %u" %ii)
for j in xrange(1, TIMES):
while (True):
code = get_code(cfile, j * ii)
if code is None:
# EOF? break
break
#print to_hex(code)
cs(md, code)
# test disasm_lite()
print("Fuzzing disasm_lite() @platform: %s" %comment)
for ii in INTERVALS:
print("Interval: %u" %ii)
for j in xrange(1, TIMES):
while (True):
code = get_code(cfile, j * ii)
if code is None:
# EOF? break
break
#print to_hex(code)
cs_lite(md, code)
except CsError as e:
print("ERROR: %s" %e) |
6,062 | get export full path | # -*- coding: UTF-8 -*-
import datetime
import os
from urllib.parse import quote
import MySQLdb
import simplejson as json
from django.template import loader
from archery import settings
from sql.engines import get_engine
from django.contrib.auth.decorators import permission_required
from django.http import HttpResponse, JsonResponse, FileResponse
from common.utils.extend_json_encoder import ExtendJSONEncoder
from sql.utils.resource_group import user_instances
from .models import Instance
@permission_required("sql.menu_data_dictionary", raise_exception=True)
def table_list(request):
"""数据字典获取表列表"""
instance_name = request.GET.get("instance_name", "")
db_name = request.GET.get("db_name", "")
db_type = request.GET.get("db_type", "")
if instance_name and db_name:
try:
instance = Instance.objects.get(
instance_name=instance_name, db_type=db_type
)
query_engine = get_engine(instance=instance)
db_name = query_engine.escape_string(db_name)
data = query_engine.get_group_tables_by_db(db_name=db_name)
res = {"status": 0, "data": data}
except Instance.DoesNotExist:
res = {"status": 1, "msg": "Instance.DoesNotExist"}
except Exception as e:
res = {"status": 1, "msg": str(e)}
else:
res = {"status": 1, "msg": "非法调用!"}
return HttpResponse(
json.dumps(res, cls=ExtendJSONEncoder, bigint_as_string=True),
content_type="application/json",
)
@permission_required("sql.menu_data_dictionary", raise_exception=True)
def table_info(request):
"""数据字典获取表信息"""
instance_name = request.GET.get("instance_name", "")
db_name = request.GET.get("db_name", "")
tb_name = request.GET.get("tb_name", "")
db_type = request.GET.get("db_type", "")
if instance_name and db_name and tb_name:
data = {}
try:
instance = Instance.objects.get(
instance_name=instance_name, db_type=db_type
)
query_engine = get_engine(instance=instance)
db_name = query_engine.escape_string(db_name)
tb_name = query_engine.escape_string(tb_name)
data["meta_data"] = query_engine.get_table_meta_data(
db_name=db_name, tb_name=tb_name
)
data["desc"] = query_engine.get_table_desc_data(
db_name=db_name, tb_name=tb_name
)
data["index"] = query_engine.get_table_index_data(
db_name=db_name, tb_name=tb_name
)
# mysql数据库可以获取创建表格的SQL语句,mssql暂无找到生成创建表格的SQL语句
if instance.db_type == "mysql":
_create_sql = query_engine.query(
db_name, "show create table `%s`;" % tb_name
)
data["create_sql"] = _create_sql.rows
res = {"status": 0, "data": data}
except Instance.DoesNotExist:
res = {"status": 1, "msg": "Instance.DoesNotExist"}
except Exception as e:
res = {"status": 1, "msg": str(e)}
else:
res = {"status": 1, "msg": "非法调用!"}
return HttpResponse(
json.dumps(res, cls=ExtendJSONEncoder, bigint_as_string=True),
content_type="application/json",
)
def METHOD_NAME(base_dir: str, instance_name: str, db_name: str) -> str:
"""validate if the instance_name and db_name provided is secure"""
fullpath = os.path.normpath(
os.path.join(base_dir, f"{instance_name}_{db_name}.html")
)
if not fullpath.startswith(base_dir):
return ""
return fullpath
@permission_required("sql.data_dictionary_export", raise_exception=True)
def export(request):
"""导出数据字典"""
instance_name = request.GET.get("instance_name", "")
db_name = request.GET.get("db_name", "")
try:
instance = user_instances(
request.user, db_type=["mysql", "mssql", "oracle"]
).get(instance_name=instance_name)
query_engine = get_engine(instance=instance)
except Instance.DoesNotExist:
return JsonResponse({"status": 1, "msg": "你所在组未关联该实例!", "data": []})
# 普通用户仅可以获取指定数据库的字典信息
if db_name:
dbs = [query_engine.escape_string(db_name)]
# 管理员可以导出整个实例的字典信息
elif request.user.is_superuser:
dbs = query_engine.get_all_databases().rows
else:
return JsonResponse({"status": 1, "msg": "仅管理员可以导出整个实例的字典信息!", "data": []})
# 获取数据,存入目录
path = os.path.join(settings.BASE_DIR, "downloads", "dictionary")
os.makedirs(path, exist_ok=True)
for db in dbs:
table_metas = query_engine.get_tables_metas_data(db_name=db)
context = {
"db_name": db_name,
"tables": table_metas,
"export_time": datetime.datetime.now(),
}
data = loader.render_to_string(
template_name="dictionaryexport.html", context=context, request=request
)
fullpath = METHOD_NAME(path, instance_name, db)
if not fullpath:
return JsonResponse({"status": 1, "msg": "实例名或db名不合法", "data": []})
with open(fullpath, "w", encoding="utf-8") as fp:
fp.write(data)
# 关闭连接
query_engine.close()
if db_name:
fullpath = METHOD_NAME(path, instance_name, db)
if not fullpath:
return JsonResponse({"status": 1, "msg": "实例名或db名不合法", "data": []})
response = FileResponse(open(fullpath, "rb"))
response["Content-Type"] = "application/octet-stream"
response[
"Content-Disposition"
] = f'attachment;filename="{quote(instance_name)}_{quote(db_name)}.html"'
return response
else:
return JsonResponse(
{
"status": 0,
"msg": f"实例{instance_name}数据字典导出成功,请到downloads目录下载!",
"data": [],
}
) |
6,063 | test psi4 restarts | """
Tests the DQM compute dispatch module
"""
import msgpack
import numpy as np
import pytest
from qcelemental.models import AtomicInput, BasisSet
from qcelemental.tests.test_model_results import center_data
import qcengine as qcng
from qcengine.testing import has_program, using
qcsk_bs = BasisSet(name="custom_basis", center_data=center_data, atom_map=["bs_sto3g_h", "bs_sto3g_h"])
_canonical_methods = [
("dftd3", {"method": "b3lyp-d3"}, {}),
("qcore", {"method": "pbe", "basis": "6-31G"}, {}),
("molpro", {"method": "hf", "basis": "6-31G"}, {}),
("mopac", {"method": "PM6"}, {}),
("mp2d", {"method": "MP2-DMP2"}, {}),
("nwchem", {"method": "hf", "basis": "6-31G"}, {}),
("openmm", {"method": "openff-1.0.0", "basis": "smirnoff"}, {}),
("psi4", {"method": "hf", "basis": "6-31G"}, {}),
("qchem", {"method": "hf", "basis": "6-31G"}, {}),
("rdkit", {"method": "UFF"}, {}),
("terachem_pbs", {"method": "b3lyp", "basis": "6-31G"}, {}),
("torchani", {"method": "ANI1x"}, {}),
("turbomole", {"method": "pbe", "basis": "6-31G"}, {}),
("xtb", {"method": "GFN2-xTB"}, {}),
("adcc", {"method": "adc2", "basis": "6-31G"}, {"n_triplets": 3}),
("gcp", {"method": "hf3c"}, {}),
("mrchem", {"method": "blyp"}, {"world_prec": 1.0e-3}),
("cfour", {"method": "hf", "basis": "6-31G"}, {}),
("gamess", {"method": "hf", "basis": "n31"}, {"basis__NGAUSS": 6}),
("mctc-gcp", {"method": "dft/sv"}, {}),
# add as programs available
# ("terachem", {"method": "bad"}),
]
_canonical_methods_qcsk_basis = [
("adcc", {"method": "adc2", "basis": qcsk_bs}, {"n_triplets": 3}),
("cfour", {"method": "hf", "basis": qcsk_bs}, {}),
("gamess", {"method": "hf", "basis": qcsk_bs}, {}),
("molpro", {"method": "hf", "basis": qcsk_bs}, {}),
("nwchem", {"method": "hf", "basis": qcsk_bs}, {}),
("openmm", {"method": "openff-1.0.0", "basis": qcsk_bs}, {}),
pytest.param("psi4", {"method": "hf", "basis": qcsk_bs}, {}, marks=using("psi4_mp2qcsk")),
("qchem", {"method": "hf", "basis": qcsk_bs}, {}),
("qcore", {"method": "pbe", "basis": qcsk_bs}, {}),
("turbomole", {"method": "pbe", "basis": qcsk_bs}, {}),
]
def _get_molecule(program):
if program in ["openmm", "terachem_pbs"]:
return qcng.get_molecule("water")
else:
return qcng.get_molecule("hydrogen")
@pytest.mark.parametrize("program, model, keywords", _canonical_methods)
def test_compute_energy(program, model, keywords):
if not has_program(program):
pytest.skip(f"Program '{program}' not found.")
molecule = _get_molecule(program)
inp = AtomicInput(molecule=molecule, driver="energy", model=model, keywords=keywords)
ret = qcng.compute(inp, program, raise_error=True)
assert ret.success is True
assert isinstance(ret.return_result, float)
@pytest.mark.parametrize("program, model, keywords", _canonical_methods)
def test_compute_gradient(program, model, keywords):
if not has_program(program):
pytest.skip("Program '{}' not found.".format(program))
molecule = _get_molecule(program)
inp = AtomicInput(
molecule=molecule, driver="gradient", model=model, extras={"mytag": "something"}, keywords=keywords
)
if program in ["adcc"]:
with pytest.raises(qcng.exceptions.InputError) as e:
qcng.compute(inp, program, raise_error=True)
assert "gradient not implemented" in str(e.value)
else:
ret = qcng.compute(inp, program, raise_error=True)
assert ret.success is True
assert isinstance(ret.return_result, np.ndarray)
assert len(ret.return_result.shape) == 2
assert ret.return_result.shape[1] == 3
assert "mytag" in ret.extras, ret.extras
@pytest.mark.parametrize("program, model, keywords", _canonical_methods_qcsk_basis)
def test_compute_energy_qcsk_basis(program, model, keywords):
if not has_program(program):
pytest.skip("Program '{}' not found.".format(program))
molecule = _get_molecule(program)
inp = AtomicInput(molecule=molecule, driver="energy", model=model, keywords=keywords)
with pytest.raises(qcng.exceptions.InputError) as e:
qcng.compute(inp, program, raise_error=True)
assert "QCSchema BasisSet for model.basis not implemented" in str(e.value)
@pytest.mark.parametrize(
"program, model",
[
("cfour", {"method": "bad"}),
("dftd3", {"method": "bad"}),
("dftd3", {"method": "b3lyp-d3", "driver": "hessian"}),
("qcore", {"method": "bad"}),
("gamess", {"method": "bad"}),
("mopac", {"method": "bad"}),
("mp2d", {"method": "bad"}),
("nwchem", {"method": "bad"}),
("openmm", {"method": "bad"}),
("psi4", {"method": "bad"}),
("qchem", {"method": "bad"}),
("rdkit", {"method": "bad"}),
("terachem_pbs", {"method": "bad"}),
("torchani", {"method": "bad"}),
("turbomole", {"method": "bad"}),
("adcc", {"method": "bad"}),
("gcp", {"method": "bad"}),
("mrchem", {"method": "bad"}),
("mctc-gcp", {"method": "bad"}),
# add as programs available
# ("molpro", {"method": "bad"}),
# ("terachem", {"method": "bad"}),
# ("xtb", {"method": "bad"}),
],
)
def test_compute_bad_models(program, model):
if not has_program(program):
pytest.skip("Program '{}' not found.".format(program))
adriver = model.pop("driver", "energy")
amodel = model
inp = AtomicInput(molecule=qcng.get_molecule("hydrogen"), driver=adriver, model=amodel)
with pytest.raises(qcng.exceptions.InputError) as exc:
ret = qcng.compute(inp, program, raise_error=True)
def METHOD_NAME(monkeypatch):
"""
Make sure that a random error is raised which can be restarted if psi4 fails with no error message
"""
if not has_program("psi4"):
pytest.skip("Program psi4 not found.")
# create the psi4 task
inp = AtomicInput(molecule=qcng.get_molecule("hydrogen"), driver="energy", model={"method": "hf", "basis": "6-31G"})
def mock_execute(*args, **kwargs):
"""
Mock the output of a failed psi4 task with missing error message.
"""
mock_output = {"sucess": False, "outfiles": {"data.msgpack": msgpack.dumps({"missing": "data"})}}
return True, mock_output
monkeypatch.setattr("qcengine.programs.psi4.execute", mock_execute)
with pytest.raises(qcng.exceptions.RandomError):
_ = qcng.compute(input_data=inp, program="psi4", raise_error=True, task_config={"retries": 0}) |
6,064 | run chat pipeline with model id | # Copyright (c) Alibaba, Inc. and its affiliates.
import unittest
import torch
from transformers import BitsAndBytesConfig
from modelscope.pipelines import pipeline
from modelscope.utils.constant import Tasks
from modelscope.utils.test_utils import test_level
class QWenTextGenerationPipelineTest(unittest.TestCase):
def setUp(self) -> None:
self.qwen_base = '../qwen_7b_ckpt_modelscope/' # local test only
self.qwen_chat = '../qwen_7b_ckpt_chat_modelscope/' # local test only
self.qwen_base_input = '蒙古国的首都是乌兰巴托(Ulaanbaatar)\n冰岛的首都是雷克雅未克(Reykjavik)\n埃塞俄比亚的首都是'
self.qwen_chat_input = [
'今天天气真好,我', 'How do you do? ', "What's your", '今夜阳光明媚', '宫廷玉液酒,',
'7 * 8 + 32 =? ', '请问把大象关冰箱总共要几步?', '1+3=?',
'请将下面这句话翻译为英文:在哪里跌倒就在哪里趴着'
]
def run_pipeline_with_model_id(self,
model_id,
input,
init_kwargs={},
run_kwargs={}):
pipeline_ins = pipeline(
task=Tasks.text_generation, model=model_id, **init_kwargs)
pipeline_ins._model_prepare = True
result = pipeline_ins(input, **run_kwargs)
print(result['text'])
def METHOD_NAME(self,
model_id,
inputs,
init_kwargs={},
run_kwargs={}):
pipeline_ins = pipeline(task=Tasks.chat, model=model_id, **init_kwargs)
pipeline_ins._model_prepare = True
history = None
for turn_idx, query in enumerate(inputs, start=1):
results = pipeline_ins(
query,
history=history,
)
response, history = results['response'], results['history']
print(f'===== Turn {turn_idx} ====')
print('Query:', query, end='\n')
print('Response:', response, end='\n')
# 7B_ms_base
@unittest.skipUnless(test_level() >= 3, 'skip test in current test level')
def test_qwen_base_with_text_generation(self):
self.run_pipeline_with_model_id(
self.qwen_base,
self.qwen_base_input,
init_kwargs={
'device_map': 'auto',
})
# 7B_ms_base
@unittest.skipUnless(test_level() >= 3, 'skip test in current test level')
def test_qwen_base_with_text_generation_quant_int8(self):
quantization_config = BitsAndBytesConfig(load_in_8bit=True)
self.run_pipeline_with_model_id(
self.qwen_base,
self.qwen_base_input,
init_kwargs={
'device_map': 'auto',
'use_max_memory': True,
'quantization_config': quantization_config,
})
# 7B_ms_base
@unittest.skipUnless(test_level() >= 3, 'skip test in current test level')
def test_qwen_base_with_text_generation_quant_int4(self):
quantization_config = BitsAndBytesConfig(
load_in_4bit=True,
bnb_4bit_quant_type='nf4',
bnb_4bit_compute_dtype=torch.bfloat16)
self.run_pipeline_with_model_id(
self.qwen_base,
self.qwen_base_input,
init_kwargs={
'device_map': 'auto',
'use_max_memory': True,
'quantization_config': quantization_config,
})
# 7B_ms_chat
@unittest.skipUnless(test_level() >= 3, 'skip test in current test level')
def test_qwen_chat_with_chat(self):
self.METHOD_NAME(
self.qwen_chat,
self.qwen_chat_input,
init_kwargs={
'device_map': 'auto',
})
# 7B_ms_chat
@unittest.skipUnless(test_level() >= 3, 'skip test in current test level')
def test_qwen_chat_with_chat_quant_int8(self):
quantization_config = BitsAndBytesConfig(load_in_8bit=True)
self.METHOD_NAME(
self.qwen_chat,
self.qwen_chat_input,
init_kwargs={
'device_map': 'auto',
'use_max_memory': True,
'quantization_config': quantization_config,
})
# 7B_ms_base
@unittest.skipUnless(test_level() >= 3, 'skip test in current test level')
def test_qwen_chat_with_chat_quant_int4(self):
quantization_config = BitsAndBytesConfig(
load_in_4bit=True,
bnb_4bit_quant_type='nf4',
bnb_4bit_compute_dtype=torch.bfloat16)
self.METHOD_NAME(
self.qwen_chat,
self.qwen_chat_input,
init_kwargs={
'device_map': 'auto',
'use_max_memory': True,
'quantization_config': quantization_config,
})
if __name__ == '__main__':
unittest.main() |
6,065 | test is in code | """Unittest for idlelib.HyperParser"""
import unittest
from test.test_support import requires
from Tkinter import Tk, Text
from idlelib.EditorWindow import EditorWindow
from idlelib.HyperParser import HyperParser
class DummyEditwin:
def __init__(self, text):
self.text = text
self.indentwidth = 8
self.tabwidth = 8
self.context_use_ps1 = True
self.num_context_lines = 50, 500, 1000
_build_char_in_string_func = EditorWindow._build_char_in_string_func.im_func
is_char_in_string = EditorWindow.is_char_in_string.im_func
class HyperParserTest(unittest.TestCase):
code = (
'"""This is a module docstring"""\n'
'# this line is a comment\n'
'x = "this is a string"\n'
"y = 'this is also a string'\n"
'l = [i for i in range(10)]\n'
'm = [py*py for # comment\n'
' py in l]\n'
'x.__len__\n'
"z = ((r'asdf')+('a')))\n"
'[x for x in\n'
'for = False\n'
)
@classmethod
def setUpClass(cls):
requires('gui')
cls.root = Tk()
cls.root.withdraw()
cls.text = Text(cls.root)
cls.editwin = DummyEditwin(cls.text)
@classmethod
def tearDownClass(cls):
del cls.text, cls.editwin
cls.root.destroy()
del cls.root
def setUp(self):
self.text.insert('insert', self.code)
def tearDown(self):
self.text.delete('1.0', 'end')
self.editwin.context_use_ps1 = True
def get_parser(self, index):
"""
Return a parser object with index at 'index'
"""
return HyperParser(self.editwin, index)
def test_init(self):
"""
test corner cases in the init method
"""
with self.assertRaises(ValueError) as ve:
self.text.tag_add('console', '1.0', '1.end')
p = self.get_parser('1.5')
self.assertIn('precedes', str(ve.exception))
# test without ps1
self.editwin.context_use_ps1 = False
# number of lines lesser than 50
p = self.get_parser('end')
self.assertEqual(p.rawtext, self.text.get('1.0', 'end'))
# number of lines greater than 50
self.text.insert('end', self.text.get('1.0', 'end')*4)
p = self.get_parser('54.5')
def test_is_in_string(self):
get = self.get_parser
p = get('1.0')
self.assertFalse(p.is_in_string())
p = get('1.4')
self.assertTrue(p.is_in_string())
p = get('2.3')
self.assertFalse(p.is_in_string())
p = get('3.3')
self.assertFalse(p.is_in_string())
p = get('3.7')
self.assertTrue(p.is_in_string())
p = get('4.6')
self.assertTrue(p.is_in_string())
def METHOD_NAME(self):
get = self.get_parser
p = get('1.0')
self.assertTrue(p.is_in_code())
p = get('1.1')
self.assertFalse(p.is_in_code())
p = get('2.5')
self.assertFalse(p.is_in_code())
p = get('3.4')
self.assertTrue(p.is_in_code())
p = get('3.6')
self.assertFalse(p.is_in_code())
p = get('4.14')
self.assertFalse(p.is_in_code())
def test_get_surrounding_bracket(self):
get = self.get_parser
def without_mustclose(parser):
# a utility function to get surrounding bracket
# with mustclose=False
return parser.get_surrounding_brackets(mustclose=False)
def with_mustclose(parser):
# a utility function to get surrounding bracket
# with mustclose=True
return parser.get_surrounding_brackets(mustclose=True)
p = get('3.2')
self.assertIsNone(with_mustclose(p))
self.assertIsNone(without_mustclose(p))
p = get('5.6')
self.assertTupleEqual(without_mustclose(p), ('5.4', '5.25'))
self.assertTupleEqual(without_mustclose(p), with_mustclose(p))
p = get('5.23')
self.assertTupleEqual(without_mustclose(p), ('5.21', '5.24'))
self.assertTupleEqual(without_mustclose(p), with_mustclose(p))
p = get('6.15')
self.assertTupleEqual(without_mustclose(p), ('6.4', '6.end'))
self.assertIsNone(with_mustclose(p))
p = get('9.end')
self.assertIsNone(with_mustclose(p))
self.assertIsNone(without_mustclose(p))
def test_get_expression(self):
get = self.get_parser
p = get('4.2')
self.assertEqual(p.get_expression(), 'y ')
p = get('4.7')
with self.assertRaises(ValueError) as ve:
p.get_expression()
self.assertIn('is inside a code', str(ve.exception))
p = get('5.25')
self.assertEqual(p.get_expression(), 'range(10)')
p = get('6.7')
self.assertEqual(p.get_expression(), 'py')
p = get('6.8')
self.assertEqual(p.get_expression(), '')
p = get('7.9')
self.assertEqual(p.get_expression(), 'py')
p = get('8.end')
self.assertEqual(p.get_expression(), 'x.__len__')
p = get('9.13')
self.assertEqual(p.get_expression(), "r'asdf'")
p = get('9.17')
with self.assertRaises(ValueError) as ve:
p.get_expression()
self.assertIn('is inside a code', str(ve.exception))
p = get('10.0')
self.assertEqual(p.get_expression(), '')
p = get('11.3')
self.assertEqual(p.get_expression(), '')
p = get('11.11')
self.assertEqual(p.get_expression(), 'False')
if __name__ == '__main__':
unittest.main(verbosity=2) |
6,066 | as header | from _typeshed import Incomplete
from collections.abc import Iterable, Iterator
from typing import ClassVar
from typing_extensions import Self, TypedDict
from pkg_resources import Environment
from .. import Command, SetuptoolsDeprecationWarning
__all__ = ["easy_install", "PthDistributions", "extract_wininst_cfg", "get_exe_prefixes"]
class easy_install(Command):
description: str
command_consumes_arguments: bool
user_options: Incomplete
boolean_options: Incomplete
negative_opt: Incomplete
create_index: Incomplete
user: int
zip_ok: Incomplete
install_dir: Incomplete
index_url: Incomplete
find_links: Incomplete
build_directory: Incomplete
args: Incomplete
optimize: Incomplete
upgrade: Incomplete
editable: Incomplete
root: Incomplete
version: Incomplete
install_purelib: Incomplete
install_platlib: Incomplete
install_headers: Incomplete
install_lib: Incomplete
install_scripts: Incomplete
install_data: Incomplete
install_base: Incomplete
install_platbase: Incomplete
install_userbase: Incomplete
install_usersite: Incomplete
no_find_links: Incomplete
package_index: Incomplete
pth_file: Incomplete
site_dirs: Incomplete
installed_projects: Incomplete
verbose: Incomplete
def initialize_options(self) -> None: ...
def delete_blockers(self, blockers) -> None: ...
config_vars: Incomplete
script_dir: Incomplete
all_site_dirs: Incomplete
shadow_path: Incomplete
local_index: Incomplete
outputs: Incomplete
def finalize_options(self) -> None: ...
def expand_basedirs(self) -> None: ...
def expand_dirs(self) -> None: ...
def run(self, show_deprecation: bool = True) -> None: ...
def pseudo_tempname(self): ...
def warn_deprecated_options(self) -> None: ...
def check_site_dir(self) -> None: ...
def cant_write_to_target(self) -> None: ...
def check_pth_processing(self): ...
def install_egg_scripts(self, dist) -> None: ...
def add_output(self, path) -> None: ...
def not_editable(self, spec) -> None: ...
def check_editable(self, spec) -> None: ...
def easy_install(self, spec, deps: bool = False): ...
def install_item(self, spec, download, tmpdir, deps, install_needed: bool = False): ...
def select_scheme(self, name) -> None: ...
def process_distribution(self, requirement, dist, deps: bool = True, *info) -> None: ...
def should_unzip(self, dist): ...
def maybe_move(self, spec, dist_filename, setup_base): ...
def install_wrapper_scripts(self, dist) -> None: ...
def install_script(self, dist, script_name, script_text, dev_path: Incomplete | None = None) -> None: ...
def write_script(self, script_name, contents, mode: str = "t", blockers=()) -> None: ...
def install_eggs(self, spec, dist_filename, tmpdir): ...
def egg_distribution(self, egg_path): ...
def install_egg(self, egg_path, tmpdir): ...
def install_exe(self, dist_filename, tmpdir): ...
def exe_to_egg(self, dist_filename, egg_tmp): ...
def install_wheel(self, wheel_path, tmpdir): ...
def installation_report(self, req, dist, what: str = "Installed"): ...
def report_editable(self, spec, setup_script): ...
def run_setup(self, setup_script, setup_base, args) -> None: ...
def build_and_install(self, setup_script, setup_base): ...
def update_pth(self, dist) -> None: ...
def unpack_progress(self, src, dst): ...
def unpack_and_compile(self, egg_path, destination): ...
def byte_compile(self, to_compile) -> None: ...
def create_home_path(self) -> None: ...
INSTALL_SCHEMES: Incomplete
DEFAULT_SCHEME: Incomplete
def extract_wininst_cfg(dist_filename): ...
def get_exe_prefixes(exe_filename): ...
class PthDistributions(Environment):
dirty: bool
filename: Incomplete
sitedirs: Incomplete
basedir: Incomplete
paths: list[str]
def __init__(self, filename, sitedirs=()) -> None: ...
def save(self) -> None: ...
def add(self, dist) -> None: ...
def remove(self, dist) -> None: ...
def make_relative(self, path): ...
class RewritePthDistributions(PthDistributions):
prelude: Incomplete
postlude: Incomplete
class _SplitArgs(TypedDict, total=False):
comments: bool
posix: bool
class CommandSpec(list[str]):
options: list[Incomplete]
split_args: ClassVar[_SplitArgs]
@classmethod
def best(cls) -> type[CommandSpec]: ...
@classmethod
def from_param(cls, param: str | Self | Iterable[str] | None) -> Self: ...
@classmethod
def from_environment(cls) -> CommandSpec: ...
@classmethod
def from_string(cls, string: str) -> CommandSpec: ...
def install_options(self, script_text: str) -> None: ...
def METHOD_NAME(self) -> str: ...
class WindowsCommandSpec(CommandSpec): ...
class ScriptWriter:
template: ClassVar[str]
command_spec_class: ClassVar[type[CommandSpec]]
@classmethod
def get_args(cls, dist, header: Incomplete | None = None) -> Iterator[tuple[str, str]]: ...
@classmethod
def best(cls) -> type[ScriptWriter]: ...
@classmethod
def get_header(cls, script_text: str = "", executable: str | CommandSpec | Iterable[str] | None = None) -> str: ...
class WindowsScriptWriter(ScriptWriter):
command_spec_class: ClassVar[type[WindowsCommandSpec]]
@classmethod
def best(cls) -> type[WindowsScriptWriter]: ...
class WindowsExecutableLauncherWriter(WindowsScriptWriter): ...
class EasyInstallDeprecationWarning(SetuptoolsDeprecationWarning): ... |
6,067 | search | # This file is Copyright 2019 Volatility Foundation and licensed under the Volatility Software License 1.0
# which is available at https://www.volatilityfoundation.org/license/vsl-v1.0
#
import re
from typing import Generator, List, Tuple, Dict, Optional
from volatility3.framework.interfaces import layers
from volatility3.framework.layers.scanners import multiregexp
class BytesScanner(layers.ScannerInterface):
thread_safe = True
_required_framework_version = (2, 0, 0)
def __init__(self, needle: bytes) -> None:
super().__init__()
self.needle = needle
def __call__(self, data: bytes, data_offset: int) -> Generator[int, None, None]:
"""Runs through the data looking for the needle, and yields all offsets
where the needle is found."""
find_pos = data.find(self.needle)
while find_pos >= 0:
# Ensure that if we're in the overlap, we don't report it
# It'll be returned when the next block is scanned
if find_pos < self.chunk_size:
yield find_pos + data_offset
find_pos = data.find(self.needle, find_pos + 1)
class RegExScanner(layers.ScannerInterface):
"""A scanner that can be provided with a bytes-object regular expression pattern
The scanner will scan all blocks for the regular expression and report the absolute offset of any finds
The default flags include DOTALL, since the searches are through binary data and the newline character should
have no specific significance in such searches"""
thread_safe = True
_required_framework_version = (2, 0, 0)
def __init__(self, pattern: bytes, flags: int = re.DOTALL) -> None:
super().__init__()
self.regex = re.compile(pattern, flags)
def __call__(self, data: bytes, data_offset: int) -> Generator[int, None, None]:
"""Runs through the data looking for the needle, and yields all offsets
where the needle is found."""
find_pos = self.regex.finditer(data)
for match in find_pos:
offset = match.start()
if offset < self.chunk_size:
yield offset + data_offset
class MultiStringScanner(layers.ScannerInterface):
thread_safe = True
_required_framework_version = (2, 0, 0)
def __init__(self, patterns: List[bytes]) -> None:
super().__init__()
self._pattern_trie: Optional[Dict[int, Optional[Dict]]] = {}
for pattern in patterns:
self._process_pattern(pattern)
self._regex = self._process_trie(self._pattern_trie)
def _process_pattern(self, value: bytes) -> None:
trie = self._pattern_trie
if trie is None:
return None
for char in value:
trie[char] = trie.get(char, {})
trie = trie[char]
# Mark the end of a string
trie[-1] = None
def _process_trie(self, trie: Optional[Dict[int, Optional[Dict]]]) -> bytes:
if trie is None or len(trie) == 1 and -1 in trie:
# We've reached the end of this path, return the empty byte string
return b""
choices = []
suffixes = []
finished = False
for entry in sorted(trie):
# Clump together different paths
if entry >= 0:
remainder = self._process_trie(trie[entry])
if remainder:
choices.append(re.escape(bytes([entry])) + remainder)
else:
suffixes.append(re.escape(bytes([entry])))
else:
# If we've finished one of the strings at this point, remember it for later
finished = True
if len(suffixes) == 1:
choices.append(suffixes[0])
elif len(suffixes) > 1:
choices.append(b"[" + b"".join(suffixes) + b"]")
if len(choices) == 0:
# If there's none, return the empty byte string
response = b""
elif len(choices) == 1:
# If there's only one return it
response = choices[0]
else:
response = b"(?:" + b"|".join(choices) + b")"
if finished:
# We finished one string, so everything after this is optional
response = b"(?:" + response + b")?"
return response
def __call__(
self, data: bytes, data_offset: int
) -> Generator[Tuple[int, bytes], None, None]:
"""Runs through the data looking for the needles."""
for offset, pattern in self.METHOD_NAME(data):
if offset < self.chunk_size:
yield offset + data_offset, pattern
def METHOD_NAME(self, haystack: bytes) -> Generator[Tuple[int, bytes], None, None]:
if not isinstance(haystack, bytes):
raise TypeError("Search haystack must be a byte string")
if not self._regex:
raise ValueError(
"MultiRegexp cannot be used with an empty set of search strings"
)
for match in re.finditer(self._regex, haystack):
yield match.start(0), match.group() |
6,068 | on event | #-----------------------------------------------------------------------------
# Copyright (c) 2012 - 2023, Anaconda, Inc., and Bokeh Contributors.
# All rights reserved.
#
# The full license is in the file LICENSE.txt, distributed with this software.
#-----------------------------------------------------------------------------
''' Provides ``PropertyCallbackManager`` and ``EventCallbackManager``
mixin classes for adding ``on_change`` and ``on_event`` callback
interfaces to classes.
'''
#-----------------------------------------------------------------------------
# Boilerplate
#-----------------------------------------------------------------------------
from __future__ import annotations
import logging # isort:skip
log = logging.getLogger(__name__)
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
# Standard library imports
from collections import defaultdict
from inspect import signature
from typing import (
TYPE_CHECKING,
Any,
Callable,
Sequence,
Union,
cast,
)
# Bokeh imports
from ..events import Event, ModelEvent
from ..util.functions import get_param_info
if TYPE_CHECKING:
from typing_extensions import TypeAlias
from ..core.has_props import Setter
from ..core.types import ID
from ..document.document import Document
from ..document.events import DocumentPatchedEvent
#-----------------------------------------------------------------------------
# Globals and constants
#-----------------------------------------------------------------------------
__all__ = (
'EventCallbackManager',
'PropertyCallbackManager',
)
#-----------------------------------------------------------------------------
# General API
#-----------------------------------------------------------------------------
# TODO (bev) the situation with no-argument Button callbacks is a mess. We
# should migrate to all callbacks receving the event as the param, even if that
# means auto-magically wrapping user-supplied callbacks for awhile.
EventCallbackWithEvent: TypeAlias = Callable[[Event], None]
EventCallbackWithoutEvent: TypeAlias = Callable[[], None]
EventCallback: TypeAlias = Union[EventCallbackWithEvent, EventCallbackWithoutEvent]
PropertyCallback: TypeAlias = Callable[[str, Any, Any], None]
class EventCallbackManager:
''' A mixin class to provide an interface for registering and
triggering event callbacks on the Python side.
'''
document: Document | None
id: ID
subscribed_events: set[str]
_event_callbacks: dict[str, list[EventCallback]]
def __init__(self, *args: Any, **kw: Any) -> None:
super().__init__(*args, **kw)
self._event_callbacks = defaultdict(list)
def METHOD_NAME(self, event: str | type[Event], *callbacks: EventCallback) -> None:
''' Run callbacks when the specified event occurs on this Model
Not all Events are supported for all Models.
See specific Events in :ref:`bokeh.events` for more information on
which Models are able to trigger them.
'''
if not isinstance(event, str) and issubclass(event, Event):
event = event.event_name
for callback in callbacks:
if _nargs(callback) != 0:
_check_callback(callback, ('event',), what='Event callback')
self._event_callbacks[event].append(callback)
self.subscribed_events.add(event)
def _trigger_event(self, event: ModelEvent) -> None:
def invoke() -> None:
for callback in self._event_callbacks.get(event.event_name, []):
if event.model is not None and self.id == event.model.id:
if _nargs(callback) == 0:
cast(EventCallbackWithoutEvent, callback)()
else:
cast(EventCallbackWithEvent, callback)(event)
if self.document is not None:
from ..model import Model
self.document.callbacks.notify_event(cast(Model, self), event, invoke)
else:
invoke()
def _update_event_callbacks(self) -> None:
if self.document is None:
return
for key in self._event_callbacks:
from ..model import Model
self.document.callbacks.subscribe(key, cast(Model, self))
class PropertyCallbackManager:
''' A mixin class to provide an interface for registering and
triggering callbacks.
'''
document: Document | None
_callbacks: dict[str, list[PropertyCallback]]
def __init__(self, *args: Any, **kw: Any) -> None:
super().__init__(*args, **kw)
self._callbacks = {}
def on_change(self, attr: str, *callbacks: PropertyCallback) -> None:
''' Add a callback on this object to trigger when ``attr`` changes.
Args:
attr (str) : an attribute name on this object
callback (callable) : a callback function to register
Returns:
None
'''
if len(callbacks) == 0:
raise ValueError("on_change takes an attribute name and one or more callbacks, got only one parameter")
_callbacks = self._callbacks.setdefault(attr, [])
for callback in callbacks:
if callback in _callbacks:
continue
_check_callback(callback, ('attr', 'old', 'new'))
_callbacks.append(callback)
def remove_on_change(self, attr: str, *callbacks: PropertyCallback) -> None:
''' Remove a callback from this object '''
if len(callbacks) == 0:
raise ValueError("remove_on_change takes an attribute name and one or more callbacks, got only one parameter")
_callbacks = self._callbacks.setdefault(attr, [])
for callback in callbacks:
_callbacks.remove(callback)
def trigger(self, attr: str, old: Any, new: Any,
hint: DocumentPatchedEvent | None = None, setter: Setter | None = None) -> None:
''' Trigger callbacks for ``attr`` on this object.
Args:
attr (str) :
old (object) :
new (object) :
Returns:
None
'''
def invoke() -> None:
callbacks = self._callbacks.get(attr)
if callbacks:
for callback in callbacks:
callback(attr, old, new)
if self.document is not None:
from ..model import Model
self.document.callbacks.notify_change(cast(Model, self), attr, old, new, hint, setter, invoke)
else:
invoke()
#-----------------------------------------------------------------------------
# Dev API
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Private API
#-----------------------------------------------------------------------------
def _nargs(fn: Callable[..., Any]) -> int:
sig = signature(fn)
all_names, default_values = get_param_info(sig)
return len(all_names) - len(default_values)
def _check_callback(callback: Callable[..., Any], fargs: Sequence[str], what: str ="Callback functions") -> None:
'''Bokeh-internal function to check callback signature'''
sig = signature(callback)
formatted_args = str(sig)
error_msg = what + " must have signature func(%s), got func%s"
all_names, default_values = get_param_info(sig)
nargs = len(all_names) - len(default_values)
if nargs != len(fargs):
raise ValueError(error_msg % (", ".join(fargs), formatted_args))
#-----------------------------------------------------------------------------
# Code
#----------------------------------------------------------------------------- |
6,069 | hessian | """ Utilities for Potential classes """
# Standard library
from functools import wraps
# Third-party
import numpy as np
# Project
from ..common import PotentialParameter
from .core import PotentialBase
__all__ = ['from_equation']
__doctest_requires__ = {('from_equation', ): ['sympy']}
def from_equation(expr, vars, pars, name=None, hessian=False):
r"""
Create a potential class from an expression for the potential.
.. note::
This utility requires having `Sympy <http://www.sympy.org/>`_ installed.
.. warning::
These potentials are *not* pickle-able and cannot be written
out to YAML files (using `~gala.potential.PotentialBase.save()`)
Parameters
----------
expr : :class:`sympy.core.expr.Expr`, str
Either a ``Sympy`` expression, or a string that can be converted to
a ``Sympy`` expression.
vars : iterable
An iterable of variable names in the expression.
pars : iterable
An iterable of parameter names in the expression.
name : str (optional)
The name of the potential class returned.
hessian : bool (optional)
Generate a function to compute the Hessian.
Returns
-------
CustomPotential : `~gala.potential.PotentialBase`
A potential class that represents the input equation. To instantiate the
potential, use just like a normal class with parameters.
Examples
--------
Here we'll create a potential class for the harmonic oscillator
potential, :math:`\Phi(x) = \frac{1}{2}\,k\,x^2`:
>>> Potential = from_equation("1/2*k*x**2", vars="x", pars="k",
... name='HarmonicOscillator')
>>> p1 = Potential(k=1.)
>>> p1
<HarmonicOscillatorPotential: k=1.00 (dimensionless)>
The potential class (and object) is a fully-fledged subclass of
`~gala.potential.PotentialBase` and therefore has many useful methods.
For example, to integrate an orbit:
>>> from gala.potential import Hamiltonian
>>> H = Hamiltonian(p1)
>>> orbit = H.integrate_orbit([1., 0], dt=0.01, n_steps=1000)
"""
try:
import sympy
from sympy.utilities.lambdify import lambdify
except ImportError:
raise ImportError("sympy is required to use 'from_equation()' "
"potential class creation.")
# convert all input to Sympy objects
expr = sympy.sympify(expr)
vars = [sympy.sympify(v) for v in vars]
var_names = [v.name for v in vars]
pars = [sympy.sympify(p) for p in pars]
par_names = [p.name for p in pars]
ndim = len(vars)
# Energy / value
energyfunc = lambdify(vars + pars, expr, dummify=False,
modules=['numpy', 'sympy'])
# Gradient
gradfuncs = []
for var in vars:
gradfuncs.append(lambdify(vars + pars, sympy.diff(expr, var),
dummify=False,
modules=['numpy', 'sympy']))
parameters = {}
for _name in par_names:
parameters[_name] = PotentialParameter(_name,
physical_type='dimensionless')
class CustomPotential(PotentialBase, parameters=parameters):
ndim = len(vars)
def _energy(self, w, t=0.):
kw = self.parameters.copy()
for k, v in kw.items():
kw[k] = v.value
for i, name in enumerate(var_names):
kw[name] = w[:, i]
return np.array(energyfunc(**kw))
def _gradient(self, w, t=0.):
kw = self.parameters.copy()
for k, v in kw.items():
kw[k] = v.value
for i, name in enumerate(var_names):
kw[name] = w[:, i]
grad = np.vstack([f(**kw)[np.newaxis] for f in gradfuncs])
return grad.T
if name is not None:
# name = _classnamify(name)
if "potential" not in name.lower():
name = name + "Potential"
CustomPotential.__name__ = str(name)
# Hessian
if hessian:
hessfuncs = []
for var1 in vars:
for var2 in vars:
hessfuncs.append(lambdify(vars + pars,
sympy.diff(expr, var1, var2),
dummify=False,
modules=['numpy', 'sympy']))
def METHOD_NAME(self, w, t):
kw = self.parameters.copy()
for k, v in kw.items():
kw[k] = v.value
for i, name in enumerate(var_names):
kw[name] = w[:, i]
# expand = [np.newaxis] * w[i].ndim
# This ain't pretty, bub
arrs = []
for f in hessfuncs:
hess_arr = np.array(f(**kw))
if hess_arr.shape != w[:, i].shape:
hess_arr = np.tile(hess_arr, reps=w[:, i].shape)
arrs.append(hess_arr)
hess = np.vstack(arrs)
return hess.reshape((ndim, ndim, len(w[:, i])))
CustomPotential.METHOD_NAME = METHOD_NAME
CustomPotential.save = None
return CustomPotential
def format_doc(*args, **kwargs):
"""
Replaces the docstring of the decorated object and then formats it.
Modeled after astropy.utils.decorators.format_doc
"""
def set_docstring(obj):
# None means: use the objects __doc__
doc = obj.__doc__
# Delete documentation in this case so we don't end up with
# awkwardly self-inserted docs.
obj.__doc__ = None
# If the original has a not-empty docstring append it to the format
# kwargs.
kwargs['__doc__'] = obj.__doc__ or ''
obj.__doc__ = doc.format(*args, **kwargs)
return obj
return set_docstring
class SympyWrapper:
@classmethod
def as_decorator(cls, func=None, **kwargs):
self = cls(**kwargs)
if func is not None and not kwargs:
return self(func)
else:
return self
def __init__(self, func=None, var=None, include_G=True):
if var is None:
_var = 'x, y, z'
else:
_var = var
self.var = _var
self.include_G = include_G
def __call__(self, wrapped_function):
@wraps(wrapped_function)
def wrapper(cls, *func_args, **func_kwargs):
try:
import sympy as sy # noqa
except ImportError:
raise ImportError("Converting to a latex expression requires "
"the sympy package to be installed")
_var = sy.symbols(self.var, seq=True, real=True)
_var = {v.name: v for v in _var}
if cls._parameters:
par = sy.symbols(' '.join(cls._parameters.keys()),
seq=True, real=True)
par = {v.name: v for v in par}
else:
par = {}
if self.include_G:
par['G'] = sy.symbols('G')
return wrapped_function(cls, _var, par)
return wrapper
sympy_wrap = SympyWrapper.as_decorator |
6,070 | get filename | """
Copyright 2007 Free Software Foundation, Inc.
This file is part of GNU Radio
SPDX-License-Identifier: GPL-2.0-or-later
"""
from os import path
from gi.repository import Gtk
from . import Constants, Utils, Dialogs
class FileDialogHelper(Gtk.FileChooserDialog, object):
"""
A wrapper class for the gtk file chooser dialog.
Implement a file chooser dialog with only necessary parameters.
"""
title = ''
action = Gtk.FileChooserAction.OPEN
filter_label = ''
filter_ext = ''
def __init__(self, parent, current_file_path):
"""
FileDialogHelper constructor.
Create a save or open dialog with cancel and ok buttons.
Use standard settings: no multiple selection, local files only, and the * filter.
Args:
action: Gtk.FileChooserAction.OPEN or Gtk.FileChooserAction.SAVE
title: the title of the dialog (string)
"""
ok_stock = {
Gtk.FileChooserAction.OPEN: 'gtk-open',
Gtk.FileChooserAction.SAVE: 'gtk-save'
}[self.action]
Gtk.FileChooserDialog.__init__(self, title=self.title, action=self.action,
transient_for=parent)
self.add_buttons('gtk-cancel', Gtk.ResponseType.CANCEL,
ok_stock, Gtk.ResponseType.OK)
self.set_select_multiple(False)
self.set_local_only(True)
self.parent = parent
self.current_file_path = current_file_path or path.join(
Constants.DEFAULT_FILE_PATH, Constants.NEW_FLOGRAPH_TITLE + Constants.FILE_EXTENSION)
self.set_current_folder(path.dirname(
current_file_path)) # current directory
self.setup_filters()
def setup_filters(self, filters=None):
set_default = True
filters = filters or (
[(self.filter_label, self.filter_ext)] if self.filter_label else [])
if ('All Files', '') not in filters:
filters.append(('All Files', ''))
for label, ext in filters:
if not label:
continue
f = Gtk.FileFilter()
f.set_name(label)
f.add_pattern('*' + ext)
self.add_filter(f)
if not set_default:
self.set_filter(f)
set_default = True
def run(self):
"""Get the filename and destroy the dialog."""
response = Gtk.FileChooserDialog.run(self)
filename = self.METHOD_NAME() if response == Gtk.ResponseType.OK else None
self.destroy()
return filename
class SaveFileDialog(FileDialogHelper):
"""A dialog box to save or open flow graph files. This is a base class, do not use."""
action = Gtk.FileChooserAction.SAVE
def __init__(self, parent, current_file_path):
super(SaveFileDialog, self).__init__(parent, current_file_path)
self.set_current_name(path.splitext(path.basename(
self.current_file_path))[0] + self.filter_ext)
self.set_create_folders(True)
self.set_do_overwrite_confirmation(True)
class OpenFileDialog(FileDialogHelper):
"""A dialog box to save or open flow graph files. This is a base class, do not use."""
action = Gtk.FileChooserAction.OPEN
def show_missing_message(self, filename):
Dialogs.MessageDialogWrapper(
self.parent,
Gtk.MessageType.WARNING, Gtk.ButtonsType.CLOSE, 'Cannot Open!',
'File <b>{filename}</b> Does not Exist!'.format(
filename=Utils.encode(filename)),
).run_and_destroy()
def METHOD_NAME(self):
"""
Run the dialog and get the filename.
If this is a save dialog and the file name is missing the extension, append the file extension.
If the file name with the extension already exists, show a overwrite dialog.
If this is an open dialog, return a list of filenames.
Returns:
the complete file path
"""
filenames = Gtk.FileChooserDialog.get_filenames(self)
for filename in filenames:
if not path.exists(filename):
self.show_missing_message(filename)
return None # rerun
return filenames
class OpenFlowGraph(OpenFileDialog):
title = 'Open a Flow Graph from a File...'
filter_label = 'Flow Graph Files'
filter_ext = Constants.FILE_EXTENSION
def __init__(self, parent, current_file_path=''):
super(OpenFlowGraph, self).__init__(parent, current_file_path)
self.set_select_multiple(True)
class OpenQSS(OpenFileDialog):
title = 'Open a QSS theme...'
filter_label = 'QSS Themes'
filter_ext = '.qss'
class SaveFlowGraph(SaveFileDialog):
title = 'Save a Flow Graph to a File...'
filter_label = 'Flow Graph Files'
filter_ext = Constants.FILE_EXTENSION
class SaveConsole(SaveFileDialog):
title = 'Save Console to a File...'
filter_label = 'Test Files'
filter_ext = '.txt'
class SaveScreenShot(SaveFileDialog):
title = 'Save a Flow Graph Screen Shot...'
filters = [('PDF Files', '.pdf'), ('PNG Files', '.png'),
('SVG Files', '.svg')]
filter_ext = '.pdf' # the default
def __init__(self, parent, current_file_path=''):
super(SaveScreenShot, self).__init__(parent, current_file_path)
self.config = Gtk.Application.get_default().config
self._button = button = Gtk.CheckButton(label='Background transparent')
self._button.set_active(
self.config.screen_shot_background_transparent())
self.set_extra_widget(button)
def setup_filters(self, filters=None):
super(SaveScreenShot, self).setup_filters(self.filters)
def show_missing_message(self, filename):
Dialogs.MessageDialogWrapper(
self.parent,
Gtk.MessageType.ERROR, Gtk.ButtonsType.CLOSE, 'Can not Save!',
'File Extension of <b>{filename}</b> not supported!'.format(
filename=Utils.encode(filename)),
).run_and_destroy()
def run(self):
valid_exts = {ext for label, ext in self.filters}
filename = None
while True:
response = Gtk.FileChooserDialog.run(self)
if response != Gtk.ResponseType.OK:
filename = None
break
filename = self.METHOD_NAME()
if path.splitext(filename)[1] in valid_exts:
break
self.show_missing_message(filename)
bg_transparent = self._button.get_active()
self.config.screen_shot_background_transparent(bg_transparent)
self.destroy()
return filename, bg_transparent |
6,071 | test no overlap | # Copyright Iris contributors
#
# This file is part of Iris and is released under the LGPL license.
# See COPYING and COPYING.LESSER in the root of the repository for full
# licensing details.
"""
Unit tests for the :func:`iris.analysis.geometry.geometry_area_weights`
function.
"""
# Import iris.tests first so that some things can be initialised before
# importing anything else.
import iris.tests as tests # isort:skip
import warnings
import numpy as np
import shapely.geometry
from iris.analysis.geometry import geometry_area_weights
from iris.coords import DimCoord
from iris.cube import Cube
import iris.tests.stock as stock
class Test(tests.IrisTest):
def setUp(self):
x_coord = DimCoord([1.0, 3.0], "longitude", bounds=[[0, 2], [2, 4]])
y_coord = DimCoord([1.0, 3.0], "latitude", bounds=[[0, 2], [2, 4]])
self.data = np.empty((4, 2, 2))
dim_coords_and_dims = [(y_coord, (1,)), (x_coord, (2,))]
self.cube = Cube(self.data, dim_coords_and_dims=dim_coords_and_dims)
self.geometry = shapely.geometry.Polygon(
[(3, 3), (3, 50), (50, 50), (50, 3)]
)
def METHOD_NAME(self):
geometry = shapely.geometry.Polygon([(4, 4), (4, 6), (6, 6), (6, 4)])
weights = geometry_area_weights(self.cube, geometry)
self.assertEqual(np.sum(weights), 0)
def test_overlap(self):
weights = geometry_area_weights(self.cube, self.geometry)
expected = np.repeat(
[[[0.0, 0.0], [0.0, 1.0]]], self.data.shape[0], axis=0
)
self.assertArrayEqual(weights, expected)
def test_overlap_normalize(self):
weights = geometry_area_weights(
self.cube, self.geometry, normalize=True
)
expected = np.repeat(
[[[0.0, 0.0], [0.0, 0.25]]], self.data.shape[0], axis=0
)
self.assertArrayEqual(weights, expected)
@tests.skip_data
def test_distinct_xy(self):
cube = stock.simple_pp()
cube = cube[:4, :4]
lon = cube.coord("longitude")
lat = cube.coord("latitude")
lon.guess_bounds()
lat.guess_bounds()
from iris.util import regular_step
quarter = abs(regular_step(lon) * regular_step(lat) * 0.25)
half = abs(regular_step(lon) * regular_step(lat) * 0.5)
minx = 3.7499990463256836
maxx = 7.499998092651367
miny = 84.99998474121094
maxy = 89.99998474121094
geometry = shapely.geometry.box(minx, miny, maxx, maxy)
weights = geometry_area_weights(cube, geometry)
target = np.array(
[
[0, quarter, quarter, 0],
[0, half, half, 0],
[0, quarter, quarter, 0],
[0, 0, 0, 0],
]
)
self.assertTrue(np.allclose(weights, target))
@tests.skip_data
def test_distinct_xy_bounds(self):
# cases where geometry bnds are outside cube bnds correctly handled?
cube = stock.simple_pp()
cube = cube[:4, :4]
lon = cube.coord("longitude")
lat = cube.coord("latitude")
lon.guess_bounds()
lat.guess_bounds()
from iris.util import regular_step
quarter = abs(regular_step(lon) * regular_step(lat) * 0.25)
half = abs(regular_step(lon) * regular_step(lat) * 0.5)
full = abs(regular_step(lon) * regular_step(lat))
minx = 3.7499990463256836
maxx = 13.12499619
maxx_overshoot = 15.0
miny = 84.99998474121094
maxy = 89.99998474121094
geometry = shapely.geometry.box(minx, miny, maxx, maxy)
geometry_overshoot = shapely.geometry.box(
minx, miny, maxx_overshoot, maxy
)
weights = geometry_area_weights(cube, geometry)
weights_overshoot = geometry_area_weights(cube, geometry_overshoot)
target = np.array(
[
[0, quarter, half, half],
[0, half, full, full],
[0, quarter, half, half],
[0, 0, 0, 0],
]
)
self.assertTrue(np.allclose(weights, target))
self.assertTrue(np.allclose(weights_overshoot, target))
@tests.skip_data
def test_distinct_xy_bounds_pole(self):
# is UserWarning issued for out-of-bounds? results will be unexpected!
cube = stock.simple_pp()
cube = cube[:4, :4]
lon = cube.coord("longitude")
lat = cube.coord("latitude")
lon.guess_bounds()
lat.guess_bounds()
from iris.util import regular_step
quarter = abs(regular_step(lon) * regular_step(lat) * 0.25)
half = abs(regular_step(lon) * regular_step(lat) * 0.5)
top_cell_half = abs(regular_step(lon) * (90 - lat.bounds[0, 1]) * 0.5)
minx = 3.7499990463256836
maxx = 7.499998092651367
miny = 84.99998474121094
maxy = 99.99998474121094
geometry = shapely.geometry.box(minx, miny, maxx, maxy)
# see http://stackoverflow.com/a/3892301 to assert warnings
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always") # always trigger all warnings
weights = geometry_area_weights(cube, geometry)
self.assertEqual(
str(w[-1].message),
"The geometry exceeds the "
"cube's y dimension at the upper end.",
)
self.assertTrue(issubclass(w[-1].category, UserWarning))
target = np.array(
[
[0, top_cell_half, top_cell_half, 0],
[0, half, half, 0],
[0, quarter, quarter, 0],
[0, 0, 0, 0],
]
)
self.assertTrue(np.allclose(weights, target))
def test_shared_xy(self):
cube = stock.track_1d()
geometry = shapely.geometry.box(1, 4, 3.5, 7)
weights = geometry_area_weights(cube, geometry)
target = np.array([0, 0, 2, 0.5, 0, 0, 0, 0, 0, 0, 0])
self.assertTrue(np.allclose(weights, target))
if __name__ == "__main__":
tests.main() |
6,072 | set user | """Ensure OpenID Connect Authorization Request 'claims' are preserved across authorization.
The claims parameter is an optional query param for the Authorization Request endpoint
but if it is provided and is valid it needs to be deserialized (from urlencoded JSON)
and persisted with the authorization code itself, then in the subsequent Access Token
request the claims should be transferred (via the oauthlib request) to be persisted
with the Access Token when it is created.
"""
from unittest import mock
from oauthlib.openid import RequestValidator
from oauthlib.openid.connect.core.endpoints.pre_configured import Server
from tests.oauth2.rfc6749.endpoints.test_utils import get_query_credentials
from tests.unittest import TestCase
class TestClaimsHandling(TestCase):
DEFAULT_REDIRECT_URI = 'http://i.b./path'
def set_scopes(self, scopes):
def set_request_scopes(client_id, code, client, request):
request.scopes = scopes
return True
return set_request_scopes
def METHOD_NAME(self, request):
request.user = 'foo'
request.client_id = 'bar'
request.client = mock.MagicMock()
request.client.client_id = 'mocked'
return True
def set_client(self, request):
request.client = mock.MagicMock()
request.client.client_id = 'mocked'
return True
def save_claims_with_code(self, client_id, code, request, *args, **kwargs):
# a real validator would save the claims with the code during save_authorization_code()
self.claims_from_auth_code_request = request.claims
self.scopes = request.scopes.split()
def retrieve_claims_saved_with_code(self, client_id, code, client, request, *args, **kwargs):
request.claims = self.claims_from_auth_code_request
request.scopes = self.scopes
return True
def save_claims_with_bearer_token(self, token, request, *args, **kwargs):
# a real validator would save the claims with the access token during save_bearer_token()
self.claims_saved_with_bearer_token = request.claims
def setUp(self):
self.validator = mock.MagicMock(spec=RequestValidator)
self.validator.get_code_challenge.return_value = None
self.validator.get_default_redirect_uri.return_value = TestClaimsHandling.DEFAULT_REDIRECT_URI
self.validator.authenticate_client.side_effect = self.set_client
self.validator.save_authorization_code.side_effect = self.save_claims_with_code
self.validator.validate_code.side_effect = self.retrieve_claims_saved_with_code
self.validator.save_token.side_effect = self.save_claims_with_bearer_token
self.server = Server(self.validator)
def test_claims_stored_on_code_creation(self):
claims = {
"id_token": {
"claim_1": None,
"claim_2": {
"essential": True
}
},
"userinfo": {
"claim_3": {
"essential": True
},
"claim_4": None
}
}
claims_urlquoted = '%7B%22id_token%22%3A%20%7B%22claim_2%22%3A%20%7B%22essential%22%3A%20true%7D%2C%20%22claim_1%22%3A%20null%7D%2C%20%22userinfo%22%3A%20%7B%22claim_4%22%3A%20null%2C%20%22claim_3%22%3A%20%7B%22essential%22%3A%20true%7D%7D%7D'
uri = 'http://example.com/path?client_id=abc&scope=openid+test_scope&response_type=code&claims=%s'
h, b, s = self.server.create_authorization_response(uri % claims_urlquoted, scopes='openid test_scope')
self.assertDictEqual(self.claims_from_auth_code_request, claims)
code = get_query_credentials(h['Location'])['code'][0]
token_uri = 'http://example.com/path'
_, body, _ = self.server.create_token_response(
token_uri,
body='client_id=me&redirect_uri=http://back.to/me&grant_type=authorization_code&code=%s' % code
)
self.assertDictEqual(self.claims_saved_with_bearer_token, claims)
def test_invalid_claims(self):
uri = 'http://example.com/path?client_id=abc&scope=openid+test_scope&response_type=code&claims=this-is-not-json'
h, b, s = self.server.create_authorization_response(uri, scopes='openid test_scope')
error = get_query_credentials(h['Location'])['error'][0]
error_desc = get_query_credentials(h['Location'])['error_description'][0]
self.assertEqual(error, 'invalid_request')
self.assertEqual(error_desc, "Malformed claims parameter") |
6,073 | get service db row | import sqlite3
from collections import namedtuple
from wazuh_testing.modules.aws import (
ALB_TYPE,
AWS_SERVICES_DB_PATH,
CISCO_UMBRELLA_TYPE,
CLB_TYPE,
CLOUD_TRAIL_TYPE,
CUSTOM_TYPE,
GUARD_DUTY_TYPE,
NLB_TYPE,
S3_CLOUDTRAIL_DB_PATH,
SERVER_ACCESS_TABLE_NAME,
VPC_FLOW_TYPE,
WAF_TYPE,
)
SELECT_QUERY_TEMPLATE = 'SELECT * FROM {table_name}'
S3CloudTrailRow = namedtuple(
'S3CloudTrailRow', 'bucket_path aws_account_id aws_region log_key processed_date created_date'
)
S3VPCFlowRow = namedtuple(
'S3VPCFlowRow', 'bucket_path aws_account_id aws_region flowlog_id log_key processed_date created_date'
)
S3ALBRow = namedtuple(
'S3ALBRow', 'bucket_path aws_account_id log_key processed_date created_date'
)
S3CustomRow = namedtuple(
'S3CustomRow', 'bucket_path aws_account_id log_key processed_date created_date'
)
S3GuardDutyRow = namedtuple(
'S3GuardDutyRow', 'bucket_path aws_account_id log_key processed_date created_date'
)
S3WAFRow = namedtuple(
'S3WAFRow', 'bucket_path aws_account_id log_key processed_date created_date'
)
S3ServerAccessRow = namedtuple(
'S3ServerAccessRow', 'bucket_path aws_account_id log_key processed_date created_date'
)
ServiceInspectorRow = namedtuple(
'ServiceInspectorRow', 'service account_id region timestamp'
)
ServiceCloudWatchRow = namedtuple(
'ServiceCloudWatchRow', 'aws_region aws_log_group aws_log_stream next_token start_time end_time'
)
S3UmbrellaRow = namedtuple(
'S3UmbrellaRow', 'bucket_path aws_account_id log_key processed_date created_date'
)
s3_rows_map = {
CLOUD_TRAIL_TYPE: S3CloudTrailRow,
VPC_FLOW_TYPE: S3VPCFlowRow,
ALB_TYPE: S3ALBRow,
CLB_TYPE: S3ALBRow,
NLB_TYPE: S3ALBRow,
CUSTOM_TYPE: S3CustomRow,
GUARD_DUTY_TYPE: S3GuardDutyRow,
WAF_TYPE: S3WAFRow,
SERVER_ACCESS_TABLE_NAME: S3ServerAccessRow,
CISCO_UMBRELLA_TYPE: S3UmbrellaRow
}
service_rows_map = {
'cloudwatch_logs': ServiceCloudWatchRow,
'aws_services': ServiceInspectorRow
}
def _get_s3_row_type(bucket_type):
"""Get row type for bucket integration.
Args:
bucket_type (str): The name of the bucket.
Returns:
Type[S3CloudTrailRow]: The type that match or a default one.
"""
return s3_rows_map.get(bucket_type, S3CloudTrailRow)
def _get_service_row_type(table_name):
"""Get row type for service integration.
Args:
table_name (str): Table name to match.
Returns:
Type[ServiceCloudWatchRow]: The type that match or a default one.
"""
return service_rows_map.get(table_name, ServiceCloudWatchRow)
def get_db_connection(path):
"""Get an open DB connection.
Args:
path (Path): The path of the sqlite file.
Returns:
sqlite3.Connection: A connection with the specified DB.
"""
return sqlite3.connect(path)
def table_exists(table_name, db_path=S3_CLOUDTRAIL_DB_PATH):
"""Check if the given table name exists.
Args:
table_name (str): Table name to search for.
Returns:
bool: True if exists else False.
"""
connection = get_db_connection(db_path)
cursor = connection.cursor()
query = """
SELECT
name
FROM
sqlite_master
WHERE
type ='table' AND
name NOT LIKE 'sqlite_%';
"""
return table_name in [result[0] for result in cursor.execute(query).fetchall()]
# cloudtrail.db utils
def s3_db_exists():
"""Check if `s3_cloudtrail.db` exists.
Returns:
bool: True if exists else False.
"""
return S3_CLOUDTRAIL_DB_PATH.exists()
def delete_s3_db() -> None:
"""Delete `s3_cloudtrail.db` file."""
if s3_db_exists():
S3_CLOUDTRAIL_DB_PATH.unlink()
def get_s3_db_row(table_name) -> S3CloudTrailRow:
"""Return one row from the given table name.
Args:
table_name (str): Table name to search into.
Returns:
S3CloudTrailRow: The first row of the table.
"""
connection = get_db_connection(S3_CLOUDTRAIL_DB_PATH)
cursor = connection.cursor()
result = cursor.execute(SELECT_QUERY_TEMPLATE.format(table_name=table_name)).fetchone()
row_type = _get_s3_row_type(table_name)
return row_type(*result)
def get_multiple_s3_db_row(table_name):
"""Return all rows from the given table name.
Args:
table_name (str): Table name to search into.
Yields:
Iterator[S3CloudTrailRow]: All the rows in the table.
"""
connection = get_db_connection(S3_CLOUDTRAIL_DB_PATH)
cursor = connection.cursor()
row_type = _get_s3_row_type(table_name)
for row in cursor.execute(SELECT_QUERY_TEMPLATE.format(table_name=table_name)):
yield row_type(*row)
def table_exists_or_has_values(table_name, db_path=S3_CLOUDTRAIL_DB_PATH):
"""Check if the given table name exists. If exists check if has values.
Args:
table_name (str): Table name to search for.
Returns:
bool: True if exists or has values else False.
"""
connection = get_db_connection(db_path)
cursor = connection.cursor()
try:
return bool(cursor.execute(SELECT_QUERY_TEMPLATE.format(table_name=table_name)).fetchall())
except sqlite3.OperationalError:
return False
# aws_services.db utils
def services_db_exists():
"""Check if `aws_services.db` exists.
Returns:
bool: True if exists else False.
"""
return AWS_SERVICES_DB_PATH.exists()
def delete_services_db() -> None:
"""Delete `aws_services.db` file."""
if services_db_exists():
AWS_SERVICES_DB_PATH.unlink()
def METHOD_NAME(table_name):
"""Return one row from the given table name.
Args:
table_name (str): Table name to search into.
Returns:
ServiceInspectorRow: The first row of the table.
"""
row_type = _get_service_row_type(table_name)
connection = get_db_connection(AWS_SERVICES_DB_PATH)
cursor = connection.cursor()
result = cursor.execute(SELECT_QUERY_TEMPLATE.format(table_name=table_name)).fetchone()
return row_type(*result)
def get_multiple_service_db_row(table_name):
"""Return all rows from the given table name.
Args:
table_name (str): Table name to search into.
Yields:
Iterator[ServiceInspectorRow]: All the rows in the table.
"""
row_type = _get_service_row_type(table_name)
connection = get_db_connection(AWS_SERVICES_DB_PATH)
cursor = connection.cursor()
for row in cursor.execute(SELECT_QUERY_TEMPLATE.format(table_name=table_name)):
yield row_type(*row) |
6,074 | test default method override method | # (C) Copyright 2005-2023 Enthought, Inc., Austin, TX
# All rights reserved.
#
# This software is provided without warranty under the terms of the BSD
# license included in LICENSE.txt and may be redistributed only under
# the conditions described in the aforementioned license. The license
# is also available online at http://www.enthought.com/licenses/BSD.txt
#
# Thanks for using Enthought open source!
"""
Tests for the PrefixMap handler.
"""
import pickle
import unittest
from traits.api import HasTraits, Int, PrefixMap, TraitError
class Person(HasTraits):
married = PrefixMap({"yes": 1, "yeah": 1, "no": 0, "nah": 0})
class TestPrefixMap(unittest.TestCase):
def test_assignment(self):
person = Person()
# Test prefix
person.married = "yea"
self.assertEqual("yeah", person.married)
self.assertEqual(1, person.married_)
person.married = "yes"
self.assertEqual("yes", person.married)
self.assertEqual(1, person.married_)
person.married = "na"
self.assertEqual("nah", person.married)
self.assertEqual(0, person.married_)
with self.assertRaises(TraitError):
person.married = "unknown"
# Test duplicate prefix
with self.assertRaises(TraitError):
person.married = "ye"
def test_bad_types(self):
person = Person()
wrong_type = [[], (1, 2, 3), 1j, 2.3, 23, b"not a string", None]
for value in wrong_type:
with self.subTest(value=value):
with self.assertRaises(TraitError):
person.married = value
def test_no_default(self):
mapping = {"yes": 1, "yeah": 1, "no": 0, "nah": 0}
class Person(HasTraits):
married = PrefixMap(mapping)
p = Person()
# Since we're using Python >= 3.6, we can rely on dictionaries
# being ordered, and then the default is predictable.
self.assertEqual(p.married, "yes")
self.assertEqual(p.married_, 1)
def test_default(self):
class Person(HasTraits):
married = PrefixMap({"yes": 1, "yeah": 1, "no": 0, "nah": 0},
default_value="nah")
p = Person()
self.assertEqual(p.married, "nah")
self.assertEqual(p.married_, 0)
def test_default_keyword_only(self):
with self.assertRaises(TypeError):
PrefixMap({"yes": 1, "no": 0}, "yes")
def test_default_method(self):
class Person(HasTraits):
married = PrefixMap({"yes": 1, "yeah": 1, "no": 0, "nah": 0})
default_calls = Int(0)
def _married_default(self):
self.default_calls += 1
return "nah"
p = Person()
self.assertEqual(p.married, "nah")
self.assertEqual(p.married_, 0)
self.assertEqual(p.default_calls, 1)
# Check that the order doesn't matter
p2 = Person()
self.assertEqual(p2.married_, 0)
self.assertEqual(p2.married, "nah")
self.assertEqual(p2.default_calls, 1)
def test_default_static_override_static(self):
class BasePerson(HasTraits):
married = PrefixMap({"yes": 1, "yeah": 1, "no": 0, "nah": 0},
default_value="nah")
class Person(BasePerson):
married = "yes"
p = Person()
self.assertEqual(p.married, "yes")
self.assertEqual(p.married_, 1)
def test_default_static_override_method(self):
class BasePerson(HasTraits):
married = PrefixMap({"yes": 1, "yeah": 1, "no": 0, "nah": 0},
default_value="nah")
class Person(BasePerson):
default_calls = Int(0)
def _married_default(self):
self.default_calls += 1
return "yes"
p = Person()
self.assertEqual(p.married, "yes")
self.assertEqual(p.married_, 1)
self.assertEqual(p.default_calls, 1)
def test_default_method_override_static(self):
class BasePerson(HasTraits):
married = PrefixMap({"yes": 1, "yeah": 1, "no": 0, "nah": 0})
default_calls = Int(0)
def _married_default(self):
self.default_calls += 1
return "nah"
class Person(BasePerson):
married = "yes"
p = Person()
self.assertEqual(p.married, "yes")
self.assertEqual(p.married_, 1)
self.assertEqual(p.default_calls, 0)
def METHOD_NAME(self):
class BasePerson(HasTraits):
married = PrefixMap({"yes": 1, "yeah": 1, "no": 0, "nah": 0})
default_calls = Int(0)
def _married_default(self):
self.default_calls += 1
return "nah"
class Person(BasePerson):
def _married_default(self):
self.default_calls += 1
return "yes"
p = Person()
self.assertEqual(p.married, "yes")
self.assertEqual(p.married_, 1)
self.assertEqual(p.default_calls, 1)
def test_static_default_transformed(self):
# Test the static default is transformed
class Person(HasTraits):
married = PrefixMap(
{"yes": 1, "yeah": 1, "no": 0}, default_value="yea")
p = Person()
self.assertEqual(p.married, "yeah")
self.assertEqual(p.married_, 1)
# access mapped trait first is okay
p = Person()
self.assertEqual(p.married_, 1)
self.assertEqual(p.married, "yeah")
def test_static_default_validation_error(self):
with self.assertRaises(ValueError):
class Person(HasTraits):
married = PrefixMap(
{"yes": 1, "yeah": 1, "no": 0}, default_value="meh")
def test_no_nested_exception(self):
# Regression test for enthought/traits#1155
class A(HasTraits):
washable = PrefixMap({"yes": 1, "no": 0})
a = A()
try:
a.washable = "affirmatron"
except TraitError as exc:
self.assertIsNone(exc.__context__)
self.assertIsNone(exc.__cause__)
def test_pickle_roundtrip(self):
class Person(HasTraits):
married = PrefixMap({"yes": 1, "yeah": 1, "no": 0, "nah": 0},
default_value="yea")
p = Person()
married_trait = p.traits()["married"]
reconstituted = pickle.loads(pickle.dumps(married_trait))
self.assertEqual(married_trait.validate(p, "married", "yea"), "yeah")
self.assertEqual(reconstituted.validate(p, "married", "yea"), "yeah")
with self.assertRaises(TraitError):
reconstituted.validate(p, "married", "uknown")
with self.assertRaises(TraitError):
reconstituted.validate(p, "married", "ye")
def test_empty_map(self):
with self.assertRaises(ValueError):
PrefixMap({})
def test_pickle_shadow_trait(self):
class Person(HasTraits):
married = PrefixMap({"yes": 1, "yeah": 1, "no": 0, "nah": 0},
default_value="yeah")
p = Person()
married_shadow_trait = p.trait("married_")
reconstituted = pickle.loads(pickle.dumps(married_shadow_trait))
default_value_callable = reconstituted.default_value()[1]
self.assertEqual(default_value_callable(p), 1)
def test_existence_of__map(self):
# This test can be removed once Mayavi no longer depends on the
# existence of the _map attribute.
# xref: enthought/traits#1577
# xref: enthought/mayavi#1094
prefix_map = PrefixMap({"yes": 1, "yeah": 1, "no": 0, "nah": 0})
self.assertEqual(prefix_map._map["yes"], "yes") |
6,075 | test oversample | #!/usr/bin/env python3
#* This file is part of the MOOSE framework
#* https://www.mooseframework.org
#*
#* All rights reserved, see COPYRIGHT for full restrictions
#* https://github.com/idaholab/moose/blob/master/COPYRIGHT
#*
#* Licensed under LGPL 2.1, please see LICENSE for details
#* https://www.gnu.org/licenses/lgpl-2.1.html
from peacock.Input import OutputNames, InputTree, ExecutableInfo
from peacock.utils import Testing
import datetime
from PyQt5 import QtWidgets
class Tests(Testing.PeacockTester):
qapp = QtWidgets.QApplication([])
def create_tree(self, input_file):
app_info = ExecutableInfo.ExecutableInfo()
app_info.setPath(Testing.find_moose_test_exe())
self.assertTrue(app_info.valid())
input_tree = InputTree.InputTree(app_info)
input_tree.setInputFile(input_file)
return input_tree
def testOutputFiles(self):
input_file = "../../common/transient.i"
input_tree = self.create_tree(input_file)
output_names = OutputNames.getOutputFiles(input_tree, input_file)
self.assertEqual(output_names, ["out_transient.e"])
outputs = input_tree.getBlockInfo("/Outputs")
file_base = outputs.getParamInfo("file_base")
file_base.value = "new_file_base"
outputs.parameters_list.remove("file_base")
del outputs.parameters["file_base"]
output_names = OutputNames.getOutputFiles(input_tree, input_file)
self.assertEqual(output_names, ["transient_out.e"])
def METHOD_NAME(self):
input_file = "../../common/oversample.i"
input_tree = self.create_tree(input_file)
output_names = OutputNames.getOutputFiles(input_tree, input_file)
self.assertEqual(output_names, ["out_transient.e", "oversample_2.e"])
outputs = input_tree.getBlockInfo("/Outputs")
outputs.parameters_list.remove("file_base")
del outputs.parameters["file_base"]
output_names = OutputNames.getOutputFiles(input_tree, input_file)
self.assertEqual(output_names, ["oversample_out.e", "oversample_2.e"])
outputs = input_tree.getBlockInfo("/Outputs/refine_2")
t = outputs.getTypeBlock()
t.parameters_list.remove("file_base")
del t.parameters["file_base"]
output_names = OutputNames.getOutputFiles(input_tree, input_file)
self.assertEqual(output_names, ["oversample_out.e", "oversample_refine_2.e"])
def testDate(self):
input_file = "../../common/transient_with_date.i"
input_tree = self.create_tree(input_file)
output_names = OutputNames.getOutputFiles(input_tree, input_file)
utc = datetime.datetime.utcnow()
self.assertEqual(output_names, ["with_date.e", "with_date_%s.e" % utc.strftime("%Y-%m-%d")])
def testPostprocessor(self):
input_file = "../../common/transient.i"
input_tree = self.create_tree(input_file)
output_names = OutputNames.getPostprocessorFiles(input_tree, input_file)
self.assertEqual(output_names, ["out_transient.csv"])
outputs = input_tree.getBlockInfo("/Outputs")
outputs.parameters_list.remove("file_base")
del outputs.parameters["file_base"]
output_names = OutputNames.getPostprocessorFiles(input_tree, input_file)
self.assertEqual(output_names, ["transient_out.csv"])
def testVectorPostprocessor(self):
input_file = "../../common/time_data.i"
input_tree = self.create_tree(input_file)
output_names = OutputNames.getVectorPostprocessorFiles(input_tree, input_file)
self.assertEqual(output_names, ["time_data_line_sample_*.csv"])
outputs = input_tree.getBlockInfo("/Outputs")
p = outputs.getParamInfo("file_base")
p.value = "foo"
output_names = OutputNames.getVectorPostprocessorFiles(input_tree, input_file)
self.assertEqual(output_names, ["foo_line_sample_*.csv"])
outputs.parameters_list.remove("file_base")
del outputs.parameters["file_base"]
output_names = OutputNames.getVectorPostprocessorFiles(input_tree, input_file)
self.assertEqual(output_names, ["time_data_out_line_sample_*.csv"])
if __name__ == '__main__':
Testing.run_tests() |
6,076 | add stream handler | # Copyright (c) 2015 SUSE Linux GmbH. All rights reserved.
#
# This file is part of kiwi.
#
# kiwi is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# kiwi is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with kiwi. If not, see <http://www.gnu.org/licenses/>
#
from typing import (
Dict, Optional, List
)
import logging
from kiwi.logger_socket import PlainTextSocketHandler
import sys
# project
from kiwi.logger_color_formatter import ColorFormatter
from kiwi.logger_filter import (
LoggerSchedulerFilter,
InfoFilter,
DebugFilter,
ErrorFilter,
WarningFilter
)
from kiwi.exceptions import (
KiwiLogFileSetupFailed,
KiwiLogSocketSetupFailed
)
class Logger(logging.Logger):
"""
**Extended logging facility based on Python logging**
:param str name: name of the logger
"""
def __init__(self, name: str):
logging.Logger.__init__(self, name)
self.log_handlers: Dict = {}
self.logfile: Optional[str] = None
# log INFO to stdout
self.METHOD_NAME(
'info',
'[ %(levelname)-8s]: %(asctime)-8s | %(message)s',
[InfoFilter(), LoggerSchedulerFilter()]
)
# log WARNING messages to stdout
self.METHOD_NAME(
'warning',
'[ %(levelname)-8s]: %(asctime)-8s | %(message)s',
[WarningFilter()]
)
# log DEBUG messages to stdout
self.METHOD_NAME(
'debug',
'[ %(levelname)-8s]: %(asctime)-8s | %(message)s',
[DebugFilter()]
)
# log ERROR messages to stderr
self.METHOD_NAME(
'error',
'[ %(levelname)-8s]: %(asctime)-8s | %(message)s',
[ErrorFilter()],
sys.__stderr__
)
self.log_level = self.level
self.log_flags: Dict[str, bool] = {}
def getLogLevel(self) -> int:
"""
Return currently used log level
:return: log level number
:rtype: int
"""
return self.log_level
def getLogFlags(self) -> Dict[str, bool]:
"""
Return logging flags
:return:
Dictionary with flags and their activation status
:rtype: dict
"""
return self.log_flags
def setLogLevel(self, level: int, except_for: List[str] = []) -> None:
"""
Set custom log level for all console handlers
:param int level: log level number
"""
self.log_level = level
for handler_type in self.log_handlers:
if handler_type not in except_for:
self.log_handlers[handler_type].setLevel(level)
def setLogFlag(self, flag: str, value: bool = True) -> None:
"""
Set logging flag for further properties of the logging facility
Available flags are:
* run-scripts-in-screen
:param str flag: name
"""
self.log_flags[flag] = value
def set_color_format(self) -> None:
"""
Set color format for all console handlers
"""
for handler_type in self.log_handlers:
message_format = None
if handler_type == 'debug':
message_format = \
'$LIGHTCOLOR[ %(levelname)-8s]: %(asctime)-8s | %(message)s'
elif handler_type == 'warning' or handler_type == 'error':
message_format = \
'$COLOR[ %(levelname)-8s]: %(asctime)-8s | %(message)s'
if message_format:
self.log_handlers[handler_type].setFormatter(
ColorFormatter(message_format, '%H:%M:%S')
)
def set_logfile(self, filename: str) -> None:
"""
Set logfile handler
:param str filename: logfile file path
"""
try:
if filename == 'stdout':
# special case, log usual log file contents to stdout
handler = logging.StreamHandler(sys.__stdout__)
else:
handler = logging.FileHandler(
filename=filename, encoding='utf-8'
)
self.logfile = filename
handler.setFormatter(
logging.Formatter(
'[ %(levelname)-8s]: %(asctime)-8s | %(message)s',
'%H:%M:%S'
)
)
handler.addFilter(LoggerSchedulerFilter())
self.addHandler(handler)
self.log_handlers['file'] = handler
except Exception as e:
raise KiwiLogFileSetupFailed(
'%s: %s' % (type(e).__name__, format(e))
)
def set_log_socket(self, filename: str) -> None:
"""
Set log socket handler
:param str filename:
UDS socket file path. Note if there is no server
listening on the socket the log handler setup
will fail
"""
try:
handler = PlainTextSocketHandler(filename, None)
handler.makeSocket()
handler.setFormatter(
logging.Formatter(
'[ %(levelname)-8s]: %(asctime)-8s | %(message)s',
'%H:%M:%S'
)
)
handler.addFilter(LoggerSchedulerFilter())
self.addHandler(handler)
self.log_handlers['socket'] = handler
except Exception as e:
raise KiwiLogSocketSetupFailed(
'UDS socket: {0}:{1}: {2}'.format(filename, type(e).__name__, e)
)
def get_logfile(self) -> Optional[str]:
"""
Return file path name of logfile
:return: file path
:rtype: str
"""
return self.logfile
@staticmethod
def progress(
current: int, total: int, prefix: str, bar_length: int = 40
) -> None:
"""
Custom progress log information. progress information is
intentionally only logged to stdout and will bypass any
handlers. We don't want this information to show up in
the log file
:param int current: current item
:param int total: total number of items
:param string prefix: prefix name
:param int bar_length: length of progress bar
"""
try:
percent = float(current) / total
except Exception:
# we don't want the progress to raise an exception
# In case of any error e.g division by zero the current
# way out is to skip the progress update
return
hashes = '#' * int(round(percent * bar_length))
spaces = ' ' * (bar_length - len(hashes))
sys.stdout.write('\r{0}: [{1}] {2}%'.format(
prefix, hashes + spaces, int(round(percent * 100))
))
if current == 100:
sys.stdout.write('\n')
sys.stdout.flush()
def METHOD_NAME(
self, handler_type, message_format, message_filter,
channel=sys.__stdout__
):
handler = logging.StreamHandler(channel)
handler.setFormatter(
logging.Formatter(message_format, '%H:%M:%S')
)
for rule in message_filter:
handler.addFilter(rule)
self.addHandler(handler)
self.log_handlers[handler_type] = handler |
6,077 | test get gz object nosuchkey | from __future__ import absolute_import, print_function, unicode_literals
from builtins import dict, str
from indra.literature import s3_client
from indra.util import unicode_strs
import zlib
import pytest
@pytest.mark.webservice
@pytest.mark.nonpublic
@pytest.mark.cron
def test_check_pmid():
pmid = s3_client.check_pmid(12345)
assert pmid == 'PMID12345'
assert unicode_strs(pmid)
pmid = s3_client.check_pmid('12345')
assert pmid == 'PMID12345'
assert unicode_strs(pmid)
pmid = s3_client.check_pmid('PMID12345')
assert pmid == 'PMID12345'
assert unicode_strs(pmid)
@pytest.mark.webservice
@pytest.mark.nonpublic
@pytest.mark.cron
def test_get_pmid_key():
pmid = '12345'
pmid_key = s3_client.get_pmid_key(pmid)
assert pmid_key == s3_client.prefix + 'PMID12345'
assert unicode_strs(pmid_key)
@pytest.mark.webservice
@pytest.mark.nonpublic
@pytest.mark.cron
def test_filter_keys():
pmid_key = s3_client.get_pmid_key('1001287')
key_list = s3_client.filter_keys(pmid_key)
assert len(key_list) == 4
@pytest.mark.webservice
@pytest.mark.nonpublic
@pytest.mark.cron
def test_get_gz_object():
# Get XML
key = 'papers/PMID27297883/fulltext/txt'
obj = s3_client.get_gz_object(key)
assert unicode_strs(obj)
# Get reach output
key = 'papers/PMID27297883/reach'
obj = s3_client.get_gz_object(key)
assert unicode_strs(obj)
@pytest.mark.webservice
@pytest.mark.nonpublic
@pytest.mark.cron
def METHOD_NAME():
obj = s3_client.get_gz_object('foobar')
assert obj is None
@pytest.mark.webservice
@pytest.mark.nonpublic
@pytest.mark.cron
def test_get_full_text():
(content, content_type) = s3_client.get_full_text('27297883')
assert unicode_strs((content, content_type))
assert content_type == 'txt'
(content, content_type) = s3_client.get_full_text('1001287')
assert unicode_strs((content, content_type))
assert content_type == 'pmc_oa_xml'
# TODO: Find a paper that has only abstract
#(content, content_type) = s3_client.get_full_text('27653174')
#assert unicode_strs((content, content_type))
#assert content_type == 'abstract'
(content, content_type) = s3_client.get_full_text('000000')
assert content is None and content_type is None
@pytest.mark.webservice
@pytest.mark.nonpublic
@pytest.mark.cron
def test_put_full_text():
full_text = 'test_put_full_text'
pmid_test = 'PMID000test1'
s3_client.put_full_text(pmid_test, full_text, full_text_type='pmc_oa_txt')
# Get the full text back
(content, content_type) = s3_client.get_full_text(pmid_test)
assert content == full_text
assert content_type == 'pmc_oa_txt'
assert unicode_strs(content)
@pytest.mark.webservice
@pytest.mark.nonpublic
@pytest.mark.cron
def test_put_abstract():
abstract = 'test_put_abstract'
pmid_test = 'PMID000test2'
s3_client.put_abstract(pmid_test, abstract)
# Get the abstract back
(content, content_type) = s3_client.get_full_text(pmid_test)
assert content == abstract
assert content_type == 'abstract'
assert unicode_strs(content)
@pytest.mark.webservice
@pytest.mark.nonpublic
@pytest.mark.cron
def test_reach_output():
# Test put_reach_output
reach_data = {'foo': 1, 'bar': {'baz': 2}}
pmid = 'PMID000test3'
reach_version = '42'
source_text = 'pmc_oa_txt'
s3_client.put_reader_output('reach', reach_data, pmid, reach_version, source_text)
# Now get the data back
retrieved_reach_data = s3_client.get_reader_output('reach', pmid)
assert retrieved_reach_data == reach_data
assert unicode_strs(retrieved_reach_data)
# Get the reach version of the key we created
ret_reach_version, ret_source_text = \
s3_client.get_reader_metadata('reach', pmid)
assert ret_reach_version == reach_version
assert ret_source_text == source_text
assert unicode_strs(ret_reach_version)
def test_gzip_string():
content = 'asdf'
content_enc = s3_client.gzip_string(content, 'content')
content_dec = zlib.decompress(content_enc, 16+zlib.MAX_WBITS)
content_dec_uni = content_dec.decode('utf-8')
assert content == content_dec_uni
@pytest.mark.webservice
@pytest.mark.nonpublic
@pytest.mark.cron
def test_get_upload_content():
pmid_s3_no_content = 'PMID000foobar'
(ct, ct_type) = s3_client.get_upload_content(pmid_s3_no_content)
assert ct is None
assert ct_type is None
pmid_s3_abstract_only = 'PMID000test4'
s3_client.put_abstract(pmid_s3_abstract_only, 'foo')
(ct, ct_type) = s3_client.get_upload_content(pmid_s3_abstract_only)
assert ct == 'foo'
assert ct_type == 'abstract'
pmid_s3_fulltext = 'PMID000test5'
s3_client.put_full_text(pmid_s3_fulltext, 'foo', full_text_type='txt')
(ct, ct_type) = s3_client.get_upload_content(pmid_s3_fulltext)
assert ct == 'foo'
assert ct_type == 'txt' |
6,078 | test metrics connections | # Copyright The OpenTelemetry Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sqlalchemy
from sqlalchemy.pool import QueuePool
from opentelemetry.instrumentation.sqlalchemy import SQLAlchemyInstrumentor
from opentelemetry.test.test_base import TestBase
class TestSqlalchemyMetricsInstrumentation(TestBase):
def setUp(self):
super().setUp()
SQLAlchemyInstrumentor().instrument(
tracer_provider=self.tracer_provider,
)
def tearDown(self):
super().tearDown()
SQLAlchemyInstrumentor().uninstrument()
def assert_pool_idle_used_expected(self, pool_name, idle, used):
metrics = self.get_sorted_metrics()
self.assertEqual(len(metrics), 1)
self.assert_metric_expected(
metrics[0],
[
self.create_number_data_point(
value=idle,
attributes={"pool.name": pool_name, "state": "idle"},
),
self.create_number_data_point(
value=used,
attributes={"pool.name": pool_name, "state": "used"},
),
],
)
def test_metrics_one_connection(self):
pool_name = "pool_test_name"
engine = sqlalchemy.create_engine(
"sqlite:///:memory:",
pool_size=5,
poolclass=QueuePool,
pool_logging_name=pool_name,
)
metrics = self.get_sorted_metrics()
self.assertEqual(len(metrics), 0)
with engine.connect():
self.assert_pool_idle_used_expected(
pool_name=pool_name, idle=0, used=1
)
# After the connection is closed
self.assert_pool_idle_used_expected(
pool_name=pool_name, idle=1, used=0
)
def test_metrics_without_pool_name(self):
pool_name = "pool_test_name"
engine = sqlalchemy.create_engine(
"sqlite:///:memory:",
pool_size=5,
poolclass=QueuePool,
pool_logging_name=pool_name,
)
metrics = self.get_sorted_metrics()
self.assertEqual(len(metrics), 0)
with engine.connect():
self.assert_pool_idle_used_expected(
pool_name=pool_name, idle=0, used=1
)
# After the connection is closed
self.assert_pool_idle_used_expected(
pool_name=pool_name, idle=1, used=0
)
def test_metrics_two_connections(self):
pool_name = "pool_test_name"
engine = sqlalchemy.create_engine(
"sqlite:///:memory:",
pool_size=5,
poolclass=QueuePool,
pool_logging_name=pool_name,
)
metrics = self.get_sorted_metrics()
self.assertEqual(len(metrics), 0)
with engine.connect():
with engine.connect():
self.assert_pool_idle_used_expected(pool_name, idle=0, used=2)
# After the first connection is closed
self.assert_pool_idle_used_expected(pool_name, idle=1, used=1)
# After the two connections are closed
self.assert_pool_idle_used_expected(pool_name, idle=2, used=0)
def METHOD_NAME(self):
pool_name = "pool_test_name"
engine = sqlalchemy.create_engine(
"sqlite:///:memory:",
pool_size=5,
poolclass=QueuePool,
pool_logging_name=pool_name,
)
metrics = self.get_sorted_metrics()
self.assertEqual(len(metrics), 0)
with engine.connect():
with engine.connect():
self.assert_pool_idle_used_expected(
pool_name=pool_name, idle=0, used=2
)
# After the first connection is closed
self.assert_pool_idle_used_expected(
pool_name=pool_name, idle=1, used=1
)
# Resume from idle to used
with engine.connect():
self.assert_pool_idle_used_expected(
pool_name=pool_name, idle=0, used=2
)
# After the two connections are closed
self.assert_pool_idle_used_expected(
pool_name=pool_name, idle=2, used=0
)
def test_metric_uninstrument(self):
SQLAlchemyInstrumentor().uninstrument()
engine = sqlalchemy.create_engine(
"sqlite:///:memory:",
poolclass=QueuePool,
)
engine.connect()
metrics = self.get_sorted_metrics()
self.assertEqual(len(metrics), 0) |
6,079 | close | """
Session server-sent events client.
| Copyright 2017-2023, Voxel51, Inc.
| `voxel51.com <https://voxel51.com/>`_
|
"""
from collections import defaultdict
from dataclasses import asdict, dataclass
import logging
from retrying import retry
from threading import Thread, Event as ThreadEvent
import time
import typing as t
from bson import json_util
import requests
import sseclient
from uuid import uuid4
import fiftyone.constants as foc
import fiftyone.core.state as fos
from fiftyone.core.session.events import (
Event,
EventType,
ListenPayload,
dict_factory,
)
logger = logging.getLogger(__name__)
@retry(wait_fixed=500, stop_max_delay=10000)
def _ping(url: str) -> None:
requests.get(url)
@dataclass
class Client:
address: str
auto: bool
desktop: bool
port: int
remote: bool
start_time: float
def __post_init__(self) -> None:
self._subscription = str(uuid4())
self._connected = True
self._closed = ThreadEvent()
self._closed.set()
self._listeners: t.Dict[str, t.Set[t.Callable]] = defaultdict(set)
@property
def origin(self) -> str:
"""The origin of the server"""
return f"http://{self.address}:{self.port}"
@property
def is_open(self) -> str:
"""Whether the client is connected"""
return not self._closed.is_set()
def open(self, state: fos.StateDescription) -> None:
"""Open the client connection
Arg:
state: the initial state description
"""
if not self._closed.is_set():
raise RuntimeError("Client is already running")
def run_client() -> None:
def subscribe() -> None:
response = requests.post(
f"{self.origin}/events",
stream=True,
headers={
"Accept": "text/event-stream",
"Content-type": "application/json",
},
data=json_util.dumps(
asdict(
ListenPayload(
events=[
"capture_notebook_cell",
"close_session",
"reactivate_notebook_cell",
"reload_session",
"state_update",
],
initializer=state,
subscription=self._subscription,
),
dict_factory=dict_factory,
)
),
)
source = sseclient.SSEClient(response)
for message in source.events():
event = Event.from_data(message.event, message.data)
self._dispatch_event(event)
while True:
try:
_ping(f"{self.origin}/fiftyone")
self._connected = True
subscribe()
except Exception as e:
if logger.level == logging.DEBUG:
raise e
if self._closed.is_set():
break
self._connected = False
print(
"\r\nCould not connect session, trying again "
"in 10 seconds\r\n"
)
time.sleep(10)
self._thread = Thread(target=run_client, daemon=True)
self._closed.clear()
self._thread.start()
def METHOD_NAME(self):
"""Close the client connection"""
self._closed.set()
self._thread.join(timeout=0)
self._thread = None
def send_event(self, event: EventType) -> None:
"""Sends an event to the server
Args:
event: the event
"""
if self._closed.is_set():
return
if not self._connected:
raise RuntimeError("Client is not connected")
self._post_event(event)
self._dispatch_event(event)
def add_event_listener(
self, event_name: str, listener: t.Callable
) -> None:
"""Adds an event listener callback for the provided event name. Events
sent from client and from the server connection will be dispatched to
the listener
Args:
event_name: the event name
listener: the listener callback
"""
self._listeners[event_name].add(listener)
def remove_event_listener(
self, event_name: str, listener: t.Callable
) -> None:
"""Removes an event listener callback for the provided event name if
it has been registered
Args:
event_name: the event name
listener: the listener callback
"""
self._listeners[event_name].discard(listener)
def _dispatch_event(self, event: EventType) -> None:
for listener in self._listeners[event.get_event_name()]:
listener(event)
def _post_event(self, event: Event) -> None:
if self._closed.is_set():
return
response = requests.post(
f"{self.origin}/event",
headers={"Content-type": "application/json"},
data=json_util.dumps(
{
"event": event.get_event_name(),
"data": asdict(event, dict_factory=dict_factory),
"subscription": self._subscription,
}
),
)
if response.status_code != 200:
raise RuntimeError(
f"Failed to post event `{event.get_event_name()}` to {self.origin}/event"
) |
6,080 | annotate method | #!/usr/bin/python2.7
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Dart library generator.
This module generates Dart code from a Google API discovery documents.
"""
__author__ = 'aiuto@google.com (Tony Aiuto)'
from googleapis.codegen import api
from googleapis.codegen import api_library_generator
from googleapis.codegen import language_model
from googleapis.codegen import utilities
class DartLanguageModel(language_model.LanguageModel):
"""A LanguageModel for Dart."""
language = 'dart'
_SCHEMA_TYPE_TO_DART_TYPE = {
'any': 'core.Object',
'boolean': 'core.bool',
'integer': 'core.int',
'long': 'core.int',
'number': 'core.double',
'string': 'core.String',
'object': 'core.Object',
}
_DART_KEYWORDS = [
'break', 'case', 'catch', 'class', 'const', 'continue', 'default',
'else', 'extends', 'final', 'finally', 'for', 'in', 'is', 'new', 'null',
'return', 'static', 'super', 'switch', 'throw', 'try', 'var', 'void',
'while'
]
# We can not create classes which match a Dart keyword or built in object
# type.
RESERVED_CLASS_NAMES = _DART_KEYWORDS + _SCHEMA_TYPE_TO_DART_TYPE.values()
array_of_policy = language_model.NamingPolicy(
format_string='core.List<{name}>')
map_of_policy = language_model.NamingPolicy(
format_string='core.Map<core.String, {name}>')
def __init__(self):
super(DartLanguageModel, self).__init__(class_name_delimiter='')
def GetCodeTypeFromDictionary(self, def_dict):
"""Gets an element's data type from its JSON definition.
Overrides the default.
Args:
def_dict: (dict) The defintion dictionary for this type
Returns:
A name suitable for use as a Dart data type
"""
json_type = def_dict.get('type', 'String')
native_type = self._SCHEMA_TYPE_TO_DART_TYPE.get(json_type)
return native_type
def ToMemberName(self, s, unused_the_api):
"""CamelCase a wire format name into a suitable Dart variable name."""
candidate = utilities.CamelCase(s)
candidate = candidate[0].lower() + candidate[1:]
while candidate in self.RESERVED_CLASS_NAMES:
candidate += '_'
return candidate
DART_LANGUAGE_MODEL = DartLanguageModel()
class DartGenerator(api_library_generator.ApiLibraryGenerator):
"""The Dart code generator."""
def __init__(self, discovery, options=None):
super(DartGenerator, self).__init__(
DartApi,
discovery,
language='dart',
language_model=DART_LANGUAGE_MODEL,
options=options)
def AnnotateResource(self, the_api, resource):
"""Override default implementation.
Prepend the resource class name to each sub-resource since Dart doesn't
support nested classes. Append 'Resource' to the class name so resources
are less likely to conflict with models.
Args:
the_api: (Api) The API this Resource belongs to.
resource: (Resource) The Resource to annotate.
"""
for r in resource.values['resources']:
r.values['className'] = (resource.values['className'] +
r.values['className'])
self.AnnotateResource(the_api, r)
resource.values['className'] += 'Resource'
parent_list = resource.ancestors[1:]
parent_list.append(resource)
parent_classes = [p.values.get('className') for p in parent_list]
resource.SetTemplateValue('contextCodeType', '.'.join(parent_classes))
super(DartGenerator, self).AnnotateResource(the_api, resource)
def METHOD_NAME(self, the_api, method, unused_resource):
"""Add Dart-specific annotations and naming schemes."""
parent_list = method.ancestors[1:]
parent_list.append(method)
parent_classes = [p.values.get('className') for p in parent_list]
method.SetTemplateValue('contextCodeType', '.'.join(parent_classes))
super(DartGenerator, self).METHOD_NAME(the_api, method, None)
class DartApi(api.Api):
"""An Api with Dart annotations."""
def __init__(self, discovery_doc, **unused_kwargs):
super(DartApi, self).__init__(discovery_doc)
# pylint: disable=unused-argument
def ToClassName(self, s, element, element_type=None):
"""Convert a discovery name to a suitable Dart class name.
Overrides the default.
Args:
s: (str) A rosy name of data element.
element: (object) The object we need a class name for.
element_type: (str) The kind of element (resource|method) to name.
Returns:
A name suitable for use as a class in the generator's target language.
"""
candidate = utilities.CamelCase(s)
if isinstance(element, api.Api):
candidate += 'Api'
while candidate in DartLanguageModel.RESERVED_CLASS_NAMES:
candidate += '_'
return candidate |
6,081 | patch sources | from conan import ConanFile
from conan.errors import ConanInvalidConfiguration
from conan.tools.build import check_min_cppstd
from conan.tools.cmake import CMake, CMakeDeps, CMakeToolchain, cmake_layout
from conan.tools.files import apply_conandata_patches, copy, export_conandata_patches, get, replace_in_file, rmdir
from conan.tools.microsoft import is_msvc_static_runtime
import os
required_conan_version = ">=1.53.0"
class CCTagConan(ConanFile):
name = "cctag"
description = "Detection of CCTag markers made up of concentric circles."
license = "MPL-2.0"
topics = ("cctag", "computer-vision", "detection", "image-processing",
"markers", "fiducial-markers", "concentric-circles")
homepage = "https://github.com/alicevision/CCTag"
url = "https://github.com/conan-io/conan-center-index"
settings = "os", "arch", "compiler", "build_type"
options = {
"shared": [True, False],
"fPIC": [True, False],
"serialize": [True, False],
"visual_debug": [True, False],
"no_cout": [True, False],
"with_cuda": [True, False],
}
default_options = {
"shared": False,
"fPIC": True,
"serialize": False,
"visual_debug": False,
"no_cout": True,
"with_cuda": False,
}
def export_sources(self):
export_conandata_patches(self)
def config_options(self):
if self.settings.os == "Windows":
del self.options.fPIC
def configure(self):
if self.options.shared:
self.options.rm_safe("fPIC")
def layout(self):
cmake_layout(self, src_folder="src")
def requirements(self):
self.requires("boost/1.80.0")
self.requires("eigen/3.4.0")
self.requires("onetbb/2020.3")
self.requires("opencv/4.5.5")
@property
def _required_boost_components(self):
return [
"atomic", "chrono", "date_time", "exception", "filesystem",
"math", "serialization", "stacktrace", "system", "thread", "timer",
]
def validate(self):
miss_boost_required_comp = \
any(getattr(self.dependencies["boost"].options,
f"without_{boost_comp}",
True) for boost_comp in self._required_boost_components)
if self.dependencies["boost"].options.header_only or miss_boost_required_comp:
raise ConanInvalidConfiguration(
f"{self.ref} requires non header-only boost with these components: "
f"{', '.join(self._required_boost_components)}",
)
if self.settings.compiler == "Visual Studio" and not self.options.shared and \
is_msvc_static_runtime(self) and self.dependencies["onetbb"].options.shared:
raise ConanInvalidConfiguration("this specific configuration is prevented due to internal c3i limitations")
if self.settings.compiler.get_safe("cppstd"):
check_min_cppstd(self, 14)
# FIXME: add cuda support
if self.options.with_cuda:
raise ConanInvalidConfiguration("CUDA not supported yet")
def source(self):
get(self, **self.conan_data["sources"][self.version], strip_root=True)
def generate(self):
tc = CMakeToolchain(self)
tc.variables["CCTAG_SERIALIZE"] = self.options.serialize
tc.variables["CCTAG_VISUAL_DEBUG"] = self.options.visual_debug
tc.variables["CCTAG_NO_COUT"] = self.options.no_cout
tc.variables["CCTAG_WITH_CUDA"] = self.options.with_cuda
tc.variables["CCTAG_BUILD_APPS"] = False
tc.variables["CCTAG_CUDA_CC_CURRENT_ONLY"] = False
tc.variables["CCTAG_NVCC_WARNINGS"] = False
tc.variables["CCTAG_EIGEN_NO_ALIGN"] = True
tc.variables["CCTAG_USE_POSITION_INDEPENDENT_CODE"] = self.options.get_safe("fPIC", True)
tc.variables["CCTAG_ENABLE_SIMD_AVX2"] = False
tc.variables["CCTAG_BUILD_TESTS"] = False
tc.variables["CCTAG_BUILD_DOC"] = False
tc.variables["CCTAG_NO_THRUST_COPY_IF"] = False
tc.generate()
deps = CMakeDeps(self)
deps.generate()
def METHOD_NAME(self):
apply_conandata_patches(self)
# Cleanup RPATH if Apple in shared lib of install tree
replace_in_file(self, os.path.join(self.source_folder, "CMakeLists.txt"),
"SET(CMAKE_INSTALL_RPATH_USE_LINK_PATH TRUE)",
"")
# Link to OpenCV targets
replace_in_file(self, os.path.join(self.source_folder, "src", "CMakeLists.txt"),
"${OpenCV_LIBS}",
"opencv_core opencv_videoio opencv_imgproc opencv_imgcodecs")
def build(self):
self.METHOD_NAME()
cmake = CMake(self)
cmake.configure()
cmake.build()
def package(self):
copy(self, "COPYING.md", src=self.source_folder, dst=os.path.join(self.package_folder, "licenses"))
cmake = CMake(self)
cmake.install()
rmdir(self, os.path.join(self.package_folder, "lib", "cmake"))
def package_info(self):
self.cpp_info.set_property("cmake_file_name", "CCTag")
self.cpp_info.set_property("cmake_target_name", "CCTag::CCTag")
suffix = "d" if self.settings.build_type == "Debug" else ""
self.cpp_info.libs = [f"CCTag{suffix}"]
if self.settings.os in ["Linux", "FreeBSD"]:
self.cpp_info.system_libs.extend(["dl", "pthread"])
self.cpp_info.requires = [
"boost::atomic", "boost::chrono", "boost::date_time", "boost::exception",
"boost::filesystem", "boost::serialization", "boost::system",
"boost::thread", "boost::timer", "boost::math_c99", "eigen::eigen",
"onetbb::onetbb", "opencv::opencv_core", "opencv::opencv_videoio",
"opencv::opencv_imgproc", "opencv::opencv_imgcodecs",
]
if self.settings.os == "Windows":
self.cpp_info.requires.append("boost::stacktrace_windbg")
else:
self.cpp_info.requires.append("boost::stacktrace_basic")
# TODO: to remove in conan v2 once cmake_find_package* generators removed
self.cpp_info.names["cmake_find_package"] = "CCTag"
self.cpp_info.names["cmake_find_package_multi"] = "CCTag" |
6,082 | test account properties v2 | # coding=utf-8
# ------------------------------------
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
# ------------------------------------
import functools
import pytest
from devtools_testutils import recorded_by_proxy, set_custom_default_matcher
from azure.core.credentials import AzureKeyCredential
from azure.core.exceptions import ResourceNotFoundError
from azure.core.pipeline.transport import RequestsTransport
from azure.ai.formrecognizer import (
FormTrainingClient,
FormRecognizerApiVersion,
)
from testcase import FormRecognizerTest
from preparers import GlobalClientPreparer as _GlobalClientPreparer
from preparers import FormRecognizerPreparer
from conftest import skip_flaky_test
FormTrainingClientPreparer = functools.partial(_GlobalClientPreparer, FormTrainingClient)
class TestManagement(FormRecognizerTest):
@skip_flaky_test
@FormRecognizerPreparer()
@FormTrainingClientPreparer(client_kwargs={"api_version": "2.1"})
@recorded_by_proxy
def METHOD_NAME(self, client):
properties = client.get_account_properties()
assert properties.custom_model_limit
assert properties.custom_model_count
@pytest.mark.skip("Issue: https://github.com/Azure/azure-sdk-for-python/issues/31739")
@skip_flaky_test
@FormRecognizerPreparer()
@FormTrainingClientPreparer(client_kwargs={"api_version": "2.1"})
@recorded_by_proxy
def test_mgmt_model_labeled_v2(self, client, formrecognizer_storage_container_sas_url_v2, **kwargs):
poller = client.begin_training(formrecognizer_storage_container_sas_url_v2, use_training_labels=True)
labeled_model_from_train = poller.result()
labeled_model_from_get = client.get_custom_model(labeled_model_from_train.model_id)
assert labeled_model_from_train.model_id == labeled_model_from_get.model_id
assert labeled_model_from_train.status == labeled_model_from_get.status
assert labeled_model_from_train.training_started_on == labeled_model_from_get.training_started_on
assert labeled_model_from_train.training_completed_on == labeled_model_from_get.training_completed_on
assert labeled_model_from_train.errors == labeled_model_from_get.errors
for a, b in zip(labeled_model_from_train.training_documents, labeled_model_from_get.training_documents):
assert a.name == b.name
assert a.errors == b.errors
assert a.page_count == b.page_count
assert a.status == b.status
for a, b in zip(labeled_model_from_train.submodels, labeled_model_from_get.submodels):
for field1, field2 in zip(a.fields.items(), b.fields.items()):
assert a.fields[field1[0]].name == b.fields[field2[0]].name
assert a.fields[field1[0]].accuracy == b.fields[field2[0]].accuracy
models_list = client.list_custom_models()
for model in models_list:
assert model.model_id
assert model.status
assert model.training_started_on
assert model.training_completed_on
client.delete_model(labeled_model_from_train.model_id)
with pytest.raises(ResourceNotFoundError):
client.get_custom_model(labeled_model_from_train.model_id)
@pytest.mark.skip("Issue: https://github.com/Azure/azure-sdk-for-python/issues/31739")
@skip_flaky_test
@FormRecognizerPreparer()
@FormTrainingClientPreparer(client_kwargs={"api_version": "2.1"})
@recorded_by_proxy
def test_mgmt_model_unlabeled_v2(self, client, formrecognizer_storage_container_sas_url_v2, **kwargs):
poller = client.begin_training(formrecognizer_storage_container_sas_url_v2, use_training_labels=False)
unlabeled_model_from_train = poller.result()
unlabeled_model_from_get = client.get_custom_model(unlabeled_model_from_train.model_id)
assert unlabeled_model_from_train.model_id == unlabeled_model_from_get.model_id
assert unlabeled_model_from_train.status == unlabeled_model_from_get.status
assert unlabeled_model_from_train.training_started_on == unlabeled_model_from_get.training_started_on
assert unlabeled_model_from_train.training_completed_on == unlabeled_model_from_get.training_completed_on
assert unlabeled_model_from_train.errors == unlabeled_model_from_get.errors
for a, b in zip(unlabeled_model_from_train.training_documents, unlabeled_model_from_get.training_documents):
assert a.name == b.name
assert a.errors == b.errors
assert a.page_count == b.page_count
assert a.status == b.status
for a, b in zip(unlabeled_model_from_train.submodels, unlabeled_model_from_get.submodels):
for field1, field2 in zip(a.fields.items(), b.fields.items()):
assert a.fields[field1[0]].label == b.fields[field2[0]].label
models_list = client.list_custom_models()
for model in models_list:
assert model.model_id
assert model.status
assert model.training_started_on
assert model.training_completed_on
client.delete_model(unlabeled_model_from_train.model_id)
with pytest.raises(ResourceNotFoundError):
client.get_custom_model(unlabeled_model_from_train.model_id)
@skip_flaky_test
@FormRecognizerPreparer()
@recorded_by_proxy
def test_get_form_recognizer_client_v2(self, formrecognizer_test_endpoint, formrecognizer_test_api_key, **kwargs):
# this can be reverted to set_bodiless_matcher() after tests are re-recorded and don't contain these headers
set_custom_default_matcher(
compare_bodies=False, excluded_headers="Authorization,Content-Length,x-ms-client-request-id,x-ms-request-id"
)
transport = RequestsTransport()
ftc = FormTrainingClient(endpoint=formrecognizer_test_endpoint, credential=AzureKeyCredential(formrecognizer_test_api_key), transport=transport, api_version="2.1")
with ftc:
ftc.get_account_properties()
assert transport.session is not None
with ftc.get_form_recognizer_client() as frc:
assert transport.session is not None
frc.begin_recognize_receipts_from_url(self.receipt_url_jpg).wait()
assert frc._api_version == FormRecognizerApiVersion.V2_1
ftc.get_account_properties()
assert transport.session is not None |
6,083 | default global ctx | # Copyright (c) 2021-2023 The Chan Zuckerberg Initiative Foundation
# Copyright (c) 2021-2023 TileDB, Inc.
#
# Licensed under the MIT License.
import datetime
import functools
import time
from typing import Any, Dict, Optional, Union
import attrs
import tiledb
from typing_extensions import Self
from .._types import OpenTimestamp
from .._util import ms_to_datetime, to_timestamp_ms
@functools.lru_cache(maxsize=None)
def METHOD_NAME() -> tiledb.Ctx:
"""Build a TileDB context starting with reasonable defaults,
and overriding and updating with user-provided config options.
"""
# Note: Defaults must provide positive out-of-the-box UX!
cfg: Dict[str, Union[str, float]] = {
"sm.mem.reader.sparse_global_order.ratio_array_data": 0.3
}
return tiledb.Ctx(cfg)
def _maybe_timestamp_ms(input: Optional[OpenTimestamp]) -> Optional[int]:
if input is None:
return None
return to_timestamp_ms(input)
@attrs.define(frozen=True, kw_only=True)
class SOMATileDBContext:
"""Maintains TileDB-specific context for TileDB-SOMA objects.
This context can be shared across multiple objects,
including having a child object inherit it from its parent.
Lifecycle:
Experimental.
"""
tiledb_ctx: tiledb.Ctx = attrs.field(factory=METHOD_NAME)
timestamp_ms: Optional[int] = attrs.field(
default=None, converter=_maybe_timestamp_ms, alias="timestamp"
)
"""
Default timestamp for operations on SOMA objects, in milliseconds since the Unix epoch.
WARNING: This should not be set unless you are *absolutely* sure you want to
use the same timestamp across multiple operations. If multiple writes to the
same object are performed at the same timestamp, they have no defined order.
In most cases, it is better to pass a timestamp to a single ``open`` call,
or to simply use the default behavior.
This is used when a timestamp is not provided to an ``open`` operation.
``None``, the default, sets the timestamp on each root ``open`` operation.
That is, if you ``open`` a collection, and access individual members of the
collection through indexing or ``add_new``, the timestamp of all of those
operations will be that of the time you called ``open``.
If a value is passed, that timestamp (representing milliseconds since
the Unix epoch) is used as the timestamp to record all operations.
Set to 0xFFFFFFFFFFFFFFFF (UINT64_MAX) to get the absolute latest revision
(i.e., including changes that occur "after" the current wall time) as of
when *each* object is opened.
"""
@property
def timestamp(self) -> Optional[datetime.datetime]:
if self.timestamp_ms is None:
return None
return ms_to_datetime(self.timestamp_ms)
def replace(
self, *, tiledb_config: Optional[Dict[str, Any]] = None, **changes: Any
) -> Self:
"""Create a copy of the context, merging changes.
Args:
tiledb_config:
A dictionary of parameters for `tiledb.Config() <https://tiledb-inc-tiledb.readthedocs-hosted.com/projects/tiledb-py/en/stable/python-api.html#config>`_.
changes:
Any other parameters will be passed to the class ``__init__``.
Lifecycle:
Experimental.
Examples:
>>> context.replace(timestamp=0)
>>> context.replace(tiledb_config={"vfs.s3.region": "us-east-2"})
"""
if tiledb_config:
new_config = self.tiledb_ctx.config()
new_config.update(tiledb_config)
changes["tiledb_ctx"] = tiledb.Ctx(config=new_config)
return attrs.evolve(self, **changes)
def _open_timestamp_ms(self, in_timestamp: Optional[OpenTimestamp]) -> int:
"""Returns the real timestamp that should be used to open an object."""
if in_timestamp is not None:
return to_timestamp_ms(in_timestamp)
if self.timestamp_ms is not None:
return self.timestamp_ms
return int(time.time() * 1000)
def _validate_soma_tiledb_context(context: Any) -> SOMATileDBContext:
"""Returns the argument, as long as it's a ``SOMATileDBContext``, or a new
one if the argument is ``None``. While we already have static type-checking,
a few things are extra-important to have runtime validation on. Since it's
easy for users to pass a ``tiledb.Ctx`` when a ``SOMATileDBContext`` is
expected, we should offer a helpful redirect when they do.
"""
if context is None:
return SOMATileDBContext()
if isinstance(context, tiledb.Ctx):
raise TypeError(
"context is a tiledb.Ctx, not a SOMATileDBContext -- please wrap it in tiledbsoma.SOMATileDBContext(...)"
)
if not isinstance(context, SOMATileDBContext):
raise TypeError("context is not a SOMATileDBContext")
return context |
6,084 | set watch dir | from __future__ import absolute_import, division, print_function
#-----------------------------------------------------------------------
# more-or-less real-time plotting of Bragg peak count and XES detector
# skewness.
#-----------------------------------------------------------------------
from xfel.cxi.gfx import status_plot
import wxtbx.plots
from scitbx.array_family import flex
import libtbx.phil
from libtbx.utils import Usage, Sorry
import wx
import matplotlib.ticker as ticker
import time
import os
master_phil = libtbx.phil.parse("""
status_dir = None
.type = path
run_id = None
.type = int
t_wait = 8000
.type = int
hit_cutoff = 12
.type = int
average_window = 1000
.type = int
""")
class DetectorPlotFrame (wxtbx.plots.plot_frame) :
show_controls_default = False
def __init__ (self, *args, **kwds) :
self.params = None
self._watch_dir = None
self._watch_files = []
self._line_offsets = {}
self._t1 = flex.double()
self._t2 = flex.double()
self._t3 = flex.double()
self._t4 = flex.double()
self._hit_sample_last = 0
self._bragg = flex.int()
self._skewness = flex.double()
self._photon_counts = flex.double()
self._hit_ratio = flex.double()
wxtbx.plots.plot_frame.__init__(self, *args, **kwds)
def create_plot_panel (self) :
return DetectorPlot(
parent=self,
figure_size=(16,10))
def set_run_id (self, run_id) :
self.plot_panel.set_run_id(run_id)
def METHOD_NAME (self, dir_name, params) :
assert os.path.isdir(dir_name)
self.params = params
self._watch_dir = dir_name
self.update_from_logs()
self._timer = wx.Timer(owner=self)
self.Bind(wx.EVT_TIMER, self.OnTimer)
self._timer.Start(self.params.t_wait)
def find_logs (self) :
assert (self._watch_dir is not None)
current_dir = os.path.join(self._watch_dir, "stdout")
print("Current directory: %s" % current_dir)
for file_name in os.listdir(current_dir) :
if (file_name.endswith(".out")) :
full_path = os.path.join(current_dir, file_name)
print("Adding %s to list of files to monitor" % full_path)
f = open(full_path)
self._watch_files.append(f)
def update_from_logs (self, force_update_hits=False) :
if (len(self._watch_files) == 0) :
self.find_logs()
if (len(self._watch_files) > 0) :
#print "Collecting new data @ %s" % time.strftime("%H:%M:%S",
# time.localtime())
for fh in self._watch_files :
for line in fh.readlines() :
if ("BRAGG" in line) :
fields1 = line.split(":")
fields2 = fields1[-1].strip().split()
self._t1.append(float(fields2[1]))
self._bragg.append(int(fields2[2]))
hit_point_min = self._hit_sample_last+self.params.average_window
if (not force_update_hits) and (len(self._t1) > hit_point_min) :
self.update_hit_rate()
self._hit_sample_last = len(self._t1)
elif ("SKEW" in line) :
fields1 = line.split(":")
fields2 = fields1[-1].strip().split()
self._t2.append(float(fields2[1]))
self._skewness.append(float(fields2[2]))
elif ("N_PHOTONS" in line) :
fields1 = line.split(":")
fields2 = fields1[-1].strip().split()
self._t4.append(float(fields2[1]))
self._photon_counts.append(float(fields2[2]))
if (force_update_hits) :
self.update_hit_rate()
self.plot_panel.show_plot(
t1=self._t1,
bragg=self._bragg,
t2=self._t2,
skewness=self._skewness,
t3=self._t3,
hit_rate=self._hit_ratio,
t4=self._t4,
photon_counts=self._photon_counts,
)
def update_hit_rate (self) :
if (len(self._t1) >= self.params.average_window) :
start = len(self._t1) - self.params.average_window
window = self._bragg[start:]
isel = (window > self.params.hit_cutoff).iselection()
ratio = float(len(isel)) / float(self.params.average_window)
self._t3.append(self._t1[-1])
self._hit_ratio.append(ratio*100)
def OnTimer (self, event) :
t1 = time.time()
self.update_from_logs(True)
t2 = time.time()
#print "Updated in %.2fs" % (t2 - t1)
def OnSave (self, event) :
self.plot_panel.save_png()
class DetectorPlot (wxtbx.plots.plot_container) :
def set_run_id (self, run_id) :
self.run_id = run_id
def show_plot (self, t1, bragg, t2, skewness, t3, hit_rate,
t4, photon_counts) :
assert (self.run_id is not None)
self.figure.clear()
xmin = xmax = None
if (len(t1) > 0) :
xmin, xmax = min(t1), max(t1)
if (len(t2) > 0) :
if (xmin is not None) :
xmin, xmax = min(min(t2), xmin), max(max(t2), xmax)
else :
xmin, xmax = min(t2), max(t2)
perm = flex.sort_permutation(t3)
t3 = t3.select(perm)
hit_rate = hit_rate.select(perm)
ax1 = self.figure.add_axes([0.1, 0.05, 0.8, 0.4])
ax2 = self.figure.add_axes([0.1, 0.45, 0.8, 0.15], sharex=ax1)
ax3 = self.figure.add_axes([0.1, 0.6, 0.8, 0.25], sharex=ax1)
ax1.grid(True, color="0.75")
ax2.grid(True, color="0.75")
ax3.grid(True, color="0.75")
ax1.plot(t1, bragg, 'd', color=[0.0,0.5,1.0])
ax2.plot(t3, hit_rate, 'o-', color=[0.0,1.0,0.0])
ax3.plot(t4, photon_counts, '^', color=[0.8,0.0,0.2])
ax1.set_ylabel("# of Bragg spots")
ax2.set_ylabel("Hit rate (%)")
ax3.set_ylabel("XES photon count")
if (len(photon_counts) > 0) :
ax3.set_ylim(-1, max(photon_counts))
ax1.set_xlim(xmin, xmax)
ax1.set_xlabel("Time")
for ax in ax1, ax2, ax3:
if (ax is not ax1) :
for label in ax.get_xticklabels():
label.set_visible(False)
ax.get_yticklabels()[0].set_visible(False)
ax1.xaxis.set_major_formatter(ticker.FuncFormatter(status_plot.format_time))
ax3.set_title("Detector analysis for run %d" % self.run_id)
self.figure.autofmt_xdate()
self.canvas.draw()
self.parent.Refresh()
def save_png (self) :
if (getattr(self, "run_id", None) is not None) :
file_name = "run%d_detector_status.png" % self.run_id
self.figure.savefig(file_name, format="png")
print("Saved image to %s" % os.path.abspath(file_name))
else :
print("Can't save an image until run ID is set")
def run (args) :
user_phil = []
# TODO: replace this stuff with iotbx.phil.process_command_line_with_files
# as soon as I can safely modify it
for arg in args :
if (os.path.isdir(arg)) :
user_phil.append(libtbx.phil.parse("""status_dir=\"%s\"""" % arg))
elif (not "=" in arg) :
try :
user_phil.append(libtbx.phil.parse("""run_id=%d""" % int(arg)))
except ValueError as e :
raise Sorry("Unrecognized argument '%s'" % arg)
else :
try :
user_phil.append(libtbx.phil.parse(arg))
except RuntimeError as e :
raise Sorry("Unrecognized argument '%s' (error: %s)" % (arg, str(e)))
params = master_phil.fetch(sources=user_phil).extract()
if (params.run_id is None) :
master_phil.show()
raise Usage("run_id must be defined (either run_id=XXX, or the integer "+
"ID alone).")
if (params.status_dir is None) :
master_phil.show()
raise Usage("status_dir must be defined!")
elif (not os.path.isdir(params.status_dir)) :
raise Sorry("%s does not exist or is not a directory!" % params.status_dir)
assert (params.t_wait is not None) and (params.t_wait > 0)
assert (params.hit_cutoff is not None) and (params.hit_cutoff > 0)
assert (params.average_window is not None) and (params.average_window > 0)
app = wx.App(0)
frame = DetectorPlotFrame(None, -1, "Detector status for run %d" %
params.run_id)
frame.set_run_id(params.run_id)
frame.METHOD_NAME(params.status_dir, params)
frame.Show()
app.MainLoop() |
6,085 | make json request | import json
import pytest
from lms.validation import ValidationError
from lms.validation._api import (
APIReadResultSchema,
APIRecordResultSchema,
APIRecordSpeedgraderSchema,
)
class TestAPIRecordSpeedgraderSchema:
def test_it_parses_request(self, json_request, all_fields):
request = json_request(all_fields)
parsed_params = APIRecordSpeedgraderSchema(request).parse()
assert parsed_params == all_fields
@pytest.mark.parametrize(
"field",
[
"h_username",
"lis_outcome_service_url",
"lis_result_sourcedid",
"learner_canvas_user_id",
],
)
def test_it_raises_if_required_fields_missing(
self, json_request, all_fields, field
):
request = json_request(all_fields, exclude=[field])
schema = APIRecordSpeedgraderSchema(request)
with pytest.raises(ValidationError):
schema.parse()
@pytest.mark.parametrize("field", ["document_url", "canvas_file_id"])
def test_it_doesnt_raise_if_optional_fields_missing(
self, json_request, all_fields, field
):
request = json_request(all_fields, exclude=[field])
APIRecordSpeedgraderSchema(request).parse()
def test_it_doesnt_raise_null_group_set(self, json_request, all_fields):
all_fields["group_set"] = None # Present, but set to None
request = json_request(all_fields)
APIRecordSpeedgraderSchema(request).parse()
@pytest.fixture
def all_fields(self):
return {
"document_url": "https://example.com",
"canvas_file_id": "file123",
"h_username": "user123",
"learner_canvas_user_id": "canvas_user_123",
"lis_outcome_service_url": "https://hypothesis.shinylms.com/outcomes",
"lis_result_sourcedid": "modelstudent-assignment1",
"group_set": 1,
}
class TestAPIReadResultSchema:
def test_it_parses_fields_from_query_params(self, pyramid_request, all_fields):
for key in all_fields:
pyramid_request.GET[key] = all_fields[key]
schema = APIReadResultSchema(pyramid_request)
parsed_params = schema.parse()
assert parsed_params == all_fields
def test_it_ignores_fields_in_json_body(self, pyramid_request, all_fields):
pyramid_request.body = json.dumps(all_fields)
schema = APIReadResultSchema(pyramid_request)
with pytest.raises(ValidationError):
schema.parse()
@pytest.mark.parametrize(
"field", ["lis_outcome_service_url", "lis_result_sourcedid"]
)
def test_it_raises_if_required_fields_missing(
self, pyramid_request, all_fields, field
):
del all_fields[field]
pyramid_request.body = json.dumps(all_fields)
schema = APIReadResultSchema(pyramid_request)
with pytest.raises(ValidationError):
schema.parse()
@pytest.fixture
def all_fields(self):
return {
"lis_outcome_service_url": "https://hypothesis.shinylms.com/outcomes",
"lis_result_sourcedid": "modelstudent-assignment1",
}
class TestAPIRecordResultSchema:
def test_it_parses_request(self, json_request, all_fields):
request = json_request(all_fields)
parsed_params = APIRecordResultSchema(request).parse()
assert parsed_params == all_fields
@pytest.mark.parametrize(
"field",
["lis_outcome_service_url", "lis_result_sourcedid", "score", "student_user_id"],
)
def test_it_raises_if_required_fields_missing(
self, json_request, all_fields, field
):
request = json_request(all_fields, exclude=[field])
schema = APIRecordResultSchema(request)
with pytest.raises(ValidationError):
schema.parse()
@pytest.mark.parametrize("bad_score", ["5", 5.0, 5, -1, 1.2, "fingers"])
def test_it_raises_if_score_invalid(self, json_request, all_fields, bad_score):
request = json_request(dict(all_fields, score=bad_score))
schema = APIRecordResultSchema(request)
with pytest.raises(ValidationError):
schema.parse()
@pytest.mark.parametrize("good_score", ["0", "0.5", "1", "1.0", "0.0", 0.5, 1, 0])
def test_it_does_not_raise_with_valid_score_value(
self, json_request, all_fields, good_score
):
request = json_request(dict(all_fields, score=good_score))
APIRecordResultSchema(request).parse()
@pytest.fixture
def all_fields(self):
return {
"lis_outcome_service_url": "https://hypothesis.shinylms.com/outcomes",
"lis_result_sourcedid": "modelstudent-assignment1",
"score": 0.5,
"student_user_id": "STUDENT_ID",
}
@pytest.fixture
def json_request(pyramid_request):
def METHOD_NAME(data, exclude=None):
pyramid_request.content_type = pyramid_request.headers[
"content-type"
] = "application/json"
if exclude:
for field in exclude:
data.pop(field, None)
pyramid_request.body = json.dumps(data)
return pyramid_request
return METHOD_NAME |
6,086 | run | # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#
# Copyright Redhat
#
# SPDX-License-Identifier: GPL-2.0
# Author: Nan Li <nanli@redhat.com>
#
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
import os
import re
from avocado.utils import process
from virttest import utils_libvirtd
from virttest import virsh
from virttest import virt_vm
from virttest import test_setup
from virttest.libvirt_xml import vm_xml
from virttest.utils_test import libvirt
from virttest.staging import utils_memory
VIRSH_ARGS = {'debug': True, 'ignore_status': False}
def METHOD_NAME(test, params, env):
"""
1.Verify different size huge page take effect for guest vm
2.Verify error msg prompts with invalid huge page size or scarce memory
Scenario:
Huge page memory status: 4k, 2M, 1G, 0, scarce.
"""
def setup_test():
"""
Set hugepage on host
"""
test.log.info("TEST_SETUP: Set hugepage on host")
utils_memory.set_num_huge_pages(int(vm_nr_hugepages))
hp_cfg = test_setup.HugePageConfig(params)
hp_cfg.set_kernel_hugepages(set_pagesize, set_pagenum)
hp_cfg.hugepage_size = mount_size
hp_cfg.mount_hugepage_fs()
utils_libvirtd.libvirtd_restart()
virsh.destroy(vm_name)
def run_test():
"""
Define guest, start guest and check mem.
"""
test.log.info("TEST_STEP1: Define guest")
vmxml = vm_xml.VMXML.new_from_inactive_dumpxml(vm_name)
vmxml.setup_attrs(**vm_attrs)
cmd_result = virsh.define(vmxml.xml, debug=True)
if define_error:
# check scenario: 0
libvirt.check_result(cmd_result, define_error)
return
test.log.info("TEST_STEP2: Start guest")
try:
vm.start()
vmxml = vm_xml.VMXML.new_from_dumpxml(vm_name)
test.log.debug("After start vm, get vmxml is :%s", vmxml)
except virt_vm.VMStartError as details:
# check scenario: scarce_mem
if not re.search(start_error, str(details)):
test.fail("Failed to start guest: {}".format(str(details)))
else:
return
test.log.info("TEST_STEP3: Check the huge page memory usage")
hp_cfg = test_setup.HugePageConfig(params)
if int(utils_memory.get_num_huge_pages()) != int(vm_nr_hugepages):
test.fail("HugePages_Total should be %s instead of %s" % (
vm_nr_hugepages, utils_memory.get_num_huge_pages()))
actural_nr = hp_cfg.get_kernel_hugepages(set_pagesize)
if int(actural_nr) != int(set_pagenum):
test.fail("nr_hugepages should be %s instead of %s" % (
set_pagenum, actural_nr))
if int(utils_memory.get_num_huge_pages_free()) != int(HugePages_Free):
test.fail("HugePages_Free should be %s instead of %s" % (
HugePages_Free, utils_memory.get_num_huge_pages_free()))
free_page_num = process.METHOD_NAME(free_hugepages_cmd, shell=True,
verbose=True).stdout_text.strip()
if int(free_page_num) != int(free_hugepages):
test.fail("free_hugepages should be %s instead of %s" % (
free_hugepages, free_page_num))
test.log.info("TEST_STEP4: Check the huge page memory usage")
virsh.suspend(vm.name, **VIRSH_ARGS)
virsh.resume(vm.name, **VIRSH_ARGS)
vm.wait_for_login().close()
if os.path.exists(save_file):
os.remove(save_file)
virsh.save(vm.name, save_file, **VIRSH_ARGS)
virsh.restore(save_file, **VIRSH_ARGS)
vm.wait_for_login().close()
virsh.managedsave(vm.name, **VIRSH_ARGS)
vm.start()
vm.wait_for_login().close()
virsh.reboot(vm.name, **VIRSH_ARGS)
session = vm.wait_for_login()
test.log.info("TEST_STEP5: Verify guest memory could be consumed")
mem_free = utils_memory.freememtotal(session)
status, stdout = session.cmd_status_output("swapoff -a; memhog %s" % (
mem_free - 204800))
if status:
raise test.fail("Failed to consume memory:%s", stdout)
virsh.destroy(vm.name, **VIRSH_ARGS)
def teardown_test():
"""
Clean data.
"""
test.log.info("TEST_TEARDOWN: Clean up env.")
bkxml.sync()
if os.path.exists(save_file):
os.remove(save_file)
hp_cfg = test_setup.HugePageConfig(params)
hp_cfg.cleanup()
vm_name = params.get("main_vm")
vm = env.get_vm(vm_name)
vmxml = vm_xml.VMXML.new_from_inactive_dumpxml(vm_name)
bkxml = vmxml.copy()
define_error = params.get("define_error")
vm_nr_hugepages = params.get("vm_nr_hugepages")
free_hugepages_cmd = params.get("free_hugepages_cmd")
set_pagesize = params.get("set_pagesize")
set_pagenum = params.get("set_pagenum")
HugePages_Free = params.get("HugePages_Free")
mount_size = params.get("mount_size")
free_hugepages = params.get("free_hugepages")
start_error = params.get("start_error")
vm_attrs = eval(params.get("vm_attrs", "{}"))
save_file = params.get("save_file", "/tmp/guest.save")
try:
setup_test()
run_test()
finally:
teardown_test() |
6,087 | extract data | # pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar
import urllib.parse
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import (
ClientAuthenticationError,
HttpResponseError,
ResourceExistsError,
ResourceNotFoundError,
ResourceNotModifiedError,
map_error,
)
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
from ..._vendor import _convert_request
from ...operations._operations import build_list_all_request
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class Operations:
"""
.. warning::
**DO NOT** instantiate this class directly.
Instead, you should access the following operations through
:class:`~azure.mgmt.chaos.aio.ChaosManagementClient`'s
:attr:`operations` attribute.
"""
models = _models
def __init__(self, *args, **kwargs) -> None:
input_args = list(args)
self._client = input_args.pop(0) if input_args else kwargs.pop("client")
self._config = input_args.pop(0) if input_args else kwargs.pop("config")
self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer")
self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer")
@distributed_trace
def list_all(self, **kwargs: Any) -> AsyncIterable["_models.Operation"]:
"""Get a list all available Operations.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either Operation or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.chaos.models.Operation]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.OperationListResult] = kwargs.pop("cls", None)
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_list_all_request(
api_version=api_version,
template_url=self.list_all.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
# make call to next link with the client's api-version
_parsed_next_link = urllib.parse.urlparse(next_link)
_next_request_params = case_insensitive_dict(
{
key: [urllib.parse.quote(v) for v in value]
for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items()
}
)
_next_request_params["api-version"] = self._config.api_version
request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
async def METHOD_NAME(pipeline_response):
deserialized = self._deserialize("OperationListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem) # type: ignore
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
_stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(get_next, METHOD_NAME)
list_all.metadata = {"url": "/providers/Microsoft.Chaos/operations"} |
6,088 | validate name | from django.db.models import F
from rest_framework import serializers
from zentral.core.compliance_checks.models import ComplianceCheck
from .cleanup import get_default_snapshot_retention_days
from .compliance_checks import InventoryJMESPathCheck
from .models import EnrollmentSecret, JMESPathCheck, MetaBusinessUnit, Tag, Taxonomy
# Machine mass tagging
class MachineTagsUpdatePrincipalUsers(serializers.Serializer):
unique_ids = serializers.ListField(
child=serializers.CharField(min_length=1),
required=False
)
principal_names = serializers.ListField(
child=serializers.CharField(min_length=1),
required=False
)
def validate(self, data):
if not data.get("unique_ids") and not data.get("principal_names"):
raise serializers.ValidationError("Unique ids and principal names cannot be both empty.")
return data
class MachineTagsUpdateSerializer(serializers.Serializer):
tags = serializers.DictField(child=serializers.CharField(allow_null=True), allow_empty=False)
principal_users = MachineTagsUpdatePrincipalUsers()
# Archive or prune machines
class MachineSerialNumbersSerializer(serializers.Serializer):
serial_numbers = serializers.ListField(
child=serializers.CharField(min_length=1),
min_length=1,
max_length=1000
)
# Cleanup inventory
class CleanupInventorySerializer(serializers.Serializer):
days = serializers.IntegerField(min_value=1, max_value=3660, default=get_default_snapshot_retention_days)
# Standard model serializers
class JMESPathCheckSerializer(serializers.ModelSerializer):
name = serializers.CharField(source="compliance_check.name")
description = serializers.CharField(
source="compliance_check.description",
allow_blank=True, required=False, default=""
)
version = serializers.IntegerField(source="compliance_check.version", read_only=True)
class Meta:
model = JMESPathCheck
fields = ("name", "description", "version",
"id", "source_name", "platforms", "tags",
"jmespath_expression", "created_at", "updated_at")
def METHOD_NAME(self, value):
qs = ComplianceCheck.objects.filter(model=InventoryJMESPathCheck.get_model(), name=value)
if self.instance:
qs = qs.exclude(pk=self.instance.compliance_check.pk)
if qs.count():
raise serializers.ValidationError(
f"A {InventoryJMESPathCheck.model_display} with this name already exists."
)
return value
def create(self, validated_data):
cc_data = validated_data.pop("compliance_check")
compliance_check = ComplianceCheck.objects.create(
model=InventoryJMESPathCheck.get_model(),
name=cc_data.get("name"),
description=cc_data.get("description") or "",
)
tags = validated_data.pop("tags")
jmespath_check = JMESPathCheck.objects.create(
compliance_check=compliance_check,
**validated_data,
)
jmespath_check.tags.set(tags)
return jmespath_check
def update(self, instance, validated_data):
# compliance check
compliance_check = instance.compliance_check
cc_data = validated_data.pop("compliance_check")
compliance_check.name = cc_data.get("name")
compliance_check.description = cc_data.get("description") or ""
# JMESPath check
jmespath_check_updated = False
tags = sorted(validated_data.pop("tags", []), key=lambda t: t.pk)
for key, value in validated_data.items():
old_value = getattr(instance, key)
if value != old_value:
jmespath_check_updated = True
setattr(instance, key, value)
if sorted(instance.tags.all(), key=lambda t: t.pk) != tags:
jmespath_check_updated = True
if jmespath_check_updated:
compliance_check.version = F("version") + 1
compliance_check.save()
if jmespath_check_updated:
# to materialize the updated version
compliance_check.refresh_from_db()
instance.save()
instance.tags.set(tags)
return instance
class MetaBusinessUnitSerializer(serializers.ModelSerializer):
api_enrollment_enabled = serializers.BooleanField(required=False)
class Meta:
model = MetaBusinessUnit
fields = ("id", "name", "api_enrollment_enabled", "created_at", "updated_at")
read_only_fields = ("api_enrollment_enabled",)
def validate_api_enrollment_enabled(self, value):
if self.instance and self.instance.api_enrollment_enabled() and not value:
raise serializers.ValidationError("Cannot disable API enrollment")
return value
def create(self, validated_data):
api_enrollment_enabled = validated_data.pop("api_enrollment_enabled", False)
mbu = super().create(validated_data)
if api_enrollment_enabled:
mbu.create_enrollment_business_unit()
return mbu
def update(self, instance, validated_data):
api_enrollment_enabled = validated_data.pop("api_enrollment_enabled", False)
mbu = super().update(instance, validated_data)
if not mbu.api_enrollment_enabled() and api_enrollment_enabled:
mbu.create_enrollment_business_unit()
# TODO: switch off api_enrollment_enabled
return mbu
class TagSerializer(serializers.ModelSerializer):
class Meta:
model = Tag
fields = ("id", "taxonomy", "meta_business_unit", "name", "slug", "color")
class TaxonomySerializer(serializers.ModelSerializer):
class Meta:
model = Taxonomy
fields = ("id", "meta_business_unit", "name", "created_at", "updated_at")
class EnrollmentSecretSerializer(serializers.ModelSerializer):
class Meta:
model = EnrollmentSecret
fields = ("id", "secret", "meta_business_unit", "tags", "serial_numbers", "udids", "quota", "request_count") |
6,089 | delaunay 2d | """Filters module with a class to manage filters/algorithms for unstructured grid datasets."""
from functools import wraps
from pyvista.core import _vtk_core as _vtk
from pyvista.core.errors import VTKVersionError
from pyvista.core.filters import _get_output, _update_alg
from pyvista.core.filters.data_set import DataSetFilters
from pyvista.core.filters.poly_data import PolyDataFilters
from pyvista.core.utilities.misc import abstract_class
@abstract_class
class UnstructuredGridFilters(DataSetFilters):
"""An internal class to manage filters/algorithms for unstructured grid datasets."""
@wraps(PolyDataFilters.METHOD_NAME)
def METHOD_NAME(self, *args, **kwargs): # numpydoc ignore=PR01,RT01
"""Wrap ``PolyDataFilters.delaunay_2d``."""
return PolyDataFilters.METHOD_NAME(self, *args, **kwargs)
@wraps(PolyDataFilters.reconstruct_surface)
def reconstruct_surface(self, *args, **kwargs): # numpydoc ignore=PR01,RT01
"""Wrap ``PolyDataFilters.reconstruct_surface``."""
return PolyDataFilters.reconstruct_surface(self, *args, **kwargs)
def subdivide_tetra(self):
"""Subdivide each tetrahedron into twelve tetrahedrons.
Returns
-------
pyvista.UnstructuredGrid
UnstructuredGrid containing the subdivided tetrahedrons.
Examples
--------
First, load a sample tetrahedral UnstructuredGrid and plot it.
>>> from pyvista import examples
>>> grid = examples.load_tetbeam()
>>> grid.plot(show_edges=True, line_width=2)
Now, subdivide and plot.
>>> subdivided = grid.subdivide_tetra()
>>> subdivided.plot(show_edges=True, line_width=2)
"""
alg = _vtk.vtkSubdivideTetra()
alg.SetInputData(self)
_update_alg(alg)
return _get_output(alg)
def clean(
self,
tolerance=0,
remove_unused_points=True,
produce_merge_map=True,
average_point_data=True,
merging_array_name=None,
progress_bar=False,
):
"""Merge duplicate points and remove unused points in an UnstructuredGrid.
This filter, merging coincident points as defined by a merging
tolerance and optionally removes unused points. The filter does not
modify the topology of the input dataset, nor change the types of
cells. It may however, renumber the cell connectivity ids.
This filter implements `vtkStaticCleanUnstructuredGrid
<https://vtk.org/doc/nightly/html/classvtkStaticCleanUnstructuredGrid.html>`_
Parameters
----------
tolerance : float, default: 0.0
The absolute point merging tolerance.
remove_unused_points : bool, default: True
Indicate whether points unused by any cell are removed from the
output. Note that when this is off, the filter can successfully
process datasets with no cells (and just points). If on in this
case, and there are no cells, the output will be empty.
produce_merge_map : bool, default: False
Indicate whether a merge map should be produced on output.
The merge map, if requested, maps each input point to its
output point id, or provides a value of -1 if the input point
is not used in the output. The merge map is associated with
the filter's output field data and is named ``"PointMergeMap"``.
average_point_data : bool, default: True
Indicate whether point coordinates and point data of merged points
are averaged. When ``True``, the data coordinates and attribute
values of all merged points are averaged. When ``False``, the point
coordinate and data of the single remaining merged point is
retained.
merging_array_name : str, optional
If a ``merging_array_name`` is specified and exists in the
``point_data``, then point merging will switch into a mode where
merged points must be both geometrically coincident and have
matching point data. When set, ``tolerance`` has no effect.
progress_bar : bool, default: False
Display a progress bar to indicate progress.
Returns
-------
UnstructuredGrid
Cleaned unstructured grid.
Examples
--------
Demonstrate cleaning an UnstructuredGrid and show how it can be used to
average the point data across merged points.
>>> import pyvista as pv
>>> from pyvista import examples
>>> hexbeam = examples.load_hexbeam()
>>> hexbeam_shifted = hexbeam.translate([1, 0, 0])
>>> hexbeam.point_data['data'] = [0] * hexbeam.n_points
>>> hexbeam_shifted.point_data['data'] = [1] * hexbeam.n_points
>>> merged = hexbeam.merge(hexbeam_shifted, merge_points=False)
>>> cleaned = merged.clean(average_point_data=True)
>>> cleaned.n_points < merged.n_points
True
Show how point averaging using the ``clean`` method with
``average_point_data=True`` results in averaged point data for merged
points.
>>> pl = pv.Plotter(shape=(1, 2))
>>> _ = pl.add_mesh(merged, scalars='data', show_scalar_bar=False)
>>> pl.subplot(0, 1)
>>> _ = pl.add_mesh(cleaned, scalars='data')
>>> pl.show()
"""
try:
from vtkmodules.vtkFiltersCore import vtkStaticCleanUnstructuredGrid
except ImportError: # pragma no cover
raise VTKVersionError("UnstructuredGrid.clean requires VTK >= 9.2.2") from None
alg = vtkStaticCleanUnstructuredGrid()
alg.SetInputDataObject(self)
alg.SetAbsoluteTolerance(True)
alg.SetTolerance(tolerance)
alg.SetMergingArray(merging_array_name)
alg.SetRemoveUnusedPoints(remove_unused_points)
alg.SetProduceMergeMap(produce_merge_map)
alg.SetAveragePointData(average_point_data)
_update_alg(alg, progress_bar, 'Cleaning Unstructured Grid')
return _get_output(alg) |
6,090 | gen args | #!/usr/bin/env python
#
# Copyright 2019 Google LLC
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import logging
MISSING_SERVICE_CONFIG_ERROR = '''
Did you forget to build the Endpoints service configuration
into the ESPv2 image? Please refer to the official serverless
quickstart tutorials (below) for more information.
https://cloud.google.com/endpoints/docs/openapi/get-started-cloud-run#configure_esp
https://cloud.google.com/endpoints/docs/openapi/get-started-cloud-functions#configure_esp
https://cloud.google.com/endpoints/docs/grpc/get-started-cloud-run#configure_esp
If you are following along with these tutorials but have not
reached the step above yet, this error is expected. Feel free
to temporarily disregard this error message.
If you wish to skip this step, please specify the name of the
service in the ENDPOINTS_SERVICE_NAME environment variable.
Note this deployment mode is **not** officially supported.
It is recommended that you follow the tutorials linked above.
'''
MALFORMED_ESPv2_ARGS_ERROR = '''
Malformed ESPv2_ARGS environment variable.
Please refer to the official ESPv2 startup reference
(below) for information on how to format ESPv2_ARGS.
https://cloud.google.com/endpoints/docs/openapi/specify-esp-v2-startup-options#setting-configuration-flags
'''
def assert_env_var(name, help_msg=""):
if name not in os.environ:
raise AssertionError(
"Serverless ESPv2 expects {} in environment variables.\n{}"
.format(name, help_msg)
)
def make_error_app(msg):
# error_msg must be a utf-8 or ascii bytestring
def error_app(environ, start_response):
start_response("503 Service Unavailable", [("Content-Type", "text/plain")])
return [msg.encode("utf-8")]
return error_app
def serve_msg(msg):
import wsgiref.simple_server
app = make_error_app(msg)
port = int(os.environ["PORT"])
server = wsgiref.simple_server.make_server("", port, app)
server.serve_forever()
def serve_error_msg(msg):
logging.error(msg)
serve_msg(msg)
def serve_warning_msg(msg):
logging.warning(msg)
serve_msg(msg)
def METHOD_NAME(cmd):
ARGS = [
cmd,
"/apiproxy/start_proxy.py",
"--on_serverless"
]
# Uncaught AssertionError;
# if no port, we can't serve a nice error handler. Crash instead.
assert_env_var("PORT")
ARGS.append("--http_port={}".format(os.environ["PORT"]))
if "ENDPOINTS_SERVICE_PATH" in os.environ:
ARGS.extend(
[
"--rollout_strategy=fixed",
"--service_json_path={}".format(os.environ["ENDPOINTS_SERVICE_PATH"]),
]
)
else:
try:
assert_env_var(
"ENDPOINTS_SERVICE_NAME",
MISSING_SERVICE_CONFIG_ERROR
)
except AssertionError as error:
serve_warning_msg(str(error))
ARGS.append("--service={}".format(os.environ["ENDPOINTS_SERVICE_NAME"]))
if "ENDPOINTS_SERVICE_VERSION" in os.environ:
ARGS.extend(
[
"--rollout_strategy=fixed",
"--version={}".format(os.environ["ENDPOINTS_SERVICE_VERSION"]),
]
)
else:
ARGS.append("--rollout_strategy=managed")
if "ESPv2_ARGS" in os.environ:
# By default, ESPv2_ARGS is comma-separated.
# But if a comma needs to appear within an arg, there is an alternative
# syntax: Pick a replacement delimiter, specify it at the beginning of the
# string between two caret (^) symbols, and use it within the arg string.
# Example:
# ^++^--cors_allow_methods="GET,POST,PUT,OPTIONS"++--cors_allow_credentials
arg_value = os.environ["ESPv2_ARGS"]
delim = ","
if arg_value.startswith("^") and "^" in arg_value[1:]:
delim, arg_value = arg_value[1:].split("^", 1)
if not delim:
serve_error_msg(MALFORMED_ESPv2_ARGS_ERROR)
ARGS.extend(arg_value.split(delim))
return ARGS
if __name__ == "__main__":
logging.basicConfig(format='%(levelname)s: %(message)s', level=logging.INFO)
cmd = "/usr/bin/python3"
args = METHOD_NAME(cmd)
os.execv(cmd, args) |
6,091 | compilers minimum version | from conan import ConanFile
from conan.errors import ConanInvalidConfiguration
from conan.tools.microsoft import check_min_vs, is_msvc_static_runtime, is_msvc
from conan.tools.files import get, copy, rm, rmdir, collect_libs
from conan.tools.build import check_min_cppstd
from conan.tools.scm import Version
from conan.tools.cmake import CMakeToolchain, CMakeDeps, CMake
import os
required_conan_version = ">=1.53.0"
class VsgConan(ConanFile):
name = "vsg"
description = "VulkanSceneGraph"
license = "MIT"
url = "https://github.com/conan-io/conan-center-index"
homepage = "https://www.vulkanscenegraph.org"
topics = ("vulkan", "scenegraph", "graphics", "3d")
package_type = "library"
settings = "os", "arch", "compiler", "build_type"
options = {
"shared": [True, False],
"max_devices": [1,2,3,4],
"fPIC": [True, False],
}
default_options = {
"shared": False,
"max_devices" : 1,
"fPIC": True,
}
@property
def _min_cppstd(self):
return 17
@property
def METHOD_NAME(self):
return {
"gcc": "7",
"clang": "7",
"apple-clang": "10",
}
def config_options(self):
if self.settings.os == "Windows":
del self.options.fPIC
def configure(self):
if self.options.shared:
self.options.rm_safe("fPIC")
def requirements(self):
self.requires("vulkan-loader/1.3.239.0", transitive_headers=True)
def validate(self):
if self.info.settings.compiler.cppstd:
check_min_cppstd(self, self._min_cppstd)
check_min_vs(self, 191)
if is_msvc_static_runtime(self):
raise ConanInvalidConfiguration(f"{self.name} does not support MSVC static runtime (MT/MTd) configurations, only dynamic runtime (MD/MDd) is supported")
if not is_msvc(self):
minimum_version = self.METHOD_NAME.get(str(self.info.settings.compiler), False)
if minimum_version and Version(self.info.settings.compiler.version) < minimum_version:
raise ConanInvalidConfiguration(
f"{self.ref} requires C++{self._min_cppstd}, which your compiler does not support."
)
def source(self):
get(self, **self.conan_data["sources"][self.version], destination=self.source_folder, strip_root=True)
def generate(self):
tc = CMakeToolchain(self)
if is_msvc(self):
tc.variables["USE_MSVC_RUNTIME_LIBRARY_DLL"] = False
tc.variables["BUILD_SHARED_LIBS"] = self.options.shared
tc.variables["VSG_SUPPORTS_ShaderCompiler"] = 0
tc.variables["VSG_MAX_DEVICES"] = self.options.max_devices
tc.generate()
deps = CMakeDeps(self)
deps.generate()
def build(self):
cmake = CMake(self)
cmake.configure()
cmake.build()
def package(self):
copy(self, pattern="LICENSE.md", dst=os.path.join(self.package_folder, "licenses"), src=self.source_folder)
cmake = CMake(self)
cmake.install()
rmdir(self, os.path.join(self.package_folder, "lib", "pkgconfig"))
rmdir(self, os.path.join(self.package_folder, "share"))
rm(self, "*.la", os.path.join(self.package_folder, "lib"))
rm(self, "*.pdb", os.path.join(self.package_folder, "lib"))
rm(self, "*.pdb", os.path.join(self.package_folder, "bin"))
rm(self, "Find*.cmake", os.path.join(self.package_folder, "lib/cmake/vsg"))
rm(self, "*Config.cmake", os.path.join(self.package_folder, "lib/cmake/vsg"))
def package_info(self):
self.cpp_info.libs = collect_libs(self)
self.cpp_info.set_property("cmake_file_name", "vsg")
self.cpp_info.set_property("cmake_target_name", "vsg::vsg")
if self.settings.os in ["Linux", "FreeBSD"]:
self.cpp_info.system_libs.append("pthread")
# TODO: to remove in conan v2 once cmake_find_package_* generators removed
self.cpp_info.filenames["cmake_find_package"] = "vsg"
self.cpp_info.filenames["cmake_find_package_multi"] = "vsg"
self.cpp_info.names["cmake_find_package"] = "VSG"
self.cpp_info.names["cmake_find_package_multi"] = "vsg" |
6,092 | get needed files | from __future__ import print_function
import datetime
import itertools
from django.core.management.base import BaseCommand
from django.db.models.loading import cache
from django.utils.translation import gettext as _
from django.utils.translation import ngettext
from filetracker.utils import split_name
from oioioi.filetracker.client import get_client
class Command(BaseCommand):
help = _("Delete all orphaned files older than specified number of days.")
def add_arguments(self, parser):
parser.add_argument(
'-d',
'--days',
action='store',
type=int,
dest='days',
default=30,
help=_(
"Orphaned files older than DAYS days will "
"be deleted. Default value is 30."
),
metavar=_("DAYS"),
)
parser.add_argument(
'-p',
'--pretend',
action='store_true',
dest='pretend',
default=False,
help=_(
"If set, the orphaned files will only be displayed, not deleted."
),
)
def METHOD_NAME(self):
result = []
for app in cache.get_apps():
model_list = cache.get_models(app)
for model in model_list:
file_fields = [
field.name
for field in model._meta.fields
if field.get_internal_type() == 'FileField'
]
if len(file_fields) > 0:
files = model.objects.all().values_list(*file_fields)
result.extend(
[
split_name(file)[0]
for file in itertools.chain.from_iterable(files)
if file
]
)
return result
def handle(self, *args, **options):
needed_files = self.METHOD_NAME()
all_files = get_client().list_local_files()
max_date_to_delete = datetime.datetime.now() - datetime.timedelta(
days=options['days']
)
diff = set([f[0] for f in all_files]) - set(needed_files)
to_delete = [
f[0]
for f in all_files
if f[0] in diff
and datetime.datetime.fromtimestamp(f[1]) < max_date_to_delete
]
files_count = len(to_delete)
if files_count == 0 and int(options['verbosity']) > 0:
print(_("No files to delete."))
elif options['pretend']:
if int(options['verbosity']) > 1:
print(
ngettext(
"The following %d file is scheduled for deletion:",
"The following %d files are scheduled for deletion:",
files_count,
)
% files_count
)
for file in to_delete:
print(" ", file)
elif int(options['verbosity']) == 1:
print(
ngettext(
"%d file scheduled for deletion.",
"%d files scheduled for deletion.",
files_count,
)
% files_count
)
else:
if int(options['verbosity']) > 1:
print(
ngettext(
"Deleting the following %d file:",
"Deleting the following %d files:",
files_count,
)
% files_count
)
if int(options['verbosity']) == 1:
print(
ngettext("Deleting %d file", "Deleting %d files", files_count)
% files_count
)
for file in to_delete:
if int(options['verbosity']) > 1:
print(" ", file)
get_client().delete_file('/' + file) |
6,093 | build arguments schema | # --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
#
# Code generated by aaz-dev-tools
# --------------------------------------------------------------------------------------------
# pylint: skip-file
# flake8: noqa
from azure.cli.core.aaz import *
@register_command(
"site-recovery replication-eligibility show-default",
)
class ShowDefault(AAZCommand):
"""Get whether a given VM can be protected or not in which case returns list of errors.
:example: Show default replication-eligibility
az site-recovery replication-eligibility show-default -g rg --virtual-machine-name vm_name
"""
_aaz_info = {
"version": "2022-08-01",
"resources": [
["mgmt-plane", "/subscriptions/{}/resourcegroups/{}/providers/microsoft.compute/virtualmachines/{}/providers/microsoft.recoveryservices/replicationeligibilityresults/default", "2022-08-01"],
]
}
def _handler(self, command_args):
super()._handler(command_args)
self._execute_operations()
return self._output()
_args_schema = None
@classmethod
def METHOD_NAME(cls, *args, **kwargs):
if cls._args_schema is not None:
return cls._args_schema
cls._args_schema = super().METHOD_NAME(*args, **kwargs)
# define Arg Group ""
_args_schema = cls._args_schema
_args_schema.resource_group = AAZResourceGroupNameArg(
required=True,
)
_args_schema.virtual_machine_name = AAZStrArg(
options=["--virtual-machine-name"],
help="Virtual Machine name.",
required=True,
id_part="name",
)
return cls._args_schema
def _execute_operations(self):
self.pre_operations()
self.ReplicationEligibilityResultsGet(ctx=self.ctx)()
self.post_operations()
@register_callback
def pre_operations(self):
pass
@register_callback
def post_operations(self):
pass
def _output(self, *args, **kwargs):
result = self.deserialize_output(self.ctx.vars.instance, client_flatten=True)
return result
class ReplicationEligibilityResultsGet(AAZHttpOperation):
CLIENT_TYPE = "MgmtClient"
def __call__(self, *args, **kwargs):
request = self.make_request()
session = self.client.send_request(request=request, stream=False, **kwargs)
if session.http_response.status_code in [200]:
return self.on_200(session)
return self.on_error(session.http_response)
@property
def url(self):
return self.client.format_url(
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{virtualMachineName}/providers/Microsoft.RecoveryServices/replicationEligibilityResults/default",
**self.url_parameters
)
@property
def method(self):
return "GET"
@property
def error_format(self):
return "ODataV4Format"
@property
def url_parameters(self):
parameters = {
**self.serialize_url_param(
"resourceGroupName", self.ctx.args.resource_group,
required=True,
),
**self.serialize_url_param(
"subscriptionId", self.ctx.subscription_id,
required=True,
),
**self.serialize_url_param(
"virtualMachineName", self.ctx.args.virtual_machine_name,
required=True,
),
}
return parameters
@property
def query_parameters(self):
parameters = {
**self.serialize_query_param(
"api-version", "2022-08-01",
required=True,
),
}
return parameters
@property
def header_parameters(self):
parameters = {
**self.serialize_header_param(
"Accept", "application/json",
),
}
return parameters
def on_200(self, session):
data = self.deserialize_http_content(session)
self.ctx.set_var(
"instance",
data,
schema_builder=self._build_schema_on_200
)
_schema_on_200 = None
@classmethod
def _build_schema_on_200(cls):
if cls._schema_on_200 is not None:
return cls._schema_on_200
cls._schema_on_200 = AAZObjectType()
_schema_on_200 = cls._schema_on_200
_schema_on_200.id = AAZStrType(
flags={"read_only": True},
)
_schema_on_200.name = AAZStrType(
flags={"read_only": True},
)
_schema_on_200.properties = AAZObjectType()
_schema_on_200.type = AAZStrType(
flags={"read_only": True},
)
properties = cls._schema_on_200.properties
properties.client_request_id = AAZStrType(
serialized_name="clientRequestId",
flags={"read_only": True},
)
properties.errors = AAZListType()
errors = cls._schema_on_200.properties.errors
errors.Element = AAZObjectType()
_element = cls._schema_on_200.properties.errors.Element
_element.code = AAZStrType()
_element.message = AAZStrType()
_element.possible_causes = AAZStrType(
serialized_name="possibleCauses",
)
_element.recommended_action = AAZStrType(
serialized_name="recommendedAction",
)
_element.status = AAZStrType(
flags={"read_only": True},
)
return cls._schema_on_200
class _ShowDefaultHelper:
"""Helper class for ShowDefault"""
__all__ = ["ShowDefault"] |
6,094 | test io update | from artiq.experiment import *
from artiq.test.hardware_testbench import ExperimentCase
from artiq.coredevice import urukul
class UrukulExp(EnvExperiment):
def build(self, runner):
self.setattr_device("core")
self.dev = self.get_device("urukul_cpld")
self.runner = runner
def run(self):
getattr(self, self.runner)()
@kernel
def instantiate(self):
pass
@kernel
def init(self):
self.core.break_realtime()
self.dev.init()
@kernel
def cfg_write(self):
self.core.break_realtime()
self.dev.init()
self.dev.cfg_write(self.dev.cfg_reg)
@kernel
def sta_read(self):
self.core.break_realtime()
self.dev.init()
sta = self.dev.sta_read()
self.set_dataset("sta", sta)
@kernel
def switches(self):
self.core.break_realtime()
self.dev.init()
self.dev.io_rst()
self.dev.cfg_sw(0, False)
self.dev.cfg_sw(0, True)
self.dev.cfg_sw(3, True)
self.dev.cfg_switches(0b1010)
@kernel
def switch_speed(self):
self.core.break_realtime()
self.dev.init()
n = 10
t0 = self.core.get_rtio_counter_mu()
for i in range(n):
self.dev.cfg_sw(3, bool(i & 1))
self.set_dataset("dt", self.core.mu_to_seconds(
self.core.get_rtio_counter_mu() - t0) / n)
@kernel
def switches_readback(self):
self.core.reset() # clear switch TTLs
self.dev.init()
sw_set = 0b1010
self.dev.cfg_switches(sw_set)
sta_get = self.dev.sta_read()
self.set_dataset("sw_set", sw_set)
self.set_dataset("sta_get", sta_get)
@kernel
def att(self):
self.core.break_realtime()
self.dev.init()
# clear backing state
self.dev.att_reg = 0
att_set = 0x12345678
self.dev.set_all_att_mu(att_set)
# confirm that we can set all attenuators and read back
att_get = self.dev.get_att_mu()
# confirm backing state
att_reg = self.dev.att_reg
self.set_dataset("att_set", att_set)
self.set_dataset("att_get", att_get)
self.set_dataset("att_reg", att_reg)
@kernel
def att_channel(self):
self.core.break_realtime()
self.dev.init()
# clear backing state
self.dev.att_reg = 0
att_set = int32(0x87654321)
# set individual attenuators
self.dev.set_att_mu(0, 0x21)
self.dev.set_att_mu(1, 0x43)
self.dev.set_att_mu(2, 0x65)
self.dev.set_att_mu(3, 0x87)
# confirm that we can set all attenuators and read back
att_get = self.dev.get_att_mu()
# confirm backing state
att_reg = self.dev.att_reg
self.set_dataset("att_set", att_set)
self.set_dataset("att_get", att_get)
self.set_dataset("att_reg", att_reg)
@kernel
def att_channel_get(self):
self.core.break_realtime()
self.dev.init()
# clear backing state
self.dev.att_reg = 0
att_set = [int32(0x21), int32(0x43),
int32(0x65), int32(0x87)]
# set individual attenuators
for i in range(len(att_set)):
self.dev.set_att_mu(i, att_set[i])
# confirm that we can set all attenuators and read back
att_get = [0 for _ in range(len(att_set))]
for i in range(len(att_set)):
self.core.break_realtime()
att_get[i] = self.dev.get_channel_att_mu(i)
# confirm backing state
att_reg = self.dev.att_reg
self.set_dataset("att_set", att_set)
self.set_dataset("att_get", att_get)
self.set_dataset("att_reg", att_reg)
@kernel
def att_speed(self):
self.core.break_realtime()
self.dev.init()
n = 10
t0 = self.core.get_rtio_counter_mu()
for i in range(n):
self.dev.set_att(3, 30 * dB)
self.set_dataset("dt", self.core.mu_to_seconds(
self.core.get_rtio_counter_mu() - t0) / n)
@kernel
def io_update(self):
self.core.break_realtime()
self.dev.init()
self.dev.io_update.pulse_mu(8)
@kernel
def sync(self):
self.core.break_realtime()
self.dev.init()
self.dev.set_sync_div(2)
@kernel
def profile(self):
self.core.break_realtime()
self.dev.init()
self.dev.set_profile(7)
self.dev.set_profile(0)
class UrukulTest(ExperimentCase):
def test_instantiate(self):
self.execute(UrukulExp, "instantiate")
def test_init(self):
self.execute(UrukulExp, "init")
def test_cfg_write(self):
self.execute(UrukulExp, "cfg_write")
def test_sta_read(self):
self.execute(UrukulExp, "sta_read")
sta = self.dataset_mgr.get("sta")
print(hex(sta))
# self.assertEqual(urukul.urukul_sta_ifc_mode(sta), 0b0001)
def test_switches(self):
self.execute(UrukulExp, "switches")
def test_switch_speed(self):
self.execute(UrukulExp, "switch_speed")
dt = self.dataset_mgr.get("dt")
print(dt)
self.assertLess(dt, 5 * us)
def test_switches_readback(self):
self.execute(UrukulExp, "switches_readback")
sw_get = urukul.urukul_sta_rf_sw(self.dataset_mgr.get("sta_get"))
sw_set = self.dataset_mgr.get("sw_set")
self.assertEqual(sw_get, sw_set)
def test_att(self):
self.execute(UrukulExp, "att")
att_set = self.dataset_mgr.get("att_set")
self.assertEqual(att_set, self.dataset_mgr.get("att_get"))
self.assertEqual(att_set, self.dataset_mgr.get("att_reg"))
def test_att_channel(self):
self.execute(UrukulExp, "att_channel")
att_set = self.dataset_mgr.get("att_set")
self.assertEqual(att_set, self.dataset_mgr.get("att_get"))
self.assertEqual(att_set, self.dataset_mgr.get("att_reg"))
def test_att_channel_get(self):
self.execute(UrukulExp, "att_channel_get")
att_set = self.dataset_mgr.get("att_set")
self.assertListEqual(att_set, self.dataset_mgr.get("att_get"))
att_reg = self.dataset_mgr.get("att_reg")
for att in att_set:
self.assertEqual(att, att_reg & 0xff)
att_reg >>= 8
def test_att_speed(self):
self.execute(UrukulExp, "att_speed")
dt = self.dataset_mgr.get("dt")
print(dt)
self.assertLess(dt, 5 * us)
def METHOD_NAME(self):
self.execute(UrukulExp, "io_update")
def test_sync(self):
self.execute(UrukulExp, "sync")
def test_profile(self):
self.execute(UrukulExp, "profile") |
6,095 | build schema on 200 | # --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
#
# Code generated by aaz-dev-tools
# --------------------------------------------------------------------------------------------
# pylint: skip-file
# flake8: noqa
from azure.cli.core.aaz import *
@register_command(
"network route-table route show",
)
class Show(AAZCommand):
"""Get the details of a route in a route table.
:example: Get the details of a route in a route table.
az network route-table route show -g MyResourceGroup --route-table-name MyRouteTable -n MyRoute -o table
"""
_aaz_info = {
"version": "2018-11-01",
"resources": [
["mgmt-plane", "/subscriptions/{}/resourcegroups/{}/providers/microsoft.network/routetables/{}/routes/{}", "2018-11-01"],
]
}
def _handler(self, command_args):
super()._handler(command_args)
self._execute_operations()
return self._output()
_args_schema = None
@classmethod
def _build_arguments_schema(cls, *args, **kwargs):
if cls._args_schema is not None:
return cls._args_schema
cls._args_schema = super()._build_arguments_schema(*args, **kwargs)
# define Arg Group ""
_args_schema = cls._args_schema
_args_schema.resource_group = AAZResourceGroupNameArg(
required=True,
)
_args_schema.name = AAZStrArg(
options=["-n", "--name"],
help="Route name.",
required=True,
id_part="child_name_1",
)
_args_schema.route_table_name = AAZStrArg(
options=["--route-table-name"],
help="Route table name.",
required=True,
id_part="name",
)
return cls._args_schema
def _execute_operations(self):
self.pre_operations()
self.RoutesGet(ctx=self.ctx)()
self.post_operations()
@register_callback
def pre_operations(self):
pass
@register_callback
def post_operations(self):
pass
def _output(self, *args, **kwargs):
result = self.deserialize_output(self.ctx.vars.instance, client_flatten=True)
return result
class RoutesGet(AAZHttpOperation):
CLIENT_TYPE = "MgmtClient"
def __call__(self, *args, **kwargs):
request = self.make_request()
session = self.client.send_request(request=request, stream=False, **kwargs)
if session.http_response.status_code in [200]:
return self.on_200(session)
return self.on_error(session.http_response)
@property
def url(self):
return self.client.format_url(
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeTables/{routeTableName}/routes/{routeName}",
**self.url_parameters
)
@property
def method(self):
return "GET"
@property
def error_format(self):
return "MgmtErrorFormat"
@property
def url_parameters(self):
parameters = {
**self.serialize_url_param(
"resourceGroupName", self.ctx.args.resource_group,
required=True,
),
**self.serialize_url_param(
"routeName", self.ctx.args.name,
required=True,
),
**self.serialize_url_param(
"routeTableName", self.ctx.args.route_table_name,
required=True,
),
**self.serialize_url_param(
"subscriptionId", self.ctx.subscription_id,
required=True,
),
}
return parameters
@property
def query_parameters(self):
parameters = {
**self.serialize_query_param(
"api-version", "2018-11-01",
required=True,
),
}
return parameters
@property
def header_parameters(self):
parameters = {
**self.serialize_header_param(
"Accept", "application/json",
),
}
return parameters
def on_200(self, session):
data = self.deserialize_http_content(session)
self.ctx.set_var(
"instance",
data,
schema_builder=self.METHOD_NAME
)
_schema_on_200 = None
@classmethod
def METHOD_NAME(cls):
if cls._schema_on_200 is not None:
return cls._schema_on_200
cls._schema_on_200 = AAZObjectType()
_schema_on_200 = cls._schema_on_200
_schema_on_200.etag = AAZStrType()
_schema_on_200.id = AAZStrType()
_schema_on_200.name = AAZStrType()
_schema_on_200.properties = AAZObjectType(
flags={"client_flatten": True},
)
properties = cls._schema_on_200.properties
properties.address_prefix = AAZStrType(
serialized_name="addressPrefix",
)
properties.next_hop_ip_address = AAZStrType(
serialized_name="nextHopIpAddress",
)
properties.next_hop_type = AAZStrType(
serialized_name="nextHopType",
flags={"required": True},
)
properties.provisioning_state = AAZStrType(
serialized_name="provisioningState",
)
return cls._schema_on_200
class _ShowHelper:
"""Helper class for Show"""
__all__ = ["Show"] |
6,096 | test null | # bluemira is an integrated inter-disciplinary design tool for future fusion
# reactors. It incorporates several modules, some of which rely on other
# codes, to carry out a range of typical conceptual fusion reactor design
# activities.
#
# Copyright (C) 2021-2023 M. Coleman, J. Cook, F. Franza, I.A. Maione, S. McIntosh,
# J. Morris, D. Short
#
# bluemira is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# bluemira is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with bluemira; if not, see <https://www.gnu.org/licenses/>.
import numpy as np
import pytest
from bluemira.geometry.bound_box import BoundingBox
from bluemira.geometry.face import BluemiraFace
from bluemira.geometry.parameterisations import PolySpline
from bluemira.geometry.tools import boolean_cut, make_polygon, revolve_shape
class TestBoundingBox:
def METHOD_NAME(self):
x, y, z = np.zeros(100), np.zeros(100), np.zeros(100)
xb, yb, zb = BoundingBox.from_xyz(x, y, z).get_box_arrays()
assert np.all(xb == 0)
assert np.all(yb == 0)
assert np.all(zb == 0)
def test_random(self):
x, y, z = np.random.rand(100), np.random.rand(100), np.random.rand(100)
args = np.random.randint(0, 100, 8)
x[args] = np.array([-2, -2, -2, -2, 2, 2, 2, 2])
y[args] = np.array([-2, -2, 2, 2, 2, -2, -2, 2])
z[args] = np.array([-2, 2, -2, 2, -2, 2, -2, 2])
xb, yb, zb = BoundingBox.from_xyz(x, y, z).get_box_arrays()
assert np.allclose(xb, np.array([-2, -2, -2, -2, 2, 2, 2, 2]))
assert np.allclose(yb, np.array([-2, -2, 2, 2, -2, -2, 2, 2]))
assert np.allclose(zb, np.array([-2, 2, -2, 2, -2, 2, -2, 2]))
class TestHardBoundingBox:
ps = PolySpline(
{
"bottom": {"value": 0.509036},
"flat": {"value": 1},
"height": {"value": 10.1269},
"lower": {"value": 0.2},
"tilt": {"value": 19.6953},
"top": {"value": 0.46719},
"upper": {"value": 0.326209},
"x1": {"value": 5},
"x2": {"value": 11.8222},
"z2": {"value": -0.170942},
}
)
cut_box = BluemiraFace(
make_polygon({"x": [0, 15, 15, 0], "z": [-7, -7, -5, -5], "y": 0}, closed=True)
)
wire = boolean_cut(ps.create_shape(), cut_box)[0]
temp = wire.deepcopy()
temp.close()
solid = revolve_shape(BluemiraFace(temp), degree=159)
@pytest.mark.xfail
def test_bad_bounding_box(self):
assert np.isclose(self.wire.bounding_box.z_min, -5.0)
@pytest.mark.parametrize("tol", [10.0, 1.0, 0.1, 0.001])
def test_opt_bounding_box(self, tol):
bb = self.wire.get_optimal_bounding_box(tolerance=tol)
assert np.isclose(bb.z_min, -5.0)
@pytest.mark.parametrize("tol", [10.0, 1.0, 0.1, 0.01])
def test_opt_bounding_box_solid(self, tol):
solid = self.solid.deepcopy()
vertices, indices = solid._tessellate(1.0)
bb = self.solid.get_optimal_bounding_box(tolerance=tol)
vertices2, indices2 = solid._tessellate(1.0)
assert np.isclose(bb.z_min, -5.0)
# Test that bounding box via tesselation did not modify properties
np.testing.assert_allclose(vertices, vertices2)
np.testing.assert_allclose(indices, indices2)
vertices3, _ = solid._tessellate(0.01)
assert vertices3.shape != vertices2.shape
@pytest.mark.parametrize("tol", [0.0, -1e-9])
def test_bad_tolerace(self, tol):
with pytest.raises(ValueError):
self.wire.get_optimal_bounding_box(tolerance=tol) |
6,097 | run | # Copyright (c) Streamlit Inc. (2018-2022) Snowflake Inc. (2022)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import annotations
import os
import time
from copy import deepcopy
from typing import Any
from streamlit.proto.ClientState_pb2 import ClientState
from streamlit.proto.ForwardMsg_pb2 import ForwardMsg
from streamlit.proto.WidgetStates_pb2 import WidgetStates
from streamlit.runtime.forward_msg_queue import ForwardMsgQueue
from streamlit.runtime.memory_uploaded_file_manager import MemoryUploadedFileManager
from streamlit.runtime.scriptrunner import RerunData, ScriptRunner, ScriptRunnerEvent
from streamlit.runtime.scriptrunner.script_cache import ScriptCache
from streamlit.runtime.state.session_state import SessionState
from streamlit.testing.element_tree import ElementTree, parse_tree_from_messages
class LocalScriptRunner(ScriptRunner):
"""Subclasses ScriptRunner to provide some testing features."""
def __init__(
self,
script_path: str,
prev_session_state: SessionState | None = None,
):
"""Initializes the ScriptRunner for the given script_name"""
assert os.path.isfile(script_path), f"File not found at {script_path}"
self.forward_msg_queue = ForwardMsgQueue()
self.script_path = script_path
if prev_session_state is not None:
self.session_state = deepcopy(prev_session_state)
else:
self.session_state = SessionState()
super().__init__(
session_id="test session id",
main_script_path=script_path,
client_state=ClientState(),
session_state=self.session_state,
uploaded_file_mgr=MemoryUploadedFileManager("/mock/upload"),
script_cache=ScriptCache(),
initial_rerun_data=RerunData(),
user_info={"email": "test@test.com"},
)
# Accumulates uncaught exceptions thrown by our run thread.
self.script_thread_exceptions: list[BaseException] = []
# Accumulates all ScriptRunnerEvents emitted by us.
self.events: list[ScriptRunnerEvent] = []
self.event_data: list[Any] = []
def record_event(
sender: ScriptRunner | None, event: ScriptRunnerEvent, **kwargs
) -> None:
# Assert that we're not getting unexpected `sender` params
# from ScriptRunner.on_event
assert (
sender is None or sender == self
), "Unexpected ScriptRunnerEvent sender!"
self.events.append(event)
self.event_data.append(kwargs)
# Send ENQUEUE_FORWARD_MSGs to our queue
if event == ScriptRunnerEvent.ENQUEUE_FORWARD_MSG:
forward_msg = kwargs["forward_msg"]
self.forward_msg_queue.enqueue(forward_msg)
self.on_event.connect(record_event, weak=False)
def join(self) -> None:
"""Wait for the script thread to finish, if it is running."""
if self._script_thread is not None:
self._script_thread.join()
def forward_msgs(self) -> list[ForwardMsg]:
"""Return all messages in our ForwardMsgQueue."""
return self.forward_msg_queue._queue
def METHOD_NAME(
self,
widget_state: WidgetStates | None = None,
timeout: float = 3,
) -> ElementTree:
"""Run the script, and parse the output messages for querying
and interaction."""
rerun_data = RerunData(widget_states=widget_state)
self.request_rerun(rerun_data)
if not self._script_thread:
self.start()
require_widgets_deltas(self, timeout)
tree = parse_tree_from_messages(self.forward_msgs())
tree.script_path = self.script_path
tree._session_state = self.session_state
return tree
def script_stopped(self) -> bool:
for e in self.events:
if e in (
ScriptRunnerEvent.SCRIPT_STOPPED_FOR_RERUN,
ScriptRunnerEvent.SCRIPT_STOPPED_WITH_COMPILE_ERROR,
ScriptRunnerEvent.SCRIPT_STOPPED_WITH_SUCCESS,
):
return True
return False
def require_widgets_deltas(runner: LocalScriptRunner, timeout: float = 3) -> None:
"""Wait for the given ScriptRunner to emit a completion event. If the timeout
is reached, the runner will be shutdown and an error will be thrown.
"""
t0 = time.time()
while time.time() - t0 < timeout:
time.sleep(0.1)
if runner.script_stopped():
return
# If we get here, the runner hasn't yet completed before our
# timeout. Create an error string for debugging.
err_string = f"require_widgets_deltas() timed out after {timeout}s)"
# Shutdown the runner before throwing an error, so that the script
# doesn't hang forever.
runner.request_stop()
runner.join()
raise RuntimeError(err_string) |
6,098 | build schema on 200 | # --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
#
# Code generated by aaz-dev-tools
# --------------------------------------------------------------------------------------------
# pylint: skip-file
# flake8: noqa
from azure.cli.core.aaz import *
@register_command(
"mobile-network slice wait",
)
class Wait(AAZWaitCommand):
"""Place the CLI in a waiting state until a condition is met.
"""
_aaz_info = {
"resources": [
["mgmt-plane", "/subscriptions/{}/resourcegroups/{}/providers/microsoft.mobilenetwork/mobilenetworks/{}/slices/{}", "2022-11-01"],
]
}
def _handler(self, command_args):
super()._handler(command_args)
self._execute_operations()
return self._output()
_args_schema = None
@classmethod
def _build_arguments_schema(cls, *args, **kwargs):
if cls._args_schema is not None:
return cls._args_schema
cls._args_schema = super()._build_arguments_schema(*args, **kwargs)
# define Arg Group ""
_args_schema = cls._args_schema
_args_schema.mobile_network_name = AAZStrArg(
options=["--mobile-network-name"],
help="The name of the mobile network.",
required=True,
id_part="name",
fmt=AAZStrArgFormat(
pattern="^[a-zA-Z0-9][a-zA-Z0-9_-]*$",
max_length=64,
),
)
_args_schema.resource_group = AAZResourceGroupNameArg(
required=True,
)
_args_schema.slice_name = AAZStrArg(
options=["-n", "--name", "--slice-name"],
help="The name of the network slice.",
required=True,
id_part="child_name_1",
fmt=AAZStrArgFormat(
pattern="^[a-zA-Z0-9][a-zA-Z0-9_-]*$",
max_length=64,
),
)
return cls._args_schema
def _execute_operations(self):
self.pre_operations()
self.SlicesGet(ctx=self.ctx)()
self.post_operations()
@register_callback
def pre_operations(self):
pass
@register_callback
def post_operations(self):
pass
def _output(self, *args, **kwargs):
result = self.deserialize_output(self.ctx.vars.instance, client_flatten=False)
return result
class SlicesGet(AAZHttpOperation):
CLIENT_TYPE = "MgmtClient"
def __call__(self, *args, **kwargs):
request = self.make_request()
session = self.client.send_request(request=request, stream=False, **kwargs)
if session.http_response.status_code in [200]:
return self.on_200(session)
return self.on_error(session.http_response)
@property
def url(self):
return self.client.format_url(
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MobileNetwork/mobileNetworks/{mobileNetworkName}/slices/{sliceName}",
**self.url_parameters
)
@property
def method(self):
return "GET"
@property
def error_format(self):
return "MgmtErrorFormat"
@property
def url_parameters(self):
parameters = {
**self.serialize_url_param(
"mobileNetworkName", self.ctx.args.mobile_network_name,
required=True,
),
**self.serialize_url_param(
"resourceGroupName", self.ctx.args.resource_group,
required=True,
),
**self.serialize_url_param(
"sliceName", self.ctx.args.slice_name,
required=True,
),
**self.serialize_url_param(
"subscriptionId", self.ctx.subscription_id,
required=True,
),
}
return parameters
@property
def query_parameters(self):
parameters = {
**self.serialize_query_param(
"api-version", "2022-11-01",
required=True,
),
}
return parameters
@property
def header_parameters(self):
parameters = {
**self.serialize_header_param(
"Accept", "application/json",
),
}
return parameters
def on_200(self, session):
data = self.deserialize_http_content(session)
self.ctx.set_var(
"instance",
data,
schema_builder=self.METHOD_NAME
)
_schema_on_200 = None
@classmethod
def METHOD_NAME(cls):
if cls._schema_on_200 is not None:
return cls._schema_on_200
cls._schema_on_200 = AAZObjectType()
_schema_on_200 = cls._schema_on_200
_schema_on_200.id = AAZStrType(
flags={"read_only": True},
)
_schema_on_200.location = AAZStrType(
flags={"required": True},
)
_schema_on_200.name = AAZStrType(
flags={"read_only": True},
)
_schema_on_200.properties = AAZObjectType(
flags={"required": True, "client_flatten": True},
)
_schema_on_200.system_data = AAZObjectType(
serialized_name="systemData",
flags={"read_only": True},
)
_schema_on_200.tags = AAZDictType()
_schema_on_200.type = AAZStrType(
flags={"read_only": True},
)
properties = cls._schema_on_200.properties
properties.description = AAZStrType()
properties.provisioning_state = AAZStrType(
serialized_name="provisioningState",
flags={"read_only": True},
)
properties.snssai = AAZObjectType(
flags={"required": True},
)
snssai = cls._schema_on_200.properties.snssai
snssai.sd = AAZStrType()
snssai.sst = AAZIntType(
flags={"required": True},
)
system_data = cls._schema_on_200.system_data
system_data.created_at = AAZStrType(
serialized_name="createdAt",
)
system_data.created_by = AAZStrType(
serialized_name="createdBy",
)
system_data.created_by_type = AAZStrType(
serialized_name="createdByType",
)
system_data.last_modified_at = AAZStrType(
serialized_name="lastModifiedAt",
)
system_data.last_modified_by = AAZStrType(
serialized_name="lastModifiedBy",
)
system_data.last_modified_by_type = AAZStrType(
serialized_name="lastModifiedByType",
)
tags = cls._schema_on_200.tags
tags.Element = AAZStrType()
return cls._schema_on_200
class _WaitHelper:
"""Helper class for Wait"""
__all__ = ["Wait"] |
6,099 | test linear | from nutils import transform, evaluable, numeric, types
from nutils.testing import TestCase
import numpy
class specialcases(TestCase):
def test_tensoredge_swapup_index(self):
lineedge = transform.SimplexEdge(1, 0, False)
for edge in transform.TensorEdge1(lineedge, 1), transform.TensorEdge2(1, lineedge):
with self.subTest(type(edge).__name__):
idnt = transform.Index(1, 0)
self.assertEqual(edge.swapup(idnt), None)
class TestTransform(TestCase):
def setUp(self, trans, linear, offset):
super().setUp()
self.trans = trans
self.linear = linear
self.offset = offset
def test_fromdims(self):
self.assertEqual(self.trans.fromdims, numpy.shape(self.linear)[1])
def test_todims(self):
self.assertEqual(self.trans.todims, numpy.shape(self.linear)[0])
def METHOD_NAME(self):
self.assertAllEqual(self.trans.linear, self.linear)
def test_offset(self):
self.assertAllEqual(self.trans.offset, self.offset)
def test_apply(self):
coords = numpy.array([[0]*self.trans.fromdims, numpy.arange(.5, self.trans.fromdims)/self.trans.fromdims])
a, b = self.trans.apply(coords)
self.assertAllAlmostEqual(a, self.offset)
self.assertAllAlmostEqual(b, numpy.dot(self.linear, coords[1]) + self.offset)
class TestInvertible(TestTransform):
def test_invapply(self):
coords = numpy.array([self.offset, numpy.arange(.5, self.trans.fromdims)/self.trans.fromdims])
a, b = self.trans.invapply(coords)
self.assertAllAlmostEqual(a, numpy.zeros((self.trans.todims,)))
self.assertAllAlmostEqual(b, numpy.linalg.solve(self.linear, (coords[1] - self.offset)))
class TestUpdim(TestTransform):
def test_ext(self):
ext = numeric.ext(self.linear)
self.assertAllAlmostEqual(ext, self.trans.ext)
class Matrix(TestTransform):
def setUp(self):
super().setUp(trans=transform.Matrix(types.arraydata([[1.], [2]]), types.arraydata([3., 4])), linear=[[1], [2]], offset=[3, 4])
class Qquare(TestInvertible):
def setUp(self):
super().setUp(trans=transform.Square(types.arraydata([[1., 2], [1, 3]]), types.arraydata([5., 6])), linear=[[1, 2], [1, 3]], offset=[5, 6])
class Identity(TestInvertible):
def setUp(self):
super().setUp(trans=transform.Identity(2), linear=[[1, 0], [0, 1]], offset=[0, 0])
class Index(TestInvertible):
def setUp(self):
super().setUp(trans=transform.Index(2, 3), linear=[[1, 0], [0, 1]], offset=[0, 0])
class SimplexEdge(TestUpdim):
def setUp(self):
super().setUp(trans=transform.SimplexEdge(3, 0), linear=[[-1., -1], [1, 0], [0, 1]], offset=[1, 0, 0])
class SimplexChild(TestInvertible):
def setUp(self):
super().setUp(trans=transform.SimplexChild(3, 1), linear=numpy.eye(3)/2, offset=[.5, 0, 0])
class Point(TestTransform):
def setUp(self):
super().setUp(trans=transform.Point(types.arraydata([1., 2., 3.])), linear=numpy.zeros((3, 0)), offset=[1., 2., 3.])
class swaps(TestCase):
def setUp(self):
self.chain = transform.SimplexChild(3, 2), transform.SimplexEdge(3, 0), transform.SimplexChild(2, 1), transform.SimplexChild(2, 1), transform.SimplexEdge(2, 0)
def assertMidpoint(self, chain):
midpoint = transform.apply(self.chain, numpy.array([.5]))
self.assertEqual(midpoint.tolist(), [0, 0.9375, 0.0625])
def test_canonical(self):
canonical = transform.SimplexEdge(3, 0), transform.SimplexEdge(2, 0), transform.SimplexChild(1, 0), transform.SimplexChild(1, 0), transform.SimplexChild(1, 0)
self.assertEqual(transform.canonical(self.chain), canonical)
self.assertMidpoint(canonical)
self.assertTrue(transform.iscanonical(canonical))
def test_promote(self):
promote = transform.SimplexEdge(3, 0), transform.SimplexChild(2, 1), transform.SimplexChild(2, 1), transform.SimplexChild(2, 1), transform.SimplexEdge(2, 0)
self.assertEqual(transform.promote(self.chain, 2), promote)
self.assertMidpoint(promote)
self.assertFalse(transform.iscanonical(promote))
def test_uppermost(self):
uppermost = transform.SimplexChild(3, 2), transform.SimplexChild(3, 2), transform.SimplexChild(3, 2), transform.SimplexEdge(3, 0), transform.SimplexEdge(2, 0)
self.assertEqual(transform.uppermost(self.chain), uppermost)
self.assertMidpoint(uppermost)
self.assertFalse(transform.iscanonical(uppermost))
del TestTransform, TestInvertible, TestUpdim |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.