hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
6a36aa38adac9c65674828580c7773cd140ced17
| 2,314
|
py
|
Python
|
proyecto_final/encuestas/admin.py
|
wzorroman/proy_ccbv
|
7b090923b06251b28d545d29e92b1b933de0515f
|
[
"MIT"
] | null | null | null |
proyecto_final/encuestas/admin.py
|
wzorroman/proy_ccbv
|
7b090923b06251b28d545d29e92b1b933de0515f
|
[
"MIT"
] | null | null | null |
proyecto_final/encuestas/admin.py
|
wzorroman/proy_ccbv
|
7b090923b06251b28d545d29e92b1b933de0515f
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from .models import Encuesta, Pregunta, Respuesta
class EncuestaAdmin(admin.ModelAdmin):
list_display = ('id', 'titulo', 'descripcion',
'format_creado', 'format_actualizado')
search_fields = ('titulo', 'descripcion')
list_filter = ('actualizado',)
def format_creado(self, obj):
return obj.creado.strftime("%d/%m/%Y %H:%M:%S") if obj.creado else '-'
def format_actualizado(self, obj):
return obj.actualizado.strftime("%d/%m/%Y %H:%M:%S") if obj.actualizado else '-'
format_creado.short_description = 'Creado'
format_creado.admin_order_field = 'creado'
format_actualizado.short_description = 'Actualizado'
format_actualizado.admin_order_field = 'actualizado'
class PreguntaAdmin(admin.ModelAdmin):
list_display=('id', 'encuesta', 'pregunta', 'pregunta_previa', 'pregunta_sgte',
'format_creado', 'format_actualizado')
search_fields = ('id', 'pregunta')
list_filter = ('actualizado',)
def format_creado(self, obj):
return obj.creado.strftime("%d/%m/%Y %H:%M:%S") if obj.creado else '-'
def format_actualizado(self, obj):
return obj.actualizado.strftime("%d/%m/%Y %H:%M:%S") if obj.actualizado else '-'
format_creado.short_description = 'Creado'
format_creado.admin_order_field = 'creado'
format_actualizado.short_description = 'Actualizado'
format_actualizado.admin_order_field = 'actualizado'
class RespuestaAdmin(admin.ModelAdmin):
list_display = ('id', 'usuario', 'pregunta', 'respuesta', 'format_creado',
'format_actualizado')
search_fields = ('id', 'pregunta')
list_filter = ('actualizado',)
def format_creado(self, obj):
return obj.creado.strftime("%d/%m/%Y %H:%M:%S") if obj.creado else '-'
def format_actualizado(self, obj):
return obj.actualizado.strftime("%d/%m/%Y %H:%M:%S") if obj.actualizado else '-'
format_creado.short_description = 'Creado'
format_creado.admin_order_field = 'creado'
format_actualizado.short_description = 'Actualizado'
format_actualizado.admin_order_field = 'actualizado'
admin.site.register(Encuesta, EncuestaAdmin)
admin.site.register(Pregunta, PreguntaAdmin)
admin.site.register(Respuesta, RespuestaAdmin)
| 36.730159
| 88
| 0.685393
| 271
| 2,314
| 5.656827
| 0.169742
| 0.093933
| 0.09002
| 0.062622
| 0.784736
| 0.729941
| 0.703196
| 0.703196
| 0.703196
| 0.703196
| 0
| 0
| 0.174157
| 2,314
| 62
| 89
| 37.322581
| 0.802198
| 0
| 0
| 0.704545
| 0
| 0
| 0.200519
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.136364
| false
| 0
| 0.045455
| 0.136364
| 0.590909
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 8
|
6a4cbee311cb4ee2b9ad1d4e6e3bc9b6f3ba6d38
| 18,894
|
py
|
Python
|
sdk/python/pulumi_keycloak/oidc/identity_provider.py
|
jaxxstorm/pulumi-keycloak
|
2fc7b1060b725a40d2ada745aa0d10130243a0b5
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_keycloak/oidc/identity_provider.py
|
jaxxstorm/pulumi-keycloak
|
2fc7b1060b725a40d2ada745aa0d10130243a0b5
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_keycloak/oidc/identity_provider.py
|
jaxxstorm/pulumi-keycloak
|
2fc7b1060b725a40d2ada745aa0d10130243a0b5
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import json
import warnings
import pulumi
import pulumi.runtime
from typing import Union
from .. import utilities, tables
class IdentityProvider(pulumi.CustomResource):
accepts_prompt_none_forward_from_client: pulumi.Output[bool]
"""
This is just used together with Identity Provider Authenticator or when kc_idp_hint points to this identity provider. In
case that client sends a request with prompt=none and user is not yet authenticated, the error will not be directly
returned to client, but the request with prompt=none will be forwarded to this identity provider.
"""
add_read_token_role_on_create: pulumi.Output[bool]
"""
Enable/disable if new users can read any stored tokens. This assigns the broker.read-token role.
"""
alias: pulumi.Output[str]
"""
The alias uniquely identifies an identity provider and it is also used to build the redirect uri.
"""
authenticate_by_default: pulumi.Output[bool]
"""
Enable/disable authenticate users by default.
"""
authorization_url: pulumi.Output[str]
"""
OIDC authorization URL.
"""
backchannel_supported: pulumi.Output[bool]
"""
Does the external IDP support backchannel logout?
"""
client_id: pulumi.Output[str]
"""
Client ID.
"""
client_secret: pulumi.Output[str]
"""
Client Secret.
"""
default_scopes: pulumi.Output[str]
"""
The scopes to be sent when asking for authorization. It can be a space-separated list of scopes. Defaults to 'openid'.
"""
display_name: pulumi.Output[str]
"""
Friendly name for Identity Providers.
"""
enabled: pulumi.Output[bool]
"""
Enable/disable this identity provider.
"""
extra_config: pulumi.Output[dict]
first_broker_login_flow_alias: pulumi.Output[str]
"""
Alias of authentication flow, which is triggered after first login with this identity provider. Term 'First Login' means
that there is not yet existing Keycloak account linked with the authenticated identity provider account.
"""
hide_on_login_page: pulumi.Output[bool]
"""
Hide On Login Page.
"""
internal_id: pulumi.Output[str]
"""
Internal Identity Provider Id
"""
jwks_url: pulumi.Output[str]
"""
JSON Web Key Set URL
"""
link_only: pulumi.Output[bool]
"""
If true, users cannot log in through this provider. They can only link to this provider. This is useful if you don't
want to allow login from the provider, but want to integrate with a provider
"""
login_hint: pulumi.Output[str]
"""
Login Hint.
"""
logout_url: pulumi.Output[str]
"""
Logout URL
"""
post_broker_login_flow_alias: pulumi.Output[str]
"""
Alias of authentication flow, which is triggered after each login with this identity provider. Useful if you want
additional verification of each user authenticated with this identity provider (for example OTP). Leave this empty if
you don't want any additional authenticators to be triggered after login with this identity provider. Also note, that
authenticator implementations must assume that user is already set in ClientSession as identity provider already set it.
"""
provider_id: pulumi.Output[str]
"""
provider id, is always oidc, unless you have a custom implementation
"""
realm: pulumi.Output[str]
"""
Realm Name
"""
store_token: pulumi.Output[bool]
"""
Enable/disable if tokens must be stored after authenticating users.
"""
token_url: pulumi.Output[str]
"""
Token URL.
"""
trust_email: pulumi.Output[bool]
"""
If enabled then email provided by this provider is not verified even if verification is enabled for the realm.
"""
ui_locales: pulumi.Output[bool]
"""
Pass current locale to identity provider
"""
user_info_url: pulumi.Output[str]
"""
User Info URL
"""
validate_signature: pulumi.Output[bool]
"""
Enable/disable signature validation of external IDP signatures.
"""
def __init__(__self__, resource_name, opts=None, accepts_prompt_none_forward_from_client=None, add_read_token_role_on_create=None, alias=None, authenticate_by_default=None, authorization_url=None, backchannel_supported=None, client_id=None, client_secret=None, default_scopes=None, display_name=None, enabled=None, extra_config=None, first_broker_login_flow_alias=None, hide_on_login_page=None, jwks_url=None, link_only=None, login_hint=None, logout_url=None, post_broker_login_flow_alias=None, provider_id=None, realm=None, store_token=None, token_url=None, trust_email=None, ui_locales=None, user_info_url=None, validate_signature=None, __props__=None, __name__=None, __opts__=None):
"""
Create a IdentityProvider resource with the given unique name, props, and options.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[bool] accepts_prompt_none_forward_from_client: This is just used together with Identity Provider Authenticator or when kc_idp_hint points to this identity provider. In
case that client sends a request with prompt=none and user is not yet authenticated, the error will not be directly
returned to client, but the request with prompt=none will be forwarded to this identity provider.
:param pulumi.Input[bool] add_read_token_role_on_create: Enable/disable if new users can read any stored tokens. This assigns the broker.read-token role.
:param pulumi.Input[str] alias: The alias uniquely identifies an identity provider and it is also used to build the redirect uri.
:param pulumi.Input[bool] authenticate_by_default: Enable/disable authenticate users by default.
:param pulumi.Input[str] authorization_url: OIDC authorization URL.
:param pulumi.Input[bool] backchannel_supported: Does the external IDP support backchannel logout?
:param pulumi.Input[str] client_id: Client ID.
:param pulumi.Input[str] client_secret: Client Secret.
:param pulumi.Input[str] default_scopes: The scopes to be sent when asking for authorization. It can be a space-separated list of scopes. Defaults to 'openid'.
:param pulumi.Input[str] display_name: Friendly name for Identity Providers.
:param pulumi.Input[bool] enabled: Enable/disable this identity provider.
:param pulumi.Input[str] first_broker_login_flow_alias: Alias of authentication flow, which is triggered after first login with this identity provider. Term 'First Login' means
that there is not yet existing Keycloak account linked with the authenticated identity provider account.
:param pulumi.Input[bool] hide_on_login_page: Hide On Login Page.
:param pulumi.Input[str] jwks_url: JSON Web Key Set URL
:param pulumi.Input[bool] link_only: If true, users cannot log in through this provider. They can only link to this provider. This is useful if you don't
want to allow login from the provider, but want to integrate with a provider
:param pulumi.Input[str] login_hint: Login Hint.
:param pulumi.Input[str] logout_url: Logout URL
:param pulumi.Input[str] post_broker_login_flow_alias: Alias of authentication flow, which is triggered after each login with this identity provider. Useful if you want
additional verification of each user authenticated with this identity provider (for example OTP). Leave this empty if
you don't want any additional authenticators to be triggered after login with this identity provider. Also note, that
authenticator implementations must assume that user is already set in ClientSession as identity provider already set it.
:param pulumi.Input[str] provider_id: provider id, is always oidc, unless you have a custom implementation
:param pulumi.Input[str] realm: Realm Name
:param pulumi.Input[bool] store_token: Enable/disable if tokens must be stored after authenticating users.
:param pulumi.Input[str] token_url: Token URL.
:param pulumi.Input[bool] trust_email: If enabled then email provided by this provider is not verified even if verification is enabled for the realm.
:param pulumi.Input[bool] ui_locales: Pass current locale to identity provider
:param pulumi.Input[str] user_info_url: User Info URL
:param pulumi.Input[bool] validate_signature: Enable/disable signature validation of external IDP signatures.
"""
if __name__ is not None:
warnings.warn("explicit use of __name__ is deprecated", DeprecationWarning)
resource_name = __name__
if __opts__ is not None:
warnings.warn("explicit use of __opts__ is deprecated, use 'opts' instead", DeprecationWarning)
opts = __opts__
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = dict()
__props__['accepts_prompt_none_forward_from_client'] = accepts_prompt_none_forward_from_client
__props__['add_read_token_role_on_create'] = add_read_token_role_on_create
if alias is None:
raise TypeError("Missing required property 'alias'")
__props__['alias'] = alias
__props__['authenticate_by_default'] = authenticate_by_default
if authorization_url is None:
raise TypeError("Missing required property 'authorization_url'")
__props__['authorization_url'] = authorization_url
__props__['backchannel_supported'] = backchannel_supported
if client_id is None:
raise TypeError("Missing required property 'client_id'")
__props__['client_id'] = client_id
if client_secret is None:
raise TypeError("Missing required property 'client_secret'")
__props__['client_secret'] = client_secret
__props__['default_scopes'] = default_scopes
__props__['display_name'] = display_name
__props__['enabled'] = enabled
__props__['extra_config'] = extra_config
__props__['first_broker_login_flow_alias'] = first_broker_login_flow_alias
__props__['hide_on_login_page'] = hide_on_login_page
__props__['jwks_url'] = jwks_url
__props__['link_only'] = link_only
__props__['login_hint'] = login_hint
__props__['logout_url'] = logout_url
__props__['post_broker_login_flow_alias'] = post_broker_login_flow_alias
__props__['provider_id'] = provider_id
if realm is None:
raise TypeError("Missing required property 'realm'")
__props__['realm'] = realm
__props__['store_token'] = store_token
if token_url is None:
raise TypeError("Missing required property 'token_url'")
__props__['token_url'] = token_url
__props__['trust_email'] = trust_email
__props__['ui_locales'] = ui_locales
__props__['user_info_url'] = user_info_url
__props__['validate_signature'] = validate_signature
__props__['internal_id'] = None
super(IdentityProvider, __self__).__init__(
'keycloak:oidc/identityProvider:IdentityProvider',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name, id, opts=None, accepts_prompt_none_forward_from_client=None, add_read_token_role_on_create=None, alias=None, authenticate_by_default=None, authorization_url=None, backchannel_supported=None, client_id=None, client_secret=None, default_scopes=None, display_name=None, enabled=None, extra_config=None, first_broker_login_flow_alias=None, hide_on_login_page=None, internal_id=None, jwks_url=None, link_only=None, login_hint=None, logout_url=None, post_broker_login_flow_alias=None, provider_id=None, realm=None, store_token=None, token_url=None, trust_email=None, ui_locales=None, user_info_url=None, validate_signature=None):
"""
Get an existing IdentityProvider resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param str id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[bool] accepts_prompt_none_forward_from_client: This is just used together with Identity Provider Authenticator or when kc_idp_hint points to this identity provider. In
case that client sends a request with prompt=none and user is not yet authenticated, the error will not be directly
returned to client, but the request with prompt=none will be forwarded to this identity provider.
:param pulumi.Input[bool] add_read_token_role_on_create: Enable/disable if new users can read any stored tokens. This assigns the broker.read-token role.
:param pulumi.Input[str] alias: The alias uniquely identifies an identity provider and it is also used to build the redirect uri.
:param pulumi.Input[bool] authenticate_by_default: Enable/disable authenticate users by default.
:param pulumi.Input[str] authorization_url: OIDC authorization URL.
:param pulumi.Input[bool] backchannel_supported: Does the external IDP support backchannel logout?
:param pulumi.Input[str] client_id: Client ID.
:param pulumi.Input[str] client_secret: Client Secret.
:param pulumi.Input[str] default_scopes: The scopes to be sent when asking for authorization. It can be a space-separated list of scopes. Defaults to 'openid'.
:param pulumi.Input[str] display_name: Friendly name for Identity Providers.
:param pulumi.Input[bool] enabled: Enable/disable this identity provider.
:param pulumi.Input[str] first_broker_login_flow_alias: Alias of authentication flow, which is triggered after first login with this identity provider. Term 'First Login' means
that there is not yet existing Keycloak account linked with the authenticated identity provider account.
:param pulumi.Input[bool] hide_on_login_page: Hide On Login Page.
:param pulumi.Input[str] internal_id: Internal Identity Provider Id
:param pulumi.Input[str] jwks_url: JSON Web Key Set URL
:param pulumi.Input[bool] link_only: If true, users cannot log in through this provider. They can only link to this provider. This is useful if you don't
want to allow login from the provider, but want to integrate with a provider
:param pulumi.Input[str] login_hint: Login Hint.
:param pulumi.Input[str] logout_url: Logout URL
:param pulumi.Input[str] post_broker_login_flow_alias: Alias of authentication flow, which is triggered after each login with this identity provider. Useful if you want
additional verification of each user authenticated with this identity provider (for example OTP). Leave this empty if
you don't want any additional authenticators to be triggered after login with this identity provider. Also note, that
authenticator implementations must assume that user is already set in ClientSession as identity provider already set it.
:param pulumi.Input[str] provider_id: provider id, is always oidc, unless you have a custom implementation
:param pulumi.Input[str] realm: Realm Name
:param pulumi.Input[bool] store_token: Enable/disable if tokens must be stored after authenticating users.
:param pulumi.Input[str] token_url: Token URL.
:param pulumi.Input[bool] trust_email: If enabled then email provided by this provider is not verified even if verification is enabled for the realm.
:param pulumi.Input[bool] ui_locales: Pass current locale to identity provider
:param pulumi.Input[str] user_info_url: User Info URL
:param pulumi.Input[bool] validate_signature: Enable/disable signature validation of external IDP signatures.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = dict()
__props__["accepts_prompt_none_forward_from_client"] = accepts_prompt_none_forward_from_client
__props__["add_read_token_role_on_create"] = add_read_token_role_on_create
__props__["alias"] = alias
__props__["authenticate_by_default"] = authenticate_by_default
__props__["authorization_url"] = authorization_url
__props__["backchannel_supported"] = backchannel_supported
__props__["client_id"] = client_id
__props__["client_secret"] = client_secret
__props__["default_scopes"] = default_scopes
__props__["display_name"] = display_name
__props__["enabled"] = enabled
__props__["extra_config"] = extra_config
__props__["first_broker_login_flow_alias"] = first_broker_login_flow_alias
__props__["hide_on_login_page"] = hide_on_login_page
__props__["internal_id"] = internal_id
__props__["jwks_url"] = jwks_url
__props__["link_only"] = link_only
__props__["login_hint"] = login_hint
__props__["logout_url"] = logout_url
__props__["post_broker_login_flow_alias"] = post_broker_login_flow_alias
__props__["provider_id"] = provider_id
__props__["realm"] = realm
__props__["store_token"] = store_token
__props__["token_url"] = token_url
__props__["trust_email"] = trust_email
__props__["ui_locales"] = ui_locales
__props__["user_info_url"] = user_info_url
__props__["validate_signature"] = validate_signature
return IdentityProvider(resource_name, opts=opts, __props__=__props__)
def translate_output_property(self, prop):
return tables._CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
def translate_input_property(self, prop):
return tables._SNAKE_TO_CAMEL_CASE_TABLE.get(prop) or prop
| 60.171975
| 689
| 0.714565
| 2,503
| 18,894
| 5.105473
| 0.102277
| 0.047343
| 0.066359
| 0.046091
| 0.828077
| 0.804367
| 0.78809
| 0.778073
| 0.74732
| 0.74732
| 0
| 0.000067
| 0.215306
| 18,894
| 313
| 690
| 60.364217
| 0.861864
| 0.397322
| 0
| 0.014925
| 1
| 0
| 0.167203
| 0.047684
| 0
| 0
| 0
| 0
| 0
| 1
| 0.029851
| false
| 0.007463
| 0.044776
| 0.014925
| 0.313433
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e03f9934d25c855a46505fbf48390cffb950eaba
| 7,864
|
py
|
Python
|
project/apps/main_site/migrations/0009_auto__add_sale__add_field_milestone_before_pic_thumb_1__add_field_mile.py
|
buddyup/dashboard
|
3c4b9ac32331b0a3bf0bb41acd31f5a4ce053dd8
|
[
"BSD-2-Clause"
] | null | null | null |
project/apps/main_site/migrations/0009_auto__add_sale__add_field_milestone_before_pic_thumb_1__add_field_mile.py
|
buddyup/dashboard
|
3c4b9ac32331b0a3bf0bb41acd31f5a4ce053dd8
|
[
"BSD-2-Clause"
] | null | null | null |
project/apps/main_site/migrations/0009_auto__add_sale__add_field_milestone_before_pic_thumb_1__add_field_mile.py
|
buddyup/dashboard
|
3c4b9ac32331b0a3bf0bb41acd31f5a4ce053dd8
|
[
"BSD-2-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Sale'
db.create_table(u'main_site_sale', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('updated_at', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True)),
('recorded_at', self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime(2014, 11, 23, 0, 0))),
('name', self.gf('django.db.models.fields.CharField')(max_length=200)),
('status', self.gf('django.db.models.fields.CharField')(max_length=50)),
('logo', self.gf('django.db.models.fields.files.ImageField')(max_length=100, null=True, blank=True)),
))
db.send_create_signal(u'main_site', ['Sale'])
# Adding field 'Milestone.before_pic_thumb_1'
db.add_column(u'main_site_milestone', 'before_pic_thumb_1',
self.gf('django.db.models.fields.files.ImageField')(max_length=100, null=True, blank=True),
keep_default=False)
# Adding field 'Milestone.before_pic_thumb_2'
db.add_column(u'main_site_milestone', 'before_pic_thumb_2',
self.gf('django.db.models.fields.files.ImageField')(max_length=100, null=True, blank=True),
keep_default=False)
# Adding field 'Milestone.before_pic_thumb_3'
db.add_column(u'main_site_milestone', 'before_pic_thumb_3',
self.gf('django.db.models.fields.files.ImageField')(max_length=100, null=True, blank=True),
keep_default=False)
# Adding field 'Milestone.after_pic_thumb_1'
db.add_column(u'main_site_milestone', 'after_pic_thumb_1',
self.gf('django.db.models.fields.files.ImageField')(max_length=100, null=True, blank=True),
keep_default=False)
# Adding field 'Milestone.after_pic_thumb_2'
db.add_column(u'main_site_milestone', 'after_pic_thumb_2',
self.gf('django.db.models.fields.files.ImageField')(max_length=100, null=True, blank=True),
keep_default=False)
# Adding field 'Milestone.after_pic_thumb_3'
db.add_column(u'main_site_milestone', 'after_pic_thumb_3',
self.gf('django.db.models.fields.files.ImageField')(max_length=100, null=True, blank=True),
keep_default=False)
def backwards(self, orm):
# Deleting model 'Sale'
db.delete_table(u'main_site_sale')
# Deleting field 'Milestone.before_pic_thumb_1'
db.delete_column(u'main_site_milestone', 'before_pic_thumb_1')
# Deleting field 'Milestone.before_pic_thumb_2'
db.delete_column(u'main_site_milestone', 'before_pic_thumb_2')
# Deleting field 'Milestone.before_pic_thumb_3'
db.delete_column(u'main_site_milestone', 'before_pic_thumb_3')
# Deleting field 'Milestone.after_pic_thumb_1'
db.delete_column(u'main_site_milestone', 'after_pic_thumb_1')
# Deleting field 'Milestone.after_pic_thumb_2'
db.delete_column(u'main_site_milestone', 'after_pic_thumb_2')
# Deleting field 'Milestone.after_pic_thumb_3'
db.delete_column(u'main_site_milestone', 'after_pic_thumb_3')
models = {
u'main_site.datapoint': {
'Meta': {'object_name': 'DataPoint'},
'buddy_ratio': ('django.db.models.fields.FloatField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'num_active_users': ('django.db.models.fields.IntegerField', [], {}),
'num_attended_one_event': ('django.db.models.fields.IntegerField', [], {}),
'num_authenticated': ('django.db.models.fields.IntegerField', [], {}),
'num_buddies': ('django.db.models.fields.IntegerField', [], {}),
'num_buddy_requests': ('django.db.models.fields.IntegerField', [], {}),
'num_filled_in_profile': ('django.db.models.fields.IntegerField', [], {}),
'num_hit_home_page': ('django.db.models.fields.IntegerField', [], {}),
'num_total_users': ('django.db.models.fields.IntegerField', [], {}),
'num_with_one_buddy': ('django.db.models.fields.IntegerField', [], {}),
'num_with_one_class': ('django.db.models.fields.IntegerField', [], {}),
'recorded_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 11, 23, 0, 0)'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'main_site.milestone': {
'Meta': {'ordering': "('recorded_at',)", 'object_name': 'Milestone'},
'after_pic_1': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'after_pic_2': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'after_pic_3': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'after_pic_thumb_1': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'after_pic_thumb_2': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'after_pic_thumb_3': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'before_pic_1': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'before_pic_2': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'before_pic_3': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'before_pic_thumb_1': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'before_pic_thumb_2': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'before_pic_thumb_3': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'recorded_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 11, 23, 0, 0)'}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'main_site.sale': {
'Meta': {'object_name': 'Sale'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'logo': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'recorded_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 11, 23, 0, 0)'}),
'status': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
}
}
complete_apps = ['main_site']
| 62.912
| 139
| 0.607579
| 951
| 7,864
| 4.783386
| 0.111462
| 0.087931
| 0.150802
| 0.215432
| 0.867224
| 0.852275
| 0.792922
| 0.744559
| 0.703231
| 0.682568
| 0
| 0.024324
| 0.200153
| 7,864
| 125
| 140
| 62.912
| 0.698887
| 0.075916
| 0
| 0.255556
| 0
| 0
| 0.498759
| 0.266409
| 0
| 0
| 0
| 0
| 0
| 1
| 0.022222
| false
| 0
| 0.044444
| 0
| 0.1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e0773bfd7e1e99121f1ed62dff4a2132081d9a89
| 107
|
py
|
Python
|
porcodio/porcodio.py
|
BranchScope/porcodio
|
dc24493fdb451d0984d11414f37187f945c930b3
|
[
"MIT"
] | 1
|
2021-09-14T09:25:07.000Z
|
2021-09-14T09:25:07.000Z
|
porcodio/porcodio.py
|
BranchScope/porcodio
|
dc24493fdb451d0984d11414f37187f945c930b3
|
[
"MIT"
] | null | null | null |
porcodio/porcodio.py
|
BranchScope/porcodio
|
dc24493fdb451d0984d11414f37187f945c930b3
|
[
"MIT"
] | null | null | null |
import random
from .ascii_list import ascii_list
def get_porcodio():
return random.choice(ascii_list)
| 17.833333
| 36
| 0.794393
| 16
| 107
| 5.0625
| 0.625
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.140187
| 107
| 5
| 37
| 21.4
| 0.880435
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0
| 0.5
| 0.25
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 8
|
0ebe7d40b2e7d3373614d1f2f4a03f2c4a37510d
| 32,550
|
py
|
Python
|
tests/test_reporter.py
|
srhrshr/rsmtool
|
4317f804de82ccb4965c2e7bb185c6ef41458f8e
|
[
"Apache-2.0"
] | 64
|
2016-04-06T15:57:24.000Z
|
2022-03-24T14:17:45.000Z
|
tests/test_reporter.py
|
srhrshr/rsmtool
|
4317f804de82ccb4965c2e7bb185c6ef41458f8e
|
[
"Apache-2.0"
] | 479
|
2016-04-07T03:04:09.000Z
|
2022-03-10T00:39:22.000Z
|
tests/test_reporter.py
|
srhrshr/rsmtool
|
4317f804de82ccb4965c2e7bb185c6ef41458f8e
|
[
"Apache-2.0"
] | 22
|
2016-04-10T06:35:28.000Z
|
2022-02-26T05:03:47.000Z
|
from os.path import join, normpath
from nose.tools import eq_, ok_, raises
from rsmtool.reporter import (Reporter,
comparison_notebook_path,
master_section_dict,
notebook_path,
notebook_path_dict,
summary_notebook_path)
# Since we are in test mode, we want to add a placeholder
# special section to the master section list so that the tests
# will run irrespective of whether or not rsmextra is installed
# We also set a placeholder special_notebook path
for context in ['rsmtool', 'rsmeval', 'rsmcompare', 'rsmsummarize']:
master_section_dict['special'][context].append('placeholder_special_section')
notebook_path_dict['special'].update({context: 'special_notebook_path'})
# define the general sections lists to keep tests more readable
general_section_list_rsmtool = master_section_dict['general']['rsmtool']
general_section_list_rsmeval = master_section_dict['general']['rsmeval']
general_section_list_rsmcompare = master_section_dict['general']['rsmcompare']
general_section_list_rsmsummarize = master_section_dict['general']['rsmsummarize']
class TestReporter:
def setUp(self):
self.reporter = Reporter()
def check_section_lists(self, context):
general_sections = master_section_dict['general'][context]
special_sections = master_section_dict['special'][context]
overlap = set(general_sections) & set(special_sections)
# check that there are general section
ok_(len(general_sections) > 0)
# check that there is no overlap between general and special section
# list
eq_(len(overlap), 0)
def test_check_section_lists_rsmtool(self):
# sanity checks to make sure nothing went wrong when generating
# master section list
for context in ['rsmtool', 'rsmeval', 'rsmcompare']:
yield self.check_section_lists, context
@raises(ValueError)
def test_check_section_order_not_enough_sections(self):
general_sections = ['evaluation', 'sysinfo']
special_sections = ['placeholder_special_section']
custom_sections = ['custom.ipynb']
subgroups = ['prompt', 'gender']
section_order = general_sections
self.reporter.get_ordered_notebook_files(general_sections,
special_sections=special_sections,
custom_sections=custom_sections,
section_order=section_order,
subgroups=subgroups)
@raises(ValueError)
def test_check_section_order_extra_sections(self):
general_sections = ['evaluation', 'sysinfo']
special_sections = ['placeholder_special_section']
custom_sections = ['custom.ipynb']
subgroups = []
section_order = general_sections + special_sections + custom_sections + ['extra_section']
self.reporter.get_ordered_notebook_files(general_sections,
special_sections=special_sections,
custom_sections=custom_sections,
section_order=section_order,
subgroups=subgroups)
@raises(ValueError)
def test_check_section_order_wrong_sections(self):
general_sections = ['evaluation', 'sysinfo']
special_sections = ['placeholder_special_section']
custom_sections = ['custom.ipynb']
subgroups = []
section_order = ['extra_section1', 'extra_section2']
self.reporter.get_ordered_notebook_files(general_sections,
special_sections=special_sections,
custom_sections=custom_sections,
section_order=section_order,
subgroups=subgroups)
def test_check_section_order(self):
general_sections = ['evaluation', 'sysinfo']
special_sections = ['placeholder_special_section']
custom_sections = ['foobar']
section_order = (['foobar'] +
special_sections +
general_sections)
self.reporter.check_section_order(general_sections +
special_sections +
custom_sections,
section_order)
def test_check_general_section_names_rsmtool(self):
specified_list = ['data_description', 'preprocessed_features']
self.reporter.check_section_names(specified_list, 'general')
@raises(ValueError)
def test_check_general_section_names_wrong_names_1(self):
specified_list = ['data_description', 'feature_stats']
self.reporter.check_section_names(specified_list, 'general')
def test_check_general_section_names_rsmeval_1(self):
specified_list = ['data_description', 'evaluation']
self.reporter.check_section_names(specified_list, 'general', context='rsmeval')
@raises(ValueError)
def test_check_general_section_names_rsmeval_2(self):
specified_list = ['data_description', 'preprocessed_features']
self.reporter.check_section_names(specified_list, 'general', context='rsmeval')
def test_check_general_section_names_rsmcompare(self):
specified_list = ['feature_descriptives', 'evaluation']
self.reporter.check_section_names(specified_list, 'general', context='rsmcompare')
@raises(ValueError)
def test_check_general_section_names_wrong_names_2(self):
specified_list = ['data_description', 'evaluation']
self.reporter.check_section_names(specified_list, 'general', context='rsmcompare')
def test_determine_chosen_sections_default_general(self):
general_sections = ['all']
special_sections = []
custom_sections = []
subgroups = ['prompt']
chosen_sections = self.reporter.determine_chosen_sections(general_sections,
special_sections,
custom_sections,
subgroups)
eq_(chosen_sections, general_section_list_rsmtool)
def test_determine_chosen_sections_default_general_no_subgroups(self):
general_sections = ['all']
special_sections = []
custom_sections = []
subgroups = []
no_subgroup_list = [s for s in general_section_list_rsmtool
if not s.endswith('by_group') and s != 'fairness_analyses']
chosen_sections = self.reporter.determine_chosen_sections(general_sections,
special_sections,
custom_sections,
subgroups)
eq_(chosen_sections, no_subgroup_list)
@raises(ValueError)
def test_determine_chosen_sections_invalid_general(self):
general_sections = ['data_description', 'foobar']
special_sections = []
custom_sections = []
subgroups = []
chosen_sections = self.reporter.determine_chosen_sections(general_sections,
special_sections,
custom_sections,
subgroups)
eq_(chosen_sections, general_section_list_rsmtool)
@raises(ValueError)
def test_determine_chosen_sections_no_subgroups(self):
general_sections = ['data_description', 'data_description_by_group']
special_sections = []
custom_sections = []
subgroups = []
chosen_sections = self.reporter.determine_chosen_sections(general_sections,
special_sections,
custom_sections,
subgroups)
eq_(chosen_sections, general_section_list_rsmtool)
def test_determine_chosen_sections_custom_general(self):
general_sections = ['data_description', 'evaluation']
special_sections = []
custom_sections = []
subgroups = []
chosen_sections = self.reporter.determine_chosen_sections(general_sections,
special_sections,
custom_sections,
subgroups)
eq_(sorted(chosen_sections), sorted(general_sections))
def test_determine_chosen_sections_default_general_with_special(self):
general_sections = ['all']
special_sections = ['placeholder_special_section']
custom_sections = []
subgroups = ['prompt']
chosen_sections = self.reporter.determine_chosen_sections(general_sections,
special_sections,
custom_sections,
subgroups)
eq_(sorted(chosen_sections),
sorted(general_section_list_rsmtool + special_sections))
@raises(ValueError)
def test_determine_chosen_sections_invalid_special(self):
general_sections = ['all']
special_sections = ['placeholder_special_section', 'foobar']
custom_sections = []
subgroups = ['prompt']
self.reporter.determine_chosen_sections(general_sections,
special_sections,
custom_sections,
subgroups)
def test_determine_chosen_sections_custom_general_with_special(self):
general_sections = ['data_description', 'evaluation']
special_sections = ['placeholder_special_section']
custom_sections = []
subgroups = []
chosen_sections = self.reporter.determine_chosen_sections(general_sections,
special_sections,
custom_sections,
subgroups)
eq_(sorted(chosen_sections),
sorted(general_sections + special_sections))
def test_determine_chosen_sections_default_general_with_subgroups(self):
general_sections = ['all']
special_sections = []
custom_sections = []
subgroups = ['prompt', 'gender']
chosen_sections = self.reporter.determine_chosen_sections(general_sections,
special_sections,
custom_sections,
subgroups)
eq_(sorted(chosen_sections),
sorted(general_section_list_rsmtool))
def test_determine_chosen_sections_custom_general_with_special_subgroups_and_custom(self):
general_sections = ['evaluation', 'sysinfo', 'evaluation_by_group']
special_sections = ['placeholder_special_section']
custom_sections = ['foobar.ipynb']
subgroups = ['prompt', 'gender']
chosen_sections = self.reporter.determine_chosen_sections(general_sections,
special_sections,
custom_sections,
subgroups)
eq_(sorted(chosen_sections),
sorted(general_sections +
special_sections +
['foobar']))
def test_determine_chosen_sections_eval_default_general(self):
general_sections = ['all']
special_sections = []
custom_sections = []
subgroups = ['prompt']
chosen_sections = self.reporter.determine_chosen_sections(general_sections,
special_sections,
custom_sections,
subgroups,
context='rsmeval')
eq_(sorted(chosen_sections), sorted(general_section_list_rsmeval))
def test_determine_chosen_sections_eval_custom_general(self):
general_sections = ['data_description', 'consistency']
special_sections = []
custom_sections = []
subgroups = []
chosen_sections = self.reporter.determine_chosen_sections(general_sections,
special_sections,
custom_sections,
subgroups,
context='rsmeval')
eq_(sorted(chosen_sections), sorted(general_sections))
def test_determine_chosen_sections_eval_default_general_with_no_subgroups(self):
general_sections = ['all']
special_sections = []
custom_sections = []
subgroups = []
no_subgroup_list = [s for s in general_section_list_rsmeval
if not s.endswith('by_group') and s != 'fairness_analyses']
chosen_sections = self.reporter.determine_chosen_sections(general_sections,
special_sections,
custom_sections,
subgroups,
context='rsmeval')
eq_(sorted(chosen_sections), sorted(no_subgroup_list))
def test_determine_chosen_sections_eval_custom_general_with_special_and_subgroups(self):
general_sections = ['data_description', 'consistency', 'data_description_by_group']
special_sections = ['placeholder_special_section']
custom_sections = []
subgroups = ['prompt', 'gender']
chosen_sections = self.reporter.determine_chosen_sections(general_sections,
special_sections,
custom_sections,
subgroups,
context='rsmeval')
eq_(sorted(chosen_sections), sorted(general_sections +
special_sections))
def test_determine_chosen_sections_compare_default_general(self):
general_sections = ['all']
special_sections = []
custom_sections = []
subgroups = ['prompt']
chosen_sections = self.reporter.determine_chosen_sections(general_sections,
special_sections,
custom_sections,
subgroups,
context='rsmcompare')
eq_(sorted(chosen_sections), sorted(general_section_list_rsmcompare))
def test_determine_chosen_sections_rsmcompare_custom_general(self):
general_sections = ['feature_descriptives',
'evaluation']
special_sections = []
custom_sections = []
subgroups = []
chosen_sections = self.reporter.determine_chosen_sections(general_sections,
special_sections,
custom_sections,
subgroups,
context='rsmcompare')
eq_(sorted(chosen_sections), sorted(general_sections))
def test_determine_chosen_sections_rsmcompare_default_general_with_no_subgroups(self):
general_sections = ['all']
special_sections = []
custom_sections = []
subgroups = []
no_subgroup_list = [s for s in general_section_list_rsmcompare
if not s.endswith('by_group')]
chosen_sections = self.reporter.determine_chosen_sections(general_sections,
special_sections,
custom_sections,
subgroups,
context='rsmcompare')
eq_(sorted(chosen_sections), sorted(no_subgroup_list))
def test_determine_chosen_sections_rsmcompare_custom_general_with_special_and_subgroups(self):
general_sections = ['feature_descriptives',
'evaluation']
special_sections = ['placeholder_special_section']
custom_sections = []
subgroups = ['prompt', 'gender']
chosen_sections = self.reporter.determine_chosen_sections(general_sections,
special_sections,
custom_sections,
subgroups,
context='rsmcompare')
eq_(sorted(chosen_sections), sorted(general_sections +
special_sections))
def test_determine_chosen_sections_rsmsummarize_default_general(self):
general_sections = ['all']
special_sections = []
custom_sections = []
subgroups = ['prompt']
chosen_sections = self.reporter.determine_chosen_sections(general_sections,
special_sections,
custom_sections,
subgroups,
context='rsmsummarize')
eq_(sorted(chosen_sections), sorted(general_section_list_rsmsummarize))
def test_determine_chosen_sections_rsmsummarize_custom_general(self):
general_sections = ['evaluation']
special_sections = []
custom_sections = []
subgroups = []
chosen_sections = self.reporter.determine_chosen_sections(general_sections,
special_sections,
custom_sections,
subgroups,
context='rsmsummarize')
eq_(sorted(chosen_sections), sorted(general_sections))
def test_determine_chosen_sections_compare_custom_general_with_special_subgroups_and_custom(self):
general_sections = ['feature_descriptives',
'evaluation']
special_sections = ['placeholder_special_section']
custom_sections = ['foobar.ipynb']
subgroups = ['prompt', 'gender']
chosen_sections = self.reporter.determine_chosen_sections(general_sections,
special_sections,
custom_sections,
subgroups,
context='rsmcompare')
eq_(sorted(chosen_sections), sorted(general_sections +
special_sections +
['foobar']))
def test_get_ordered_notebook_files_default_rsmtool(self):
general_sections = ['all']
notebook_files = self.reporter.get_ordered_notebook_files(general_sections,
model_type='skll',
context='rsmtool')
no_subgroup_list = [s for s in general_section_list_rsmtool
if not s.endswith('by_group') and s != 'fairness_analyses']
section_list = ['header'] + no_subgroup_list + ['footer']
# replace model section with skll_model.
updated_section_list = ['skll_' + sname if sname == 'model' else sname for sname in section_list]
general_section_plus_extension = [s + '.ipynb' for s in updated_section_list]
expected_notebook_files = [join(notebook_path, s)
for s in general_section_plus_extension]
eq_(notebook_files, expected_notebook_files)
def test_get_ordered_notebook_files_custom_rsmtool(self):
# custom and general sections, custom order and subgroups
general_sections = ['data_description', 'pca', 'data_description_by_group']
custom_sections = ['/test_path/custom.ipynb']
special_sections = ['placeholder_special_section']
subgroups = ['prompt']
section_order = ['custom',
'data_description',
'pca',
'data_description_by_group',
'placeholder_special_section']
special_notebook_path = notebook_path_dict['special']['rsmtool']
notebook_files = self.reporter.get_ordered_notebook_files(general_sections,
custom_sections=custom_sections,
special_sections=special_sections,
section_order=section_order,
subgroups=subgroups,
model_type='skll',
context='rsmtool')
expected_notebook_files = ([join(notebook_path, 'header.ipynb')] +
['/test_path/custom.ipynb'] +
[join(notebook_path, s) + '.ipynb' for s in ['data_description',
'pca',
'data_description_by_group']] +
[join(special_notebook_path, 'placeholder_special_section.ipynb')] +
[join(notebook_path, 'footer.ipynb')])
eq_(notebook_files, expected_notebook_files)
def test_get_ordered_notebook_files_default_rsmeval(self):
general_sections = ['all']
notebook_files = self.reporter.get_ordered_notebook_files(general_sections,
context='rsmeval')
no_subgroup_list = [s for s in general_section_list_rsmeval
if not s.endswith('by_group') and s != 'fairness_analyses']
section_list = ['header'] + no_subgroup_list + ['footer']
general_section_plus_extension = ['{}.ipynb'.format(s) for s in section_list]
expected_notebook_files = [join(notebook_path_dict['general']['rsmeval'], s)
for s in
general_section_plus_extension]
eq_(notebook_files, expected_notebook_files)
def test_get_ordered_notebook_files_custom_rsmeval(self):
# custom and general sections, custom order and subgroups
general_sections = ['evaluation', 'consistency', 'evaluation_by_group']
custom_sections = ['/test_path/custom.ipynb']
subgroups = ['prompt']
section_order = ['evaluation',
'consistency',
'custom',
'evaluation_by_group']
notebook_path = notebook_path_dict['general']['rsmeval']
notebook_files = self.reporter.get_ordered_notebook_files(general_sections,
custom_sections=custom_sections,
section_order=section_order,
subgroups=subgroups,
context='rsmeval')
expected_notebook_files = ([join(notebook_path, 'header.ipynb')] +
[join(notebook_path, s) + '.ipynb' for s in ['evaluation',
'consistency']] +
['/test_path/custom.ipynb'] +
[join(notebook_path, 'evaluation_by_group.ipynb')] +
[join(notebook_path, 'footer.ipynb')])
eq_(notebook_files, expected_notebook_files)
def test_get_ordered_notebook_files_default_rsmcompare(self):
general_sections = ['all']
comparison_notebook_path = notebook_path_dict['general']['rsmcompare']
notebook_files = self.reporter.get_ordered_notebook_files(general_sections,
context='rsmcompare')
no_subgroup_list = [s for s in general_section_list_rsmcompare
if not s.endswith('by_group')]
section_list = ['header'] + no_subgroup_list + ['footer']
general_section_plus_extension = [s + '.ipynb' for s in section_list]
expected_notebook_files = [join(comparison_notebook_path, s)
for s in general_section_plus_extension]
eq_(notebook_files, expected_notebook_files)
def test_get_ordered_notebook_files_custom_rsmcompare(self):
# custom and general sections, custom order and subgroups
general_sections = ['feature_descriptives',
'score_distributions',
'features_by_group']
custom_sections = ['/test_path/custom.ipynb']
subgroups = ['prompt']
section_order = ['feature_descriptives',
'score_distributions',
'custom',
'features_by_group']
comparison_notebook_path = notebook_path_dict['general']['rsmcompare']
notebook_files = self.reporter.get_ordered_notebook_files(general_sections,
custom_sections=custom_sections,
section_order=section_order,
subgroups=subgroups,
context='rsmcompare')
expected_notebook_files = ([join(comparison_notebook_path, 'header.ipynb')] +
[join(comparison_notebook_path, s) + '.ipynb' for s in ['feature_descriptives',
'score_distributions']] +
['/test_path/custom.ipynb'] +
[join(comparison_notebook_path, 'features_by_group.ipynb')] +
[join(comparison_notebook_path, 'footer.ipynb')])
eq_(notebook_files, expected_notebook_files)
def test_get_ordered_notebook_files_custom_rsmsummarize(self):
# custom and general sections, custom order and subgroups
general_sections = ['evaluation']
custom_sections = ['/test_path/custom.ipynb']
subgroups = ['prompt']
section_order = ['custom',
'evaluation']
summary_notebook_path = notebook_path_dict['general']['rsmsummarize']
notebook_files = self.reporter.get_ordered_notebook_files(general_sections,
custom_sections=custom_sections,
section_order=section_order,
subgroups=subgroups,
context='rsmsummarize')
expected_notebook_files = ([join(summary_notebook_path, 'header.ipynb')] +
['/test_path/custom.ipynb'] +
[join(summary_notebook_path, s) + '.ipynb' for s in ['evaluation']] +
[join(summary_notebook_path, 'footer.ipynb')])
eq_(notebook_files, expected_notebook_files)
def test_get_section_file_map_rsmtool(self):
special_sections = ['placeholder']
custom_sections = ['/path/notebook.ipynb']
section_file_map = self.reporter.get_section_file_map(special_sections,
custom_sections,
model_type='R')
eq_(section_file_map['model'], join(notebook_path, 'r_model.ipynb'))
eq_(section_file_map['notebook'], '/path/notebook.ipynb')
eq_(section_file_map['placeholder'], normpath('special_notebook_path/placeholder.ipynb'))
def test_get_section_file_map_rsmeval(self):
special_sections = ['placeholder']
custom_sections = ['/path/notebook.ipynb']
section_file_map = self.reporter.get_section_file_map(special_sections,
custom_sections,
context='rsmeval')
eq_(section_file_map['data_description'], join(notebook_path, 'data_description.ipynb'))
eq_(section_file_map['notebook'], '/path/notebook.ipynb')
eq_(section_file_map['placeholder'], normpath('special_notebook_path/placeholder.ipynb'))
def test_get_section_file_map_rsmcompare(self):
special_sections = ['placeholder']
custom_sections = ['/path/notebook.ipynb']
section_file_map = self.reporter.get_section_file_map(special_sections,
custom_sections,
context='rsmcompare')
eq_(section_file_map['evaluation'], join(comparison_notebook_path, 'evaluation.ipynb'))
eq_(section_file_map['notebook'], '/path/notebook.ipynb')
eq_(section_file_map['placeholder'], normpath('special_notebook_path/placeholder.ipynb'))
def test_get_section_file_map_rsmsummarize(self):
special_sections = ['placeholder']
custom_sections = ['/path/notebook.ipynb']
section_file_map = self.reporter.get_section_file_map(special_sections,
custom_sections,
context='rsmsummarize')
eq_(section_file_map['evaluation'], join(summary_notebook_path, 'evaluation.ipynb'))
eq_(section_file_map['notebook'], '/path/notebook.ipynb')
eq_(section_file_map['placeholder'], normpath('special_notebook_path/placeholder.ipynb'))
| 56.510417
| 116
| 0.522673
| 2,570
| 32,550
| 6.191829
| 0.058366
| 0.079181
| 0.076038
| 0.080186
| 0.852825
| 0.821907
| 0.787595
| 0.754163
| 0.704393
| 0.671526
| 0
| 0.000415
| 0.407957
| 32,550
| 575
| 117
| 56.608696
| 0.825333
| 0.022796
| 0
| 0.730382
| 0
| 0
| 0.105561
| 0.031738
| 0
| 0
| 0
| 0
| 0
| 1
| 0.090543
| false
| 0
| 0.006036
| 0
| 0.098592
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1669a2f84062a574b98e210c63b068283302d907
| 151
|
py
|
Python
|
api/xl_fmt/__init__.py
|
C-Canchola/texel
|
9ebb12cf38b78608394f44767f55236845f0f9bc
|
[
"MIT"
] | null | null | null |
api/xl_fmt/__init__.py
|
C-Canchola/texel
|
9ebb12cf38b78608394f44767f55236845f0f9bc
|
[
"MIT"
] | null | null | null |
api/xl_fmt/__init__.py
|
C-Canchola/texel
|
9ebb12cf38b78608394f44767f55236845f0f9bc
|
[
"MIT"
] | null | null | null |
from .spacing import get_spaced_formula
from .formula_formatter import format_typed_sheet_formulas
from .formula_formatter import apply_spacing_to_rng
| 37.75
| 58
| 0.900662
| 22
| 151
| 5.727273
| 0.636364
| 0.174603
| 0.31746
| 0.412698
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.07947
| 151
| 3
| 59
| 50.333333
| 0.906475
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
1695493ce484683f70fd7332e8fd418eb58c08de
| 35,332
|
py
|
Python
|
src/openprocurement/tender/openeu/tests/cancellation_blanks.py
|
pontostroy/api
|
5afdd3a62a8e562cf77e2d963d88f1a26613d16a
|
[
"Apache-2.0"
] | 3
|
2020-03-13T06:44:23.000Z
|
2020-11-05T18:25:29.000Z
|
src/openprocurement/tender/openeu/tests/cancellation_blanks.py
|
pontostroy/api
|
5afdd3a62a8e562cf77e2d963d88f1a26613d16a
|
[
"Apache-2.0"
] | 2
|
2021-03-25T23:29:58.000Z
|
2022-03-21T22:18:37.000Z
|
src/openprocurement/tender/openeu/tests/cancellation_blanks.py
|
pontostroy/api
|
5afdd3a62a8e562cf77e2d963d88f1a26613d16a
|
[
"Apache-2.0"
] | 3
|
2020-10-16T16:25:14.000Z
|
2021-05-22T12:26:20.000Z
|
# -*- coding: utf-8 -*-
from copy import deepcopy
from datetime import timedelta
from mock import patch
from openprocurement.api.utils import get_now
from openprocurement.api.constants import RELEASE_2020_04_19
from openprocurement.tender.core.tests.cancellation import (
activate_cancellation_with_complaints_after_2020_04_19,
)
# TenderCancellationBidsAvailabilityTest
from openprocurement.tender.belowthreshold.tests.base import test_cancellation
def bids_on_tender_cancellation_in_tendering(self):
response = self.app.get("/tenders/{}".format(self.tender_id))
tender = response.json["data"]
self.assertNotIn("bids", tender) # bids not visible for others
cancellation = dict(**test_cancellation)
cancellation.update({
"status": "active",
})
response = self.app.post_json(
"/tenders/{}/cancellations?acc_token={}".format(self.tender_id, self.tender_token),
{"data": cancellation},
)
self.assertEqual(response.status, "201 Created")
cancellation = response.json["data"]
cancellation_id = cancellation["id"]
if get_now() < RELEASE_2020_04_19:
self.assertEqual(cancellation["status"], "active")
else:
self.assertEqual(cancellation["status"], "draft")
activate_cancellation_with_complaints_after_2020_04_19(self, cancellation_id)
response = self.app.get("/tenders/{}".format(self.tender_id))
tender = response.json["data"]
self.assertNotIn("bids", tender)
self.assertEqual(tender["status"], "cancelled")
response = self.app.get("/tenders/{}".format(self.tender_id))
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["data"]["status"], "cancelled")
def bids_on_tender_cancellation_in_pre_qualification(self):
self._mark_one_bid_deleted()
# leave one bid invalidated
response = self.app.patch_json(
"/tenders/{}?acc_token={}".format(self.tender_id, self.tender_token), {"data": {"description": "2 b | !2 b"}}
)
for bid_id in self.valid_bids:
response = self.app.get(
"/tenders/{}/bids/{}?acc_token={}".format(self.tender_id, bid_id, self.initial_bids_tokens[bid_id])
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.json["data"]["status"], "invalid")
invalid_bid_id = self.valid_bids.pop()
self.assertEqual(len(self.valid_bids), (self.min_bids_number - 1) * 2)
for bid_id in self.valid_bids:
response = self.app.patch_json(
"/tenders/{}/bids/{}?acc_token={}".format(self.tender_id, bid_id, self.initial_bids_tokens[bid_id]),
{"data": {"status": "pending"}},
)
self.set_status("active.pre-qualification", {"id": self.tender_id, "status": "active.tendering"})
self.app.authorization = ("Basic", ("chronograph", ""))
response = self.app.patch_json("/tenders/{}".format(self.tender_id), {"data": {"id": self.tender_id}})
self.assertEqual(response.json["data"]["status"], "active.pre-qualification")
tender = self._cancel_tender()
for bid in tender["bids"]:
if bid["id"] in self.valid_bids:
self.assertEqual(bid["status"], "invalid.pre-qualification")
self.assertEqual(set(bid.keys()), set(self.bid_visible_fields))
elif bid["id"] == invalid_bid_id:
self.assertEqual(bid["status"], "invalid")
self.assertEqual(set(bid.keys()), set(["id", "status"]))
else:
self.assertEqual(bid["status"], "deleted")
self.assertEqual(set(bid.keys()), set(["id", "status"]))
self._check_visible_fields_for_invalidated_bids()
def bids_on_tender_cancellation_in_pre_qualification_stand_still(self):
self._mark_one_bid_deleted()
self.set_status("active.pre-qualification", {"id": self.tender_id, "status": "active.tendering"})
self.app.authorization = ("Basic", ("chronograph", ""))
response = self.app.patch_json("/tenders/{}".format(self.tender_id), {"data": {"id": self.tender_id}})
self.assertEqual(response.json["data"]["status"], "active.pre-qualification")
self._qualify_bids_and_switch_to_pre_qualification_stand_still()
if RELEASE_2020_04_19 > get_now():
# Test for old rules
# In new rules there will be 403 error
tender = self._cancel_tender()
self.app.authorization = ("Basic", ("broker", ""))
for bid in tender["bids"]:
if bid["id"] in self.valid_bids:
self.assertEqual(bid["status"], "invalid.pre-qualification")
self.assertEqual(set(bid.keys()), set(self.bid_visible_fields))
else:
self.assertEqual(bid["status"], "deleted")
self.assertEqual(set(bid.keys()), set(["id", "status"]))
self._check_visible_fields_for_invalidated_bids()
def bids_on_tender_cancellation_in_auction(self):
self._mark_one_bid_deleted()
self.set_status("active.pre-qualification", {"id": self.tender_id, "status": "active.tendering"})
self.app.authorization = ("Basic", ("chronograph", ""))
response = self.app.patch_json("/tenders/{}".format(self.tender_id), {"data": {"id": self.tender_id}})
self.assertEqual(response.json["data"]["status"], "active.pre-qualification")
self._qualify_bids_and_switch_to_pre_qualification_stand_still()
self.set_status("active.auction", {"id": self.tender_id, "status": "active.pre-qualification.stand-still"})
self.app.authorization = ("Basic", ("chronograph", ""))
response = self.app.patch_json("/tenders/{}".format(self.tender_id), {"data": {"id": self.tender_id}})
self.assertEqual(response.json["data"]["status"], "active.auction")
if RELEASE_2020_04_19 > get_now():
tender = self._cancel_tender()
self.app.authorization = ("Basic", ("broker", ""))
for bid in tender["bids"]:
if bid["id"] in self.valid_bids:
self.assertEqual(bid["status"], "invalid.pre-qualification")
self.assertEqual(set(bid.keys()), set(self.bid_visible_fields))
else:
self.assertEqual(bid["status"], "deleted")
self.assertEqual(set(bid.keys()), set(["id", "status"]))
self._all_documents_are_not_accessible(bid["id"])
self._check_visible_fields_for_invalidated_bids()
def bids_on_tender_cancellation_in_qualification(self):
self.bid_visible_fields = [
u"status",
u"documents",
u"tenderers",
u"id",
u"selfQualified",
u"eligibilityDocuments",
u"selfEligible",
u"value",
u"date",
u"financialDocuments",
u"participationUrl",
u"qualificationDocuments",
]
deleted_bid_id = self._mark_one_bid_deleted()
self.set_status("active.pre-qualification", {"id": self.tender_id, "status": "active.tendering"})
self.app.authorization = ("Basic", ("chronograph", ""))
response = self.app.patch_json("/tenders/{}".format(self.tender_id), {"data": {"id": self.tender_id}})
self.assertEqual(response.json["data"]["status"], "active.pre-qualification")
self._qualify_bids_and_switch_to_pre_qualification_stand_still(qualify_all=False)
self.set_status("active.auction", {"id": self.tender_id, "status": "active.pre-qualification.stand-still"})
self.app.authorization = ("Basic", ("chronograph", ""))
response = self.app.patch_json("/tenders/{}".format(self.tender_id), {"data": {"id": self.tender_id}})
self.assertEqual(response.json["data"]["status"], "active.auction")
self._set_auction_results()
tender = self._cancel_tender()
self.app.authorization = ("Basic", ("broker", ""))
for bid in tender["bids"]:
if bid["id"] in self.valid_bids:
self.assertEqual(bid["status"], "active")
self.assertEqual(set(bid.keys()), set(self.bid_visible_fields))
elif bid["id"] == deleted_bid_id:
self.assertEqual(bid["status"], "deleted")
self.assertEqual(set(bid.keys()), set(["id", "status"]))
else:
self.assertEqual(bid["status"], "unsuccessful")
self.assertEqual(
set(bid.keys()),
set(
[
u"documents",
u"eligibilityDocuments",
u"id",
u"status",
u"selfEligible",
u"tenderers",
u"selfQualified",
]
),
)
for bid_id, bid_token in self.initial_bids_tokens.items():
response = self.app.get("/tenders/{}/bids/{}".format(self.tender_id, bid_id))
bid_data = response.json["data"]
if bid_id in self.valid_bids:
self.assertEqual(set(bid_data.keys()), set(self.bid_visible_fields))
for doc_resource in [
"documents",
"eligibility_documents",
"financial_documents",
"qualification_documents",
]:
self._bid_document_is_accessible(bid_id, doc_resource)
elif bid_id == deleted_bid_id:
self._all_documents_are_not_accessible(bid_id)
else: # unsuccessful bid
for doc_resource in ["financial_documents", "qualification_documents"]:
response = self.app.get(
"/tenders/{}/bids/{}/{}".format(self.tender_id, bid_id, doc_resource), status=403
)
self.assertEqual(response.status, "403 Forbidden")
self.assertIn("Can't view bid documents in current (", response.json["errors"][0]["description"])
response = self.app.get(
"/tenders/{}/bids/{}/{}/{}".format(
self.tender_id, bid_id, doc_resource, self.doc_id_by_type[bid_id + doc_resource]["id"]
),
status=403,
)
self.assertEqual(response.status, "403 Forbidden")
self.assertIn("Can't view bid documents in current (", response.json["errors"][0]["description"])
for doc_resource in ["documents", "eligibility_documents"]:
self._bid_document_is_accessible(bid_id, doc_resource)
def bids_on_tender_cancellation_in_awarded(self):
self.bid_visible_fields = [
u"status",
u"documents",
u"tenderers",
u"id",
u"selfQualified",
u"eligibilityDocuments",
u"selfEligible",
u"value",
u"date",
u"financialDocuments",
u"participationUrl",
u"qualificationDocuments",
]
self._mark_one_bid_deleted()
self.set_status("active.pre-qualification", {"id": self.tender_id, "status": "active.tendering"})
self.app.authorization = ("Basic", ("chronograph", ""))
response = self.app.patch_json("/tenders/{}".format(self.tender_id), {"data": {"id": self.tender_id}})
self.assertEqual(response.json["data"]["status"], "active.pre-qualification")
self._qualify_bids_and_switch_to_pre_qualification_stand_still()
self.set_status("active.auction", {"id": self.tender_id, "status": "active.pre-qualification.stand-still"})
self.app.authorization = ("Basic", ("chronograph", ""))
response = self.app.patch_json("/tenders/{}".format(self.tender_id), {"data": {"id": self.tender_id}})
self.assertEqual(response.json["data"]["status"], "active.auction")
self._set_auction_results()
self.app.authorization = ("Basic", ("broker", ""))
response = self.app.get("/tenders/{}/awards?acc_token={}".format(self.tender_id, self.tender_token))
award_id = [i["id"] for i in response.json["data"] if i["status"] == "pending"][0]
self.app.patch_json(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, award_id, self.tender_token),
{"data": {"status": "active", "qualified": True, "eligible": True}},
)
self.assertEqual(response.status, "200 OK")
response = self.app.get("/tenders/{}".format(self.tender_id), {"data": {"id": self.tender_id}})
self.assertEqual(response.json["data"]["status"], "active.awarded")
if RELEASE_2020_04_19 < get_now():
self.set_all_awards_complaint_period_end()
tender = self._cancel_tender()
self.app.authorization = ("Basic", ("broker", ""))
for bid in tender["bids"]:
if bid["id"] in self.valid_bids:
self.assertEqual(bid["status"], "active")
self.assertEqual(set(bid.keys()), set(self.bid_visible_fields))
else:
self.assertEqual(bid["status"], "deleted")
self.assertEqual(set(bid.keys()), set(["id", "status"]))
for bid_id, bid_token in self.initial_bids_tokens.items():
response = self.app.get("/tenders/{}/bids/{}".format(self.tender_id, bid_id))
bid_data = response.json["data"]
if bid_id in self.valid_bids:
self.assertEqual(set(bid_data.keys()), set(self.bid_visible_fields))
for doc_resource in [
"documents",
"eligibility_documents",
"financial_documents",
"qualification_documents",
]:
self._bid_document_is_accessible(bid_id, doc_resource)
# TenderAwardsCancellationResourceTest
def cancellation_active_tendering_j708(self):
bid = deepcopy(self.initial_bids[0])
bid["lotValues"] = bid["lotValues"][:1]
response = self.app.post_json("/tenders/{}/bids".format(self.tender_id), {"data": bid})
self.assertEqual(response.status, "201 Created")
self.initial_bids_tokens[response.json["data"]["id"]] = response.json["access"]["token"]
self.initial_bids.append(response.json["data"])
response = self.app.delete(
"/tenders/{}/bids/{}?acc_token={}".format(
self.tender_id, response.json["data"]["id"], response.json["access"]["token"]
)
)
self.assertEqual(response.status, "200 OK")
cancellation = dict(**test_cancellation)
cancellation.update({
"status": "pending",
"cancellationOf": "lot",
"relatedLot": self.initial_lots[0]["id"],
})
response = self.app.post_json(
"/tenders/{}/cancellations?acc_token={}".format(self.tender_id, self.tender_token),
{"data": cancellation},
)
self.assertEqual(response.status, "201 Created")
self.assertEqual(response.content_type, "application/json")
cancellation_id = response.json["data"]["id"]
if RELEASE_2020_04_19 > get_now():
response = self.app.patch_json(
"/tenders/{}/cancellations/{}?acc_token={}".format(
self.tender_id, response.json["data"]["id"], self.tender_token
),
{"data": {"status": "active"}},
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/json")
else:
activate_cancellation_with_complaints_after_2020_04_19(self, cancellation_id)
response = self.app.post_json("/tenders/{}/bids".format(self.tender_id), {"data": bid})
self.assertEqual(response.status, "201 Created")
self.initial_bids_tokens[response.json["data"]["id"]] = response.json["access"]["token"]
self.initial_bids.append(response.json["data"])
self.set_status("active.pre-qualification", {"id": self.tender_id, "status": "active.tendering"})
self.app.authorization = ("Basic", ("chronograph", ""))
response = self.app.patch_json("/tenders/{}".format(self.tender_id), {"data": {"id": self.tender_id}})
self.assertEqual(response.json["data"]["status"], "active.pre-qualification")
def cancellation_active_qualification_j1427(self):
bid = deepcopy(self.initial_bids[0])
bid["lotValues"] = bid["lotValues"][:1]
# post three bids
bid_ids = []
for i in range(3):
response = self.app.post_json("/tenders/{}/bids".format(self.tender_id), {"data": bid})
self.assertEqual(response.status, "201 Created")
self.initial_bids_tokens[response.json["data"]["id"]] = response.json["access"]["token"]
self.initial_bids.append(response.json["data"])
bid_ids.append(response.json["data"]["id"])
self.set_status("active.pre-qualification", {"id": self.tender_id, "status": "active.tendering"})
self.app.authorization = ("Basic", ("chronograph", ""))
response = self.app.patch_json("/tenders/{}".format(self.tender_id), {"data": {"id": self.tender_id}})
self.assertEqual(response.json["data"]["status"], "active.pre-qualification")
response = self.app.get("/tenders/{}/qualifications".format(self.tender_id))
qualification_id = [i["id"] for i in response.json["data"] if i["bidID"] == bid_ids[0]][0]
response = self.app.patch_json(
"/tenders/{}/qualifications/{}?acc_token={}".format(self.tender_id, qualification_id, self.tender_token),
{"data": {"status": "unsuccessful"}},
)
response = self.app.get("/tenders/{}/qualifications".format(self.tender_id))
qualification_id = [i["id"] for i in response.json["data"] if i["bidID"] == bid_ids[1]][0]
response = self.app.patch_json(
"/tenders/{}/qualifications/{}?acc_token={}".format(self.tender_id, qualification_id, self.tender_token),
{"data": {"status": "active", "qualified": True, "eligible": True}},
)
cancellation = dict(**test_cancellation)
cancellation.update({
"status": "active",
"cancellationOf": "lot",
"relatedLot": self.initial_lots[0]["id"],
})
response = self.app.post_json(
"/tenders/{}/cancellations?acc_token={}".format(self.tender_id, self.tender_token),
{"data": cancellation},
)
self.assertEqual(response.status, "201 Created")
self.assertEqual(response.content_type, "application/json")
cancellation_id = response.json["data"]["id"]
if RELEASE_2020_04_19 < get_now():
activate_cancellation_with_complaints_after_2020_04_19(self, cancellation_id)
response = self.app.get("/tenders/{}/bids/{}".format(self.tender_id, bid_ids[0]))
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.json["data"]["status"], "unsuccessful")
response = self.app.get("/tenders/{}/bids/{}".format(self.tender_id, bid_ids[1]))
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.json["data"]["status"], "invalid.pre-qualification")
response = self.app.get("/tenders/{}/bids/{}".format(self.tender_id, bid_ids[2]))
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.json["data"]["status"], "invalid.pre-qualification")
def cancellation_active_qualification(self):
self.set_status("active.pre-qualification", {"id": self.tender_id, "status": "active.tendering"})
self.app.authorization = ("Basic", ("chronograph", ""))
response = self.app.patch_json("/tenders/{}".format(self.tender_id), {"data": {"id": self.tender_id}})
self.assertEqual(response.json["data"]["status"], "active.pre-qualification")
self.app.authorization = ("Basic", ("token", ""))
response = self.app.get("/tenders/{}/qualifications".format(self.tender_id))
qualification_id = [
i["id"] for i in response.json["data"] if i["status"] == "pending" and i["lotID"] == self.initial_lots[0]["id"]
][0]
response = self.app.patch_json(
"/tenders/{}/qualifications/{}?acc_token={}".format(self.tender_id, qualification_id, self.tender_token),
{"data": {"status": "active", "qualified": True, "eligible": True}},
)
cancellation = dict(**test_cancellation)
cancellation.update({
"status": "active",
"cancellationOf": "lot",
"relatedLot": self.initial_lots[0]["id"],
})
response = self.app.post_json(
"/tenders/{}/cancellations?acc_token={}".format(self.tender_id, self.tender_token),
{"data": cancellation},
)
self.assertEqual(response.status, "201 Created")
self.assertEqual(response.content_type, "application/json")
cancellation = response.json["data"]
self.assertEqual(cancellation["reason"], "cancellation reason")
self.assertIn("id", cancellation)
self.assertIn(cancellation["id"], response.headers["Location"])
if RELEASE_2020_04_19 > get_now():
self.assertEqual(cancellation["status"], "active")
else:
activate_cancellation_with_complaints_after_2020_04_19(self, cancellation["id"])
cancellation = dict(**test_cancellation)
cancellation.update({
"status": "active",
})
response = self.app.post_json(
"/tenders/{}/cancellations?acc_token={}".format(self.tender_id, self.tender_token),
{"data": cancellation},
)
self.assertEqual(response.status, "201 Created")
self.assertEqual(response.content_type, "application/json")
cancellation = response.json["data"]
cancellation_id = cancellation["id"]
self.assertEqual(cancellation["reason"], "cancellation reason")
if get_now() < RELEASE_2020_04_19:
self.assertEqual(cancellation["status"], "active")
self.assertIn("id", cancellation)
self.assertIn(cancellation["id"], response.headers["Location"])
else:
self.assertEqual(cancellation["status"], "draft")
activate_cancellation_with_complaints_after_2020_04_19(self, cancellation_id)
def cancellation_unsuccessful_qualification(self):
self.set_status("active.pre-qualification", {"id": self.tender_id, "status": "active.tendering"})
self.app.authorization = ("Basic", ("chronograph", ""))
response = self.app.patch_json("/tenders/{}".format(self.tender_id), {"data": {"id": self.tender_id}})
self.assertEqual(response.json["data"]["status"], "active.pre-qualification")
self.app.authorization = ("Basic", ("token", ""))
for i in range(self.min_bids_number):
response = self.app.get("/tenders/{}/qualifications".format(self.tender_id))
qualification_id = [
i["id"]
for i in response.json["data"]
if i["status"] == "pending" and i["lotID"] == self.initial_lots[0]["id"]
][0]
response = self.app.patch_json(
"/tenders/{}/qualifications/{}?acc_token={}".format(self.tender_id, qualification_id, self.tender_token),
{"data": {"status": "unsuccessful", "qualified": True, "eligible": True}},
)
self.assertEqual(response.status, "200 OK")
cancellation = dict(**test_cancellation)
cancellation.update({
"status": "active",
"cancellationOf": "lot",
"relatedLot": self.initial_lots[0]["id"],
})
response = self.app.post_json(
"/tenders/{}/cancellations?acc_token={}".format(self.tender_id, self.tender_token),
{"data": cancellation},
status=403,
)
self.assertEqual(response.status, "403 Forbidden")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(
response.json["errors"][0]["description"], "Can't perform cancellation if all qualifications are unsuccessful"
)
cancellation = dict(**test_cancellation)
cancellation.update({
"status": "active",
})
response = self.app.post_json(
"/tenders/{}/cancellations?acc_token={}".format(self.tender_id, self.tender_token),
{"data": cancellation},
status=403,
)
self.assertEqual(response.status, "403 Forbidden")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(
response.json["errors"][0]["description"], "Can't perform cancellation if all qualifications are unsuccessful"
)
cancellation = dict(**test_cancellation)
cancellation.update({
"status": "active",
"cancellationOf": "lot",
"relatedLot": self.initial_lots[1]["id"],
})
response = self.app.post_json(
"/tenders/{}/cancellations?acc_token={}".format(self.tender_id, self.tender_token),
{"data": cancellation},
)
self.assertEqual(response.status, "201 Created")
self.assertEqual(response.content_type, "application/json")
cancellation = response.json["data"]
self.assertEqual(cancellation["reason"], "cancellation reason")
self.assertIn("id", cancellation)
self.assertIn(cancellation["id"], response.headers["Location"])
if RELEASE_2020_04_19 > get_now():
self.assertEqual(cancellation["status"], "active")
else:
activate_cancellation_with_complaints_after_2020_04_19(self, cancellation["id"])
def cancellation_active_award(self):
self.set_status("active.pre-qualification", {"id": self.tender_id, "status": "active.tendering"})
self.app.authorization = ("Basic", ("chronograph", ""))
response = self.app.patch_json("/tenders/{}".format(self.tender_id), {"data": {"id": self.tender_id}})
self.assertEqual(response.json["data"]["status"], "active.pre-qualification")
response = self.app.get("/tenders/{}/qualifications".format(self.tender_id))
self.app.authorization = ("Basic", ("token", ""))
for qualification in response.json["data"]:
response = self.app.patch_json(
"/tenders/{}/qualifications/{}?acc_token={}".format(self.tender_id, qualification["id"], self.tender_token),
{"data": {"status": "active", "qualified": True, "eligible": True}},
)
self.assertEqual(response.status, "200 OK")
response = self.app.patch_json(
"/tenders/{}?acc_token={}".format(self.tender_id, self.tender_token),
{"data": {"status": "active.pre-qualification.stand-still"}},
)
self.assertEqual(response.json["data"]["status"], "active.pre-qualification.stand-still")
self.set_status("active.auction", {"id": self.tender_id, "status": "active.pre-qualification.stand-still"})
self.app.authorization = ("Basic", ("chronograph", ""))
response = self.app.patch_json("/tenders/{}".format(self.tender_id), {"data": {"id": self.tender_id}})
self.assertEqual(response.json["data"]["status"], "active.auction")
self.app.authorization = ("Basic", ("auction", ""))
response = self.app.get("/tenders/{}/auction".format(self.tender_id))
auction_bids_data = response.json["data"]["bids"]
for lot_id in self.initial_lots:
response = self.app.post_json(
"/tenders/{}/auction/{}".format(self.tender_id, lot_id["id"]), {"data": {"bids": auction_bids_data}}
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/json")
response = self.app.get("/tenders/{}".format(self.tender_id))
self.assertEqual(response.json["data"]["status"], "active.qualification")
self.app.authorization = ("Basic", ("token", ""))
response = self.app.get("/tenders/{}/awards".format(self.tender_id))
award_id = [
i["id"] for i in response.json["data"] if i["status"] == "pending" and i["lotID"] == self.initial_lots[0]["id"]
][0]
response = self.app.patch_json(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, award_id, self.tender_token),
{"data": {"status": "active", "qualified": True, "eligible": True}},
)
if RELEASE_2020_04_19 < get_now():
self.set_all_awards_complaint_period_end()
cancellation = dict(**test_cancellation)
cancellation.update({
"status": "active",
"cancellationOf": "lot",
"relatedLot": self.initial_lots[0]["id"],
})
response = self.app.post_json(
"/tenders/{}/cancellations?acc_token={}".format(self.tender_id, self.tender_token),
{"data": cancellation},
)
self.assertEqual(response.status, "201 Created")
self.assertEqual(response.content_type, "application/json")
cancellation = response.json["data"]
self.assertEqual(cancellation["reason"], "cancellation reason")
self.assertIn("id", cancellation)
self.assertIn(cancellation["id"], response.headers["Location"])
if RELEASE_2020_04_19 > get_now():
self.assertEqual(cancellation["status"], "active")
else:
activate_cancellation_with_complaints_after_2020_04_19(self, cancellation["id"])
cancellation = dict(**test_cancellation)
cancellation.update({
"status": "active",
})
response = self.app.post_json(
"/tenders/{}/cancellations?acc_token={}".format(self.tender_id, self.tender_token),
{"data": cancellation},
)
self.assertEqual(response.status, "201 Created")
self.assertEqual(response.content_type, "application/json")
cancellation = response.json["data"]
cancellation_id = cancellation["id"]
self.assertEqual(cancellation["reason"], "cancellation reason")
if get_now() < RELEASE_2020_04_19:
self.assertEqual(cancellation["status"], "active")
self.assertIn("id", cancellation)
self.assertIn(cancellation["id"], response.headers["Location"])
else:
self.assertEqual(cancellation["status"], "draft")
activate_cancellation_with_complaints_after_2020_04_19(self, cancellation_id)
def cancellation_unsuccessful_award(self):
self.set_status("active.pre-qualification", {"id": self.tender_id, "status": "active.tendering"})
self.app.authorization = ("Basic", ("chronograph", ""))
response = self.app.patch_json("/tenders/{}".format(self.tender_id), {"data": {"id": self.tender_id}})
self.assertEqual(response.json["data"]["status"], "active.pre-qualification")
response = self.app.get("/tenders/{}/qualifications".format(self.tender_id))
self.app.authorization = ("Basic", ("token", ""))
for qualification in response.json["data"]:
response = self.app.patch_json(
"/tenders/{}/qualifications/{}?acc_token={}".format(self.tender_id, qualification["id"], self.tender_token),
{"data": {"status": "active", "qualified": True, "eligible": True}},
)
self.assertEqual(response.status, "200 OK")
response = self.app.patch_json(
"/tenders/{}?acc_token={}".format(self.tender_id, self.tender_token),
{"data": {"status": "active.pre-qualification.stand-still"}},
)
self.assertEqual(response.json["data"]["status"], "active.pre-qualification.stand-still")
self.set_status("active.auction", {"id": self.tender_id, "status": "active.pre-qualification.stand-still"})
self.app.authorization = ("Basic", ("chronograph", ""))
response = self.app.patch_json("/tenders/{}".format(self.tender_id), {"data": {"id": self.tender_id}})
self.assertEqual(response.json["data"]["status"], "active.auction")
self.app.authorization = ("Basic", ("auction", ""))
response = self.app.get("/tenders/{}/auction".format(self.tender_id))
auction_bids_data = response.json["data"]["bids"]
for lot_id in self.initial_lots:
response = self.app.post_json(
"/tenders/{}/auction/{}".format(self.tender_id, lot_id["id"]), {"data": {"bids": auction_bids_data}}
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/json")
response = self.app.get("/tenders/{}".format(self.tender_id))
self.assertEqual(response.json["data"]["status"], "active.qualification")
self.app.authorization = ("Basic", ("token", ""))
# patch all first lot related Awards to unsuccessful
while True:
response = self.app.get("/tenders/{}/awards".format(self.tender_id))
awards = [
i["id"]
for i in response.json["data"]
if i["status"] == "pending" and i["lotID"] == self.initial_lots[0]["id"]
]
if awards:
award_id = awards[0]
else:
break
response = self.app.patch_json(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, award_id, self.tender_token),
{"data": {"status": "unsuccessful"}},
)
self.assertEqual(response.status, "200 OK")
if RELEASE_2020_04_19 < get_now():
self.set_all_awards_complaint_period_end()
cancellation = dict(**test_cancellation)
cancellation.update({
"status": "active",
"cancellationOf": "lot",
"relatedLot": self.initial_lots[0]["id"],
})
response = self.app.post_json(
"/tenders/{}/cancellations?acc_token={}".format(self.tender_id, self.tender_token),
{"data": cancellation},
status=403,
)
self.assertEqual(response.status, "403 Forbidden")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["errors"][0]["description"],
"Can't perform cancellation if all awards are unsuccessful")
cancellation = dict(**test_cancellation)
cancellation.update({
"status": "active",
})
response = self.app.post_json(
"/tenders/{}/cancellations?acc_token={}".format(self.tender_id, self.tender_token),
{"data": cancellation},
status=403,
)
self.assertEqual(response.status, "403 Forbidden")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["errors"][0]["description"],
"Can't perform cancellation if all awards are unsuccessful")
cancellation = dict(**test_cancellation)
cancellation.update({
"status": "active",
"cancellationOf": "lot",
"relatedLot": self.initial_lots[1]["id"],
})
response = self.app.post_json(
"/tenders/{}/cancellations?acc_token={}".format(self.tender_id, self.tender_token),
{"data": cancellation},
)
self.assertEqual(response.status, "201 Created")
self.assertEqual(response.content_type, "application/json")
cancellation = response.json["data"]
self.assertEqual(cancellation["reason"], "cancellation reason")
self.assertIn("id", cancellation)
self.assertIn(cancellation["id"], response.headers["Location"])
if RELEASE_2020_04_19 > get_now():
self.assertEqual(cancellation["status"], "active")
else:
activate_cancellation_with_complaints_after_2020_04_19(self, cancellation["id"])
@patch("openprocurement.tender.core.models.RELEASE_2020_04_19",
get_now() - timedelta(days=1))
@patch("openprocurement.tender.core.validation.RELEASE_2020_04_19",
get_now() - timedelta(days=1))
@patch("openprocurement.tender.core.views.cancellation.RELEASE_2020_04_19",
get_now() - timedelta(days=1))
def create_cancellation_in_qualification_complaint_period(self):
self.set_status("active.pre-qualification.stand-still")
cancellation = dict(**test_cancellation)
cancellation.update({"reasonType": "noDemand"})
response = self.app.post_json(
"/tenders/{}/cancellations?acc_token={}".format(self.tender_id, self.tender_token),
{"data": cancellation},
status=403
)
self.assertEqual(response.status, "403 Forbidden")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(
response.json["errors"],
[{
u"description": u"Cancellation can't be add when exists active complaint period",
u"location": u"body",
u"name": u"data"
}],
)
| 44.109863
| 120
| 0.644373
| 4,024
| 35,332
| 5.470179
| 0.049205
| 0.061784
| 0.058877
| 0.06133
| 0.938579
| 0.935581
| 0.918908
| 0.909958
| 0.897692
| 0.891968
| 0
| 0.01383
| 0.187564
| 35,332
| 800
| 121
| 44.165
| 0.753005
| 0.008236
| 0
| 0.805596
| 0
| 0
| 0.22181
| 0.078133
| 0
| 0
| 0
| 0
| 0.207658
| 1
| 0.019146
| false
| 0
| 0.010309
| 0
| 0.029455
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
169eaade1729e35920b873928fdeed5ac3b078be
| 1,492
|
py
|
Python
|
python_modules/dagster/dagster_tests/core_tests/runtime_types_tests/test_typed_python_dict.py
|
dbatten5/dagster
|
d76e50295054ffe5a72f9b292ef57febae499528
|
[
"Apache-2.0"
] | 4,606
|
2018-06-21T17:45:20.000Z
|
2022-03-31T23:39:42.000Z
|
python_modules/dagster/dagster_tests/core_tests/runtime_types_tests/test_typed_python_dict.py
|
dbatten5/dagster
|
d76e50295054ffe5a72f9b292ef57febae499528
|
[
"Apache-2.0"
] | 6,221
|
2018-06-12T04:36:01.000Z
|
2022-03-31T21:43:05.000Z
|
python_modules/dagster/dagster_tests/core_tests/runtime_types_tests/test_typed_python_dict.py
|
dbatten5/dagster
|
d76e50295054ffe5a72f9b292ef57febae499528
|
[
"Apache-2.0"
] | 619
|
2018-08-22T22:43:09.000Z
|
2022-03-31T22:48:06.000Z
|
import pytest
from dagster import (
DagsterTypeCheckDidNotPass,
Dict,
InputDefinition,
OutputDefinition,
execute_solid,
lambda_solid,
)
def test_typed_python_dict():
int_to_int = Dict[int, int]
int_to_int.type_check(None, {1: 1})
def test_typed_python_dict_failure():
int_to_int = Dict[int, int]
res = int_to_int.type_check(None, {1: "1"})
assert not res.success
def test_basic_solid_dict_int_int_output():
@lambda_solid(output_def=OutputDefinition(Dict[int, int]))
def emit_dict_int_int():
return {1: 1}
assert execute_solid(emit_dict_int_int).output_value() == {1: 1}
def test_basic_solid_dict_int_int_output_faile():
@lambda_solid(output_def=OutputDefinition(Dict[int, int]))
def emit_dict_int_int():
return {1: "1"}
with pytest.raises(DagsterTypeCheckDidNotPass):
execute_solid(emit_dict_int_int)
def test_basic_solid_dict_int_int_input_pass():
@lambda_solid(input_defs=[InputDefinition("ddict", Dict[int, int])])
def emit_dict_int_int(ddict):
return ddict
assert execute_solid(emit_dict_int_int, input_values={"ddict": {1: 2}}).output_value() == {1: 2}
def test_basic_solid_dict_int_int_input_fails():
@lambda_solid(input_defs=[InputDefinition("ddict", Dict[int, int])])
def emit_dict_int_int(ddict):
return ddict
with pytest.raises(DagsterTypeCheckDidNotPass):
execute_solid(emit_dict_int_int, input_values={"ddict": {"1": 2}})
| 26.175439
| 100
| 0.715818
| 210
| 1,492
| 4.67619
| 0.195238
| 0.135438
| 0.183299
| 0.114053
| 0.814664
| 0.769857
| 0.733198
| 0.694501
| 0.515275
| 0.515275
| 0
| 0.012903
| 0.168901
| 1,492
| 56
| 101
| 26.642857
| 0.779032
| 0
| 0
| 0.368421
| 0
| 0
| 0.015416
| 0
| 0
| 0
| 0
| 0
| 0.078947
| 1
| 0.263158
| false
| 0.105263
| 0.052632
| 0.105263
| 0.421053
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
|
0
| 7
|
bc7f1cd58236516e3a0196d1d7bf1cda101cd565
| 150
|
py
|
Python
|
GeeProxy/__init__.py
|
geebytes/GeeProxy
|
6f2f57ef1e1e8ea9a295cf987577dab5f1cadfe5
|
[
"Apache-2.0"
] | 2
|
2020-10-12T05:31:36.000Z
|
2020-11-30T07:39:06.000Z
|
GeeProxy/__init__.py
|
geebytes/GeeProxy
|
6f2f57ef1e1e8ea9a295cf987577dab5f1cadfe5
|
[
"Apache-2.0"
] | 1
|
2021-04-19T11:14:59.000Z
|
2021-04-19T11:14:59.000Z
|
GeeProxy/__init__.py
|
geebytes/GeeProxy
|
6f2f57ef1e1e8ea9a295cf987577dab5f1cadfe5
|
[
"Apache-2.0"
] | 1
|
2021-09-30T04:36:10.000Z
|
2021-09-30T04:36:10.000Z
|
'''
@Author: qinzhonghe96@163.com
@Date: 2020-03-01 14:31:44
@LastEditors: qinzhonghe96@163.com
@LastEditTime: 2020-03-01 17:34:18
@Description:
'''
| 18.75
| 34
| 0.72
| 23
| 150
| 4.695652
| 0.73913
| 0.277778
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.279412
| 0.093333
| 150
| 7
| 35
| 21.428571
| 0.514706
| 0.933333
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
bc90b2be579aaf383f62fb080072262c51110d23
| 12,807
|
py
|
Python
|
tests/test_problem.py
|
tods-doc/d3m
|
e25793d4aaa9a8fdb63ac33bf1c045b96d6067a6
|
[
"Apache-2.0"
] | null | null | null |
tests/test_problem.py
|
tods-doc/d3m
|
e25793d4aaa9a8fdb63ac33bf1c045b96d6067a6
|
[
"Apache-2.0"
] | null | null | null |
tests/test_problem.py
|
tods-doc/d3m
|
e25793d4aaa9a8fdb63ac33bf1c045b96d6067a6
|
[
"Apache-2.0"
] | null | null | null |
import os.path
import pickle
import unittest
from d3m import utils
from d3m.metadata import problem, pipeline_run
class TestProblem(unittest.TestCase):
def test_basic(self):
self.maxDiff = None
problem_doc_path = os.path.join(os.path.dirname(__file__), 'data', 'problems', 'iris_problem_1', 'problemDoc.json')
problem_uri = 'file://{problem_doc_path}'.format(problem_doc_path=problem_doc_path)
problem_description = problem.Problem.load(problem_uri)
self.assertEqual(problem_description.to_simple_structure(), {
'id': 'iris_problem_1',
'digest': '1a12135422967aa0de0c4629f4f58d08d39e97f9133f7b50da71420781aa18a5',
'version': '4.0.0',
'location_uris': [
problem_uri,
],
'name': 'Distinguish Iris flowers',
'description': 'Distinguish Iris flowers of three related species.',
'schema': problem.PROBLEM_SCHEMA_VERSION,
'problem': {
'task_keywords': [problem.TaskKeyword.CLASSIFICATION, problem.TaskKeyword.MULTICLASS],
'performance_metrics': [
{
'metric': problem.PerformanceMetric.ACCURACY,
}
]
},
'inputs': [
{
'dataset_id': 'iris_dataset_1',
'targets': [
{
'target_index': 0,
'resource_id': 'learningData',
'column_index': 5,
'column_name': 'species',
}
]
}
],
})
self.assertEqual(problem_description.to_json_structure(), {
'id': 'iris_problem_1',
'digest': '1a12135422967aa0de0c4629f4f58d08d39e97f9133f7b50da71420781aa18a5',
'version': '4.0.0',
'location_uris': [
problem_uri,
],
'name': 'Distinguish Iris flowers',
'description': 'Distinguish Iris flowers of three related species.',
'schema': problem.PROBLEM_SCHEMA_VERSION,
'problem': {
'task_keywords': [problem.TaskKeyword.CLASSIFICATION, problem.TaskKeyword.MULTICLASS],
'performance_metrics': [
{
'metric': problem.PerformanceMetric.ACCURACY,
}
]
},
'inputs': [
{
'dataset_id': 'iris_dataset_1',
'targets': [
{
'target_index': 0,
'resource_id': 'learningData',
'column_index': 5,
'column_name': 'species',
}
]
}
],
})
self.assertEqual(problem_description.to_json_structure(), {
'id': 'iris_problem_1',
'digest': '1a12135422967aa0de0c4629f4f58d08d39e97f9133f7b50da71420781aa18a5',
'version': '4.0.0',
'location_uris': [
problem_uri,
],
'name': 'Distinguish Iris flowers',
'description': 'Distinguish Iris flowers of three related species.',
'schema': problem.PROBLEM_SCHEMA_VERSION,
'problem': {
'task_keywords': ['CLASSIFICATION', 'MULTICLASS'],
'performance_metrics': [
{
'metric': 'ACCURACY',
}
]
},
'inputs': [
{
'dataset_id': 'iris_dataset_1',
'targets': [
{
'target_index': 0,
'resource_id': 'learningData',
'column_index': 5,
'column_name': 'species',
}
]
}
],
})
pipeline_run.validate_problem(problem_description.to_json_structure(canonical=True))
problem.PROBLEM_SCHEMA_VALIDATOR.validate(problem_description.to_json_structure(canonical=True))
def test_conversion(self):
problem_doc_path = os.path.join(os.path.dirname(__file__), 'data', 'problems', 'iris_problem_1', 'problemDoc.json')
problem_uri = 'file://{problem_doc_path}'.format(problem_doc_path=problem_doc_path)
problem_description = problem.Problem.load(problem_uri)
self.assertEqual(problem_description.to_simple_structure(), problem.Problem.from_json_structure(problem_description.to_json_structure(), strict_digest=True).to_simple_structure())
# Legacy.
self.assertEqual(utils.to_json_structure(problem_description.to_simple_structure()), problem.Problem.from_json_structure(utils.to_json_structure(problem_description.to_simple_structure()), strict_digest=True).to_simple_structure())
self.assertIs(problem.Problem.from_json_structure(problem_description.to_json_structure(), strict_digest=True)['problem']['task_keywords'][0], problem.TaskKeyword.CLASSIFICATION)
def test_unparse(self):
self.assertEqual(problem.TaskKeyword.CLASSIFICATION.unparse(), 'classification')
self.assertEqual(problem.TaskKeyword.MULTICLASS.unparse(), 'multiClass')
self.assertEqual(problem.PerformanceMetric.ACCURACY.unparse(), 'accuracy')
def test_normalize(self):
self.assertEqual(problem.PerformanceMetric._normalize(0, 1, 0.5), 0.5)
self.assertEqual(problem.PerformanceMetric._normalize(0, 2, 0.5), 0.25)
self.assertEqual(problem.PerformanceMetric._normalize(1, 2, 1.5), 0.5)
self.assertEqual(problem.PerformanceMetric._normalize(-1, 0, -0.5), 0.5)
self.assertEqual(problem.PerformanceMetric._normalize(-2, 0, -1.5), 0.25)
self.assertEqual(problem.PerformanceMetric._normalize(-2, -1, -1.5), 0.5)
self.assertEqual(problem.PerformanceMetric._normalize(1, 0, 0.5), 0.5)
self.assertEqual(problem.PerformanceMetric._normalize(2, 0, 0.5), 0.75)
self.assertEqual(problem.PerformanceMetric._normalize(2, 1, 1.5), 0.5)
self.assertEqual(problem.PerformanceMetric._normalize(0, -1, -0.5), 0.5)
self.assertEqual(problem.PerformanceMetric._normalize(0, -2, -1.5), 0.75)
self.assertEqual(problem.PerformanceMetric._normalize(-1, -2, -1.5), 0.5)
self.assertAlmostEqual(problem.PerformanceMetric._normalize(float('inf'), 0, 0.0), 1.0)
self.assertAlmostEqual(problem.PerformanceMetric._normalize(float('inf'), 0, 0.5), 0.9997500000052083)
self.assertAlmostEqual(problem.PerformanceMetric._normalize(float('inf'), 0, 1000.0), 0.5378828427399902)
self.assertAlmostEqual(problem.PerformanceMetric._normalize(float('inf'), 0, 5000.0), 0.013385701848569713)
self.assertAlmostEqual(problem.PerformanceMetric._normalize(float('inf'), 1, 1.0), 1.0)
self.assertAlmostEqual(problem.PerformanceMetric._normalize(float('inf'), 1, 1.5), 0.9997500000052083)
self.assertAlmostEqual(problem.PerformanceMetric._normalize(float('inf'), 1, 1000.0), 0.5382761574524354)
self.assertAlmostEqual(problem.PerformanceMetric._normalize(float('inf'), 1, 5000.0), 0.013399004523107192)
self.assertAlmostEqual(problem.PerformanceMetric._normalize(float('inf'), -1, -1.0), 1.0)
self.assertAlmostEqual(problem.PerformanceMetric._normalize(float('inf'), -1, -0.5), 0.9997500000052083)
self.assertAlmostEqual(problem.PerformanceMetric._normalize(float('inf'), -1, 1000.0), 0.5374897097430198)
self.assertAlmostEqual(problem.PerformanceMetric._normalize(float('inf'), -1, 5000.0), 0.01337241229216877)
self.assertAlmostEqual(problem.PerformanceMetric._normalize(float('inf'), -1, 0.0), 0.9995000000416667)
self.assertAlmostEqual(problem.PerformanceMetric._normalize(float('-inf'), 0, 0.0), 1.0)
self.assertAlmostEqual(problem.PerformanceMetric._normalize(float('-inf'), 0, -0.5), 0.9997500000052083)
self.assertAlmostEqual(problem.PerformanceMetric._normalize(float('-inf'), 0, -1000.0), 0.5378828427399902)
self.assertAlmostEqual(problem.PerformanceMetric._normalize(float('-inf'), 0, -5000.0), 0.013385701848569713)
self.assertAlmostEqual(problem.PerformanceMetric._normalize(float('-inf'), 1, 1.0), 1.0)
self.assertAlmostEqual(problem.PerformanceMetric._normalize(float('-inf'), 1, 0.5), 0.9997500000052083)
self.assertAlmostEqual(problem.PerformanceMetric._normalize(float('-inf'), 1, -1000.0), 0.5374897097430198)
self.assertAlmostEqual(problem.PerformanceMetric._normalize(float('-inf'), 1, -5000.0), 0.01337241229216877)
self.assertAlmostEqual(problem.PerformanceMetric._normalize(float('-inf'), 1, 0.0), 0.9995000000416667)
self.assertAlmostEqual(problem.PerformanceMetric._normalize(float('-inf'), -1, -1.0), 1.0)
self.assertAlmostEqual(problem.PerformanceMetric._normalize(float('-inf'), -1, -1.5), 0.9997500000052083)
self.assertAlmostEqual(problem.PerformanceMetric._normalize(float('-inf'), -1, -1000.0), 0.5382761574524354)
self.assertAlmostEqual(problem.PerformanceMetric._normalize(float('-inf'), -1, -5000.0), 0.013399004523107192)
self.assertAlmostEqual(problem.PerformanceMetric._normalize(0, float('inf'), 0.0), 1 - 1.0)
self.assertAlmostEqual(problem.PerformanceMetric._normalize(0, float('inf'), 0.5), 1 - 0.9997500000052083)
self.assertAlmostEqual(problem.PerformanceMetric._normalize(0, float('inf'), 1000.0), 1 - 0.5378828427399902)
self.assertAlmostEqual(problem.PerformanceMetric._normalize(0, float('inf'), 5000.0), 1 - 0.013385701848569713)
self.assertAlmostEqual(problem.PerformanceMetric._normalize(1, float('inf'), 1.0), 1 - 1.0)
self.assertAlmostEqual(problem.PerformanceMetric._normalize(1, float('inf'), 1.5), 1 - 0.9997500000052083)
self.assertAlmostEqual(problem.PerformanceMetric._normalize(1, float('inf'), 1000.0), 1 - 0.5382761574524354)
self.assertAlmostEqual(problem.PerformanceMetric._normalize(1, float('inf'), 5000.0), 1 - 0.013399004523107192)
self.assertAlmostEqual(problem.PerformanceMetric._normalize(-1, float('inf'), -1.0), 1 - 1.0)
self.assertAlmostEqual(problem.PerformanceMetric._normalize(-1, float('inf'), -0.5), 1 - 0.9997500000052083)
self.assertAlmostEqual(problem.PerformanceMetric._normalize(-1, float('inf'), 1000.0), 1 - 0.5374897097430198)
self.assertAlmostEqual(problem.PerformanceMetric._normalize(-1, float('inf'), 5000.0), 1 - 0.01337241229216877)
self.assertAlmostEqual(problem.PerformanceMetric._normalize(-1, float('inf'), 0.0), 1 - 0.9995000000416667)
self.assertAlmostEqual(problem.PerformanceMetric._normalize(0, float('-inf'), 0.0), 1 - 1.0)
self.assertAlmostEqual(problem.PerformanceMetric._normalize(0, float('-inf'), -0.5), 1 - 0.9997500000052083)
self.assertAlmostEqual(problem.PerformanceMetric._normalize(0, float('-inf'), -1000.0), 1 - 0.5378828427399902)
self.assertAlmostEqual(problem.PerformanceMetric._normalize(0, float('-inf'), -5000.0), 1 - 0.013385701848569713)
self.assertAlmostEqual(problem.PerformanceMetric._normalize(1, float('-inf'), 1.0), 1 - 1.0)
self.assertAlmostEqual(problem.PerformanceMetric._normalize(1, float('-inf'), 0.5), 1 - 0.9997500000052083)
self.assertAlmostEqual(problem.PerformanceMetric._normalize(1, float('-inf'), -1000.0), 1 - 0.5374897097430198)
self.assertAlmostEqual(problem.PerformanceMetric._normalize(1, float('-inf'), -5000.0), 1 - 0.01337241229216877)
self.assertAlmostEqual(problem.PerformanceMetric._normalize(1, float('-inf'), 0.0), 1 - 0.9995000000416667)
self.assertAlmostEqual(problem.PerformanceMetric._normalize(-1, float('-inf'), -1.0), 1 - 1.0)
self.assertAlmostEqual(problem.PerformanceMetric._normalize(-1, float('-inf'), -1.5), 1 - 0.9997500000052083)
self.assertAlmostEqual(problem.PerformanceMetric._normalize(-1, float('-inf'), -1000.0), 1 - 0.5382761574524354)
self.assertAlmostEqual(problem.PerformanceMetric._normalize(-1, float('-inf'), -5000.0), 1 - 0.013399004523107192)
def test_pickle(self):
value = problem.PerformanceMetric.ACCURACY
pickled = pickle.dumps(value)
unpickled = pickle.loads(pickled)
self.assertEqual(value, unpickled)
self.assertIs(value.get_class(), unpickled.get_class())
if __name__ == '__main__':
unittest.main()
| 54.965665
| 239
| 0.636605
| 1,261
| 12,807
| 6.298176
| 0.085646
| 0.20549
| 0.265928
| 0.294636
| 0.898766
| 0.898766
| 0.898766
| 0.880886
| 0.878872
| 0.869428
| 0
| 0.123175
| 0.229796
| 12,807
| 232
| 240
| 55.202586
| 0.681975
| 0.000547
| 0
| 0.373057
| 0
| 0
| 0.114471
| 0.018909
| 0
| 0
| 0
| 0
| 0.388601
| 1
| 0.025907
| false
| 0
| 0.025907
| 0
| 0.056995
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
bca19ee383005e46961e0b285139b0f039de24ef
| 3,114
|
py
|
Python
|
ingenico/direct/sdk/merchant/payouts/i_payouts_client.py
|
Ingenico/direct-sdk-python2
|
1c5c08fe2281aa99bfe8e8e031071600cb3be11d
|
[
"Apache-2.0"
] | null | null | null |
ingenico/direct/sdk/merchant/payouts/i_payouts_client.py
|
Ingenico/direct-sdk-python2
|
1c5c08fe2281aa99bfe8e8e031071600cb3be11d
|
[
"Apache-2.0"
] | null | null | null |
ingenico/direct/sdk/merchant/payouts/i_payouts_client.py
|
Ingenico/direct-sdk-python2
|
1c5c08fe2281aa99bfe8e8e031071600cb3be11d
|
[
"Apache-2.0"
] | null | null | null |
#
# This class was auto-generated from the API references found at
# https://support.direct.ingenico.com/documentation/api/reference/
#
from abc import ABCMeta, abstractmethod
class IPayoutsClient:
"""
Payouts client interface. Thread-safe.
"""
__metaclass__ = ABCMeta
@abstractmethod
def create_payout(self, body, context=None):
"""
Resource /v2/{merchantId}/payouts - Create payout
See also https://support.direct.ingenico.com/documentation/api/reference#operation/CreatePayoutApi
:param body: :class:`ingenico.direct.sdk.domain.create_payout_request.CreatePayoutRequest`
:param context: :class:`ingenico.direct.sdk.call_context.CallContext`
:return: :class:`ingenico.direct.sdk.domain.payout_response.PayoutResponse`
:raise: DeclinedPayoutException if the Ingenico ePayments platform declined / rejected the payout. The payout result will be available from the exception.
:raise: ValidationException if the request was not correct and couldn't be processed (HTTP status code 400)
:raise: AuthorizationException if the request was not allowed (HTTP status code 403)
:raise: ReferenceException if an object was attempted to be referenced that doesn't exist or has been removed,
or there was a conflict (HTTP status code 404, 409 or 410)
:raise: DirectException if something went wrong at the Ingenico ePayments platform,
the Ingenico ePayments platform was unable to process a message from a downstream partner/acquirer,
or the service that you're trying to reach is temporary unavailable (HTTP status code 500, 502 or 503)
:raise: ApiException if the Ingenico ePayments platform returned any other error
"""
pass
@abstractmethod
def get_payout(self, payout_id, context=None):
"""
Resource /v2/{merchantId}/payouts/{payoutId} - Get payout
See also https://support.direct.ingenico.com/documentation/api/reference#operation/GetPayoutApi
:param payout_id: str
:param context: :class:`ingenico.direct.sdk.call_context.CallContext`
:return: :class:`ingenico.direct.sdk.domain.payout_response.PayoutResponse`
:raise: ValidationException if the request was not correct and couldn't be processed (HTTP status code 400)
:raise: AuthorizationException if the request was not allowed (HTTP status code 403)
:raise: ReferenceException if an object was attempted to be referenced that doesn't exist or has been removed,
or there was a conflict (HTTP status code 404, 409 or 410)
:raise: DirectException if something went wrong at the Ingenico ePayments platform,
the Ingenico ePayments platform was unable to process a message from a downstream partner/acquirer,
or the service that you're trying to reach is temporary unavailable (HTTP status code 500, 502 or 503)
:raise: ApiException if the Ingenico ePayments platform returned any other error
"""
pass
| 55.607143
| 162
| 0.712267
| 390
| 3,114
| 5.651282
| 0.333333
| 0.036298
| 0.050817
| 0.088929
| 0.805354
| 0.779038
| 0.744555
| 0.744555
| 0.720054
| 0.720054
| 0
| 0.020644
| 0.222222
| 3,114
| 55
| 163
| 56.618182
| 0.889348
| 0.822736
| 0
| 0.444444
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.222222
| false
| 0.222222
| 0.111111
| 0
| 0.555556
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 11
|
bcabb38a4b1721f4251f5f959c53bae2a9581cdb
| 5,556
|
py
|
Python
|
tests/test_fft.py
|
FilipeMaia/afnumpy
|
11958f501f7ddeb88915a44d0fd4914e1779e7dd
|
[
"BSD-2-Clause"
] | 31
|
2015-06-16T17:17:06.000Z
|
2021-01-03T16:20:23.000Z
|
tests/test_fft.py
|
daurer/afnumpy
|
83f529eab7cb0ba49101aa5869059ac38f457e36
|
[
"BSD-2-Clause"
] | 33
|
2015-05-14T18:03:43.000Z
|
2019-09-23T20:02:45.000Z
|
tests/test_fft.py
|
daurer/afnumpy
|
83f529eab7cb0ba49101aa5869059ac38f457e36
|
[
"BSD-2-Clause"
] | 13
|
2015-06-16T17:17:09.000Z
|
2021-11-06T22:46:15.000Z
|
import afnumpy
import afnumpy.fft
import numpy
import numpy.fft
from asserts import *
def test_fft():
b = numpy.random.random((3,3))
a = afnumpy.array(b)
fassert(afnumpy.fft.fft(a), numpy.fft.fft(b))
b = numpy.random.random((3,2))
a = afnumpy.array(b)
fassert(afnumpy.fft.fft(a), numpy.fft.fft(b))
b = numpy.random.random((5,3,2))
a = afnumpy.array(b)
fassert(afnumpy.fft.fft(a), numpy.fft.fft(b))
b = numpy.random.random((5,3,2))+numpy.random.random((5,3,2))*1.0j
a = afnumpy.array(b)
fassert(afnumpy.fft.fft(a), numpy.fft.fft(b))
def test_ifft():
# Real to complex inverse fft not implemented in arrayfire
# b = numpy.random.random((3,3))
# a = afnumpy.array(b)
# fassert(afnumpy.fft.ifft(a), numpy.fft.ifft(b))
# b = numpy.random.random((3,2))
# a = afnumpy.array(b)
# fassert(afnumpy.fft.ifft(a), numpy.fft.ifft(b))
# b = numpy.random.random((5,3,2))
# a = afnumpy.array(b)
# fassert(afnumpy.fft.ifft(a), numpy.fft.ifft(b))
b = numpy.random.random((5,3,2))+numpy.random.random((5,3,2))*1.0j
# b = numpy.ones((3,3))+numpy.zeros((3,3))*1.0j
a = afnumpy.array(b)
fassert(afnumpy.fft.ifft(a), numpy.fft.ifft(b))
def test_fft2():
b = numpy.random.random((3,3))
a = afnumpy.array(b)
fassert(afnumpy.fft.fft2(a), numpy.fft.fft2(b))
b = numpy.random.random((3,2))
a = afnumpy.array(b)
fassert(afnumpy.fft.fft2(a), numpy.fft.fft2(b))
b = numpy.random.random((5,3,2))
a = afnumpy.array(b)
fassert(afnumpy.fft.fft2(a), numpy.fft.fft2(b))
b = numpy.random.random((5,3,2))+numpy.random.random((5,3,2))*1.0j
a = afnumpy.array(b)
fassert(afnumpy.fft.fft2(a), numpy.fft.fft2(b))
def test_ifft2():
# Real to complex inverse fft not implemented in arrayfire
# b = numpy.random.random((3,3))
# a = afnumpy.array(b)
# fassert(afnumpy.fft.ifft2(a), numpy.fft.ifft2(b))
# b = numpy.random.random((3,2))
# a = afnumpy.array(b)
# fassert(afnumpy.fft.ifft2(a), numpy.fft.ifft2(b))
# b = numpy.random.random((5,3,2))
# a = afnumpy.array(b)
# fassert(afnumpy.fft.ifft2(a), numpy.fft.ifft2(b))
b = numpy.random.random((5,3,2))+numpy.random.random((5,3,2))*1.0j
# b = numpy.ones((3,3))+numpy.zeros((3,3))*1.0j
a = afnumpy.array(b)
fassert(afnumpy.fft.ifft2(a), numpy.fft.ifft2(b))
def test_fftn():
b = numpy.random.random((3,3))
a = afnumpy.array(b)
fassert(afnumpy.fft.fftn(a), numpy.fft.fftn(b))
b = numpy.random.random((3,2))
a = afnumpy.array(b)
fassert(afnumpy.fft.fftn(a), numpy.fft.fftn(b))
b = numpy.random.random((5,3,2))
a = afnumpy.array(b)
fassert(afnumpy.fft.fftn(a), numpy.fft.fftn(b))
b = numpy.random.random((5,3,2))+numpy.random.random((5,3,2))*1.0j
a = afnumpy.array(b)
fassert(afnumpy.fft.fftn(a), numpy.fft.fftn(b))
# Test shape argument
b = numpy.random.random((3,3))
a = afnumpy.array(b)
s = (3,3)
fassert(afnumpy.fft.fftn(a, s), numpy.fft.fftn(b, s))
s = (3,6)
fassert(afnumpy.fft.fftn(a, s), numpy.fft.fftn(b, s))
s = (3,2)
fassert(afnumpy.fft.fftn(a, s), numpy.fft.fftn(b, s))
def test_ifftn():
# Real to complex inverse fft not implemented in arrayfire
# b = numpy.random.random((3,3))
# a = afnumpy.array(b)
# fassert(afnumpy.fft.ifftn(a), numpy.fft.ifftn(b))
# b = numpy.random.random((3,2))
# a = afnumpy.array(b)
# fassert(afnumpy.fft.ifftn(a), numpy.fft.ifftn(b))
# b = numpy.random.random((5,3,2))
# a = afnumpy.array(b)
# fassert(afnumpy.fft.ifftn(a), numpy.fft.ifftn(b))
b = numpy.random.random((5,3,2))+numpy.random.random((5,3,2))*1.0j
# b = numpy.ones((3,3))+numpy.zeros((3,3))*1.0j
a = afnumpy.array(b)
fassert(afnumpy.fft.ifftn(a), numpy.fft.ifftn(b))
# Test shape argument
b = numpy.random.random((3,3))
a = afnumpy.array(b)
s = (3,3)
fassert(afnumpy.fft.ifftn(a, s), numpy.fft.ifftn(b, s))
s = (3,6)
fassert(afnumpy.fft.ifftn(a, s), numpy.fft.ifftn(b, s))
s = (3,2)
fassert(afnumpy.fft.ifftn(a, s), numpy.fft.ifftn(b, s))
def test_fftshift():
b = numpy.random.random((3))
a = afnumpy.array(b)
fassert(afnumpy.fft.fftshift(a), numpy.fft.fftshift(b))
b = numpy.random.random((3,3))
a = afnumpy.array(b)
fassert(afnumpy.fft.fftshift(a), numpy.fft.fftshift(b))
b = numpy.random.random((3,3,3))
a = afnumpy.array(b)
fassert(afnumpy.fft.fftshift(a), numpy.fft.fftshift(b))
fassert(afnumpy.fft.fftshift(a,axes=0), numpy.fft.fftshift(b,axes=0))
fassert(afnumpy.fft.fftshift(a,axes=1), numpy.fft.fftshift(b,axes=1))
fassert(afnumpy.fft.fftshift(a,axes=2), numpy.fft.fftshift(b,axes=2))
fassert(afnumpy.fft.fftshift(a,axes=(1,2)), numpy.fft.fftshift(b,axes=(1,2)))
def test_ifftshift():
b = numpy.random.random((3))
a = afnumpy.array(b)
fassert(afnumpy.fft.ifftshift(a), numpy.fft.ifftshift(b))
b = numpy.random.random((3,3))
a = afnumpy.array(b)
fassert(afnumpy.fft.ifftshift(a), numpy.fft.ifftshift(b))
b = numpy.random.random((3,3,3))
a = afnumpy.array(b)
fassert(afnumpy.fft.ifftshift(a), numpy.fft.ifftshift(b))
fassert(afnumpy.fft.ifftshift(a,axes=0), numpy.fft.ifftshift(b,axes=0))
fassert(afnumpy.fft.ifftshift(a,axes=1), numpy.fft.ifftshift(b,axes=1))
fassert(afnumpy.fft.ifftshift(a,axes=2), numpy.fft.ifftshift(b,axes=2))
fassert(afnumpy.fft.ifftshift(a,axes=(1,2)), numpy.fft.ifftshift(b,axes=(1,2)))
| 33.878049
| 83
| 0.62419
| 945
| 5,556
| 3.661376
| 0.043386
| 0.130058
| 0.216185
| 0.166474
| 0.950578
| 0.940462
| 0.861561
| 0.825145
| 0.823988
| 0.823988
| 0
| 0.037793
| 0.171346
| 5,556
| 163
| 84
| 34.08589
| 0.713727
| 0.229482
| 0
| 0.76
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.36
| 1
| 0.08
| false
| 0
| 0.05
| 0
| 0.13
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
bce40530a9eb3518c63b0a0f5197c43a451c0ccc
| 390
|
py
|
Python
|
terrascript/resource/boundary.py
|
mjuenema/python-terrascript
|
6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d
|
[
"BSD-2-Clause"
] | 507
|
2017-07-26T02:58:38.000Z
|
2022-01-21T12:35:13.000Z
|
terrascript/resource/boundary.py
|
mjuenema/python-terrascript
|
6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d
|
[
"BSD-2-Clause"
] | 135
|
2017-07-20T12:01:59.000Z
|
2021-10-04T22:25:40.000Z
|
terrascript/resource/boundary.py
|
mjuenema/python-terrascript
|
6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d
|
[
"BSD-2-Clause"
] | 81
|
2018-02-20T17:55:28.000Z
|
2022-01-31T07:08:40.000Z
|
# terrascript/resource/boundary.py
# Automatically generated by tools/makecode.py (24-Sep-2021 15:13:38 UTC)
#
# For imports without namespace, e.g.
#
# >>> import terrascript.resource.boundary
#
# instead of
#
# >>> import terrascript.resource.hashicorp.boundary
#
# This is only available for 'official' and 'partner' providers.
from terrascript.resource.hashicorp.boundary import *
| 26
| 73
| 0.751282
| 49
| 390
| 5.979592
| 0.714286
| 0.259386
| 0.1843
| 0.245734
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.035398
| 0.130769
| 390
| 14
| 74
| 27.857143
| 0.828909
| 0.794872
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
bcec56bb83865a2c01a4b0312922b8864d0b782c
| 94,261
|
py
|
Python
|
python/gherkin/parser.py
|
nixel2007/gherkin
|
2500471f69a95cd78cf2c6c664d673fb0fae0b54
|
[
"MIT"
] | null | null | null |
python/gherkin/parser.py
|
nixel2007/gherkin
|
2500471f69a95cd78cf2c6c664d673fb0fae0b54
|
[
"MIT"
] | null | null | null |
python/gherkin/parser.py
|
nixel2007/gherkin
|
2500471f69a95cd78cf2c6c664d673fb0fae0b54
|
[
"MIT"
] | null | null | null |
# This file is generated. Do not edit! Edit gherkin-python.razor instead.
from collections import deque
from .ast_builder import AstBuilder
from .token_matcher import TokenMatcher
from .token_scanner import TokenScanner
from .errors import UnexpectedEOFException, UnexpectedTokenException, ParserException, CompositeParserException
RULE_TYPE = [
'None',
'_EOF', # #EOF
'_Empty', # #Empty
'_Comment', # #Comment
'_TagLine', # #TagLine
'_FeatureLine', # #FeatureLine
'_BackgroundLine', # #BackgroundLine
'_ScenarioLine', # #ScenarioLine
'_ScenarioOutlineLine', # #ScenarioOutlineLine
'_ExamplesLine', # #ExamplesLine
'_StepLine', # #StepLine
'_DocStringSeparator', # #DocStringSeparator
'_TableRow', # #TableRow
'_Language', # #Language
'_Other', # #Other
'Feature', # Feature! := Feature_Header Background? Scenario_Definition*
'Feature_Header', # Feature_Header! := #Language? Tags? #FeatureLine Feature_Description
'Background', # Background! := #BackgroundLine Background_Description Scenario_Step*
'Scenario_Definition', # Scenario_Definition! := Tags? (Scenario | ScenarioOutline)
'Scenario', # Scenario! := #ScenarioLine Scenario_Description Scenario_Step*
'ScenarioOutline', # ScenarioOutline! := #ScenarioOutlineLine ScenarioOutline_Description ScenarioOutline_Step* Examples_Definition*
'Examples_Definition', # Examples_Definition! [#Empty|#Comment|#TagLine->#ExamplesLine] := Tags? Examples
'Examples', # Examples! := #ExamplesLine Examples_Description Examples_Table?
'Examples_Table', # Examples_Table! := #TableRow #TableRow*
'Scenario_Step', # Scenario_Step := Step
'ScenarioOutline_Step', # ScenarioOutline_Step := Step
'Step', # Step! := #StepLine Step_Arg?
'Step_Arg', # Step_Arg := (DataTable | DocString)
'DataTable', # DataTable! := #TableRow+
'DocString', # DocString! := #DocStringSeparator #Other* #DocStringSeparator
'Tags', # Tags! := #TagLine+
'Feature_Description', # Feature_Description := Description_Helper
'Background_Description', # Background_Description := Description_Helper
'Scenario_Description', # Scenario_Description := Description_Helper
'ScenarioOutline_Description', # ScenarioOutline_Description := Description_Helper
'Examples_Description', # Examples_Description := Description_Helper
'Description_Helper', # Description_Helper := #Empty* Description? #Comment*
'Description', # Description! := #Other+
]
class ParserContext(object):
def __init__(self, token_scanner, token_matcher, token_queue, errors):
self.token_scanner = token_scanner
self.token_matcher = token_matcher
self.token_queue = token_queue
self.errors = errors
class Parser(object):
def __init__(self, ast_builder=AstBuilder()):
self.ast_builder = ast_builder
self.stop_at_first_error = False
def parse(self, token_scanner_or_str, token_matcher=TokenMatcher()):
token_scanner = TokenScanner(token_scanner_or_str) if isinstance(token_scanner_or_str, str) else token_scanner_or_str
self.ast_builder.reset()
token_matcher.reset()
context = ParserContext(
token_scanner,
token_matcher,
deque(),
[])
self.start_rule(context, 'Feature')
state = 0
token = None
while True:
token = self.read_token(context)
state = self.match_token(state, token, context)
if token.eof():
break
self.end_rule(context, 'Feature')
if context.errors:
raise CompositeParserException(context.errors)
return self.get_result()
def build(self, context, token):
self.handle_ast_error(context, token, self.ast_builder.build)
def add_error(self, context, error):
context.errors.append(error)
if len(context.errors) > 10:
raise CompositeParserException(context.errors)
def start_rule(self, context, rule_type):
self.handle_ast_error(context, rule_type, self.ast_builder.start_rule)
def end_rule(self, context, rule_type):
self.handle_ast_error(context, rule_type, self.ast_builder.end_rule)
def get_result(self):
return self.ast_builder.get_result()
def read_token(self, context):
if context.token_queue:
return context.token_queue.popleft()
else:
return context.token_scanner.read()
def match_EOF(self, context, token):
return self.handle_external_error(context, False, token, context.token_matcher.match_EOF)
def match_Empty(self, context, token):
if token.eof():
return False
return self.handle_external_error(context, False, token, context.token_matcher.match_Empty)
def match_Comment(self, context, token):
if token.eof():
return False
return self.handle_external_error(context, False, token, context.token_matcher.match_Comment)
def match_TagLine(self, context, token):
if token.eof():
return False
return self.handle_external_error(context, False, token, context.token_matcher.match_TagLine)
def match_FeatureLine(self, context, token):
if token.eof():
return False
return self.handle_external_error(context, False, token, context.token_matcher.match_FeatureLine)
def match_BackgroundLine(self, context, token):
if token.eof():
return False
return self.handle_external_error(context, False, token, context.token_matcher.match_BackgroundLine)
def match_ScenarioLine(self, context, token):
if token.eof():
return False
return self.handle_external_error(context, False, token, context.token_matcher.match_ScenarioLine)
def match_ScenarioOutlineLine(self, context, token):
if token.eof():
return False
return self.handle_external_error(context, False, token, context.token_matcher.match_ScenarioOutlineLine)
def match_ExamplesLine(self, context, token):
if token.eof():
return False
return self.handle_external_error(context, False, token, context.token_matcher.match_ExamplesLine)
def match_StepLine(self, context, token):
if token.eof():
return False
return self.handle_external_error(context, False, token, context.token_matcher.match_StepLine)
def match_DocStringSeparator(self, context, token):
if token.eof():
return False
return self.handle_external_error(context, False, token, context.token_matcher.match_DocStringSeparator)
def match_TableRow(self, context, token):
if token.eof():
return False
return self.handle_external_error(context, False, token, context.token_matcher.match_TableRow)
def match_Language(self, context, token):
if token.eof():
return False
return self.handle_external_error(context, False, token, context.token_matcher.match_Language)
def match_Other(self, context, token):
if token.eof():
return False
return self.handle_external_error(context, False, token, context.token_matcher.match_Other)
def match_token(self, state, token, context):
state_map = {
0: self.match_token_at_0,
1: self.match_token_at_1,
2: self.match_token_at_2,
3: self.match_token_at_3,
4: self.match_token_at_4,
5: self.match_token_at_5,
6: self.match_token_at_6,
7: self.match_token_at_7,
8: self.match_token_at_8,
9: self.match_token_at_9,
10: self.match_token_at_10,
11: self.match_token_at_11,
12: self.match_token_at_12,
13: self.match_token_at_13,
14: self.match_token_at_14,
15: self.match_token_at_15,
16: self.match_token_at_16,
17: self.match_token_at_17,
18: self.match_token_at_18,
19: self.match_token_at_19,
20: self.match_token_at_20,
21: self.match_token_at_21,
22: self.match_token_at_22,
23: self.match_token_at_23,
24: self.match_token_at_24,
25: self.match_token_at_25,
26: self.match_token_at_26,
28: self.match_token_at_28,
29: self.match_token_at_29,
30: self.match_token_at_30,
31: self.match_token_at_31,
32: self.match_token_at_32,
33: self.match_token_at_33,
}
if state in state_map:
return state_map[state](token, context)
else:
raise RuntimeError("Unknown state: " + str(state))
# Start
def match_token_at_0(self, token, context):
if self.match_Language(context, token):
self.start_rule(context, 'Feature_Header')
self.build(context, token)
return 1
if self.match_TagLine(context, token):
self.start_rule(context, 'Feature_Header')
self.start_rule(context, 'Tags')
self.build(context, token)
return 2
if self.match_FeatureLine(context, token):
self.start_rule(context, 'Feature_Header')
self.build(context, token)
return 3
if self.match_Comment(context, token):
self.build(context, token)
return 0
if self.match_Empty(context, token):
self.build(context, token)
return 0
state_comment = "State: 0 - Start"
token.detach
expected_tokens = ["#Language", "#TagLine", "#FeatureLine", "#Comment", "#Empty"]
error = UnexpectedEOFException(token, expected_tokens, state_comment) if token.eof() else UnexpectedTokenException(token, expected_tokens, state_comment)
if (self.stop_at_first_error):
raise error
self.add_error(context, error)
return 0
# Feature:0>Feature_Header:0>#Language:0
def match_token_at_1(self, token, context):
if self.match_TagLine(context, token):
self.start_rule(context, 'Tags')
self.build(context, token)
return 2
if self.match_FeatureLine(context, token):
self.build(context, token)
return 3
if self.match_Comment(context, token):
self.build(context, token)
return 1
if self.match_Empty(context, token):
self.build(context, token)
return 1
state_comment = "State: 1 - Feature:0>Feature_Header:0>#Language:0"
token.detach
expected_tokens = ["#TagLine", "#FeatureLine", "#Comment", "#Empty"]
error = UnexpectedEOFException(token, expected_tokens, state_comment) if token.eof() else UnexpectedTokenException(token, expected_tokens, state_comment)
if (self.stop_at_first_error):
raise error
self.add_error(context, error)
return 1
# Feature:0>Feature_Header:1>Tags:0>#TagLine:0
def match_token_at_2(self, token, context):
if self.match_TagLine(context, token):
self.build(context, token)
return 2
if self.match_FeatureLine(context, token):
self.end_rule(context, 'Tags')
self.build(context, token)
return 3
if self.match_Comment(context, token):
self.build(context, token)
return 2
if self.match_Empty(context, token):
self.build(context, token)
return 2
state_comment = "State: 2 - Feature:0>Feature_Header:1>Tags:0>#TagLine:0"
token.detach
expected_tokens = ["#TagLine", "#FeatureLine", "#Comment", "#Empty"]
error = UnexpectedEOFException(token, expected_tokens, state_comment) if token.eof() else UnexpectedTokenException(token, expected_tokens, state_comment)
if (self.stop_at_first_error):
raise error
self.add_error(context, error)
return 2
# Feature:0>Feature_Header:2>#FeatureLine:0
def match_token_at_3(self, token, context):
if self.match_EOF(context, token):
self.end_rule(context, 'Feature_Header')
self.build(context, token)
return 27
if self.match_Empty(context, token):
self.build(context, token)
return 3
if self.match_Comment(context, token):
self.build(context, token)
return 5
if self.match_BackgroundLine(context, token):
self.end_rule(context, 'Feature_Header')
self.start_rule(context, 'Background')
self.build(context, token)
return 6
if self.match_TagLine(context, token):
self.end_rule(context, 'Feature_Header')
self.start_rule(context, 'Scenario_Definition')
self.start_rule(context, 'Tags')
self.build(context, token)
return 11
if self.match_ScenarioLine(context, token):
self.end_rule(context, 'Feature_Header')
self.start_rule(context, 'Scenario_Definition')
self.start_rule(context, 'Scenario')
self.build(context, token)
return 12
if self.match_ScenarioOutlineLine(context, token):
self.end_rule(context, 'Feature_Header')
self.start_rule(context, 'Scenario_Definition')
self.start_rule(context, 'ScenarioOutline')
self.build(context, token)
return 17
if self.match_Other(context, token):
self.start_rule(context, 'Description')
self.build(context, token)
return 4
state_comment = "State: 3 - Feature:0>Feature_Header:2>#FeatureLine:0"
token.detach
expected_tokens = ["#EOF", "#Empty", "#Comment", "#BackgroundLine", "#TagLine", "#ScenarioLine", "#ScenarioOutlineLine", "#Other"]
error = UnexpectedEOFException(token, expected_tokens, state_comment) if token.eof() else UnexpectedTokenException(token, expected_tokens, state_comment)
if (self.stop_at_first_error):
raise error
self.add_error(context, error)
return 3
# Feature:0>Feature_Header:3>Feature_Description:0>Description_Helper:1>Description:0>#Other:0
def match_token_at_4(self, token, context):
if self.match_EOF(context, token):
self.end_rule(context, 'Description')
self.end_rule(context, 'Feature_Header')
self.build(context, token)
return 27
if self.match_Comment(context, token):
self.end_rule(context, 'Description')
self.build(context, token)
return 5
if self.match_BackgroundLine(context, token):
self.end_rule(context, 'Description')
self.end_rule(context, 'Feature_Header')
self.start_rule(context, 'Background')
self.build(context, token)
return 6
if self.match_TagLine(context, token):
self.end_rule(context, 'Description')
self.end_rule(context, 'Feature_Header')
self.start_rule(context, 'Scenario_Definition')
self.start_rule(context, 'Tags')
self.build(context, token)
return 11
if self.match_ScenarioLine(context, token):
self.end_rule(context, 'Description')
self.end_rule(context, 'Feature_Header')
self.start_rule(context, 'Scenario_Definition')
self.start_rule(context, 'Scenario')
self.build(context, token)
return 12
if self.match_ScenarioOutlineLine(context, token):
self.end_rule(context, 'Description')
self.end_rule(context, 'Feature_Header')
self.start_rule(context, 'Scenario_Definition')
self.start_rule(context, 'ScenarioOutline')
self.build(context, token)
return 17
if self.match_Other(context, token):
self.build(context, token)
return 4
state_comment = "State: 4 - Feature:0>Feature_Header:3>Feature_Description:0>Description_Helper:1>Description:0>#Other:0"
token.detach
expected_tokens = ["#EOF", "#Comment", "#BackgroundLine", "#TagLine", "#ScenarioLine", "#ScenarioOutlineLine", "#Other"]
error = UnexpectedEOFException(token, expected_tokens, state_comment) if token.eof() else UnexpectedTokenException(token, expected_tokens, state_comment)
if (self.stop_at_first_error):
raise error
self.add_error(context, error)
return 4
# Feature:0>Feature_Header:3>Feature_Description:0>Description_Helper:2>#Comment:0
def match_token_at_5(self, token, context):
if self.match_EOF(context, token):
self.end_rule(context, 'Feature_Header')
self.build(context, token)
return 27
if self.match_Comment(context, token):
self.build(context, token)
return 5
if self.match_BackgroundLine(context, token):
self.end_rule(context, 'Feature_Header')
self.start_rule(context, 'Background')
self.build(context, token)
return 6
if self.match_TagLine(context, token):
self.end_rule(context, 'Feature_Header')
self.start_rule(context, 'Scenario_Definition')
self.start_rule(context, 'Tags')
self.build(context, token)
return 11
if self.match_ScenarioLine(context, token):
self.end_rule(context, 'Feature_Header')
self.start_rule(context, 'Scenario_Definition')
self.start_rule(context, 'Scenario')
self.build(context, token)
return 12
if self.match_ScenarioOutlineLine(context, token):
self.end_rule(context, 'Feature_Header')
self.start_rule(context, 'Scenario_Definition')
self.start_rule(context, 'ScenarioOutline')
self.build(context, token)
return 17
if self.match_Empty(context, token):
self.build(context, token)
return 5
state_comment = "State: 5 - Feature:0>Feature_Header:3>Feature_Description:0>Description_Helper:2>#Comment:0"
token.detach
expected_tokens = ["#EOF", "#Comment", "#BackgroundLine", "#TagLine", "#ScenarioLine", "#ScenarioOutlineLine", "#Empty"]
error = UnexpectedEOFException(token, expected_tokens, state_comment) if token.eof() else UnexpectedTokenException(token, expected_tokens, state_comment)
if (self.stop_at_first_error):
raise error
self.add_error(context, error)
return 5
# Feature:1>Background:0>#BackgroundLine:0
def match_token_at_6(self, token, context):
if self.match_EOF(context, token):
self.end_rule(context, 'Background')
self.build(context, token)
return 27
if self.match_Empty(context, token):
self.build(context, token)
return 6
if self.match_Comment(context, token):
self.build(context, token)
return 8
if self.match_StepLine(context, token):
self.start_rule(context, 'Step')
self.build(context, token)
return 9
if self.match_TagLine(context, token):
self.end_rule(context, 'Background')
self.start_rule(context, 'Scenario_Definition')
self.start_rule(context, 'Tags')
self.build(context, token)
return 11
if self.match_ScenarioLine(context, token):
self.end_rule(context, 'Background')
self.start_rule(context, 'Scenario_Definition')
self.start_rule(context, 'Scenario')
self.build(context, token)
return 12
if self.match_ScenarioOutlineLine(context, token):
self.end_rule(context, 'Background')
self.start_rule(context, 'Scenario_Definition')
self.start_rule(context, 'ScenarioOutline')
self.build(context, token)
return 17
if self.match_Other(context, token):
self.start_rule(context, 'Description')
self.build(context, token)
return 7
state_comment = "State: 6 - Feature:1>Background:0>#BackgroundLine:0"
token.detach
expected_tokens = ["#EOF", "#Empty", "#Comment", "#StepLine", "#TagLine", "#ScenarioLine", "#ScenarioOutlineLine", "#Other"]
error = UnexpectedEOFException(token, expected_tokens, state_comment) if token.eof() else UnexpectedTokenException(token, expected_tokens, state_comment)
if (self.stop_at_first_error):
raise error
self.add_error(context, error)
return 6
# Feature:1>Background:1>Background_Description:0>Description_Helper:1>Description:0>#Other:0
def match_token_at_7(self, token, context):
if self.match_EOF(context, token):
self.end_rule(context, 'Description')
self.end_rule(context, 'Background')
self.build(context, token)
return 27
if self.match_Comment(context, token):
self.end_rule(context, 'Description')
self.build(context, token)
return 8
if self.match_StepLine(context, token):
self.end_rule(context, 'Description')
self.start_rule(context, 'Step')
self.build(context, token)
return 9
if self.match_TagLine(context, token):
self.end_rule(context, 'Description')
self.end_rule(context, 'Background')
self.start_rule(context, 'Scenario_Definition')
self.start_rule(context, 'Tags')
self.build(context, token)
return 11
if self.match_ScenarioLine(context, token):
self.end_rule(context, 'Description')
self.end_rule(context, 'Background')
self.start_rule(context, 'Scenario_Definition')
self.start_rule(context, 'Scenario')
self.build(context, token)
return 12
if self.match_ScenarioOutlineLine(context, token):
self.end_rule(context, 'Description')
self.end_rule(context, 'Background')
self.start_rule(context, 'Scenario_Definition')
self.start_rule(context, 'ScenarioOutline')
self.build(context, token)
return 17
if self.match_Other(context, token):
self.build(context, token)
return 7
state_comment = "State: 7 - Feature:1>Background:1>Background_Description:0>Description_Helper:1>Description:0>#Other:0"
token.detach
expected_tokens = ["#EOF", "#Comment", "#StepLine", "#TagLine", "#ScenarioLine", "#ScenarioOutlineLine", "#Other"]
error = UnexpectedEOFException(token, expected_tokens, state_comment) if token.eof() else UnexpectedTokenException(token, expected_tokens, state_comment)
if (self.stop_at_first_error):
raise error
self.add_error(context, error)
return 7
# Feature:1>Background:1>Background_Description:0>Description_Helper:2>#Comment:0
def match_token_at_8(self, token, context):
if self.match_EOF(context, token):
self.end_rule(context, 'Background')
self.build(context, token)
return 27
if self.match_Comment(context, token):
self.build(context, token)
return 8
if self.match_StepLine(context, token):
self.start_rule(context, 'Step')
self.build(context, token)
return 9
if self.match_TagLine(context, token):
self.end_rule(context, 'Background')
self.start_rule(context, 'Scenario_Definition')
self.start_rule(context, 'Tags')
self.build(context, token)
return 11
if self.match_ScenarioLine(context, token):
self.end_rule(context, 'Background')
self.start_rule(context, 'Scenario_Definition')
self.start_rule(context, 'Scenario')
self.build(context, token)
return 12
if self.match_ScenarioOutlineLine(context, token):
self.end_rule(context, 'Background')
self.start_rule(context, 'Scenario_Definition')
self.start_rule(context, 'ScenarioOutline')
self.build(context, token)
return 17
if self.match_Empty(context, token):
self.build(context, token)
return 8
state_comment = "State: 8 - Feature:1>Background:1>Background_Description:0>Description_Helper:2>#Comment:0"
token.detach
expected_tokens = ["#EOF", "#Comment", "#StepLine", "#TagLine", "#ScenarioLine", "#ScenarioOutlineLine", "#Empty"]
error = UnexpectedEOFException(token, expected_tokens, state_comment) if token.eof() else UnexpectedTokenException(token, expected_tokens, state_comment)
if (self.stop_at_first_error):
raise error
self.add_error(context, error)
return 8
# Feature:1>Background:2>Scenario_Step:0>Step:0>#StepLine:0
def match_token_at_9(self, token, context):
if self.match_EOF(context, token):
self.end_rule(context, 'Step')
self.end_rule(context, 'Background')
self.build(context, token)
return 27
if self.match_TableRow(context, token):
self.start_rule(context, 'DataTable')
self.build(context, token)
return 10
if self.match_DocStringSeparator(context, token):
self.start_rule(context, 'DocString')
self.build(context, token)
return 32
if self.match_StepLine(context, token):
self.end_rule(context, 'Step')
self.start_rule(context, 'Step')
self.build(context, token)
return 9
if self.match_TagLine(context, token):
self.end_rule(context, 'Step')
self.end_rule(context, 'Background')
self.start_rule(context, 'Scenario_Definition')
self.start_rule(context, 'Tags')
self.build(context, token)
return 11
if self.match_ScenarioLine(context, token):
self.end_rule(context, 'Step')
self.end_rule(context, 'Background')
self.start_rule(context, 'Scenario_Definition')
self.start_rule(context, 'Scenario')
self.build(context, token)
return 12
if self.match_ScenarioOutlineLine(context, token):
self.end_rule(context, 'Step')
self.end_rule(context, 'Background')
self.start_rule(context, 'Scenario_Definition')
self.start_rule(context, 'ScenarioOutline')
self.build(context, token)
return 17
if self.match_Comment(context, token):
self.build(context, token)
return 9
if self.match_Empty(context, token):
self.build(context, token)
return 9
state_comment = "State: 9 - Feature:1>Background:2>Scenario_Step:0>Step:0>#StepLine:0"
token.detach
expected_tokens = ["#EOF", "#TableRow", "#DocStringSeparator", "#StepLine", "#TagLine", "#ScenarioLine", "#ScenarioOutlineLine", "#Comment", "#Empty"]
error = UnexpectedEOFException(token, expected_tokens, state_comment) if token.eof() else UnexpectedTokenException(token, expected_tokens, state_comment)
if (self.stop_at_first_error):
raise error
self.add_error(context, error)
return 9
# Feature:1>Background:2>Scenario_Step:0>Step:1>Step_Arg:0>__alt1:0>DataTable:0>#TableRow:0
def match_token_at_10(self, token, context):
if self.match_EOF(context, token):
self.end_rule(context, 'DataTable')
self.end_rule(context, 'Step')
self.end_rule(context, 'Background')
self.build(context, token)
return 27
if self.match_TableRow(context, token):
self.build(context, token)
return 10
if self.match_StepLine(context, token):
self.end_rule(context, 'DataTable')
self.end_rule(context, 'Step')
self.start_rule(context, 'Step')
self.build(context, token)
return 9
if self.match_TagLine(context, token):
self.end_rule(context, 'DataTable')
self.end_rule(context, 'Step')
self.end_rule(context, 'Background')
self.start_rule(context, 'Scenario_Definition')
self.start_rule(context, 'Tags')
self.build(context, token)
return 11
if self.match_ScenarioLine(context, token):
self.end_rule(context, 'DataTable')
self.end_rule(context, 'Step')
self.end_rule(context, 'Background')
self.start_rule(context, 'Scenario_Definition')
self.start_rule(context, 'Scenario')
self.build(context, token)
return 12
if self.match_ScenarioOutlineLine(context, token):
self.end_rule(context, 'DataTable')
self.end_rule(context, 'Step')
self.end_rule(context, 'Background')
self.start_rule(context, 'Scenario_Definition')
self.start_rule(context, 'ScenarioOutline')
self.build(context, token)
return 17
if self.match_Comment(context, token):
self.build(context, token)
return 10
if self.match_Empty(context, token):
self.build(context, token)
return 10
state_comment = "State: 10 - Feature:1>Background:2>Scenario_Step:0>Step:1>Step_Arg:0>__alt1:0>DataTable:0>#TableRow:0"
token.detach
expected_tokens = ["#EOF", "#TableRow", "#StepLine", "#TagLine", "#ScenarioLine", "#ScenarioOutlineLine", "#Comment", "#Empty"]
error = UnexpectedEOFException(token, expected_tokens, state_comment) if token.eof() else UnexpectedTokenException(token, expected_tokens, state_comment)
if (self.stop_at_first_error):
raise error
self.add_error(context, error)
return 10
# Feature:2>Scenario_Definition:0>Tags:0>#TagLine:0
def match_token_at_11(self, token, context):
if self.match_TagLine(context, token):
self.build(context, token)
return 11
if self.match_ScenarioLine(context, token):
self.end_rule(context, 'Tags')
self.start_rule(context, 'Scenario')
self.build(context, token)
return 12
if self.match_ScenarioOutlineLine(context, token):
self.end_rule(context, 'Tags')
self.start_rule(context, 'ScenarioOutline')
self.build(context, token)
return 17
if self.match_Comment(context, token):
self.build(context, token)
return 11
if self.match_Empty(context, token):
self.build(context, token)
return 11
state_comment = "State: 11 - Feature:2>Scenario_Definition:0>Tags:0>#TagLine:0"
token.detach
expected_tokens = ["#TagLine", "#ScenarioLine", "#ScenarioOutlineLine", "#Comment", "#Empty"]
error = UnexpectedEOFException(token, expected_tokens, state_comment) if token.eof() else UnexpectedTokenException(token, expected_tokens, state_comment)
if (self.stop_at_first_error):
raise error
self.add_error(context, error)
return 11
# Feature:2>Scenario_Definition:1>__alt0:0>Scenario:0>#ScenarioLine:0
def match_token_at_12(self, token, context):
if self.match_EOF(context, token):
self.end_rule(context, 'Scenario')
self.end_rule(context, 'Scenario_Definition')
self.build(context, token)
return 27
if self.match_Empty(context, token):
self.build(context, token)
return 12
if self.match_Comment(context, token):
self.build(context, token)
return 14
if self.match_StepLine(context, token):
self.start_rule(context, 'Step')
self.build(context, token)
return 15
if self.match_TagLine(context, token):
self.end_rule(context, 'Scenario')
self.end_rule(context, 'Scenario_Definition')
self.start_rule(context, 'Scenario_Definition')
self.start_rule(context, 'Tags')
self.build(context, token)
return 11
if self.match_ScenarioLine(context, token):
self.end_rule(context, 'Scenario')
self.end_rule(context, 'Scenario_Definition')
self.start_rule(context, 'Scenario_Definition')
self.start_rule(context, 'Scenario')
self.build(context, token)
return 12
if self.match_ScenarioOutlineLine(context, token):
self.end_rule(context, 'Scenario')
self.end_rule(context, 'Scenario_Definition')
self.start_rule(context, 'Scenario_Definition')
self.start_rule(context, 'ScenarioOutline')
self.build(context, token)
return 17
if self.match_Other(context, token):
self.start_rule(context, 'Description')
self.build(context, token)
return 13
state_comment = "State: 12 - Feature:2>Scenario_Definition:1>__alt0:0>Scenario:0>#ScenarioLine:0"
token.detach
expected_tokens = ["#EOF", "#Empty", "#Comment", "#StepLine", "#TagLine", "#ScenarioLine", "#ScenarioOutlineLine", "#Other"]
error = UnexpectedEOFException(token, expected_tokens, state_comment) if token.eof() else UnexpectedTokenException(token, expected_tokens, state_comment)
if (self.stop_at_first_error):
raise error
self.add_error(context, error)
return 12
# Feature:2>Scenario_Definition:1>__alt0:0>Scenario:1>Scenario_Description:0>Description_Helper:1>Description:0>#Other:0
def match_token_at_13(self, token, context):
if self.match_EOF(context, token):
self.end_rule(context, 'Description')
self.end_rule(context, 'Scenario')
self.end_rule(context, 'Scenario_Definition')
self.build(context, token)
return 27
if self.match_Comment(context, token):
self.end_rule(context, 'Description')
self.build(context, token)
return 14
if self.match_StepLine(context, token):
self.end_rule(context, 'Description')
self.start_rule(context, 'Step')
self.build(context, token)
return 15
if self.match_TagLine(context, token):
self.end_rule(context, 'Description')
self.end_rule(context, 'Scenario')
self.end_rule(context, 'Scenario_Definition')
self.start_rule(context, 'Scenario_Definition')
self.start_rule(context, 'Tags')
self.build(context, token)
return 11
if self.match_ScenarioLine(context, token):
self.end_rule(context, 'Description')
self.end_rule(context, 'Scenario')
self.end_rule(context, 'Scenario_Definition')
self.start_rule(context, 'Scenario_Definition')
self.start_rule(context, 'Scenario')
self.build(context, token)
return 12
if self.match_ScenarioOutlineLine(context, token):
self.end_rule(context, 'Description')
self.end_rule(context, 'Scenario')
self.end_rule(context, 'Scenario_Definition')
self.start_rule(context, 'Scenario_Definition')
self.start_rule(context, 'ScenarioOutline')
self.build(context, token)
return 17
if self.match_Other(context, token):
self.build(context, token)
return 13
state_comment = "State: 13 - Feature:2>Scenario_Definition:1>__alt0:0>Scenario:1>Scenario_Description:0>Description_Helper:1>Description:0>#Other:0"
token.detach
expected_tokens = ["#EOF", "#Comment", "#StepLine", "#TagLine", "#ScenarioLine", "#ScenarioOutlineLine", "#Other"]
error = UnexpectedEOFException(token, expected_tokens, state_comment) if token.eof() else UnexpectedTokenException(token, expected_tokens, state_comment)
if (self.stop_at_first_error):
raise error
self.add_error(context, error)
return 13
# Feature:2>Scenario_Definition:1>__alt0:0>Scenario:1>Scenario_Description:0>Description_Helper:2>#Comment:0
def match_token_at_14(self, token, context):
if self.match_EOF(context, token):
self.end_rule(context, 'Scenario')
self.end_rule(context, 'Scenario_Definition')
self.build(context, token)
return 27
if self.match_Comment(context, token):
self.build(context, token)
return 14
if self.match_StepLine(context, token):
self.start_rule(context, 'Step')
self.build(context, token)
return 15
if self.match_TagLine(context, token):
self.end_rule(context, 'Scenario')
self.end_rule(context, 'Scenario_Definition')
self.start_rule(context, 'Scenario_Definition')
self.start_rule(context, 'Tags')
self.build(context, token)
return 11
if self.match_ScenarioLine(context, token):
self.end_rule(context, 'Scenario')
self.end_rule(context, 'Scenario_Definition')
self.start_rule(context, 'Scenario_Definition')
self.start_rule(context, 'Scenario')
self.build(context, token)
return 12
if self.match_ScenarioOutlineLine(context, token):
self.end_rule(context, 'Scenario')
self.end_rule(context, 'Scenario_Definition')
self.start_rule(context, 'Scenario_Definition')
self.start_rule(context, 'ScenarioOutline')
self.build(context, token)
return 17
if self.match_Empty(context, token):
self.build(context, token)
return 14
state_comment = "State: 14 - Feature:2>Scenario_Definition:1>__alt0:0>Scenario:1>Scenario_Description:0>Description_Helper:2>#Comment:0"
token.detach
expected_tokens = ["#EOF", "#Comment", "#StepLine", "#TagLine", "#ScenarioLine", "#ScenarioOutlineLine", "#Empty"]
error = UnexpectedEOFException(token, expected_tokens, state_comment) if token.eof() else UnexpectedTokenException(token, expected_tokens, state_comment)
if (self.stop_at_first_error):
raise error
self.add_error(context, error)
return 14
# Feature:2>Scenario_Definition:1>__alt0:0>Scenario:2>Scenario_Step:0>Step:0>#StepLine:0
def match_token_at_15(self, token, context):
if self.match_EOF(context, token):
self.end_rule(context, 'Step')
self.end_rule(context, 'Scenario')
self.end_rule(context, 'Scenario_Definition')
self.build(context, token)
return 27
if self.match_TableRow(context, token):
self.start_rule(context, 'DataTable')
self.build(context, token)
return 16
if self.match_DocStringSeparator(context, token):
self.start_rule(context, 'DocString')
self.build(context, token)
return 30
if self.match_StepLine(context, token):
self.end_rule(context, 'Step')
self.start_rule(context, 'Step')
self.build(context, token)
return 15
if self.match_TagLine(context, token):
self.end_rule(context, 'Step')
self.end_rule(context, 'Scenario')
self.end_rule(context, 'Scenario_Definition')
self.start_rule(context, 'Scenario_Definition')
self.start_rule(context, 'Tags')
self.build(context, token)
return 11
if self.match_ScenarioLine(context, token):
self.end_rule(context, 'Step')
self.end_rule(context, 'Scenario')
self.end_rule(context, 'Scenario_Definition')
self.start_rule(context, 'Scenario_Definition')
self.start_rule(context, 'Scenario')
self.build(context, token)
return 12
if self.match_ScenarioOutlineLine(context, token):
self.end_rule(context, 'Step')
self.end_rule(context, 'Scenario')
self.end_rule(context, 'Scenario_Definition')
self.start_rule(context, 'Scenario_Definition')
self.start_rule(context, 'ScenarioOutline')
self.build(context, token)
return 17
if self.match_Comment(context, token):
self.build(context, token)
return 15
if self.match_Empty(context, token):
self.build(context, token)
return 15
state_comment = "State: 15 - Feature:2>Scenario_Definition:1>__alt0:0>Scenario:2>Scenario_Step:0>Step:0>#StepLine:0"
token.detach
expected_tokens = ["#EOF", "#TableRow", "#DocStringSeparator", "#StepLine", "#TagLine", "#ScenarioLine", "#ScenarioOutlineLine", "#Comment", "#Empty"]
error = UnexpectedEOFException(token, expected_tokens, state_comment) if token.eof() else UnexpectedTokenException(token, expected_tokens, state_comment)
if (self.stop_at_first_error):
raise error
self.add_error(context, error)
return 15
# Feature:2>Scenario_Definition:1>__alt0:0>Scenario:2>Scenario_Step:0>Step:1>Step_Arg:0>__alt1:0>DataTable:0>#TableRow:0
def match_token_at_16(self, token, context):
if self.match_EOF(context, token):
self.end_rule(context, 'DataTable')
self.end_rule(context, 'Step')
self.end_rule(context, 'Scenario')
self.end_rule(context, 'Scenario_Definition')
self.build(context, token)
return 27
if self.match_TableRow(context, token):
self.build(context, token)
return 16
if self.match_StepLine(context, token):
self.end_rule(context, 'DataTable')
self.end_rule(context, 'Step')
self.start_rule(context, 'Step')
self.build(context, token)
return 15
if self.match_TagLine(context, token):
self.end_rule(context, 'DataTable')
self.end_rule(context, 'Step')
self.end_rule(context, 'Scenario')
self.end_rule(context, 'Scenario_Definition')
self.start_rule(context, 'Scenario_Definition')
self.start_rule(context, 'Tags')
self.build(context, token)
return 11
if self.match_ScenarioLine(context, token):
self.end_rule(context, 'DataTable')
self.end_rule(context, 'Step')
self.end_rule(context, 'Scenario')
self.end_rule(context, 'Scenario_Definition')
self.start_rule(context, 'Scenario_Definition')
self.start_rule(context, 'Scenario')
self.build(context, token)
return 12
if self.match_ScenarioOutlineLine(context, token):
self.end_rule(context, 'DataTable')
self.end_rule(context, 'Step')
self.end_rule(context, 'Scenario')
self.end_rule(context, 'Scenario_Definition')
self.start_rule(context, 'Scenario_Definition')
self.start_rule(context, 'ScenarioOutline')
self.build(context, token)
return 17
if self.match_Comment(context, token):
self.build(context, token)
return 16
if self.match_Empty(context, token):
self.build(context, token)
return 16
state_comment = "State: 16 - Feature:2>Scenario_Definition:1>__alt0:0>Scenario:2>Scenario_Step:0>Step:1>Step_Arg:0>__alt1:0>DataTable:0>#TableRow:0"
token.detach
expected_tokens = ["#EOF", "#TableRow", "#StepLine", "#TagLine", "#ScenarioLine", "#ScenarioOutlineLine", "#Comment", "#Empty"]
error = UnexpectedEOFException(token, expected_tokens, state_comment) if token.eof() else UnexpectedTokenException(token, expected_tokens, state_comment)
if (self.stop_at_first_error):
raise error
self.add_error(context, error)
return 16
# Feature:2>Scenario_Definition:1>__alt0:1>ScenarioOutline:0>#ScenarioOutlineLine:0
def match_token_at_17(self, token, context):
if self.match_EOF(context, token):
self.end_rule(context, 'ScenarioOutline')
self.end_rule(context, 'Scenario_Definition')
self.build(context, token)
return 27
if self.match_Empty(context, token):
self.build(context, token)
return 17
if self.match_Comment(context, token):
self.build(context, token)
return 19
if self.match_StepLine(context, token):
self.start_rule(context, 'Step')
self.build(context, token)
return 20
if self.match_TagLine(context, token):
if self.lookahead_0(context, token):
self.start_rule(context, 'Examples_Definition')
self.start_rule(context, 'Tags')
self.build(context, token)
return 22
if self.match_TagLine(context, token):
self.end_rule(context, 'ScenarioOutline')
self.end_rule(context, 'Scenario_Definition')
self.start_rule(context, 'Scenario_Definition')
self.start_rule(context, 'Tags')
self.build(context, token)
return 11
if self.match_ExamplesLine(context, token):
self.start_rule(context, 'Examples_Definition')
self.start_rule(context, 'Examples')
self.build(context, token)
return 23
if self.match_ScenarioLine(context, token):
self.end_rule(context, 'ScenarioOutline')
self.end_rule(context, 'Scenario_Definition')
self.start_rule(context, 'Scenario_Definition')
self.start_rule(context, 'Scenario')
self.build(context, token)
return 12
if self.match_ScenarioOutlineLine(context, token):
self.end_rule(context, 'ScenarioOutline')
self.end_rule(context, 'Scenario_Definition')
self.start_rule(context, 'Scenario_Definition')
self.start_rule(context, 'ScenarioOutline')
self.build(context, token)
return 17
if self.match_Other(context, token):
self.start_rule(context, 'Description')
self.build(context, token)
return 18
state_comment = "State: 17 - Feature:2>Scenario_Definition:1>__alt0:1>ScenarioOutline:0>#ScenarioOutlineLine:0"
token.detach
expected_tokens = ["#EOF", "#Empty", "#Comment", "#StepLine", "#TagLine", "#ExamplesLine", "#ScenarioLine", "#ScenarioOutlineLine", "#Other"]
error = UnexpectedEOFException(token, expected_tokens, state_comment) if token.eof() else UnexpectedTokenException(token, expected_tokens, state_comment)
if (self.stop_at_first_error):
raise error
self.add_error(context, error)
return 17
# Feature:2>Scenario_Definition:1>__alt0:1>ScenarioOutline:1>ScenarioOutline_Description:0>Description_Helper:1>Description:0>#Other:0
def match_token_at_18(self, token, context):
if self.match_EOF(context, token):
self.end_rule(context, 'Description')
self.end_rule(context, 'ScenarioOutline')
self.end_rule(context, 'Scenario_Definition')
self.build(context, token)
return 27
if self.match_Comment(context, token):
self.end_rule(context, 'Description')
self.build(context, token)
return 19
if self.match_StepLine(context, token):
self.end_rule(context, 'Description')
self.start_rule(context, 'Step')
self.build(context, token)
return 20
if self.match_TagLine(context, token):
if self.lookahead_0(context, token):
self.end_rule(context, 'Description')
self.start_rule(context, 'Examples_Definition')
self.start_rule(context, 'Tags')
self.build(context, token)
return 22
if self.match_TagLine(context, token):
self.end_rule(context, 'Description')
self.end_rule(context, 'ScenarioOutline')
self.end_rule(context, 'Scenario_Definition')
self.start_rule(context, 'Scenario_Definition')
self.start_rule(context, 'Tags')
self.build(context, token)
return 11
if self.match_ExamplesLine(context, token):
self.end_rule(context, 'Description')
self.start_rule(context, 'Examples_Definition')
self.start_rule(context, 'Examples')
self.build(context, token)
return 23
if self.match_ScenarioLine(context, token):
self.end_rule(context, 'Description')
self.end_rule(context, 'ScenarioOutline')
self.end_rule(context, 'Scenario_Definition')
self.start_rule(context, 'Scenario_Definition')
self.start_rule(context, 'Scenario')
self.build(context, token)
return 12
if self.match_ScenarioOutlineLine(context, token):
self.end_rule(context, 'Description')
self.end_rule(context, 'ScenarioOutline')
self.end_rule(context, 'Scenario_Definition')
self.start_rule(context, 'Scenario_Definition')
self.start_rule(context, 'ScenarioOutline')
self.build(context, token)
return 17
if self.match_Other(context, token):
self.build(context, token)
return 18
state_comment = "State: 18 - Feature:2>Scenario_Definition:1>__alt0:1>ScenarioOutline:1>ScenarioOutline_Description:0>Description_Helper:1>Description:0>#Other:0"
token.detach
expected_tokens = ["#EOF", "#Comment", "#StepLine", "#TagLine", "#ExamplesLine", "#ScenarioLine", "#ScenarioOutlineLine", "#Other"]
error = UnexpectedEOFException(token, expected_tokens, state_comment) if token.eof() else UnexpectedTokenException(token, expected_tokens, state_comment)
if (self.stop_at_first_error):
raise error
self.add_error(context, error)
return 18
# Feature:2>Scenario_Definition:1>__alt0:1>ScenarioOutline:1>ScenarioOutline_Description:0>Description_Helper:2>#Comment:0
def match_token_at_19(self, token, context):
if self.match_EOF(context, token):
self.end_rule(context, 'ScenarioOutline')
self.end_rule(context, 'Scenario_Definition')
self.build(context, token)
return 27
if self.match_Comment(context, token):
self.build(context, token)
return 19
if self.match_StepLine(context, token):
self.start_rule(context, 'Step')
self.build(context, token)
return 20
if self.match_TagLine(context, token):
if self.lookahead_0(context, token):
self.start_rule(context, 'Examples_Definition')
self.start_rule(context, 'Tags')
self.build(context, token)
return 22
if self.match_TagLine(context, token):
self.end_rule(context, 'ScenarioOutline')
self.end_rule(context, 'Scenario_Definition')
self.start_rule(context, 'Scenario_Definition')
self.start_rule(context, 'Tags')
self.build(context, token)
return 11
if self.match_ExamplesLine(context, token):
self.start_rule(context, 'Examples_Definition')
self.start_rule(context, 'Examples')
self.build(context, token)
return 23
if self.match_ScenarioLine(context, token):
self.end_rule(context, 'ScenarioOutline')
self.end_rule(context, 'Scenario_Definition')
self.start_rule(context, 'Scenario_Definition')
self.start_rule(context, 'Scenario')
self.build(context, token)
return 12
if self.match_ScenarioOutlineLine(context, token):
self.end_rule(context, 'ScenarioOutline')
self.end_rule(context, 'Scenario_Definition')
self.start_rule(context, 'Scenario_Definition')
self.start_rule(context, 'ScenarioOutline')
self.build(context, token)
return 17
if self.match_Empty(context, token):
self.build(context, token)
return 19
state_comment = "State: 19 - Feature:2>Scenario_Definition:1>__alt0:1>ScenarioOutline:1>ScenarioOutline_Description:0>Description_Helper:2>#Comment:0"
token.detach
expected_tokens = ["#EOF", "#Comment", "#StepLine", "#TagLine", "#ExamplesLine", "#ScenarioLine", "#ScenarioOutlineLine", "#Empty"]
error = UnexpectedEOFException(token, expected_tokens, state_comment) if token.eof() else UnexpectedTokenException(token, expected_tokens, state_comment)
if (self.stop_at_first_error):
raise error
self.add_error(context, error)
return 19
# Feature:2>Scenario_Definition:1>__alt0:1>ScenarioOutline:2>ScenarioOutline_Step:0>Step:0>#StepLine:0
def match_token_at_20(self, token, context):
if self.match_EOF(context, token):
self.end_rule(context, 'Step')
self.end_rule(context, 'ScenarioOutline')
self.end_rule(context, 'Scenario_Definition')
self.build(context, token)
return 27
if self.match_TableRow(context, token):
self.start_rule(context, 'DataTable')
self.build(context, token)
return 21
if self.match_DocStringSeparator(context, token):
self.start_rule(context, 'DocString')
self.build(context, token)
return 28
if self.match_StepLine(context, token):
self.end_rule(context, 'Step')
self.start_rule(context, 'Step')
self.build(context, token)
return 20
if self.match_TagLine(context, token):
if self.lookahead_0(context, token):
self.end_rule(context, 'Step')
self.start_rule(context, 'Examples_Definition')
self.start_rule(context, 'Tags')
self.build(context, token)
return 22
if self.match_TagLine(context, token):
self.end_rule(context, 'Step')
self.end_rule(context, 'ScenarioOutline')
self.end_rule(context, 'Scenario_Definition')
self.start_rule(context, 'Scenario_Definition')
self.start_rule(context, 'Tags')
self.build(context, token)
return 11
if self.match_ExamplesLine(context, token):
self.end_rule(context, 'Step')
self.start_rule(context, 'Examples_Definition')
self.start_rule(context, 'Examples')
self.build(context, token)
return 23
if self.match_ScenarioLine(context, token):
self.end_rule(context, 'Step')
self.end_rule(context, 'ScenarioOutline')
self.end_rule(context, 'Scenario_Definition')
self.start_rule(context, 'Scenario_Definition')
self.start_rule(context, 'Scenario')
self.build(context, token)
return 12
if self.match_ScenarioOutlineLine(context, token):
self.end_rule(context, 'Step')
self.end_rule(context, 'ScenarioOutline')
self.end_rule(context, 'Scenario_Definition')
self.start_rule(context, 'Scenario_Definition')
self.start_rule(context, 'ScenarioOutline')
self.build(context, token)
return 17
if self.match_Comment(context, token):
self.build(context, token)
return 20
if self.match_Empty(context, token):
self.build(context, token)
return 20
state_comment = "State: 20 - Feature:2>Scenario_Definition:1>__alt0:1>ScenarioOutline:2>ScenarioOutline_Step:0>Step:0>#StepLine:0"
token.detach
expected_tokens = ["#EOF", "#TableRow", "#DocStringSeparator", "#StepLine", "#TagLine", "#ExamplesLine", "#ScenarioLine", "#ScenarioOutlineLine", "#Comment", "#Empty"]
error = UnexpectedEOFException(token, expected_tokens, state_comment) if token.eof() else UnexpectedTokenException(token, expected_tokens, state_comment)
if (self.stop_at_first_error):
raise error
self.add_error(context, error)
return 20
# Feature:2>Scenario_Definition:1>__alt0:1>ScenarioOutline:2>ScenarioOutline_Step:0>Step:1>Step_Arg:0>__alt1:0>DataTable:0>#TableRow:0
def match_token_at_21(self, token, context):
if self.match_EOF(context, token):
self.end_rule(context, 'DataTable')
self.end_rule(context, 'Step')
self.end_rule(context, 'ScenarioOutline')
self.end_rule(context, 'Scenario_Definition')
self.build(context, token)
return 27
if self.match_TableRow(context, token):
self.build(context, token)
return 21
if self.match_StepLine(context, token):
self.end_rule(context, 'DataTable')
self.end_rule(context, 'Step')
self.start_rule(context, 'Step')
self.build(context, token)
return 20
if self.match_TagLine(context, token):
if self.lookahead_0(context, token):
self.end_rule(context, 'DataTable')
self.end_rule(context, 'Step')
self.start_rule(context, 'Examples_Definition')
self.start_rule(context, 'Tags')
self.build(context, token)
return 22
if self.match_TagLine(context, token):
self.end_rule(context, 'DataTable')
self.end_rule(context, 'Step')
self.end_rule(context, 'ScenarioOutline')
self.end_rule(context, 'Scenario_Definition')
self.start_rule(context, 'Scenario_Definition')
self.start_rule(context, 'Tags')
self.build(context, token)
return 11
if self.match_ExamplesLine(context, token):
self.end_rule(context, 'DataTable')
self.end_rule(context, 'Step')
self.start_rule(context, 'Examples_Definition')
self.start_rule(context, 'Examples')
self.build(context, token)
return 23
if self.match_ScenarioLine(context, token):
self.end_rule(context, 'DataTable')
self.end_rule(context, 'Step')
self.end_rule(context, 'ScenarioOutline')
self.end_rule(context, 'Scenario_Definition')
self.start_rule(context, 'Scenario_Definition')
self.start_rule(context, 'Scenario')
self.build(context, token)
return 12
if self.match_ScenarioOutlineLine(context, token):
self.end_rule(context, 'DataTable')
self.end_rule(context, 'Step')
self.end_rule(context, 'ScenarioOutline')
self.end_rule(context, 'Scenario_Definition')
self.start_rule(context, 'Scenario_Definition')
self.start_rule(context, 'ScenarioOutline')
self.build(context, token)
return 17
if self.match_Comment(context, token):
self.build(context, token)
return 21
if self.match_Empty(context, token):
self.build(context, token)
return 21
state_comment = "State: 21 - Feature:2>Scenario_Definition:1>__alt0:1>ScenarioOutline:2>ScenarioOutline_Step:0>Step:1>Step_Arg:0>__alt1:0>DataTable:0>#TableRow:0"
token.detach
expected_tokens = ["#EOF", "#TableRow", "#StepLine", "#TagLine", "#ExamplesLine", "#ScenarioLine", "#ScenarioOutlineLine", "#Comment", "#Empty"]
error = UnexpectedEOFException(token, expected_tokens, state_comment) if token.eof() else UnexpectedTokenException(token, expected_tokens, state_comment)
if (self.stop_at_first_error):
raise error
self.add_error(context, error)
return 21
# Feature:2>Scenario_Definition:1>__alt0:1>ScenarioOutline:3>Examples_Definition:0>Tags:0>#TagLine:0
def match_token_at_22(self, token, context):
if self.match_TagLine(context, token):
self.build(context, token)
return 22
if self.match_ExamplesLine(context, token):
self.end_rule(context, 'Tags')
self.start_rule(context, 'Examples')
self.build(context, token)
return 23
if self.match_Comment(context, token):
self.build(context, token)
return 22
if self.match_Empty(context, token):
self.build(context, token)
return 22
state_comment = "State: 22 - Feature:2>Scenario_Definition:1>__alt0:1>ScenarioOutline:3>Examples_Definition:0>Tags:0>#TagLine:0"
token.detach
expected_tokens = ["#TagLine", "#ExamplesLine", "#Comment", "#Empty"]
error = UnexpectedEOFException(token, expected_tokens, state_comment) if token.eof() else UnexpectedTokenException(token, expected_tokens, state_comment)
if (self.stop_at_first_error):
raise error
self.add_error(context, error)
return 22
# Feature:2>Scenario_Definition:1>__alt0:1>ScenarioOutline:3>Examples_Definition:1>Examples:0>#ExamplesLine:0
def match_token_at_23(self, token, context):
if self.match_EOF(context, token):
self.end_rule(context, 'Examples')
self.end_rule(context, 'Examples_Definition')
self.end_rule(context, 'ScenarioOutline')
self.end_rule(context, 'Scenario_Definition')
self.build(context, token)
return 27
if self.match_Empty(context, token):
self.build(context, token)
return 23
if self.match_Comment(context, token):
self.build(context, token)
return 25
if self.match_TableRow(context, token):
self.start_rule(context, 'Examples_Table')
self.build(context, token)
return 26
if self.match_TagLine(context, token):
if self.lookahead_0(context, token):
self.end_rule(context, 'Examples')
self.end_rule(context, 'Examples_Definition')
self.start_rule(context, 'Examples_Definition')
self.start_rule(context, 'Tags')
self.build(context, token)
return 22
if self.match_TagLine(context, token):
self.end_rule(context, 'Examples')
self.end_rule(context, 'Examples_Definition')
self.end_rule(context, 'ScenarioOutline')
self.end_rule(context, 'Scenario_Definition')
self.start_rule(context, 'Scenario_Definition')
self.start_rule(context, 'Tags')
self.build(context, token)
return 11
if self.match_ExamplesLine(context, token):
self.end_rule(context, 'Examples')
self.end_rule(context, 'Examples_Definition')
self.start_rule(context, 'Examples_Definition')
self.start_rule(context, 'Examples')
self.build(context, token)
return 23
if self.match_ScenarioLine(context, token):
self.end_rule(context, 'Examples')
self.end_rule(context, 'Examples_Definition')
self.end_rule(context, 'ScenarioOutline')
self.end_rule(context, 'Scenario_Definition')
self.start_rule(context, 'Scenario_Definition')
self.start_rule(context, 'Scenario')
self.build(context, token)
return 12
if self.match_ScenarioOutlineLine(context, token):
self.end_rule(context, 'Examples')
self.end_rule(context, 'Examples_Definition')
self.end_rule(context, 'ScenarioOutline')
self.end_rule(context, 'Scenario_Definition')
self.start_rule(context, 'Scenario_Definition')
self.start_rule(context, 'ScenarioOutline')
self.build(context, token)
return 17
if self.match_Other(context, token):
self.start_rule(context, 'Description')
self.build(context, token)
return 24
state_comment = "State: 23 - Feature:2>Scenario_Definition:1>__alt0:1>ScenarioOutline:3>Examples_Definition:1>Examples:0>#ExamplesLine:0"
token.detach
expected_tokens = ["#EOF", "#Empty", "#Comment", "#TableRow", "#TagLine", "#ExamplesLine", "#ScenarioLine", "#ScenarioOutlineLine", "#Other"]
error = UnexpectedEOFException(token, expected_tokens, state_comment) if token.eof() else UnexpectedTokenException(token, expected_tokens, state_comment)
if (self.stop_at_first_error):
raise error
self.add_error(context, error)
return 23
# Feature:2>Scenario_Definition:1>__alt0:1>ScenarioOutline:3>Examples_Definition:1>Examples:1>Examples_Description:0>Description_Helper:1>Description:0>#Other:0
def match_token_at_24(self, token, context):
if self.match_EOF(context, token):
self.end_rule(context, 'Description')
self.end_rule(context, 'Examples')
self.end_rule(context, 'Examples_Definition')
self.end_rule(context, 'ScenarioOutline')
self.end_rule(context, 'Scenario_Definition')
self.build(context, token)
return 27
if self.match_Comment(context, token):
self.end_rule(context, 'Description')
self.build(context, token)
return 25
if self.match_TableRow(context, token):
self.end_rule(context, 'Description')
self.start_rule(context, 'Examples_Table')
self.build(context, token)
return 26
if self.match_TagLine(context, token):
if self.lookahead_0(context, token):
self.end_rule(context, 'Description')
self.end_rule(context, 'Examples')
self.end_rule(context, 'Examples_Definition')
self.start_rule(context, 'Examples_Definition')
self.start_rule(context, 'Tags')
self.build(context, token)
return 22
if self.match_TagLine(context, token):
self.end_rule(context, 'Description')
self.end_rule(context, 'Examples')
self.end_rule(context, 'Examples_Definition')
self.end_rule(context, 'ScenarioOutline')
self.end_rule(context, 'Scenario_Definition')
self.start_rule(context, 'Scenario_Definition')
self.start_rule(context, 'Tags')
self.build(context, token)
return 11
if self.match_ExamplesLine(context, token):
self.end_rule(context, 'Description')
self.end_rule(context, 'Examples')
self.end_rule(context, 'Examples_Definition')
self.start_rule(context, 'Examples_Definition')
self.start_rule(context, 'Examples')
self.build(context, token)
return 23
if self.match_ScenarioLine(context, token):
self.end_rule(context, 'Description')
self.end_rule(context, 'Examples')
self.end_rule(context, 'Examples_Definition')
self.end_rule(context, 'ScenarioOutline')
self.end_rule(context, 'Scenario_Definition')
self.start_rule(context, 'Scenario_Definition')
self.start_rule(context, 'Scenario')
self.build(context, token)
return 12
if self.match_ScenarioOutlineLine(context, token):
self.end_rule(context, 'Description')
self.end_rule(context, 'Examples')
self.end_rule(context, 'Examples_Definition')
self.end_rule(context, 'ScenarioOutline')
self.end_rule(context, 'Scenario_Definition')
self.start_rule(context, 'Scenario_Definition')
self.start_rule(context, 'ScenarioOutline')
self.build(context, token)
return 17
if self.match_Other(context, token):
self.build(context, token)
return 24
state_comment = "State: 24 - Feature:2>Scenario_Definition:1>__alt0:1>ScenarioOutline:3>Examples_Definition:1>Examples:1>Examples_Description:0>Description_Helper:1>Description:0>#Other:0"
token.detach
expected_tokens = ["#EOF", "#Comment", "#TableRow", "#TagLine", "#ExamplesLine", "#ScenarioLine", "#ScenarioOutlineLine", "#Other"]
error = UnexpectedEOFException(token, expected_tokens, state_comment) if token.eof() else UnexpectedTokenException(token, expected_tokens, state_comment)
if (self.stop_at_first_error):
raise error
self.add_error(context, error)
return 24
# Feature:2>Scenario_Definition:1>__alt0:1>ScenarioOutline:3>Examples_Definition:1>Examples:1>Examples_Description:0>Description_Helper:2>#Comment:0
def match_token_at_25(self, token, context):
if self.match_EOF(context, token):
self.end_rule(context, 'Examples')
self.end_rule(context, 'Examples_Definition')
self.end_rule(context, 'ScenarioOutline')
self.end_rule(context, 'Scenario_Definition')
self.build(context, token)
return 27
if self.match_Comment(context, token):
self.build(context, token)
return 25
if self.match_TableRow(context, token):
self.start_rule(context, 'Examples_Table')
self.build(context, token)
return 26
if self.match_TagLine(context, token):
if self.lookahead_0(context, token):
self.end_rule(context, 'Examples')
self.end_rule(context, 'Examples_Definition')
self.start_rule(context, 'Examples_Definition')
self.start_rule(context, 'Tags')
self.build(context, token)
return 22
if self.match_TagLine(context, token):
self.end_rule(context, 'Examples')
self.end_rule(context, 'Examples_Definition')
self.end_rule(context, 'ScenarioOutline')
self.end_rule(context, 'Scenario_Definition')
self.start_rule(context, 'Scenario_Definition')
self.start_rule(context, 'Tags')
self.build(context, token)
return 11
if self.match_ExamplesLine(context, token):
self.end_rule(context, 'Examples')
self.end_rule(context, 'Examples_Definition')
self.start_rule(context, 'Examples_Definition')
self.start_rule(context, 'Examples')
self.build(context, token)
return 23
if self.match_ScenarioLine(context, token):
self.end_rule(context, 'Examples')
self.end_rule(context, 'Examples_Definition')
self.end_rule(context, 'ScenarioOutline')
self.end_rule(context, 'Scenario_Definition')
self.start_rule(context, 'Scenario_Definition')
self.start_rule(context, 'Scenario')
self.build(context, token)
return 12
if self.match_ScenarioOutlineLine(context, token):
self.end_rule(context, 'Examples')
self.end_rule(context, 'Examples_Definition')
self.end_rule(context, 'ScenarioOutline')
self.end_rule(context, 'Scenario_Definition')
self.start_rule(context, 'Scenario_Definition')
self.start_rule(context, 'ScenarioOutline')
self.build(context, token)
return 17
if self.match_Empty(context, token):
self.build(context, token)
return 25
state_comment = "State: 25 - Feature:2>Scenario_Definition:1>__alt0:1>ScenarioOutline:3>Examples_Definition:1>Examples:1>Examples_Description:0>Description_Helper:2>#Comment:0"
token.detach
expected_tokens = ["#EOF", "#Comment", "#TableRow", "#TagLine", "#ExamplesLine", "#ScenarioLine", "#ScenarioOutlineLine", "#Empty"]
error = UnexpectedEOFException(token, expected_tokens, state_comment) if token.eof() else UnexpectedTokenException(token, expected_tokens, state_comment)
if (self.stop_at_first_error):
raise error
self.add_error(context, error)
return 25
# Feature:2>Scenario_Definition:1>__alt0:1>ScenarioOutline:3>Examples_Definition:1>Examples:2>Examples_Table:0>#TableRow:0
def match_token_at_26(self, token, context):
if self.match_EOF(context, token):
self.end_rule(context, 'Examples_Table')
self.end_rule(context, 'Examples')
self.end_rule(context, 'Examples_Definition')
self.end_rule(context, 'ScenarioOutline')
self.end_rule(context, 'Scenario_Definition')
self.build(context, token)
return 27
if self.match_TableRow(context, token):
self.build(context, token)
return 26
if self.match_TagLine(context, token):
if self.lookahead_0(context, token):
self.end_rule(context, 'Examples_Table')
self.end_rule(context, 'Examples')
self.end_rule(context, 'Examples_Definition')
self.start_rule(context, 'Examples_Definition')
self.start_rule(context, 'Tags')
self.build(context, token)
return 22
if self.match_TagLine(context, token):
self.end_rule(context, 'Examples_Table')
self.end_rule(context, 'Examples')
self.end_rule(context, 'Examples_Definition')
self.end_rule(context, 'ScenarioOutline')
self.end_rule(context, 'Scenario_Definition')
self.start_rule(context, 'Scenario_Definition')
self.start_rule(context, 'Tags')
self.build(context, token)
return 11
if self.match_ExamplesLine(context, token):
self.end_rule(context, 'Examples_Table')
self.end_rule(context, 'Examples')
self.end_rule(context, 'Examples_Definition')
self.start_rule(context, 'Examples_Definition')
self.start_rule(context, 'Examples')
self.build(context, token)
return 23
if self.match_ScenarioLine(context, token):
self.end_rule(context, 'Examples_Table')
self.end_rule(context, 'Examples')
self.end_rule(context, 'Examples_Definition')
self.end_rule(context, 'ScenarioOutline')
self.end_rule(context, 'Scenario_Definition')
self.start_rule(context, 'Scenario_Definition')
self.start_rule(context, 'Scenario')
self.build(context, token)
return 12
if self.match_ScenarioOutlineLine(context, token):
self.end_rule(context, 'Examples_Table')
self.end_rule(context, 'Examples')
self.end_rule(context, 'Examples_Definition')
self.end_rule(context, 'ScenarioOutline')
self.end_rule(context, 'Scenario_Definition')
self.start_rule(context, 'Scenario_Definition')
self.start_rule(context, 'ScenarioOutline')
self.build(context, token)
return 17
if self.match_Comment(context, token):
self.build(context, token)
return 26
if self.match_Empty(context, token):
self.build(context, token)
return 26
state_comment = "State: 26 - Feature:2>Scenario_Definition:1>__alt0:1>ScenarioOutline:3>Examples_Definition:1>Examples:2>Examples_Table:0>#TableRow:0"
token.detach
expected_tokens = ["#EOF", "#TableRow", "#TagLine", "#ExamplesLine", "#ScenarioLine", "#ScenarioOutlineLine", "#Comment", "#Empty"]
error = UnexpectedEOFException(token, expected_tokens, state_comment) if token.eof() else UnexpectedTokenException(token, expected_tokens, state_comment)
if (self.stop_at_first_error):
raise error
self.add_error(context, error)
return 26
# Feature:2>Scenario_Definition:1>__alt0:1>ScenarioOutline:2>ScenarioOutline_Step:0>Step:1>Step_Arg:0>__alt1:1>DocString:0>#DocStringSeparator:0
def match_token_at_28(self, token, context):
if self.match_DocStringSeparator(context, token):
self.build(context, token)
return 29
if self.match_Other(context, token):
self.build(context, token)
return 28
state_comment = "State: 28 - Feature:2>Scenario_Definition:1>__alt0:1>ScenarioOutline:2>ScenarioOutline_Step:0>Step:1>Step_Arg:0>__alt1:1>DocString:0>#DocStringSeparator:0"
token.detach
expected_tokens = ["#DocStringSeparator", "#Other"]
error = UnexpectedEOFException(token, expected_tokens, state_comment) if token.eof() else UnexpectedTokenException(token, expected_tokens, state_comment)
if (self.stop_at_first_error):
raise error
self.add_error(context, error)
return 28
# Feature:2>Scenario_Definition:1>__alt0:1>ScenarioOutline:2>ScenarioOutline_Step:0>Step:1>Step_Arg:0>__alt1:1>DocString:2>#DocStringSeparator:0
def match_token_at_29(self, token, context):
if self.match_EOF(context, token):
self.end_rule(context, 'DocString')
self.end_rule(context, 'Step')
self.end_rule(context, 'ScenarioOutline')
self.end_rule(context, 'Scenario_Definition')
self.build(context, token)
return 27
if self.match_StepLine(context, token):
self.end_rule(context, 'DocString')
self.end_rule(context, 'Step')
self.start_rule(context, 'Step')
self.build(context, token)
return 20
if self.match_TagLine(context, token):
if self.lookahead_0(context, token):
self.end_rule(context, 'DocString')
self.end_rule(context, 'Step')
self.start_rule(context, 'Examples_Definition')
self.start_rule(context, 'Tags')
self.build(context, token)
return 22
if self.match_TagLine(context, token):
self.end_rule(context, 'DocString')
self.end_rule(context, 'Step')
self.end_rule(context, 'ScenarioOutline')
self.end_rule(context, 'Scenario_Definition')
self.start_rule(context, 'Scenario_Definition')
self.start_rule(context, 'Tags')
self.build(context, token)
return 11
if self.match_ExamplesLine(context, token):
self.end_rule(context, 'DocString')
self.end_rule(context, 'Step')
self.start_rule(context, 'Examples_Definition')
self.start_rule(context, 'Examples')
self.build(context, token)
return 23
if self.match_ScenarioLine(context, token):
self.end_rule(context, 'DocString')
self.end_rule(context, 'Step')
self.end_rule(context, 'ScenarioOutline')
self.end_rule(context, 'Scenario_Definition')
self.start_rule(context, 'Scenario_Definition')
self.start_rule(context, 'Scenario')
self.build(context, token)
return 12
if self.match_ScenarioOutlineLine(context, token):
self.end_rule(context, 'DocString')
self.end_rule(context, 'Step')
self.end_rule(context, 'ScenarioOutline')
self.end_rule(context, 'Scenario_Definition')
self.start_rule(context, 'Scenario_Definition')
self.start_rule(context, 'ScenarioOutline')
self.build(context, token)
return 17
if self.match_Comment(context, token):
self.build(context, token)
return 29
if self.match_Empty(context, token):
self.build(context, token)
return 29
state_comment = "State: 29 - Feature:2>Scenario_Definition:1>__alt0:1>ScenarioOutline:2>ScenarioOutline_Step:0>Step:1>Step_Arg:0>__alt1:1>DocString:2>#DocStringSeparator:0"
token.detach
expected_tokens = ["#EOF", "#StepLine", "#TagLine", "#ExamplesLine", "#ScenarioLine", "#ScenarioOutlineLine", "#Comment", "#Empty"]
error = UnexpectedEOFException(token, expected_tokens, state_comment) if token.eof() else UnexpectedTokenException(token, expected_tokens, state_comment)
if (self.stop_at_first_error):
raise error
self.add_error(context, error)
return 29
# Feature:2>Scenario_Definition:1>__alt0:0>Scenario:2>Scenario_Step:0>Step:1>Step_Arg:0>__alt1:1>DocString:0>#DocStringSeparator:0
def match_token_at_30(self, token, context):
if self.match_DocStringSeparator(context, token):
self.build(context, token)
return 31
if self.match_Other(context, token):
self.build(context, token)
return 30
state_comment = "State: 30 - Feature:2>Scenario_Definition:1>__alt0:0>Scenario:2>Scenario_Step:0>Step:1>Step_Arg:0>__alt1:1>DocString:0>#DocStringSeparator:0"
token.detach
expected_tokens = ["#DocStringSeparator", "#Other"]
error = UnexpectedEOFException(token, expected_tokens, state_comment) if token.eof() else UnexpectedTokenException(token, expected_tokens, state_comment)
if (self.stop_at_first_error):
raise error
self.add_error(context, error)
return 30
# Feature:2>Scenario_Definition:1>__alt0:0>Scenario:2>Scenario_Step:0>Step:1>Step_Arg:0>__alt1:1>DocString:2>#DocStringSeparator:0
def match_token_at_31(self, token, context):
if self.match_EOF(context, token):
self.end_rule(context, 'DocString')
self.end_rule(context, 'Step')
self.end_rule(context, 'Scenario')
self.end_rule(context, 'Scenario_Definition')
self.build(context, token)
return 27
if self.match_StepLine(context, token):
self.end_rule(context, 'DocString')
self.end_rule(context, 'Step')
self.start_rule(context, 'Step')
self.build(context, token)
return 15
if self.match_TagLine(context, token):
self.end_rule(context, 'DocString')
self.end_rule(context, 'Step')
self.end_rule(context, 'Scenario')
self.end_rule(context, 'Scenario_Definition')
self.start_rule(context, 'Scenario_Definition')
self.start_rule(context, 'Tags')
self.build(context, token)
return 11
if self.match_ScenarioLine(context, token):
self.end_rule(context, 'DocString')
self.end_rule(context, 'Step')
self.end_rule(context, 'Scenario')
self.end_rule(context, 'Scenario_Definition')
self.start_rule(context, 'Scenario_Definition')
self.start_rule(context, 'Scenario')
self.build(context, token)
return 12
if self.match_ScenarioOutlineLine(context, token):
self.end_rule(context, 'DocString')
self.end_rule(context, 'Step')
self.end_rule(context, 'Scenario')
self.end_rule(context, 'Scenario_Definition')
self.start_rule(context, 'Scenario_Definition')
self.start_rule(context, 'ScenarioOutline')
self.build(context, token)
return 17
if self.match_Comment(context, token):
self.build(context, token)
return 31
if self.match_Empty(context, token):
self.build(context, token)
return 31
state_comment = "State: 31 - Feature:2>Scenario_Definition:1>__alt0:0>Scenario:2>Scenario_Step:0>Step:1>Step_Arg:0>__alt1:1>DocString:2>#DocStringSeparator:0"
token.detach
expected_tokens = ["#EOF", "#StepLine", "#TagLine", "#ScenarioLine", "#ScenarioOutlineLine", "#Comment", "#Empty"]
error = UnexpectedEOFException(token, expected_tokens, state_comment) if token.eof() else UnexpectedTokenException(token, expected_tokens, state_comment)
if (self.stop_at_first_error):
raise error
self.add_error(context, error)
return 31
# Feature:1>Background:2>Scenario_Step:0>Step:1>Step_Arg:0>__alt1:1>DocString:0>#DocStringSeparator:0
def match_token_at_32(self, token, context):
if self.match_DocStringSeparator(context, token):
self.build(context, token)
return 33
if self.match_Other(context, token):
self.build(context, token)
return 32
state_comment = "State: 32 - Feature:1>Background:2>Scenario_Step:0>Step:1>Step_Arg:0>__alt1:1>DocString:0>#DocStringSeparator:0"
token.detach
expected_tokens = ["#DocStringSeparator", "#Other"]
error = UnexpectedEOFException(token, expected_tokens, state_comment) if token.eof() else UnexpectedTokenException(token, expected_tokens, state_comment)
if (self.stop_at_first_error):
raise error
self.add_error(context, error)
return 32
# Feature:1>Background:2>Scenario_Step:0>Step:1>Step_Arg:0>__alt1:1>DocString:2>#DocStringSeparator:0
def match_token_at_33(self, token, context):
if self.match_EOF(context, token):
self.end_rule(context, 'DocString')
self.end_rule(context, 'Step')
self.end_rule(context, 'Background')
self.build(context, token)
return 27
if self.match_StepLine(context, token):
self.end_rule(context, 'DocString')
self.end_rule(context, 'Step')
self.start_rule(context, 'Step')
self.build(context, token)
return 9
if self.match_TagLine(context, token):
self.end_rule(context, 'DocString')
self.end_rule(context, 'Step')
self.end_rule(context, 'Background')
self.start_rule(context, 'Scenario_Definition')
self.start_rule(context, 'Tags')
self.build(context, token)
return 11
if self.match_ScenarioLine(context, token):
self.end_rule(context, 'DocString')
self.end_rule(context, 'Step')
self.end_rule(context, 'Background')
self.start_rule(context, 'Scenario_Definition')
self.start_rule(context, 'Scenario')
self.build(context, token)
return 12
if self.match_ScenarioOutlineLine(context, token):
self.end_rule(context, 'DocString')
self.end_rule(context, 'Step')
self.end_rule(context, 'Background')
self.start_rule(context, 'Scenario_Definition')
self.start_rule(context, 'ScenarioOutline')
self.build(context, token)
return 17
if self.match_Comment(context, token):
self.build(context, token)
return 33
if self.match_Empty(context, token):
self.build(context, token)
return 33
state_comment = "State: 33 - Feature:1>Background:2>Scenario_Step:0>Step:1>Step_Arg:0>__alt1:1>DocString:2>#DocStringSeparator:0"
token.detach
expected_tokens = ["#EOF", "#StepLine", "#TagLine", "#ScenarioLine", "#ScenarioOutlineLine", "#Comment", "#Empty"]
error = UnexpectedEOFException(token, expected_tokens, state_comment) if token.eof() else UnexpectedTokenException(token, expected_tokens, state_comment)
if (self.stop_at_first_error):
raise error
self.add_error(context, error)
return 33
def lookahead_0(self, context, currentToken):
currentToken.detach
token = None
queue = []
match = False
while True:
token = self.read_token(context)
token.detach
queue.append(token)
if (self.match_ExamplesLine(context, token) or False):
match = True
break
if not (self.match_Empty(context, token) or self.match_Comment(context, token) or self.match_TagLine(context, token) or False):
break
context.token_queue.extend(queue)
return match
# private
def handle_ast_error(self, context, argument, action):
self.handle_external_error(context, True, argument, action)
def handle_external_error(self, context, default_value, argument, action):
if self.stop_at_first_error:
return action(argument)
try:
return action(argument)
except CompositeParserException as e:
for error in e.errors:
self.add_error(context, error)
except ParserException as e:
self.add_error(context, e)
return default_value
| 48.264721
| 196
| 0.601405
| 9,985
| 94,261
| 5.472609
| 0.015724
| 0.116554
| 0.06945
| 0.113645
| 0.909999
| 0.903832
| 0.900849
| 0.896841
| 0.894371
| 0.887453
| 0
| 0.018619
| 0.298024
| 94,261
| 1,952
| 197
| 48.289447
| 0.807206
| 0.04757
| 0
| 0.859194
| 1
| 0.014909
| 0.148477
| 0.035432
| 0
| 0
| 0
| 0
| 0
| 1
| 0.033131
| false
| 0
| 0.002761
| 0.001104
| 0.205964
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
4c34c0371ef9df8e11386e065ce1e5ff700d4004
| 17,836
|
py
|
Python
|
tests/ut/python/parallel/test_arithmetic.py
|
tjulitianyi1997/mindspore
|
c802a8c31fe2b51530d932fdd364824e45264b12
|
[
"Apache-2.0"
] | 2
|
2020-04-28T03:49:10.000Z
|
2020-04-28T03:49:13.000Z
|
tests/ut/python/parallel/test_arithmetic.py
|
tjulitianyi1997/mindspore
|
c802a8c31fe2b51530d932fdd364824e45264b12
|
[
"Apache-2.0"
] | null | null | null |
tests/ut/python/parallel/test_arithmetic.py
|
tjulitianyi1997/mindspore
|
c802a8c31fe2b51530d932fdd364824e45264b12
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import numpy as np
import mindspore as ms
from mindspore import Parameter, Tensor, context
import mindspore.nn as nn
from mindspore.ops import operations as P
from mindspore.ops import composite as C
from mindspore.common.api import _executor
from tests.ut.python.ops.test_math_ops import VirtualLoss
class NetWithLoss(nn.Cell):
def __init__(self, network):
super(NetWithLoss, self).__init__()
self.loss = VirtualLoss()
self.network = network
def construct(self, x, y, b):
predict = self.network(x, y, b)
return self.loss(predict)
class GradWrap(nn.Cell):
def __init__(self, network):
super(GradWrap, self).__init__()
self.network = network
def construct(self, x, y, b):
return C.grad_all(self.network)(x, y, b)
def test_matmul_sub():
class Net(nn.Cell):
def __init__(self, strategy1, strategy2):
super().__init__()
self.matmul = P.MatMul().set_strategy(strategy1)
self.sub = P.Sub().set_strategy(strategy2)
def construct(self, x, y, b):
out = self.matmul(x, y)
out = self.sub(out, b)
return out
context.set_auto_parallel_context(device_num=8, global_rank=0)
strategy1 = ((2, 2), (2, 2))
strategy2 = ((4, 2), (4, 2))
net = GradWrap(NetWithLoss(Net(strategy1, strategy2)))
context.set_auto_parallel_context(parallel_mode="semi_auto_parallel")
x = Tensor(np.ones([64, 32]), dtype=ms.float32)
y = Tensor(np.ones([32, 64]), dtype=ms.float32)
b = Tensor(np.ones([64, 64]), dtype=ms.float32)
_executor.compile(net, x, y, b)
def test_matmul_add():
class Net(nn.Cell):
def __init__(self, strategy1, strategy2):
super().__init__()
self.matmul = P.MatMul().set_strategy(strategy1)
self.add = P.TensorAdd().set_strategy(strategy2)
def construct(self, x, y, b):
out = self.matmul(x, y)
out = self.add(out, b)
return out
context.set_auto_parallel_context(device_num=8, global_rank=0)
strategy1 = ((2, 2), (2, 2))
strategy2 = ((4, 2), (4, 2))
net = GradWrap(NetWithLoss(Net(strategy1, strategy2)))
context.set_auto_parallel_context(parallel_mode="semi_auto_parallel")
x = Tensor(np.ones([64, 32]), dtype=ms.float32)
y = Tensor(np.ones([32, 64]), dtype=ms.float32)
b = Tensor(np.ones([64, 64]), dtype=ms.float32)
_executor.compile(net, x, y, b)
def test_matmul_mul():
class Net(nn.Cell):
def __init__(self, strategy1, strategy2):
super().__init__()
self.matmul = P.MatMul().set_strategy(strategy1)
self.mul = P.Mul().set_strategy(strategy2)
def construct(self, x, y, b):
out = self.matmul(x, y)
out = self.mul(out, b)
return out
context.set_auto_parallel_context(device_num=8, global_rank=0)
strategy1 = ((2, 2), (2, 2))
strategy2 = ((4, 2), (4, 2))
net = GradWrap(NetWithLoss(Net(strategy1, strategy2)))
context.set_auto_parallel_context(parallel_mode="semi_auto_parallel")
x = Tensor(np.ones([64, 32]), dtype=ms.float32)
y = Tensor(np.ones([32, 64]), dtype=ms.float32)
b = Tensor(np.ones([64, 64]), dtype=ms.float32)
_executor.compile(net, x, y, b)
def test_matmul_div():
class Net(nn.Cell):
def __init__(self, strategy1, strategy2):
super().__init__()
self.matmul = P.MatMul().set_strategy(strategy1)
self.div = P.Div().set_strategy(strategy2)
def construct(self, x, y, b):
out = self.matmul(x, y)
out = self.div(out, b)
return out
context.set_auto_parallel_context(device_num=8, global_rank=0)
strategy1 = ((2, 2), (2, 2))
strategy2 = ((4, 2), (4, 2))
net = GradWrap(NetWithLoss(Net(strategy1, strategy2)))
context.set_auto_parallel_context(parallel_mode="semi_auto_parallel")
x = Tensor(np.ones([64, 32]), dtype=ms.float32)
y = Tensor(np.ones([32, 64]), dtype=ms.float32)
b = Tensor(np.ones([64, 64]), dtype=ms.float32)
_executor.compile(net, x, y, b)
def test_matmul_greater():
class Net(nn.Cell):
def __init__(self, strategy1, strategy2):
super().__init__()
self.matmul = P.MatMul().set_strategy(strategy1)
self.greater = P.Greater().set_strategy(strategy2)
def construct(self, x, y, b):
out = self.matmul(x, y)
out = self.greater(out, b)
return out
context.set_auto_parallel_context(device_num=8, global_rank=0)
strategy1 = ((2, 2), (2, 2))
strategy2 = ((4, 2), (4, 2))
net = GradWrap(NetWithLoss(Net(strategy1, strategy2)))
context.set_auto_parallel_context(parallel_mode="semi_auto_parallel")
x = Tensor(np.ones([64, 32]), dtype=ms.float32)
y = Tensor(np.ones([32, 64]), dtype=ms.float32)
b = Tensor(np.ones([64, 64]), dtype=ms.float32)
_executor.compile(net, x, y, b)
def test_matmul_add_broadcast():
class Net(nn.Cell):
def __init__(self, strategy1, strategy2):
super().__init__()
self.matmul = P.MatMul().set_strategy(strategy1)
self.add = P.TensorAdd().set_strategy(strategy2)
def construct(self, x, y, b):
out = self.matmul(x, y)
out = self.add(out, b)
return out
context.set_auto_parallel_context(device_num=8, global_rank=0)
strategy1 = ((2, 2), (2, 2))
strategy2 = ((4, 2), (2, ))
net = GradWrap(NetWithLoss(Net(strategy1, strategy2)))
context.set_auto_parallel_context(parallel_mode="semi_auto_parallel")
x = Tensor(np.ones([64, 32]), dtype=ms.float32)
y = Tensor(np.ones([32, 64]), dtype=ms.float32)
b = Tensor(np.ones([64]), dtype=ms.float32)
_executor.compile(net, x, y, b)
def test_matmul_add_broadcast2():
class Net(nn.Cell):
def __init__(self, strategy1, strategy2):
super().__init__()
self.matmul = P.MatMul().set_strategy(strategy1)
self.add = P.TensorAdd().set_strategy(strategy2)
def construct(self, x, y, b):
out = self.matmul(x, y)
out = self.add(out, b)
return out
context.set_auto_parallel_context(device_num=8, global_rank=0)
strategy1 = ((2, 4), (4, 1))
strategy2 = ((4, 1), (1, 2))
net = GradWrap(NetWithLoss(Net(strategy1, strategy2)))
context.set_auto_parallel_context(parallel_mode="semi_auto_parallel")
x = Tensor(np.ones([64, 32]), dtype=ms.float32)
y = Tensor(np.ones([32, 1]), dtype=ms.float32)
b = Tensor(np.ones([1, 64]), dtype=ms.float32)
_executor.compile(net, x, y, b)
def test_matmul_sub_broadcast():
class Net(nn.Cell):
def __init__(self, strategy1, strategy2):
super().__init__()
self.matmul = P.MatMul().set_strategy(strategy1)
self.sub = P.Sub().set_strategy(strategy2)
def construct(self, x, y, b):
out = self.matmul(x, y)
out = self.sub(out, b)
return out
context.set_auto_parallel_context(device_num=8, global_rank=0)
strategy1 = ((2, 2), (2, 2))
strategy2 = ((4, 2), (2, ))
net = GradWrap(NetWithLoss(Net(strategy1, strategy2)))
context.set_auto_parallel_context(parallel_mode="semi_auto_parallel")
x = Tensor(np.ones([64, 32]), dtype=ms.float32)
y = Tensor(np.ones([32, 64]), dtype=ms.float32)
b = Tensor(np.ones([64]), dtype=ms.float32)
_executor.compile(net, x, y, b)
def test_matmul_sub_broadcast2():
class Net(nn.Cell):
def __init__(self, strategy1, strategy2):
super().__init__()
self.matmul = P.MatMul().set_strategy(strategy1)
self.sub = P.Sub().set_strategy(strategy2)
def construct(self, x, y, b):
out = self.matmul(x, y)
out = self.sub(out, b)
return out
context.set_auto_parallel_context(device_num=8, global_rank=0)
strategy1 = ((2, 4), (4, 1))
strategy2 = ((4, 1), (1, 2))
net = GradWrap(NetWithLoss(Net(strategy1, strategy2)))
context.set_auto_parallel_context(parallel_mode="semi_auto_parallel")
x = Tensor(np.ones([64, 32]), dtype=ms.float32)
y = Tensor(np.ones([32, 1]), dtype=ms.float32)
b = Tensor(np.ones([1, 64]), dtype=ms.float32)
_executor.compile(net, x, y, b)
def test_matmul_mul_broadcast():
class Net(nn.Cell):
def __init__(self, strategy1, strategy2):
super().__init__()
self.matmul = P.MatMul().set_strategy(strategy1)
self.mul = P.Mul().set_strategy(strategy2)
def construct(self, x, y, b):
out = self.matmul(x, y)
out = self.mul(out, b)
return out
context.set_auto_parallel_context(device_num=8, global_rank=0)
strategy1 = ((2, 2), (2, 2))
strategy2 = ((4, 2), (2, ))
net = GradWrap(NetWithLoss(Net(strategy1, strategy2)))
context.set_auto_parallel_context(parallel_mode="semi_auto_parallel")
x = Tensor(np.ones([64, 32]), dtype=ms.float32)
y = Tensor(np.ones([32, 64]), dtype=ms.float32)
b = Tensor(np.ones([64]), dtype=ms.float32)
_executor.compile(net, x, y, b)
def test_matmul_mul_broadcast2():
class Net(nn.Cell):
def __init__(self, strategy1, strategy2):
super().__init__()
self.matmul = P.MatMul().set_strategy(strategy1)
self.mul = P.Mul().set_strategy(strategy2)
def construct(self, x, y, b):
out = self.matmul(x, y)
out = self.mul(out, b)
return out
context.set_auto_parallel_context(device_num=8, global_rank=0)
strategy1 = ((2, 4), (4, 1))
strategy2 = ((4, 1), (1, 2))
net = GradWrap(NetWithLoss(Net(strategy1, strategy2)))
context.set_auto_parallel_context(parallel_mode="semi_auto_parallel")
x = Tensor(np.ones([64, 32]), dtype=ms.float32)
y = Tensor(np.ones([32, 1]), dtype=ms.float32)
b = Tensor(np.ones([1, 64]), dtype=ms.float32)
_executor.compile(net, x, y, b)
def test_matmul_div_broadcast():
class Net(nn.Cell):
def __init__(self, strategy1, strategy2):
super().__init__()
self.matmul = P.MatMul().set_strategy(strategy1)
self.div = P.Div().set_strategy(strategy2)
def construct(self, x, y, b):
out = self.matmul(x, y)
out = self.div(out, b)
return out
context.set_auto_parallel_context(device_num=8, global_rank=0)
strategy1 = ((2, 2), (2, 2))
strategy2 = ((4, 2), (2, ))
net = GradWrap(NetWithLoss(Net(strategy1, strategy2)))
context.set_auto_parallel_context(parallel_mode="semi_auto_parallel")
x = Tensor(np.ones([64, 32]), dtype=ms.float32)
y = Tensor(np.ones([32, 64]), dtype=ms.float32)
b = Tensor(np.ones([64]), dtype=ms.float32)
_executor.compile(net, x, y, b)
def test_matmul_div_broadcast2():
class Net(nn.Cell):
def __init__(self, strategy1, strategy2):
super().__init__()
self.matmul = P.MatMul().set_strategy(strategy1)
self.div = P.Div().set_strategy(strategy2)
def construct(self, x, y, b):
out = self.matmul(x, y)
out = self.div(out, b)
return out
context.set_auto_parallel_context(device_num=8, global_rank=0)
strategy1 = ((2, 4), (4, 1))
strategy2 = ((4, 1), (1, 2))
net = GradWrap(NetWithLoss(Net(strategy1, strategy2)))
context.set_auto_parallel_context(parallel_mode="semi_auto_parallel")
x = Tensor(np.ones([64, 32]), dtype=ms.float32)
y = Tensor(np.ones([32, 1]), dtype=ms.float32)
b = Tensor(np.ones([1, 64]), dtype=ms.float32)
_executor.compile(net, x, y, b)
def test_matmul_greater_broadcast():
class Net(nn.Cell):
def __init__(self, strategy1, strategy2):
super().__init__()
self.matmul = P.MatMul().set_strategy(strategy1)
self.greater = P.Greater().set_strategy(strategy2)
def construct(self, x, y, b):
out = self.matmul(x, y)
out = self.greater(out, b)
return out
context.set_auto_parallel_context(device_num=8, global_rank=0)
strategy1 = ((2, 2), (2, 2))
strategy2 = ((4, 2), (2, ))
net = GradWrap(NetWithLoss(Net(strategy1, strategy2)))
context.set_auto_parallel_context(parallel_mode="semi_auto_parallel")
x = Tensor(np.ones([64, 32]), dtype=ms.float32)
y = Tensor(np.ones([32, 64]), dtype=ms.float32)
b = Tensor(np.ones([64]), dtype=ms.float32)
_executor.compile(net, x, y, b)
def test_matmul_greater_broadcast2():
class Net(nn.Cell):
def __init__(self, strategy1, strategy2):
super().__init__()
self.matmul = P.MatMul().set_strategy(strategy1)
self.greater = P.Greater().set_strategy(strategy2)
def construct(self, x, y, b):
out = self.matmul(x, y)
out = self.greater(out, b)
return out
context.set_auto_parallel_context(device_num=8, global_rank=0)
strategy1 = ((2, 4), (4, 1))
strategy2 = ((4, 1), (1, 2))
net = GradWrap(NetWithLoss(Net(strategy1, strategy2)))
context.set_auto_parallel_context(parallel_mode="semi_auto_parallel")
x = Tensor(np.ones([64, 32]), dtype=ms.float32)
y = Tensor(np.ones([32, 1]), dtype=ms.float32)
b = Tensor(np.ones([1, 64]), dtype=ms.float32)
_executor.compile(net, x, y, b)
def test_matmul_floordiv():
class Net(nn.Cell):
def __init__(self, strategy1, strategy2):
super().__init__()
self.matmul = P.MatMul().set_strategy(strategy1)
self.floordiv = P.FloorDiv().set_strategy(strategy2)
def construct(self, x, y, b):
out = self.matmul(x, y)
out = self.floordiv(out, b)
return out
context.set_auto_parallel_context(device_num=8, global_rank=0)
strategy1 = ((2, 2), (2, 2))
strategy2 = ((4, 2), (4, 2))
net = GradWrap(NetWithLoss(Net(strategy1, strategy2)))
context.set_auto_parallel_context(parallel_mode="semi_auto_parallel")
x = Tensor(np.ones([64, 32]), dtype=ms.float32)
y = Tensor(np.ones([32, 64]), dtype=ms.float32)
b = Tensor(np.ones([64, 64]), dtype=ms.float32)
_executor.compile(net, x, y, b)
def test_matmul_floordiv_broadcast():
class Net(nn.Cell):
def __init__(self, strategy1, strategy2):
super().__init__()
self.matmul = P.MatMul().set_strategy(strategy1)
self.floordiv = P.FloorDiv().set_strategy(strategy2)
def construct(self, x, y, b):
out = self.matmul(x, y)
out = self.floordiv(out, b)
return out
context.set_auto_parallel_context(device_num=8, global_rank=0)
strategy1 = ((2, 2), (2, 2))
strategy2 = ((4, 2), (2, ))
net = GradWrap(NetWithLoss(Net(strategy1, strategy2)))
context.set_auto_parallel_context(parallel_mode="semi_auto_parallel")
x = Tensor(np.ones([64, 32]), dtype=ms.float32)
y = Tensor(np.ones([32, 64]), dtype=ms.float32)
b = Tensor(np.ones([64]), dtype=ms.float32)
_executor.compile(net, x, y, b)
def test_matmul_floordiv_broadcast2():
class Net(nn.Cell):
def __init__(self, strategy1, strategy2):
super().__init__()
self.matmul = P.MatMul().set_strategy(strategy1)
self.floordiv = P.FloorDiv().set_strategy(strategy2)
def construct(self, x, y, b):
out = self.matmul(x, y)
out = self.floordiv(out, b)
return out
context.set_auto_parallel_context(device_num=8, global_rank=0)
strategy1 = ((2, 4), (4, 1))
strategy2 = ((4, 1), (1, 2))
net = GradWrap(NetWithLoss(Net(strategy1, strategy2)))
context.set_auto_parallel_context(parallel_mode="semi_auto_parallel")
x = Tensor(np.ones([64, 32]), dtype=ms.float32)
y = Tensor(np.ones([32, 1]), dtype=ms.float32)
b = Tensor(np.ones([1, 64]), dtype=ms.float32)
_executor.compile(net, x, y, b)
def test_assign_sub():
class Net(nn.Cell):
def __init__(self):
super().__init__()
self.assign_sub = P.AssignSub()
self.mul = P.Mul()
self.mul_weight = Parameter(Tensor(np.full([128, 32],
0.5, dtype=np.float32)),
name="mul_weight")
self.assignsub_weight = Parameter(Tensor(np.full([128, 32],
1.1, dtype=np.float32)),
name="assignsub_weight")
def construct(self, x, y, z):
out = self.mul(x, self.mul_weight)
out = self.assign_sub(self.assignsub_weight, out)
return out
context.set_auto_parallel_context(device_num=64, global_rank=15)
context.set_auto_parallel_context(parallel_mode="semi_auto_parallel")
net = GradWrap(NetWithLoss(Net()))
x = Tensor(np.ones([128, 32]), dtype=ms.float32)
y = Tensor(np.ones([128, 32]), dtype=ms.float32)
z = Tensor(np.ones([128, 32]), dtype=ms.float32)
_executor.compile(net, x, y, z)
| 35.743487
| 74
| 0.614319
| 2,448
| 17,836
| 4.274101
| 0.060866
| 0.011469
| 0.065373
| 0.079901
| 0.898308
| 0.894199
| 0.894199
| 0.881009
| 0.867151
| 0.855586
| 0
| 0.049742
| 0.239179
| 17,836
| 498
| 75
| 35.815261
| 0.721297
| 0.031453
| 0
| 0.863049
| 0
| 0
| 0.02132
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.157623
| false
| 0
| 0.020672
| 0.002584
| 0.286822
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4c47b560982e1a0252dd550f078abe4f377f3729
| 3,689
|
py
|
Python
|
pretrain/pri3d/dataset/collate_fn.py
|
kudo1026/Pri3D
|
8bf8a3ec4393db3da6c0662ff49d5788ea188e20
|
[
"MIT"
] | 103
|
2021-07-06T17:05:03.000Z
|
2022-03-30T06:10:04.000Z
|
pretrain/pri3d/dataset/collate_fn.py
|
kudo1026/Pri3D
|
8bf8a3ec4393db3da6c0662ff49d5788ea188e20
|
[
"MIT"
] | 5
|
2021-10-13T14:35:32.000Z
|
2022-03-31T23:40:44.000Z
|
pretrain/pri3d/dataset/collate_fn.py
|
kudo1026/Pri3D
|
8bf8a3ec4393db3da6c0662ff49d5788ea188e20
|
[
"MIT"
] | 3
|
2021-09-24T09:00:44.000Z
|
2021-10-14T19:17:01.000Z
|
import torch
import numpy as np
from torch.utils.data import Dataset
def checktype(obj):
return bool(obj) and all(isinstance(elem, str) for elem in obj)
class collate_fn_factory:
"""Generates collate function for coords, feats, labels.
Args:
limit_numpoints: If 0 or False, does not alter batch size. If positive integer, limits batch
size so that the number of input coordinates is below limit_numpoints.
"""
def __init__(self, keywords=['xyz']):
self.keywords = keywords
def __call__(self, list_data):
data_dict = {}
for batch_id, sample in enumerate(list_data):
for name in sample:
data = sample[name]
if name in self.keywords:
num_points = data.shape[0]
batch_ids = torch.ones(num_points, 1) * batch_id
data = np.concatenate([batch_ids, data], -1)
if name not in data_dict:
data_dict[name] = []
data_dict[name].append(data)
# Concatenate all lists
for name in data_dict:
if checktype(data_dict[name]):
continue
if name in self.keywords:
data_dict[name] = torch.from_numpy(np.concatenate(data_dict[name], 0))
elif name == 'id':
data_dict[name] = data_dict[name]
else:
data_dict[name] = torch.from_numpy(np.stack(data_dict[name], 0))
return data_dict
class collate_fn_factory_triplet:
"""Generates collate function for coords, feats, labels.
Args:
limit_numpoints: If 0 or False, does not alter batch size. If positive integer, limits batch
size so that the number of input coordinates is below limit_numpoints.
"""
def __init__(self, keywords=['xyz']):
self.keywords = keywords
def __call__(self, list_data):
data_dict = {'color1': [], 'color2': [], 'depth1': [], 'depth2': [],
'id1': [], 'id2': [], 'camera2world1': [], 'camera2world2': []}
for batch_id, sample in enumerate(list_data):
data_dict['color1'].append(sample['color1'])
data_dict['color2'].append(sample['color2'])
data_dict['depth1'].append(sample['depth1'])
data_dict['depth2'].append(sample['depth2'])
data_dict['id1'].append(sample['id1'])
data_dict['id2'].append(sample['id2'])
data_dict['camera2world1'].append(sample['camera2world1'])
data_dict['camera2world2'].append(sample['camera2world2'])
data_dict['color1'].append(sample['color1'])
data_dict['color2'].append(sample['color3'])
data_dict['depth1'].append(sample['depth1'])
data_dict['depth2'].append(sample['depth3'])
data_dict['id1'].append(sample['id1'])
data_dict['id2'].append(sample['id3'])
data_dict['camera2world1'].append(sample['camera2world1'])
data_dict['camera2world2'].append(sample['camera2world3'])
data_dict['color1'].append(sample['color2'])
data_dict['color2'].append(sample['color3'])
data_dict['depth1'].append(sample['depth2'])
data_dict['depth2'].append(sample['depth3'])
data_dict['id1'].append(sample['id2'])
data_dict['id2'].append(sample['id3'])
data_dict['camera2world1'].append(sample['camera2world2'])
data_dict['camera2world2'].append(sample['camera2world3'])
# Concatenate all lists
for name in data_dict:
if checktype(data_dict[name]):
continue
if name in self.keywords:
data_dict[name] = torch.from_numpy(np.concatenate(data_dict[name], 0))
elif name == 'id':
data_dict[name] = data_dict[name]
else:
data_dict[name] = torch.from_numpy(np.stack(data_dict[name], 0))
return data_dict
| 38.030928
| 98
| 0.637842
| 465
| 3,689
| 4.873118
| 0.195699
| 0.165931
| 0.084731
| 0.030009
| 0.84113
| 0.793469
| 0.75684
| 0.75684
| 0.725949
| 0.725949
| 0
| 0.027469
| 0.220385
| 3,689
| 97
| 99
| 38.030928
| 0.760431
| 0.144755
| 0
| 0.633803
| 1
| 0
| 0.129094
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.070423
| false
| 0
| 0.042254
| 0.014085
| 0.183099
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4c6c0ebf5cdb3e7e16200dadb8f8aa75f6bab464
| 16,049
|
py
|
Python
|
geotrek/core/tests/test_permissions.py
|
pierreloicq/Geotrek-admin
|
00cd29f29843f2cc25e5a3c7372fcccf14956887
|
[
"BSD-2-Clause"
] | 50
|
2016-10-19T23:01:21.000Z
|
2022-03-28T08:28:34.000Z
|
geotrek/core/tests/test_permissions.py
|
pierreloicq/Geotrek-admin
|
00cd29f29843f2cc25e5a3c7372fcccf14956887
|
[
"BSD-2-Clause"
] | 1,422
|
2016-10-27T10:39:40.000Z
|
2022-03-31T13:37:10.000Z
|
geotrek/core/tests/test_permissions.py
|
pierreloicq/Geotrek-admin
|
00cd29f29843f2cc25e5a3c7372fcccf14956887
|
[
"BSD-2-Clause"
] | 46
|
2016-10-27T10:59:10.000Z
|
2022-03-22T15:55:56.000Z
|
from unittest import skipIf
from django.test import TestCase
from django.conf import settings
from django.contrib.auth.models import Permission
from django.contrib.gis.geos import LineString
from django.urls import reverse
from mapentity.factories import UserFactory
from geotrek.core.factories import PathFactory, ComfortFactory
from geotrek.core.models import Path
@skipIf(not settings.TREKKING_TOPOLOGY_ENABLED, 'Test with dynamic segmentation only')
class PermissionDraftPath(TestCase):
def setUp(self):
self.user = UserFactory.create(password='booh')
def get_good_data(self):
return {
'name': '',
'stake': '',
'comfort': ComfortFactory.create().pk,
'trail': '',
'comments': '',
'departure': '',
'arrival': '',
'source': '',
'valid': 'on',
'geom': '{"geom": "LINESTRING (99.0 89.0, 100.0 88.0)", "snap": [null, null]}',
}
def test_permission_view_add_path_with_draft_permission(self):
"""
Check draft checkbox not visible if user have only add_path permission
"""
self.client.login(username=self.user.username, password='booh')
response = self.client.get('/path/add/')
self.assertEqual(response.status_code, 302)
self.user.user_permissions.add(Permission.objects.get(codename='add_draft_path'))
self.client.login(username=self.user.username, password='booh')
response = self.client.get('/path/add/')
self.assertEqual(response.status_code, 200)
self.assertNotContains(response, 'name="draft"')
def test_permission_view_add_path_without_draft_permission(self):
"""
Check draft checkbox not visible if user have only add_path permission
"""
self.client.login(username=self.user.username, password='booh')
response = self.client.get('/path/add/')
self.assertEqual(response.status_code, 302)
self.user.user_permissions.add(Permission.objects.get(codename='add_path'))
self.client.login(username=self.user.username, password='booh')
response = self.client.get('/path/add/')
self.assertEqual(response.status_code, 200)
self.assertNotContains(response, 'name="draft"')
def test_permission_view_add_path_with_2_permissions(self):
"""
Check draft checkbox visible if user have 2 permissions : add_path, add_draft_path
"""
self.client.login(username=self.user.username, password='booh')
response = self.client.get('/path/add/')
self.assertEqual(response.status_code, 302)
self.user.user_permissions.add(Permission.objects.get(codename='add_path'))
self.user.user_permissions.add(Permission.objects.get(codename='add_draft_path'))
self.client.login(username=self.user.username, password='booh')
response = self.client.get('/path/add/')
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'name="draft"')
def test_permission_view_change_path_with_draft_permission(self):
"""
Check user can edit a draft path if user has change_draft_path permission only
but can not edit normal path
Check draft checkbox not visible if user have only change_draft_path
"""
self.client.login(username=self.user.username, password='booh')
path = PathFactory(name="PATH_AB", geom=LineString((0, 0), (4, 0)))
draft_path = PathFactory(name="PATH_AB", geom=LineString((0, 0), (4, 0)), draft=True)
response = self.client.get('/path/edit/%s/' % path.pk)
self.assertEqual(response.status_code, 302)
response = self.client.get('/path/edit/%s/' % draft_path.pk)
self.assertEqual(response.status_code, 302)
self.user.user_permissions.add(Permission.objects.get(codename='change_draft_path'))
self.client.login(username=self.user.username, password='booh')
response = self.client.post('/path/edit/%s/' % path.pk)
self.assertEqual(response.status_code, 302)
response = self.client.get('/path/edit/%s/' % draft_path.pk)
self.assertEqual(response.status_code, 200)
self.assertNotContains(response, 'name="draft"')
def test_permission_view_change_path_without_draft_permission(self):
"""
Check user can not edit a draft path if user has change_path permission only
but can edit normal path
Check draft checkbox not visible if user have only change_path
"""
self.client.login(username=self.user.username, password='booh')
path = PathFactory(name="path", geom=LineString((0, 0), (4, 0)))
draft_path = PathFactory(name="draft_path", geom=LineString((0, 0), (4, 0)), draft=True)
response = self.client.get('/path/edit/%s/' % path.pk)
self.assertEqual(response.status_code, 302)
response = self.client.get('/path/edit/%s/' % draft_path.pk)
self.assertEqual(response.status_code, 302)
self.user.user_permissions.add(Permission.objects.get(codename='change_path'))
self.client.login(username=self.user.username, password='booh')
response = self.client.get('/path/edit/%s/' % path.pk)
self.assertEqual(response.status_code, 200)
self.assertNotContains(response, 'name="draft"')
response = self.client.get('/path/edit/%s/' % draft_path.pk)
self.assertEqual(response.status_code, 302)
def test_permission_view_change_path_with_2_permissions(self):
"""
Check draft checkbox visible if user have 2 permissions : change_path, change_draft_path
"""
self.client.login(username=self.user.username, password='booh')
path = PathFactory(name="PATH_AB", geom=LineString((0, 0), (4, 0)))
draft_path = PathFactory(name="draft_path", geom=LineString((0, 0), (4, 0)), draft=True)
response = self.client.get('/path/edit/%s/' % path.pk)
self.assertEqual(response.status_code, 302)
response = self.client.get('/path/edit/%s/' % draft_path.pk)
self.assertEqual(response.status_code, 302)
self.user.user_permissions.add(Permission.objects.get(codename='change_path'))
self.user.user_permissions.add(Permission.objects.get(codename='change_draft_path'))
self.client.login(username=self.user.username, password='booh')
response = self.client.get('/path/edit/%s/' % path.pk)
self.assertEqual(response.status_code, 200)
self.assertNotContains(response, 'name="draft"')
response = self.client.get('/path/edit/%s/' % draft_path.pk)
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'name="draft"')
def test_permission_view_delete_path_with_draft_permission(self):
"""
Check user can not delete a normal path if user has delete_draft_path permission
but can delete draft path
"""
self.client.login(username=self.user.username, password='booh')
path = PathFactory(name="PATH_AB", geom=LineString((0, 0), (4, 0)))
draft_path = PathFactory(name="PATH_BC", geom=LineString((0, 2), (4, 2)), draft=True)
response = self.client.post('/path/delete/%s/' % path.pk)
self.assertEqual(response.status_code, 302)
response = self.client.post('/path/delete/%s/' % draft_path.pk)
self.assertEqual(response.status_code, 302)
self.assertEqual(Path.objects.count(), 2)
self.user.user_permissions.add(Permission.objects.get(codename='delete_draft_path'))
self.client.login(username=self.user.username, password='booh')
response = self.client.post('/path/delete/%s/' % path.pk)
self.assertEqual(response.status_code, 302)
self.assertEqual(Path.objects.count(), 2)
response = self.client.post('/path/delete/%s/' % draft_path.pk)
self.assertEqual(response.status_code, 302)
self.assertEqual(Path.objects.count(), 1)
def test_permission_view_delete_path_without_draft_permission(self):
"""
Check user can delete a normal path and can not delete a draft path if user has :
only delete_path permission
"""
self.client.login(username=self.user.username, password='booh')
path = PathFactory(name="PATH_AB", geom=LineString((0, 0), (4, 0)))
draft_path = PathFactory(name="PATH_BC", geom=LineString((0, 2), (4, 2)), draft=True)
response = self.client.post('/path/delete/%s/' % path.pk)
self.assertEqual(response.status_code, 302)
response = self.client.post('/path/delete/%s/' % draft_path.pk)
self.assertEqual(response.status_code, 302)
self.assertEqual(Path.objects.count(), 2)
self.user.user_permissions.add(Permission.objects.get(codename='delete_path'))
self.client.login(username=self.user.username, password='booh')
response = self.client.post('/path/delete/%s/' % draft_path.pk)
self.assertEqual(response.status_code, 302)
self.assertEqual(Path.objects.count(), 2)
response = self.client.post('/path/delete/%s/' % path.pk)
self.assertEqual(response.status_code, 302)
self.assertEqual(Path.objects.count(), 1)
def test_permission_view_delete_path_with_2_permissions(self):
"""
Check user can delete a normal path and draft path if user has :
delete_draft_path permission and delete_path
"""
self.client.login(username=self.user.username, password='booh')
path = PathFactory(name="PATH_AB", geom=LineString((0, 0), (4, 0)))
draft_path = PathFactory(name="PATH_BC", geom=LineString((0, 2), (4, 2)), draft=True)
response = self.client.post('/path/delete/%s/' % path.pk)
self.assertEqual(response.status_code, 302)
response = self.client.post('/path/delete/%s/' % draft_path.pk)
self.assertEqual(response.status_code, 302)
self.assertEqual(Path.objects.count(), 2)
self.user.user_permissions.add(Permission.objects.get(codename='delete_path'))
self.user.user_permissions.add(Permission.objects.get(codename='delete_draft_path'))
self.client.login(username=self.user.username, password='booh')
response = self.client.post('/path/delete/%s/' % path.pk)
self.assertEqual(response.status_code, 302)
self.assertEqual(Path.objects.count(), 1)
response = self.client.post('/path/delete/%s/' % draft_path.pk)
self.assertEqual(response.status_code, 302)
self.assertEqual(Path.objects.count(), 0)
def test_delete_multiple_path_draft_withtout_perm(self):
self.client.login(username=self.user.username, password='booh')
path = PathFactory.create(name="path_1", geom=LineString((0, 0), (4, 0)))
draft_path = PathFactory.create(name="path_2", geom=LineString((2, 2), (2, -2)), draft=True)
response = self.client.post(reverse('core:multiple_path_delete', args=['%s,%s' % (path.pk, draft_path.pk)]))
self.assertEqual(response.status_code, 302)
self.assertEqual(Path.objects.count(), 2)
self.user.user_permissions.add(Permission.objects.get(codename='delete_path'))
response = self.client.post(reverse('core:multiple_path_delete', args=['%s,%s' % (path.pk, draft_path.pk)]))
self.assertEqual(response.status_code, 302)
self.assertEqual(Path.objects.count(), 2)
self.user.user_permissions.add(Permission.objects.get(codename='delete_draft_path'))
self.client.login(username=self.user.username, password='booh')
response = self.client.post(reverse('core:multiple_path_delete', args=['%s,%s' % (path.pk, draft_path.pk)]))
self.assertEqual(response.status_code, 302)
self.assertEqual(Path.objects.count(), 0)
def test_save_path_with_only_add_draft_path(self):
"""
Check save path without permission add_path save with draft=True
"""
self.user.user_permissions.add(Permission.objects.get(codename='add_draft_path'))
self.client.login(username=self.user.username, password='booh')
response = self.client.post('/path/add/', self.get_good_data())
self.assertEqual(response.status_code, 302)
self.assertTrue(Path.objects.first().draft)
def test_save_path_with_only_edit_draft_path(self):
"""
Check save path without permission change_path save with draft=True
"""
draft_path = PathFactory(name="draft", geom=LineString((0, 2), (4, 2)), draft=True)
path = PathFactory(name="normal", geom=LineString((0, 2), (4, 2)))
self.user.user_permissions.add(Permission.objects.get(codename='change_draft_path'))
self.client.login(username=self.user.username, password='booh')
data = self.get_good_data()
response = self.client.post('/path/edit/%s/' % draft_path.pk, data)
self.assertEqual(response.status_code, 302)
self.assertTrue(Path.objects.get(pk=draft_path.pk).draft)
response = self.client.post('/path/edit/%s/' % path.pk, data)
self.assertEqual(response.status_code, 302)
self.assertFalse(Path.objects.get(pk=path.pk).draft)
def test_save_path_with_only_add_path(self):
"""
Check save path without permission add_draft_path save with draft=False
"""
self.user.user_permissions.add(Permission.objects.get(codename='add_path'))
self.client.login(username=self.user.username, password='booh')
response = self.client.post('/path/add/', self.get_good_data())
self.assertEqual(response.status_code, 302)
self.assertFalse(Path.objects.first().draft)
def test_save_path_with_only_edit_path(self):
"""
Check save path without permission change_draft_path save with draft=False
"""
path = PathFactory(name="path", geom=LineString((0, 2), (4, 2)))
draft_path = PathFactory(name="draft", geom=LineString((0, 2), (4, 2)), draft=True)
self.user.user_permissions.add(Permission.objects.get(codename='change_path'))
self.client.login(username=self.user.username, password='booh')
data = self.get_good_data()
response = self.client.post('/path/edit/%s/' % path.pk, data)
self.assertEqual(response.status_code, 302)
self.assertFalse(Path.objects.first().draft)
response = self.client.post('/path/edit/%s/' % draft_path.pk, data)
self.assertEqual(response.status_code, 302)
self.assertTrue(Path.objects.get(pk=draft_path.pk).draft)
def test_save_path_with_edit_draft_path_and_edit_path(self):
"""
Check save path without permission change_path save with draft=True
"""
draft_path = PathFactory(name="draft", geom=LineString((0, 2), (4, 2)), draft=True)
self.user.user_permissions.add(Permission.objects.get(codename='change_path'))
self.user.user_permissions.add(Permission.objects.get(codename='change_draft_path'))
self.client.login(username=self.user.username, password='booh')
data = self.get_good_data()
data['draft'] = True
response = self.client.post('/path/edit/%s/' % draft_path.pk, data)
self.assertEqual(response.status_code, 302)
self.assertTrue(Path.objects.get(pk=draft_path.pk).draft)
# You can change a draft path to a normal path.
data['draft'] = False
response = self.client.post('/path/edit/%s/' % draft_path.pk, data)
self.assertEqual(response.status_code, 302)
self.assertFalse(Path.objects.get(pk=draft_path.pk).draft)
# You can't change a normal path back to a draft path.
data['draft'] = True
response = self.client.post('/path/edit/%s/' % draft_path.pk, data)
self.assertEqual(response.status_code, 302)
self.assertFalse(Path.objects.get(pk=draft_path.pk).draft)
| 43.493225
| 116
| 0.668141
| 2,072
| 16,049
| 5.034749
| 0.057915
| 0.064225
| 0.072469
| 0.116756
| 0.917657
| 0.908742
| 0.898294
| 0.883723
| 0.844517
| 0.827454
| 0
| 0.018088
| 0.193906
| 16,049
| 368
| 117
| 43.611413
| 0.788282
| 0.089102
| 0
| 0.7713
| 0
| 0.004484
| 0.097057
| 0.005267
| 0
| 0
| 0
| 0
| 0.313901
| 1
| 0.076233
| false
| 0.116592
| 0.040359
| 0.004484
| 0.125561
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
d5cc1517c99c858ec8f1adc993a28c9f790abb6b
| 88
|
py
|
Python
|
src/helpers/directory_helper.py
|
ZelphirKaltstahl/QuestionsAndAnswers
|
3edb7ef8357706d94fcbc060d966186e933112c8
|
[
"MIT"
] | null | null | null |
src/helpers/directory_helper.py
|
ZelphirKaltstahl/QuestionsAndAnswers
|
3edb7ef8357706d94fcbc060d966186e933112c8
|
[
"MIT"
] | null | null | null |
src/helpers/directory_helper.py
|
ZelphirKaltstahl/QuestionsAndAnswers
|
3edb7ef8357706d94fcbc060d966186e933112c8
|
[
"MIT"
] | null | null | null |
import os
def file_path_in_data_dir(file_name):
return os.path.join('data', file_name)
| 22
| 39
| 0.795455
| 17
| 88
| 3.764706
| 0.647059
| 0.25
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.090909
| 88
| 4
| 39
| 22
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0.044944
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
91144a6a9acead9464dabaf74d337feb7b73dac5
| 9,836
|
py
|
Python
|
tests/sub_user_service_test_case.py
|
odeoteknologi/odeo-python-sdk
|
31a0b21a8b23d887fef16b09e1293091e520a23e
|
[
"MIT"
] | null | null | null |
tests/sub_user_service_test_case.py
|
odeoteknologi/odeo-python-sdk
|
31a0b21a8b23d887fef16b09e1293091e520a23e
|
[
"MIT"
] | null | null | null |
tests/sub_user_service_test_case.py
|
odeoteknologi/odeo-python-sdk
|
31a0b21a8b23d887fef16b09e1293091e520a23e
|
[
"MIT"
] | null | null | null |
import json
import unittest
import odeo.client
from odeo.exceptions import GeneralError, InputValidationError
from odeo.models.sub_user import *
from tests.service_test_case import ServiceTestCase
class SubUserServiceTestCase(ServiceTestCase):
def test_list_sub_users(self):
self.adapter.register_uri(
'GET',
odeo.client.DEVELOPMENT_BASE_URL + '/sub-users',
request_headers={
'Authorization': 'Bearer ' + self.access_token,
'Accept': 'application/json',
'X-Odeo-Timestamp': '1612137600',
'X-Odeo-Signature': 'sHzLllqZvSFwpZTxd70qu1eGfTWHawv3a1nT1IXP3zs='
},
text=json.dumps({
'sub_users': [{
'user_id': 123,
'name': 'Agus Hartono',
'phone_number': '081234567890',
'email': 'agus@example.com'
}],
'next_page_token': 'abcdef'
})
)
self.assertEqual(
SubUsersList(
sub_users=[SubUser(123, 'Agus Hartono', '081234567890', 'agus@example.com')],
next_page_token='abcdef'
),
self.client.sub_user.list_sub_users()
)
def test_list_sub_users_empty(self):
self.adapter.register_uri(
'GET',
odeo.client.DEVELOPMENT_BASE_URL + '/sub-users',
request_headers={
'Authorization': 'Bearer ' + self.access_token,
'Accept': 'application/json',
'X-Odeo-Timestamp': '1612137600',
'X-Odeo-Signature': 'sHzLllqZvSFwpZTxd70qu1eGfTWHawv3a1nT1IXP3zs='
},
text=json.dumps({
'sub_users': [],
'next_page_token': 'abcdef'
})
)
self.assertEqual(
SubUsersList(
sub_users=[],
next_page_token='abcdef'
),
self.client.sub_user.list_sub_users()
)
def test_list_sub_users_with_page_token(self):
self.adapter.register_uri(
'GET',
odeo.client.DEVELOPMENT_BASE_URL + '/sub-users?page_token=abcdef',
request_headers={
'Authorization': 'Bearer ' + self.access_token,
'Accept': 'application/json',
'X-Odeo-Timestamp': '1612137600',
'X-Odeo-Signature': 'amWP0owDfbvyPoQGyG9jpBCZXYugX3D+zhkH/t4lA3k='
},
text=json.dumps({
'sub_users': [{
'user_id': 123,
'name': 'Agus Hartono',
'phone_number': '081234567890',
'email': 'agus@example.com'
}],
'next_page_token': 'abcdef'
})
)
self.assertEqual(
SubUsersList(
sub_users=[SubUser(123, 'Agus Hartono', '081234567890', 'agus@example.com')],
next_page_token='abcdef'
),
self.client.sub_user.list_sub_users('abcdef')
)
def test_create_sub_user(self):
self.adapter.register_uri(
'POST',
odeo.client.DEVELOPMENT_BASE_URL + '/sub-users',
request_headers={
'Authorization': 'Bearer ' + self.access_token,
'Content-Type': 'application/json',
'X-Odeo-Timestamp': '1612137600',
'X-Odeo-Signature': 'IyMSYxV1An/jebiOlsVaWAz1XPlVQ663J5spsJlxlro='
},
text=json.dumps({
'user_id': '123',
'name': 'Agus Hartono',
'phone_number': '081234567890',
'email': 'agus@example.com'
})
)
self.assertEqual(
SubUser(123, 'Agus Hartono', '081234567890', 'agus@example.com'),
self.client.sub_user.create_sub_user(
'agus@example.com', 'Agus Hartono', '081234567890'
)
)
def test_create_sub_user_failed_email_invalid(self):
message = 'The email must be a valid email address'
self.adapter.register_uri(
'POST',
odeo.client.DEVELOPMENT_BASE_URL + '/sub-users',
request_headers={
'Authorization': 'Bearer ' + self.access_token,
'Content-Type': 'application/json',
'X-Odeo-Timestamp': '1612137600',
'X-Odeo-Signature': 'DIQNg7zkrqYnM1bMFk563xv+Z2ljQOF8ptx5oR5LmZA='
},
status_code=400,
text=json.dumps({
'message': message,
'status_code': 400,
'error_code': 10001
})
)
with self.assertRaises(InputValidationError) as ctx:
self.client.sub_user.create_sub_user(
'agus-example.com', 'Agus Hartono', '081234567890'
)
self.assertEqual(str(ctx.exception), message)
def test_create_sub_user_failed_phone_number_already_registered(self):
message = 'Phone number registered'
self.adapter.register_uri(
'POST',
odeo.client.DEVELOPMENT_BASE_URL + '/sub-users',
request_headers={
'Authorization': 'Bearer ' + self.access_token,
'Content-Type': 'application/json',
'X-Odeo-Timestamp': '1612137600',
'X-Odeo-Signature': 'IyMSYxV1An/jebiOlsVaWAz1XPlVQ663J5spsJlxlro='
},
status_code=400,
text=json.dumps({
'message': message,
'status_code': 400,
'error_code': 10000
})
)
with self.assertRaises(GeneralError) as ctx:
self.client.sub_user.create_sub_user(
'agus@example.com', 'Agus Hartono', '081234567890'
)
self.assertEqual(str(ctx.exception), message)
def test_update_sub_user(self):
self.adapter.register_uri(
'PUT',
odeo.client.DEVELOPMENT_BASE_URL + '/sub-users/123',
request_headers={
'Authorization': 'Bearer ' + self.access_token,
'Content-Type': 'application/json',
'X-Odeo-Timestamp': '1612137600',
'X-Odeo-Signature': 'tcw7lg3kRTbH4h/80Tv3cidIa+uCqZecHKRKgXtC+3Y='
},
text=json.dumps({
'user_id': '123',
'name': 'Agus Hartono',
'phone_number': '081234567890',
'email': 'agus@example.com'
})
)
self.assertEqual(
SubUser(123, 'Agus Hartono', '081234567890', 'agus@example.com'),
self.client.sub_user.update_sub_user(
123, 'agus@example.com', 'Agus Hartono', '081234567890'
)
)
def test_update_sub_user_failed_email_invalid(self):
message = 'The email must be a valid email address'
self.adapter.register_uri(
'PUT',
odeo.client.DEVELOPMENT_BASE_URL + '/sub-users/123',
request_headers={
'Authorization': 'Bearer ' + self.access_token,
'Content-Type': 'application/json',
'X-Odeo-Timestamp': '1612137600',
'X-Odeo-Signature': 'qcZEJpP9vS1KNRVqCqS5qzyxmEUxn8xi8pBgEprySaU='
},
status_code=400,
text=json.dumps({
'message': message,
'status_code': 400,
'error_code': 10001
})
)
with self.assertRaises(InputValidationError) as ctx:
self.client.sub_user.update_sub_user(
123, 'agus-example.com', 'Agus Hartono', '081234567890'
)
self.assertEqual(str(ctx.exception), message)
def test_update_sub_user_failed_user_not_found(self):
message = 'Data not found'
self.adapter.register_uri(
'PUT',
odeo.client.DEVELOPMENT_BASE_URL + '/sub-users/123',
request_headers={
'Authorization': 'Bearer ' + self.access_token,
'Content-Type': 'application/json',
'X-Odeo-Timestamp': '1612137600',
'X-Odeo-Signature': 'tcw7lg3kRTbH4h/80Tv3cidIa+uCqZecHKRKgXtC+3Y='
},
status_code=400,
text=json.dumps({
'message': message,
'status_code': 400,
'error_code': 10000
})
)
with self.assertRaises(GeneralError) as ctx:
self.client.sub_user.update_sub_user(
123, 'agus@example.com', 'Agus Hartono', '081234567890'
)
self.assertEqual(str(ctx.exception), message)
def test_update_sub_user_failed_phone_number_already_registered(self):
message = 'Phone number registered'
self.adapter.register_uri(
'PUT',
odeo.client.DEVELOPMENT_BASE_URL + '/sub-users/123',
request_headers={
'Authorization': 'Bearer ' + self.access_token,
'Content-Type': 'application/json',
'X-Odeo-Timestamp': '1612137600',
'X-Odeo-Signature': 'tcw7lg3kRTbH4h/80Tv3cidIa+uCqZecHKRKgXtC+3Y='
},
status_code=400,
text=json.dumps({
'message': message,
'status_code': 400,
'error_code': 10000
})
)
with self.assertRaises(GeneralError) as ctx:
self.client.sub_user.update_sub_user(
123, 'agus@example.com', 'Agus Hartono', '081234567890'
)
self.assertEqual(str(ctx.exception), message)
if __name__ == '__main__':
unittest.main()
| 35.509025
| 93
| 0.528162
| 871
| 9,836
| 5.749713
| 0.125144
| 0.034944
| 0.041933
| 0.04393
| 0.913738
| 0.907348
| 0.903754
| 0.900759
| 0.897963
| 0.887181
| 0
| 0.071011
| 0.357157
| 9,836
| 276
| 94
| 35.637681
| 0.721019
| 0
| 0
| 0.752033
| 0
| 0
| 0.265047
| 0.04758
| 0
| 0
| 0
| 0
| 0.060976
| 1
| 0.04065
| false
| 0
| 0.02439
| 0
| 0.069106
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9137b627b69ade336e4656a6319489259f881dd9
| 16,216
|
py
|
Python
|
tests/st/ops/gpu/test_cast_op.py
|
dreamsxin/mindspore
|
b6b254f6e4d07c4eecd8cb4e0a1060da771d6441
|
[
"Apache-2.0"
] | null | null | null |
tests/st/ops/gpu/test_cast_op.py
|
dreamsxin/mindspore
|
b6b254f6e4d07c4eecd8cb4e0a1060da771d6441
|
[
"Apache-2.0"
] | null | null | null |
tests/st/ops/gpu/test_cast_op.py
|
dreamsxin/mindspore
|
b6b254f6e4d07c4eecd8cb4e0a1060da771d6441
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
import numpy as np
import pytest
import mindspore.common.dtype as mstype
import mindspore.context as context
from mindspore.common.tensor import Tensor
from mindspore.nn import Cell
from mindspore.ops import operations as P
class Net(Cell):
def __init__(self, type0, type1):
super(Net, self).__init__()
self.Cast = P.Cast()
self.type0 = type0
self.type1 = type1
def construct(self, x0, x1):
output = (self.Cast(x0, self.type0),
self.Cast(x1, self.type1))
return output
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_cast():
x0 = Tensor(np.arange(24).reshape((4, 3, 2)).astype(np.float32))
t0 = mstype.float16
x1 = Tensor(np.arange(24).reshape((4, 3, 2)).astype(np.float16))
t1 = mstype.float32
context.set_context(mode=context.GRAPH_MODE, device_target='GPU')
net = Net(t0, t1)
output = net(x0, x1)
type0 = output[0].asnumpy().dtype
assert type0 == 'float16'
type1 = output[1].asnumpy().dtype
assert type1 == 'float32'
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_cast1():
x0 = Tensor(np.arange(24).reshape((4, 3, 2)).astype(np.int32))
t0 = mstype.float32
x1 = Tensor(np.arange(24).reshape((4, 3, 2)).astype(np.bool))
t1 = mstype.float32
context.set_context(mode=context.GRAPH_MODE, device_target='GPU')
net = Net(t0, t1)
output = net(x0, x1)
type0 = output[0].asnumpy().dtype
assert type0 == 'float32'
type1 = output[1].asnumpy().dtype
assert type1 == 'float32'
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_cast2():
x0 = Tensor(np.arange(24).reshape((4, 3, 2)).astype(np.float16))
t0 = mstype.int32
x1 = Tensor(np.arange(24).reshape((4, 3, 2)).astype(np.float16))
t1 = mstype.float64
context.set_context(mode=context.GRAPH_MODE, device_target='GPU')
net = Net(t0, t1)
output = net(x0, x1)
type0 = output[0].asnumpy().dtype
assert type0 == 'int32'
type1 = output[1].asnumpy().dtype
assert type1 == 'float64'
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_cast3():
x0 = Tensor(np.arange(24).reshape((4, 3, 2)).astype(np.int64))
t0 = mstype.int32
x1 = Tensor(np.arange(24).reshape((4, 3, 2)).astype(np.float32))
t1 = mstype.int32
context.set_context(mode=context.GRAPH_MODE, device_target='GPU')
net = Net(t0, t1)
output = net(x0, x1)
type0 = output[0].asnumpy().dtype
assert type0 == 'int32'
type1 = output[1].asnumpy().dtype
assert type1 == 'int32'
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_cast4():
x0 = Tensor(np.arange(24).reshape((4, 3, 2)).astype(np.int32))
t0 = mstype.float16
x1 = Tensor(np.arange(24).reshape((4, 3, 2)).astype(np.int32))
t1 = mstype.int8
context.set_context(mode=context.GRAPH_MODE, device_target='GPU')
net = Net(t0, t1)
output = net(x0, x1)
type0 = output[0].asnumpy().dtype
assert type0 == 'float16'
type1 = output[1].asnumpy().dtype
assert type1 == 'int8'
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_cast5():
x0 = Tensor(np.arange(24).reshape((4, 3, 2)).astype(np.int32))
t0 = mstype.uint8
x1 = Tensor(np.arange(24).reshape((4, 3, 2)).astype(np.int32))
t1 = mstype.bool_
context.set_context(mode=context.GRAPH_MODE, device_target='GPU')
net = Net(t0, t1)
output = net(x0, x1)
type0 = output[0].asnumpy().dtype
assert type0 == 'uint8'
type1 = output[1].asnumpy().dtype
assert type1 == 'bool'
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_cast6():
x0 = Tensor(np.arange(24).reshape((4, 3, 2)).astype(np.int8))
t0 = mstype.float64
x1 = Tensor(np.arange(24).reshape((4, 3, 2)).astype(np.int8))
t1 = mstype.float32
context.set_context(mode=context.GRAPH_MODE, device_target='GPU')
net = Net(t0, t1)
output = net(x0, x1)
type0 = output[0].asnumpy().dtype
assert type0 == 'float64'
type1 = output[1].asnumpy().dtype
assert type1 == 'float32'
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_cast7():
x0 = Tensor(np.arange(24).reshape((4, 3, 2)).astype(np.int8))
t0 = mstype.float32
x1 = Tensor(np.arange(24).reshape((4, 3, 2)).astype(np.int8))
t1 = mstype.float16
context.set_context(mode=context.GRAPH_MODE, device_target='GPU')
net = Net(t0, t1)
output = net(x0, x1)
type0 = output[0].asnumpy().dtype
assert type0 == 'float32'
type1 = output[1].asnumpy().dtype
assert type1 == 'float16'
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_cast8():
x0 = Tensor(np.arange(24).reshape((4, 3, 2)).astype(np.int8))
t0 = mstype.int32
x1 = Tensor(np.arange(24).reshape((4, 3, 2)).astype(np.int8))
t1 = mstype.int16
context.set_context(mode=context.GRAPH_MODE, device_target='GPU')
net = Net(t0, t1)
output = net(x0, x1)
type0 = output[0].asnumpy().dtype
assert type0 == 'int32'
type1 = output[1].asnumpy().dtype
assert type1 == 'int16'
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_cast9():
x0 = Tensor(np.arange(24).reshape((4, 3, 2)).astype(np.int8))
t0 = mstype.int64
x1 = Tensor(np.arange(24).reshape((4, 3, 2)).astype(np.bool))
t1 = mstype.float16
context.set_context(mode=context.GRAPH_MODE, device_target='GPU')
net = Net(t0, t1)
output = net(x0, x1)
type0 = output[0].asnumpy().dtype
assert type0 == 'int64'
type1 = output[1].asnumpy().dtype
assert type1 == 'float16'
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_cast10():
x0 = Tensor(np.arange(24).reshape((4, 3, 2)).astype(np.bool))
t0 = mstype.int8
x1 = Tensor(np.arange(24).reshape((4, 3, 2)).astype(np.bool))
t1 = mstype.float64
context.set_context(mode=context.GRAPH_MODE, device_target='GPU')
net = Net(t0, t1)
output = net(x0, x1)
type0 = output[0].asnumpy().dtype
assert type0 == 'int8'
type1 = output[1].asnumpy().dtype
assert type1 == 'float64'
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_cast11():
x0 = Tensor(np.arange(24).reshape((4, 3, 2)).astype(np.bool))
t0 = mstype.int16
x1 = Tensor(np.arange(24).reshape((4, 3, 2)).astype(np.bool))
t1 = mstype.int32
context.set_context(mode=context.GRAPH_MODE, device_target='GPU')
net = Net(t0, t1)
output = net(x0, x1)
type0 = output[0].asnumpy().dtype
assert type0 == 'int16'
type1 = output[1].asnumpy().dtype
assert type1 == 'int32'
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_cast12():
x0 = Tensor(np.arange(24).reshape((4, 3, 2)).astype(np.bool))
t0 = mstype.int64
x1 = Tensor(np.arange(24).reshape((4, 3, 2)).astype(np.uint8))
t1 = mstype.float32
context.set_context(mode=context.GRAPH_MODE, device_target='GPU')
net = Net(t0, t1)
output = net(x0, x1)
type0 = output[0].asnumpy().dtype
assert type0 == 'int64'
type1 = output[1].asnumpy().dtype
assert type1 == 'float32'
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_cast13():
x0 = Tensor(np.arange(24).reshape((4, 3, 2)).astype(np.uint8))
t0 = mstype.int32
x1 = Tensor(np.arange(24).reshape((4, 3, 2)).astype(np.uint8))
t1 = mstype.float16
context.set_context(mode=context.GRAPH_MODE, device_target='GPU')
net = Net(t0, t1)
output = net(x0, x1)
type0 = output[0].asnumpy().dtype
assert type0 == 'int32'
type1 = output[1].asnumpy().dtype
assert type1 == 'float16'
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_cast14():
x0 = Tensor(np.arange(24).reshape((4, 3, 2)).astype(np.int16))
t0 = mstype.float64
x1 = Tensor(np.arange(24).reshape((4, 3, 2)).astype(np.int16))
t1 = mstype.float32
context.set_context(mode=context.GRAPH_MODE, device_target='GPU')
net = Net(t0, t1)
output = net(x0, x1)
type0 = output[0].asnumpy().dtype
assert type0 == 'float64'
type1 = output[1].asnumpy().dtype
assert type1 == 'float32'
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_cast15():
x0 = Tensor(np.arange(24).reshape((4, 3, 2)).astype(np.int16))
t0 = mstype.float16
x1 = Tensor(np.arange(24).reshape((4, 3, 2)).astype(np.int16))
t1 = mstype.int32
context.set_context(mode=context.GRAPH_MODE, device_target='GPU')
net = Net(t0, t1)
output = net(x0, x1)
type0 = output[0].asnumpy().dtype
assert type0 == 'float16'
type1 = output[1].asnumpy().dtype
assert type1 == 'int32'
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_cast16():
x0 = Tensor(np.arange(24).reshape((4, 3, 2)).astype(np.int16))
t0 = mstype.float16
x1 = Tensor(np.arange(24).reshape((4, 3, 2)).astype(np.int64))
t1 = mstype.float64
context.set_context(mode=context.GRAPH_MODE, device_target='GPU')
net = Net(t0, t1)
output = net(x0, x1)
type0 = output[0].asnumpy().dtype
assert type0 == 'float16'
type1 = output[1].asnumpy().dtype
assert type1 == 'float64'
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_cast17():
x0 = Tensor(np.arange(24).reshape((4, 3, 2)).astype(np.int16))
t0 = mstype.float32
x1 = Tensor(np.arange(24).reshape((4, 3, 2)).astype(np.int16))
t1 = mstype.float16
context.set_context(mode=context.GRAPH_MODE, device_target='GPU')
net = Net(t0, t1)
output = net(x0, x1)
type0 = output[0].asnumpy().dtype
assert type0 == 'float32'
type1 = output[1].asnumpy().dtype
assert type1 == 'float16'
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_cast18():
x0 = Tensor(np.arange(24).reshape((4, 3, 2)).astype(np.int64))
t0 = mstype.float32
x1 = Tensor(np.arange(24).reshape((4, 3, 2)).astype(np.int64))
t1 = mstype.float16
context.set_context(mode=context.GRAPH_MODE, device_target='GPU')
net = Net(t0, t1)
output = net(x0, x1)
type0 = output[0].asnumpy().dtype
assert type0 == 'float32'
type1 = output[1].asnumpy().dtype
assert type1 == 'float16'
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_cast19():
x0 = Tensor(np.arange(24).reshape((4, 3, 2)).astype(np.int8))
t0 = mstype.bool_
x1 = Tensor(np.arange(24).reshape((4, 3, 2)).astype(np.int16))
t1 = mstype.bool_
context.set_context(mode=context.GRAPH_MODE, device_target='GPU')
net = Net(t0, t1)
output = net(x0, x1)
type0 = output[0].asnumpy().dtype
assert type0 == 'bool'
type1 = output[1].asnumpy().dtype
assert type1 == 'bool'
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_cast20():
x0 = Tensor(np.arange(24).reshape((4, 3, 2)).astype(np.int64))
t0 = mstype.bool_
x1 = Tensor(np.arange(24).reshape((4, 3, 2)).astype(np.float16))
t1 = mstype.bool_
context.set_context(mode=context.GRAPH_MODE, device_target='GPU')
net = Net(t0, t1)
output = net(x0, x1)
type0 = output[0].asnumpy().dtype
assert type0 == 'bool'
type1 = output[1].asnumpy().dtype
assert type1 == 'bool'
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_cast21():
x0 = Tensor(np.arange(24).reshape((4, 3, 2)).astype(np.float32))
t0 = mstype.bool_
x1 = Tensor(np.arange(24).reshape((4, 3, 2)).astype(np.float64))
t1 = mstype.bool_
context.set_context(mode=context.GRAPH_MODE, device_target='GPU')
net = Net(t0, t1)
output = net(x0, x1)
type0 = output[0].asnumpy().dtype
assert type0 == 'bool'
type1 = output[1].asnumpy().dtype
assert type1 == 'bool'
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_cast22():
x0 = Tensor(np.arange(24).reshape((4, 3, 2)).astype(np.uint8))
t0 = mstype.bool_
x1 = Tensor(np.arange(24).reshape((4, 3, 2)).astype(np.int32))
t1 = mstype.bool_
context.set_context(mode=context.GRAPH_MODE, device_target='GPU')
net = Net(t0, t1)
output = net(x0, x1)
type0 = output[0].asnumpy().dtype
assert type0 == 'bool'
type1 = output[1].asnumpy().dtype
assert type1 == 'bool'
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_cast23():
x0 = Tensor(np.arange(24).reshape((4, 3, 2)).astype(np.float64))
t0 = mstype.float32
x1 = Tensor(np.arange(24).reshape((4, 3, 2)).astype(np.float64))
t1 = mstype.float16
context.set_context(mode=context.GRAPH_MODE, device_target='GPU')
net = Net(t0, t1)
output = net(x0, x1)
type0 = output[0].asnumpy().dtype
assert type0 == 'float32'
type1 = output[1].asnumpy().dtype
assert type1 == 'float16'
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_cast24():
x0 = Tensor(np.arange(24).reshape((4, 3, 2)).astype(np.float64))
t0 = mstype.int64
x1 = Tensor(np.arange(24).reshape((4, 3, 2)).astype(np.float64))
t1 = mstype.int32
context.set_context(mode=context.GRAPH_MODE, device_target='GPU')
net = Net(t0, t1)
output = net(x0, x1)
type0 = output[0].asnumpy().dtype
assert type0 == 'int64'
type1 = output[1].asnumpy().dtype
assert type1 == 'int32'
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_cast25():
x0 = Tensor(np.arange(24).reshape((4, 3, 2)).astype(np.float64))
t0 = mstype.int16
x1 = Tensor(np.arange(24).reshape((4, 3, 2)).astype(np.float64))
t1 = mstype.int8
context.set_context(mode=context.GRAPH_MODE, device_target='GPU')
net = Net(t0, t1)
output = net(x0, x1)
type0 = output[0].asnumpy().dtype
assert type0 == 'int16'
type1 = output[1].asnumpy().dtype
assert type1 == 'int8'
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_cast26():
x0 = Tensor(np.arange(24).reshape((4, 3, 2)).astype(np.int32))
t0 = mstype.int64
x1 = Tensor(np.arange(24).reshape((4, 3, 2)).astype(np.int32))
t1 = mstype.float64
context.set_context(mode=context.GRAPH_MODE, device_target='GPU')
net = Net(t0, t1)
output = net(x0, x1)
type0 = output[0].asnumpy().dtype
assert type0 == 'int64'
type1 = output[1].asnumpy().dtype
assert type1 == 'float64'
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_cast27():
x0 = Tensor(np.arange(24).reshape((4, 3, 2)).astype(np.float32))
t0 = mstype.float64
x1 = Tensor(np.arange(24).reshape((4, 3, 2)).astype(np.float64))
t1 = mstype.float32
context.set_context(mode=context.GRAPH_MODE, device_target='GPU')
net = Net(t0, t1)
output = net(x0, x1)
type0 = output[0].asnumpy().dtype
assert type0 == 'float64'
type1 = output[1].asnumpy().dtype
assert type1 == 'float32'
| 31.487379
| 78
| 0.664406
| 2,407
| 16,216
| 4.375987
| 0.064811
| 0.079749
| 0.074433
| 0.085066
| 0.90582
| 0.90582
| 0.90582
| 0.90582
| 0.90582
| 0.90582
| 0
| 0.080932
| 0.171744
| 16,216
| 514
| 79
| 31.548638
| 0.703298
| 0.039344
| 0
| 0.878719
| 0
| 0
| 0.026216
| 0
| 0
| 0
| 0
| 0
| 0.128146
| 1
| 0.06865
| false
| 0
| 0.016018
| 0
| 0.089245
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e696b074d2b4dfff7ee9b0a0989c46199ab205ce
| 2,373
|
py
|
Python
|
util/visualize.py
|
mrsfgl/geometric_tensor_learning
|
74281c68fa67c4d7e813fdbbf5515ce849cf22ad
|
[
"MIT"
] | null | null | null |
util/visualize.py
|
mrsfgl/geometric_tensor_learning
|
74281c68fa67c4d7e813fdbbf5515ce849cf22ad
|
[
"MIT"
] | null | null | null |
util/visualize.py
|
mrsfgl/geometric_tensor_learning
|
74281c68fa67c4d7e813fdbbf5515ce849cf22ad
|
[
"MIT"
] | null | null | null |
import matplotlib.pyplot as plt
def plot_synthetic(params, md):
len_theta = len(params.model.geoTL.theta)
len_gamma = len(params.model.geoTL.gamma)
for i_gam in range(len_gamma):
for i_theta in range(len_theta):
for key, value in md.items():
if key in ['__header__', '__version__', '__globals__',
'params']:
continue
elif key == 'geoTL':
plt.plot(params.noise.SNR,
value[0, :, i_gam, i_theta].squeeze(),
'--*', label=key)
else:
plt.plot(params.noise.SNR, value[0, :].squeeze(),
'--*', label=key)
if params.noise.noise_type == 'AWGN':
plt.xlabel('SNR values of noise.')
elif params.noise.noise_type == 'gross':
plt.xlabel('Percentage of corrupted entries.')
plt.ylabel('RSE values of the output')
plt.legend()
plt.show()
fname = 'synthetic_results/gamma_ind_{}_theta_ind_{}.png'.format(
i_gam, i_theta)
plt.savefig(fname)
def plot_real(params, md):
len_theta = len(params.model.geoTL.theta)
len_gamma = len(params.model.geoTL.gamma)
for i_gam in range(len_gamma):
for i_theta in range(len_theta):
for key, value in md.items():
if key in ['__header__', '__version__', '__globals__',
'params']:
continue
elif key == 'geoTL':
plt.plot(params.noise.SNR,
value[:, i_gam, i_theta].squeeze(),
'--*', label=key)
else:
plt.plot(params.noise.SNR, value[:].squeeze(),
'--*', label=key)
if params.noise.noise_type == 'AWGN':
plt.xlabel('SNR values of noise.')
elif params.noise.noise_type == 'gross':
plt.xlabel('Percentage of corrupted entries.')
plt.ylabel('RSE values of the output')
plt.legend()
plt.show()
fname = 'real_results/gamma_ind_{}_theta_ind_{}.png'.format(
i_gam, i_theta)
plt.savefig(fname)
| 38.901639
| 77
| 0.485883
| 255
| 2,373
| 4.278431
| 0.223529
| 0.08066
| 0.051329
| 0.069661
| 0.938588
| 0.938588
| 0.938588
| 0.936755
| 0.936755
| 0.936755
| 0
| 0.001389
| 0.393173
| 2,373
| 60
| 78
| 39.55
| 0.75625
| 0
| 0
| 0.830189
| 0
| 0
| 0.150506
| 0.037521
| 0
| 0
| 0
| 0
| 0
| 1
| 0.037736
| false
| 0
| 0.018868
| 0
| 0.056604
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
fc052d37ff94f3d18eb956597d7e8ea3df108603
| 5,999
|
py
|
Python
|
src/visuanalytics/tests/analytics/transform/types/test_transform_compare.py
|
mxsph/Data-Analytics
|
c82ff54b78f50b6660d7640bfee96ea68bef598f
|
[
"MIT"
] | 3
|
2020-08-24T19:02:09.000Z
|
2021-05-27T20:22:41.000Z
|
src/visuanalytics/tests/analytics/transform/types/test_transform_compare.py
|
mxsph/Data-Analytics
|
c82ff54b78f50b6660d7640bfee96ea68bef598f
|
[
"MIT"
] | 342
|
2020-08-13T10:24:23.000Z
|
2021-08-12T14:01:52.000Z
|
src/visuanalytics/tests/analytics/transform/types/test_transform_compare.py
|
visuanalytics/visuanalytics
|
f9cce7bc9e3227568939648ddd1dd6df02eac752
|
[
"MIT"
] | 8
|
2020-09-01T07:11:18.000Z
|
2021-04-09T09:02:11.000Z
|
import unittest
from visuanalytics.tests.analytics.transform.transform_test_helper import prepare_test
class TestTransformCompare(unittest.TestCase):
def setUp(self):
self.data = {
"value1": 5,
"value2": 5,
"value3": 30,
"text1": "result"
}
def test_transform_compare_equal(self):
values = [
{
"type": "compare",
"value_left": "_req|value1",
"value_right": "_req|value2",
"on_equal": [
{
"type": "replace",
"keys": ["_req|text1"],
"old_value": "result",
"new_value": "equal"
}
],
"on_higher": [
{
"type": "replace",
"keys": ["_req|text1"],
"old_value": "result",
"new_value": "value_left is higher"
}
],
"on_lower": [
{
"type": "replace",
"keys": ["_req|text1"],
"old_value": "result",
"new_value": "value_left is lower"
}
]
}
]
expected_data = {
"_req": {
"value1": 5,
"value2": 5,
"value3": 30,
"text1": "equal"
}
}
exp, out = prepare_test(values, self.data, expected_data)
self.assertDictEqual(exp, out, "compare equal Failed")
def test_transform_compare_not_equal(self):
values = [
{
"type": "compare",
"value_left": "_req|value1",
"value_right": "_req|value3",
"on_not_equal": [
{
"type": "replace",
"keys": ["_req|text1"],
"old_value": "result",
"new_value": "not equal"
}
],
"on_higher": [
{
"type": "replace",
"keys": ["_req|text1"],
"old_value": "result",
"new_value": "value_left is higher"
}
],
"on_lower": [
{
"type": "replace",
"keys": ["_req|text1"],
"old_value": "result",
"new_value": "value_left is lower"
}
]
}
]
expected_data = {
"_req": {
"value1": 5,
"value2": 5,
"value3": 30,
"text1": "not equal"
}
}
exp, out = prepare_test(values, self.data, expected_data)
self.assertDictEqual(exp, out, "compare not equal Failed")
def test_transform_compare_higher(self):
values = [
{
"type": "compare",
"value_left": "_req|value3",
"value_right": "_req|value2",
"on_equal": [
{
"type": "replace",
"keys": ["_req|text1"],
"old_value": "result",
"new_value": "equal"
}
],
"on_higher": [
{
"type": "replace",
"keys": ["_req|text1"],
"old_value": "result",
"new_value": "value_left is higher"
}
],
"on_lower": [
{
"type": "replace",
"keys": ["_req|text1"],
"old_value": "result",
"new_value": "value_left is lower"
}
]
}
]
expected_data = {
"_req": {
"value1": 5,
"value2": 5,
"value3": 30,
"text1": "value_left is higher"
}
}
exp, out = prepare_test(values, self.data, expected_data)
self.assertDictEqual(exp, out, "compare higher Failed")
def test_transform_compare_lower(self):
values = [
{
"type": "compare",
"value_left": "_req|value2",
"value_right": "_req|value3",
"on_equal": [
{
"type": "replace",
"keys": ["_req|text1"],
"old_value": "result",
"new_value": "equal"
}
],
"on_higher": [
{
"type": "replace",
"keys": ["_req|text1"],
"old_value": "result",
"new_value": "value_left is higher"
}
],
"on_lower": [
{
"type": "replace",
"keys": ["_req|text1"],
"old_value": "result",
"new_value": "value_left is lower"
}
]
}
]
expected_data = {
"_req": {
"value1": 5,
"value2": 5,
"value3": 30,
"text1": "value_left is lower"
}
}
exp, out = prepare_test(values, self.data, expected_data)
self.assertDictEqual(exp, out, "compare lower Failed")
| 30.92268
| 86
| 0.331222
| 397
| 5,999
| 4.735516
| 0.115869
| 0.067021
| 0.095745
| 0.114894
| 0.869149
| 0.839362
| 0.803191
| 0.753723
| 0.753723
| 0.753723
| 0
| 0.022297
| 0.551425
| 5,999
| 193
| 87
| 31.082902
| 0.676329
| 0
| 0
| 0.601124
| 0
| 0
| 0.227205
| 0
| 0
| 0
| 0
| 0
| 0.022472
| 1
| 0.02809
| false
| 0
| 0.011236
| 0
| 0.044944
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
fc2ac56f458819f1bc05c9b2c8a40560b545f0da
| 23,561
|
py
|
Python
|
tests/dhcpv6/ddns/test_ddns_tsig_request.py
|
isc-projects/forge
|
dfec8b41003d6b5a229f69ee93616e0e5cc6d71b
|
[
"0BSD"
] | 22
|
2015-02-27T11:51:05.000Z
|
2022-02-28T12:39:29.000Z
|
tests/dhcpv6/ddns/test_ddns_tsig_request.py
|
isc-projects/forge
|
dfec8b41003d6b5a229f69ee93616e0e5cc6d71b
|
[
"0BSD"
] | 16
|
2018-10-30T15:00:12.000Z
|
2019-01-11T17:55:13.000Z
|
tests/dhcpv6/ddns/test_ddns_tsig_request.py
|
isc-projects/forge
|
dfec8b41003d6b5a229f69ee93616e0e5cc6d71b
|
[
"0BSD"
] | 11
|
2015-02-27T11:51:36.000Z
|
2021-03-30T08:33:54.000Z
|
"""DDNS without TSIG"""
# pylint: disable=invalid-name,line-too-long
import pytest
import srv_control
import misc
import srv_msg
@pytest.mark.v6
@pytest.mark.ddns
@pytest.mark.kea_only
@pytest.mark.tsig
@pytest.mark.forward_reverse_add
def test_ddns6_tsig_sha1_forw_and_rev():
misc.test_setup()
srv_control.config_srv_subnet('2001:db8:1::/64', '2001:db8:1::50-2001:db8:1::50')
srv_control.add_ddns_server('127.0.0.1', '53001')
srv_control.add_ddns_server_options('enable-updates', True)
srv_control.add_ddns_server_options('generated-prefix', 'six')
srv_control.add_ddns_server_options('qualifying-suffix', 'example.com')
srv_control.add_forward_ddns('six.example.com.', 'forge.sha1.key')
srv_control.add_reverse_ddns('1.0.0.0.8.b.d.0.1.0.0.2.ip6.arpa.', 'forge.sha1.key')
srv_control.add_keys('forge.sha1.key', 'HMAC-SHA1', 'PN4xKZ/jDobCMlo4rpr70w==')
srv_control.build_and_send_config_files()
srv_control.start_srv('DHCP', 'started')
srv_control.use_dns_set_number(3)
srv_control.start_srv('DNS', 'started')
misc.test_procedure()
srv_msg.dns_question_record('sth6.six.example.com', 'AAAA', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER', expect_include=False)
misc.test_procedure()
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:ff:ff:ff:ff:ff:01')
srv_msg.client_does_include('Client', 'client-id')
srv_msg.client_does_include('Client', 'IA-NA')
srv_msg.client_send_msg('SOLICIT')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'ADVERTISE')
srv_msg.response_check_include_option(1)
srv_msg.response_check_include_option(2)
misc.test_procedure()
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:ff:ff:ff:ff:ff:01')
srv_msg.client_copy_option('IA_NA')
srv_msg.client_copy_option('server-id')
srv_msg.client_sets_value('Client', 'FQDN_domain_name', 'sth6.six.example.com.')
srv_msg.client_sets_value('Client', 'FQDN_flags', 'S')
srv_msg.client_does_include('Client', 'fqdn')
srv_msg.client_does_include('Client', 'client-id')
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'REPLY')
srv_msg.response_check_include_option(1)
srv_msg.response_check_include_option(2)
srv_msg.response_check_include_option(39)
srv_msg.response_check_option_content(39, 'flags', 'S')
srv_msg.response_check_option_content(39, 'fqdn', 'sth6.six.example.com.')
misc.test_procedure()
srv_msg.dns_question_record('sth6.six.example.com', 'AAAA', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER')
srv_msg.dns_option_content('ANSWER', 'rdata', '2001:db8:1::50')
misc.test_procedure()
srv_msg.dns_question_record('0.5.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.1.0.0.0.8.b.d.0.1.0.0.2.ip6.arpa.',
'PTR',
'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER')
srv_msg.dns_option_content('ANSWER', 'rdata', 'sth6.six.example.com.')
srv_msg.dns_option_content('ANSWER', 'rrname',
'0.5.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.1.0.0.0.8.b.d.0.1.0.0.2.ip6.arpa.')
@pytest.mark.v6
@pytest.mark.ddns
@pytest.mark.kea_only
@pytest.mark.tsig
@pytest.mark.forward_reverse_add
def test_ddns6_tsig_sha224_forw_and_rev():
misc.test_setup()
srv_control.config_srv_subnet('2001:db8:1::/64', '2001:db8:1::50-2001:db8:1::50')
srv_control.add_ddns_server('127.0.0.1', '53001')
srv_control.add_ddns_server_options('enable-updates', True)
srv_control.add_ddns_server_options('generated-prefix', 'six')
srv_control.add_ddns_server_options('qualifying-suffix', 'example.com')
srv_control.add_forward_ddns('six.example.com.', 'forge.sha224.key')
srv_control.add_reverse_ddns('1.0.0.0.8.b.d.0.1.0.0.2.ip6.arpa.', 'forge.sha224.key')
srv_control.add_keys('forge.sha224.key', 'HMAC-SHA224', 'TxAiO5TRKkFyHSCa4erQZQ==')
srv_control.build_and_send_config_files()
srv_control.start_srv('DHCP', 'started')
srv_control.use_dns_set_number(4)
srv_control.start_srv('DNS', 'started')
misc.test_procedure()
srv_msg.dns_question_record('sth6.six.example.com', 'AAAA', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER', expect_include=False)
misc.test_procedure()
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:ff:ff:ff:ff:ff:01')
srv_msg.client_does_include('Client', 'client-id')
srv_msg.client_does_include('Client', 'IA-NA')
srv_msg.client_send_msg('SOLICIT')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'ADVERTISE')
srv_msg.response_check_include_option(1)
srv_msg.response_check_include_option(2)
misc.test_procedure()
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:ff:ff:ff:ff:ff:01')
srv_msg.client_copy_option('IA_NA')
srv_msg.client_copy_option('server-id')
srv_msg.client_sets_value('Client', 'FQDN_domain_name', 'sth6.six.example.com.')
srv_msg.client_sets_value('Client', 'FQDN_flags', 'S')
srv_msg.client_does_include('Client', 'fqdn')
srv_msg.client_does_include('Client', 'client-id')
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'REPLY')
srv_msg.response_check_include_option(1)
srv_msg.response_check_include_option(2)
srv_msg.response_check_include_option(39)
srv_msg.response_check_option_content(39, 'flags', 'S')
srv_msg.response_check_option_content(39, 'fqdn', 'sth6.six.example.com.')
misc.test_procedure()
srv_msg.dns_question_record('sth6.six.example.com', 'AAAA', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER')
srv_msg.dns_option_content('ANSWER', 'rdata', '2001:db8:1::50')
misc.test_procedure()
srv_msg.dns_question_record('0.5.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.1.0.0.0.8.b.d.0.1.0.0.2.ip6.arpa.',
'PTR',
'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER')
srv_msg.dns_option_content('ANSWER', 'rdata', 'sth6.six.example.com.')
srv_msg.dns_option_content('ANSWER', 'rrname',
'0.5.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.1.0.0.0.8.b.d.0.1.0.0.2.ip6.arpa.')
@pytest.mark.v6
@pytest.mark.ddns
@pytest.mark.kea_only
@pytest.mark.tsig
@pytest.mark.forward_reverse_add
def test_ddns6_tsig_sha256_forw_and_rev():
misc.test_setup()
srv_control.config_srv_subnet('2001:db8:1::/64', '2001:db8:1::50-2001:db8:1::50')
srv_control.add_ddns_server('127.0.0.1', '53001')
srv_control.add_ddns_server_options('enable-updates', True)
srv_control.add_ddns_server_options('generated-prefix', 'six')
srv_control.add_ddns_server_options('qualifying-suffix', 'example.com')
srv_control.add_forward_ddns('six.example.com.', 'forge.sha256.key')
srv_control.add_reverse_ddns('1.0.0.0.8.b.d.0.1.0.0.2.ip6.arpa.', 'forge.sha256.key')
srv_control.add_keys('forge.sha256.key', 'HMAC-SHA256', '5AYMijv0rhZJyQqK/caV7g==')
srv_control.build_and_send_config_files()
srv_control.start_srv('DHCP', 'started')
srv_control.use_dns_set_number(5)
srv_control.start_srv('DNS', 'started')
misc.test_procedure()
srv_msg.dns_question_record('sth6.six.example.com', 'AAAA', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER', expect_include=False)
misc.test_procedure()
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:ff:ff:ff:ff:ff:01')
srv_msg.client_does_include('Client', 'client-id')
srv_msg.client_does_include('Client', 'IA-NA')
srv_msg.client_send_msg('SOLICIT')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'ADVERTISE')
srv_msg.response_check_include_option(1)
srv_msg.response_check_include_option(2)
misc.test_procedure()
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:ff:ff:ff:ff:ff:01')
srv_msg.client_copy_option('IA_NA')
srv_msg.client_copy_option('server-id')
srv_msg.client_sets_value('Client', 'FQDN_domain_name', 'sth6.six.example.com.')
srv_msg.client_sets_value('Client', 'FQDN_flags', 'S')
srv_msg.client_does_include('Client', 'fqdn')
srv_msg.client_does_include('Client', 'client-id')
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'REPLY')
srv_msg.response_check_include_option(1)
srv_msg.response_check_include_option(2)
srv_msg.response_check_include_option(39)
srv_msg.response_check_option_content(39, 'flags', 'S')
srv_msg.response_check_option_content(39, 'fqdn', 'sth6.six.example.com.')
misc.test_procedure()
srv_msg.dns_question_record('sth6.six.example.com', 'AAAA', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER')
srv_msg.dns_option_content('ANSWER', 'rdata', '2001:db8:1::50')
misc.test_procedure()
srv_msg.dns_question_record('0.5.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.1.0.0.0.8.b.d.0.1.0.0.2.ip6.arpa.',
'PTR',
'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER')
srv_msg.dns_option_content('ANSWER', 'rdata', 'sth6.six.example.com.')
srv_msg.dns_option_content('ANSWER', 'rrname',
'0.5.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.1.0.0.0.8.b.d.0.1.0.0.2.ip6.arpa.')
@pytest.mark.v6
@pytest.mark.ddns
@pytest.mark.kea_only
@pytest.mark.tsig
@pytest.mark.forward_reverse_add
def test_ddns6_tsig_sha384_forw_and_rev():
misc.test_setup()
srv_control.config_srv_subnet('2001:db8:1::/64', '2001:db8:1::50-2001:db8:1::50')
srv_control.add_ddns_server('127.0.0.1', '53001')
srv_control.add_ddns_server_options('enable-updates', True)
srv_control.add_ddns_server_options('generated-prefix', 'six')
srv_control.add_ddns_server_options('qualifying-suffix', 'example.com')
srv_control.add_forward_ddns('six.example.com.', 'forge.sha384.key')
srv_control.add_reverse_ddns('1.0.0.0.8.b.d.0.1.0.0.2.ip6.arpa.', 'forge.sha384.key')
srv_control.add_keys('forge.sha384.key', 'HMAC-SHA384', '21upyvp7zcG0S2PB4+kuQQ==')
srv_control.build_and_send_config_files()
srv_control.start_srv('DHCP', 'started')
srv_control.use_dns_set_number(6)
srv_control.start_srv('DNS', 'started')
misc.test_procedure()
srv_msg.dns_question_record('sth6.six.example.com', 'AAAA', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER', expect_include=False)
misc.test_procedure()
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:ff:ff:ff:ff:ff:01')
srv_msg.client_does_include('Client', 'client-id')
srv_msg.client_does_include('Client', 'IA-NA')
srv_msg.client_send_msg('SOLICIT')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'ADVERTISE')
srv_msg.response_check_include_option(1)
srv_msg.response_check_include_option(2)
misc.test_procedure()
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:ff:ff:ff:ff:ff:01')
srv_msg.client_copy_option('IA_NA')
srv_msg.client_copy_option('server-id')
srv_msg.client_sets_value('Client', 'FQDN_domain_name', 'sth6.six.example.com.')
srv_msg.client_sets_value('Client', 'FQDN_flags', 'S')
srv_msg.client_does_include('Client', 'fqdn')
srv_msg.client_does_include('Client', 'client-id')
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'REPLY')
srv_msg.response_check_include_option(1)
srv_msg.response_check_include_option(2)
srv_msg.response_check_include_option(39)
srv_msg.response_check_option_content(39, 'flags', 'S')
srv_msg.response_check_option_content(39, 'fqdn', 'sth6.six.example.com.')
misc.test_procedure()
srv_msg.dns_question_record('sth6.six.example.com', 'AAAA', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER')
srv_msg.dns_option_content('ANSWER', 'rdata', '2001:db8:1::50')
misc.test_procedure()
srv_msg.dns_question_record('0.5.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.1.0.0.0.8.b.d.0.1.0.0.2.ip6.arpa.',
'PTR',
'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER')
srv_msg.dns_option_content('ANSWER', 'rdata', 'sth6.six.example.com.')
srv_msg.dns_option_content('ANSWER', 'rrname',
'0.5.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.1.0.0.0.8.b.d.0.1.0.0.2.ip6.arpa.')
@pytest.mark.v6
@pytest.mark.ddns
@pytest.mark.kea_only
@pytest.mark.tsig
@pytest.mark.forward_reverse_add
def test_ddns6_tsig_sha512_forw_and_rev():
misc.test_setup()
srv_control.config_srv_subnet('2001:db8:1::/64', '2001:db8:1::50-2001:db8:1::50')
srv_control.add_ddns_server('127.0.0.1', '53001')
srv_control.add_ddns_server_options('enable-updates', True)
srv_control.add_ddns_server_options('generated-prefix', 'six')
srv_control.add_ddns_server_options('qualifying-suffix', 'example.com')
srv_control.add_forward_ddns('six.example.com.', 'forge.sha512.key')
srv_control.add_reverse_ddns('1.0.0.0.8.b.d.0.1.0.0.2.ip6.arpa.', 'forge.sha512.key')
srv_control.add_keys('forge.sha512.key', 'HMAC-SHA512', 'jBng5D6QL4f8cfLUUwE7OQ==')
srv_control.build_and_send_config_files()
srv_control.start_srv('DHCP', 'started')
srv_control.use_dns_set_number(7)
srv_control.start_srv('DNS', 'started')
misc.test_procedure()
srv_msg.dns_question_record('sth6.six.example.com', 'AAAA', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER', expect_include=False)
misc.test_procedure()
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:ff:ff:ff:ff:ff:01')
srv_msg.client_does_include('Client', 'client-id')
srv_msg.client_does_include('Client', 'IA-NA')
srv_msg.client_send_msg('SOLICIT')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'ADVERTISE')
srv_msg.response_check_include_option(1)
srv_msg.response_check_include_option(2)
misc.test_procedure()
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:ff:ff:ff:ff:ff:01')
srv_msg.client_copy_option('IA_NA')
srv_msg.client_copy_option('server-id')
srv_msg.client_sets_value('Client', 'FQDN_domain_name', 'sth6.six.example.com.')
srv_msg.client_sets_value('Client', 'FQDN_flags', 'S')
srv_msg.client_does_include('Client', 'fqdn')
srv_msg.client_does_include('Client', 'client-id')
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'REPLY')
srv_msg.response_check_include_option(1)
srv_msg.response_check_include_option(2)
srv_msg.response_check_include_option(39)
srv_msg.response_check_option_content(39, 'flags', 'S')
srv_msg.response_check_option_content(39, 'fqdn', 'sth6.six.example.com.')
misc.test_procedure()
srv_msg.dns_question_record('sth6.six.example.com', 'AAAA', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER')
srv_msg.dns_option_content('ANSWER', 'rdata', '2001:db8:1::50')
misc.test_procedure()
srv_msg.dns_question_record('0.5.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.1.0.0.0.8.b.d.0.1.0.0.2.ip6.arpa.',
'PTR',
'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER')
srv_msg.dns_option_content('ANSWER', 'rdata', 'sth6.six.example.com.')
srv_msg.dns_option_content('ANSWER', 'rrname',
'0.5.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.1.0.0.0.8.b.d.0.1.0.0.2.ip6.arpa.')
@pytest.mark.v6
@pytest.mark.ddns
@pytest.mark.kea_only
@pytest.mark.tsig
@pytest.mark.forward_reverse_add
def test_ddns6_tsig_md5_forw_and_rev():
misc.test_setup()
srv_control.config_srv_subnet('2001:db8:1::/64', '2001:db8:1::50-2001:db8:1::50')
srv_control.add_ddns_server('127.0.0.1', '53001')
srv_control.add_ddns_server_options('enable-updates', True)
srv_control.add_ddns_server_options('generated-prefix', 'six')
srv_control.add_ddns_server_options('qualifying-suffix', 'example.com')
srv_control.add_forward_ddns('six.example.com.', 'forge.md5.key')
srv_control.add_reverse_ddns('1.0.0.0.8.b.d.0.1.0.0.2.ip6.arpa.', 'forge.md5.key')
srv_control.add_keys('forge.md5.key', 'HMAC-MD5', 'bX3Hs+fG/tThidQPuhK1mA==')
srv_control.build_and_send_config_files()
srv_control.start_srv('DHCP', 'started')
srv_control.use_dns_set_number(8)
srv_control.start_srv('DNS', 'started')
misc.test_procedure()
srv_msg.dns_question_record('sth6.six.example.com', 'AAAA', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER', expect_include=False)
misc.test_procedure()
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:ff:ff:ff:ff:ff:01')
srv_msg.client_does_include('Client', 'client-id')
srv_msg.client_does_include('Client', 'IA-NA')
srv_msg.client_send_msg('SOLICIT')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'ADVERTISE')
srv_msg.response_check_include_option(1)
srv_msg.response_check_include_option(2)
misc.test_procedure()
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:ff:ff:ff:ff:ff:01')
srv_msg.client_copy_option('IA_NA')
srv_msg.client_copy_option('server-id')
srv_msg.client_sets_value('Client', 'FQDN_domain_name', 'sth6.six.example.com.')
srv_msg.client_sets_value('Client', 'FQDN_flags', 'S')
srv_msg.client_does_include('Client', 'fqdn')
srv_msg.client_does_include('Client', 'client-id')
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'REPLY')
srv_msg.response_check_include_option(1)
srv_msg.response_check_include_option(2)
srv_msg.response_check_include_option(39)
srv_msg.response_check_option_content(39, 'flags', 'S')
srv_msg.response_check_option_content(39, 'fqdn', 'sth6.six.example.com.')
misc.test_procedure()
srv_msg.dns_question_record('sth6.six.example.com', 'AAAA', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER')
srv_msg.dns_option_content('ANSWER', 'rdata', '2001:db8:1::50')
misc.test_procedure()
srv_msg.dns_question_record('0.5.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.1.0.0.0.8.b.d.0.1.0.0.2.ip6.arpa.',
'PTR',
'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER')
srv_msg.dns_option_content('ANSWER', 'rdata', 'sth6.six.example.com.')
srv_msg.dns_option_content('ANSWER', 'rrname',
'0.5.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.1.0.0.0.8.b.d.0.1.0.0.2.ip6.arpa.')
@pytest.mark.v6
@pytest.mark.ddns
@pytest.mark.kea_only
@pytest.mark.tsig
@pytest.mark.forward_reverse_add
def test_ddns6_tsig_multi_key_forw_and_rev():
misc.test_setup()
srv_control.config_srv_subnet('2001:db8:1::/64', '2001:db8:1::50-2001:db8:1::50')
srv_control.add_ddns_server('127.0.0.1', '53001')
srv_control.add_ddns_server_options('enable-updates', True)
srv_control.add_ddns_server_options('generated-prefix', 'six')
srv_control.add_ddns_server_options('qualifying-suffix', 'example.com')
srv_control.add_forward_ddns('six.example.com.', 'forge.md5.key')
srv_control.add_reverse_ddns('1.0.0.0.8.b.d.0.1.0.0.2.ip6.arpa.', 'forge.sha512.key')
srv_control.add_keys('forge.sha512.key', 'HMAC-SHA512', 'jBng5D6QL4f8cfLUUwE7OQ==')
srv_control.add_keys('forge.md5.key', 'HMAC-MD5', 'bX3Hs+fG/tThidQPuhK1mA==')
srv_control.build_and_send_config_files()
srv_control.start_srv('DHCP', 'started')
srv_control.use_dns_set_number(9)
srv_control.start_srv('DNS', 'started')
misc.test_procedure()
srv_msg.dns_question_record('sth6.six.example.com', 'AAAA', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER', expect_include=False)
misc.test_procedure()
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:ff:ff:ff:ff:ff:01')
srv_msg.client_does_include('Client', 'client-id')
srv_msg.client_does_include('Client', 'IA-NA')
srv_msg.client_send_msg('SOLICIT')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'ADVERTISE')
srv_msg.response_check_include_option(1)
srv_msg.response_check_include_option(2)
misc.test_procedure()
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:ff:ff:ff:ff:ff:01')
srv_msg.client_copy_option('IA_NA')
srv_msg.client_copy_option('server-id')
srv_msg.client_sets_value('Client', 'FQDN_domain_name', 'sth6.six.example.com.')
srv_msg.client_sets_value('Client', 'FQDN_flags', 'S')
srv_msg.client_does_include('Client', 'fqdn')
srv_msg.client_does_include('Client', 'client-id')
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'REPLY')
srv_msg.response_check_include_option(1)
srv_msg.response_check_include_option(2)
srv_msg.response_check_include_option(39)
srv_msg.response_check_option_content(39, 'flags', 'S')
srv_msg.response_check_option_content(39, 'fqdn', 'sth6.six.example.com.')
misc.test_procedure()
srv_msg.dns_question_record('sth6.six.example.com', 'AAAA', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER')
srv_msg.dns_option_content('ANSWER', 'rdata', '2001:db8:1::50')
misc.test_procedure()
srv_msg.dns_question_record('0.5.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.1.0.0.0.8.b.d.0.1.0.0.2.ip6.arpa.',
'PTR',
'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER')
srv_msg.dns_option_content('ANSWER', 'rdata', 'sth6.six.example.com.')
srv_msg.dns_option_content('ANSWER', 'rrname',
'0.5.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.1.0.0.0.8.b.d.0.1.0.0.2.ip6.arpa.')
| 40.344178
| 108
| 0.699801
| 3,895
| 23,561
| 3.901155
| 0.036457
| 0.04054
| 0.048371
| 0.055281
| 0.980059
| 0.980059
| 0.969793
| 0.969793
| 0.969793
| 0.969793
| 0
| 0.058379
| 0.135563
| 23,561
| 583
| 109
| 40.413379
| 0.687681
| 0.002589
| 0
| 0.933472
| 0
| 0.043659
| 0.252405
| 0.106197
| 0
| 0
| 0
| 0
| 0
| 1
| 0.014553
| true
| 0.072765
| 0.008316
| 0
| 0.022869
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 9
|
fc61cc6227c922f820d7c90eee34a6c08b98d51b
| 29,006
|
py
|
Python
|
nbgrader/tests/test_validator.py
|
aliniknejad/nbgrader
|
124095e48a840ac2af6e3178eab7ed32089f3cd2
|
[
"BSD-3-Clause"
] | 2
|
2017-07-10T10:44:28.000Z
|
2017-11-28T18:30:35.000Z
|
nbgrader/tests/test_validator.py
|
jld23/nbgrader
|
07a38cd8ed12ab33870bdd42f0bf35aa1252b0db
|
[
"BSD-3-Clause"
] | 5
|
2020-03-03T02:48:10.000Z
|
2020-05-22T04:23:29.000Z
|
nbgrader/tests/test_validator.py
|
jld23/nbgrader
|
07a38cd8ed12ab33870bdd42f0bf35aa1252b0db
|
[
"BSD-3-Clause"
] | 2
|
2020-06-23T10:45:09.000Z
|
2020-11-25T16:02:44.000Z
|
import pytest
import io
import os
from textwrap import dedent
from nbformat.v4 import new_output
from ..validator import Validator
from . import (
create_code_cell, create_text_cell)
from io import StringIO
from nbgrader.validator import Validator
@pytest.fixture
def validator() -> Validator:
return Validator()
@pytest.fixture
def stream() -> StringIO:
return io.StringIO()
class TestValidator(object):
def _add_error(self, cell):
cell.outputs.append(new_output(
"error",
ename="Error",
evalue="oh noes, an error occurred!",
traceback=["oh noes, an error occurred!"]
))
return cell
def test_indent(self, validator):
# test normal indenting
assert validator._indent("Hello, world!") == " Hello, world!"
assert validator._indent("Hello,\n world!") == " Hello,\n world!"
# test truncation
validator.width = 10
assert validator._indent("Hello, world!") == " Hel..."
assert validator._indent("Hello,\n world!") == " Hel...\n wo..."
# test that ansi escape sequences are removed and not counted towards
# the line width
assert validator._indent("\x1b[30mHello, world!\x1b[0m") == " Hel..."
assert validator._indent("\x1b[30mHello,\n world!\x1b[0m") == " Hel...\n wo..."
def test_print_type_changed(self, validator, stream):
cell = create_code_cell()
validator.stream = stream
validator.width = 20
validator._print_type_changed("markdown", "code", cell.source.strip())
expected = dedent(
"""
====================
The following markdown cell has changed to a code cell:
print("someth...
### BEGIN SOL...
print("hello"...
### END SOLUT...
"""
)
assert stream.getvalue() == expected
def test_print_changed(self, validator, stream):
cell = create_code_cell()
validator.stream = stream
validator.width = 20
validator._print_changed(cell.source.strip())
expected = dedent(
"""
====================
The following cell has changed:
print("someth...
### BEGIN SOL...
print("hello"...
### END SOLUT...
"""
)
assert stream.getvalue() == expected
def test_print_error_code_cell(self, validator, stream):
cell = create_code_cell()
validator.stream = stream
validator.width = 20
validator._print_error(cell.source.strip(), validator._extract_error(cell))
expected = dedent(
"""
====================
The following cell failed:
print("someth...
### BEGIN SOL...
print("hello"...
### END SOLUT...
The error was:
You did not p...
"""
)
assert stream.getvalue() == expected
def test_print_error_code_cell_error(self, validator, stream):
cell = self._add_error(create_code_cell())
validator.stream = stream
validator.width = 20
validator._print_error(cell.source.strip(), validator._extract_error(cell))
expected = dedent(
"""
====================
The following cell failed:
print("someth...
### BEGIN SOL...
print("hello"...
### END SOLUT...
The error was:
oh noes, an e...
"""
)
assert stream.getvalue() == expected
def test_print_error_markdown_cell(self, validator, stream):
cell = create_text_cell()
validator.stream = stream
validator.width = 20
validator._print_error(cell.source.strip(), validator._extract_error(cell))
expected = dedent(
"""
====================
The following cell failed:
this is the a...
The error was:
You did not p...
"""
)
assert stream.getvalue() == expected
def test_print_pass(self, validator, stream):
cell = create_code_cell()
validator.stream = stream
validator.width = 20
validator._print_pass(cell.source.strip())
expected = dedent(
"""
====================
The following cell passed:
print("someth...
### BEGIN SOL...
print("hello"...
### END SOLUT...
"""
)
assert stream.getvalue() == expected
def test_print_num_type_changed_0(self, validator, stream):
validator.stream = stream
validator._print_num_type_changed(0)
assert stream.getvalue() == ""
def test_print_num_type_changed_1(self, validator, stream):
validator.stream = stream
validator._print_num_type_changed(1)
assert stream.getvalue().startswith("THE TYPES OF 1 CELL(S) HAVE CHANGED!")
def test_print_num_changed_0(self, validator: Validator, stream: StringIO) -> None:
validator.stream = stream
validator._print_num_changed(0)
assert stream.getvalue() == ""
def test_print_num_changed_1(self, validator, stream):
validator.stream = stream
validator._print_num_changed(1)
assert stream.getvalue().startswith("THE CONTENTS OF 1 TEST CELL(S) HAVE CHANGED!")
def test_print_num_failed(self, validator, stream):
validator.stream = stream
validator._print_num_failed(0)
assert stream.getvalue() == "Success! Your notebook passes all the tests.\n"
def test_print_num_failed_1(self, validator, stream):
validator.stream = stream
validator._print_num_failed(1)
assert stream.getvalue().startswith("VALIDATION FAILED ON 1 CELL(S)!")
def test_print_num_passed(self, validator, stream):
validator.stream = stream
validator._print_num_passed(0)
assert stream.getvalue() == "Success! The notebook does not pass any tests.\n"
def test_print_num_passed_1(self, validator, stream):
validator.stream = stream
validator._print_num_passed(1)
assert stream.getvalue().startswith("NOTEBOOK PASSED ON 1 CELL(S)!")
def test_submitted_unchanged(self, validator, stream):
"""Does the validation fail on an unchanged notebook?"""
filename = os.path.join(os.path.dirname(__file__), "preprocessors", "files", "submitted-unchanged.ipynb")
validator.stream = stream
validator.validate_and_print(filename)
assert stream.getvalue().split("\n")[0] == "VALIDATION FAILED ON 3 CELL(S)! If you submit your assignment as it is, you WILL NOT"
def test_submitted_changed(self, validator, stream):
"""Does the validation pass on an changed notebook?"""
filename = os.path.join(os.path.dirname(__file__), "preprocessors", "files", "submitted-changed.ipynb")
validator.stream = stream
validator.validate_and_print(filename)
assert stream.getvalue() == "Success! Your notebook passes all the tests.\n"
def test_invert_submitted_unchanged(self, validator, stream):
"""Does the inverted validation pass on an unchanged notebook?"""
filename = os.path.join(os.path.dirname(__file__), "preprocessors", "files", "submitted-unchanged.ipynb")
validator.stream = stream
validator.invert = True
validator.validate_and_print(filename)
assert stream.getvalue().split("\n")[0] == "NOTEBOOK PASSED ON 1 CELL(S)!"
def test_invert_submitted_changed(self, validator, stream):
"""Does the inverted validation fail on a changed notebook?"""
filename = os.path.join(os.path.dirname(__file__), "preprocessors", "files", "submitted-changed.ipynb")
validator.stream = stream
validator.invert = True
validator.validate_and_print(filename)
assert stream.getvalue().split("\n")[0] == "NOTEBOOK PASSED ON 2 CELL(S)!"
def test_grade_cell_changed(self, validator, stream):
"""Does the validate fail if a grade cell has changed?"""
filename = os.path.join(os.path.dirname(__file__), "preprocessors", "files", "submitted-grade-cell-changed.ipynb")
validator.stream = stream
validator.validate_and_print(filename)
assert stream.getvalue().split("\n")[0] == "THE CONTENTS OF 1 TEST CELL(S) HAVE CHANGED! This might mean that even though the tests"
def test_grade_cell_changed_ignore_checksums(self, validator, stream):
"""Does the validate pass if a grade cell has changed but we're ignoring checksums?"""
filename = os.path.join(os.path.dirname(__file__), "preprocessors", "files", "submitted-grade-cell-changed.ipynb")
validator.stream = stream
validator.ignore_checksums = True
validator.validate_and_print(filename)
assert stream.getvalue().split("\n")[0] == "Success! Your notebook passes all the tests."
def test_invert_grade_cell_changed(self, validator, stream):
"""Does the validate fail if a grade cell has changed, even with --invert?"""
filename = os.path.join(os.path.dirname(__file__), "preprocessors", "files", "submitted-grade-cell-changed.ipynb")
validator.stream = stream
validator.invert = True
validator.validate_and_print(filename)
assert stream.getvalue().split("\n")[0] == "THE CONTENTS OF 1 TEST CELL(S) HAVE CHANGED! This might mean that even though the tests"
def test_invert_grade_cell_changed_ignore_checksums(self, validator, stream):
"""Does the validate fail if a grade cell has changed with --invert and ignoring checksums?"""
filename = os.path.join(os.path.dirname(__file__), "preprocessors", "files", "submitted-grade-cell-changed.ipynb")
validator.stream = stream
validator.invert = True
validator.ignore_checksums = True
validator.validate_and_print(filename)
assert stream.getvalue().split("\n")[0] == "NOTEBOOK PASSED ON 2 CELL(S)!"
def test_submitted_unchanged_ignore_checksums(self, validator, stream):
"""Does the validation fail on an unchanged notebook with ignoring checksums?"""
filename = os.path.join(os.path.dirname(__file__), "preprocessors", "files", "submitted-unchanged.ipynb")
validator.stream = stream
validator.ignore_checksums = True
validator.validate_and_print(filename)
assert stream.getvalue().split("\n")[0] == "VALIDATION FAILED ON 1 CELL(S)! If you submit your assignment as it is, you WILL NOT"
def test_locked_cell_changed(self, validator, stream):
"""Does the validate fail if a locked cell has changed?"""
filename = os.path.join(os.path.dirname(__file__), "preprocessors", "files", "submitted-locked-cell-changed.ipynb")
validator.stream = stream
validator.validate_and_print(filename)
assert stream.getvalue().split("\n")[0] == "THE CONTENTS OF 2 TEST CELL(S) HAVE CHANGED! This might mean that even though the tests"
def test_locked_cell_changed_ignore_checksums(self, validator: Validator, stream: StringIO) -> None:
"""Does the validate pass if a locked cell has changed but we're ignoring checksums?"""
filename = os.path.join(os.path.dirname(__file__), "preprocessors", "files", "submitted-locked-cell-changed.ipynb")
validator.stream = stream
validator.ignore_checksums = True
validator.validate_and_print(filename)
assert stream.getvalue().split("\n")[0] == "VALIDATION FAILED ON 1 CELL(S)! If you submit your assignment as it is, you WILL NOT"
def test_invert_locked_cell_changed(self, validator, stream):
"""Does the validate fail if a locked cell has changed, even with --invert?"""
filename = os.path.join(os.path.dirname(__file__), "preprocessors", "files", "submitted-locked-cell-changed.ipynb")
validator.stream = stream
validator.invert = True
validator.validate_and_print(filename)
assert stream.getvalue().split("\n")[0] == "THE CONTENTS OF 2 TEST CELL(S) HAVE CHANGED! This might mean that even though the tests"
def test_invert_locked_cell_changed_ignore_checksums(self, validator, stream):
"""Does the validate fail if a locked cell has changed with --invert and ignoring checksums?"""
filename = os.path.join(os.path.dirname(__file__), "preprocessors", "files", "submitted-locked-cell-changed.ipynb")
validator.stream = stream
validator.invert = True
validator.ignore_checksums = True
validator.validate_and_print(filename)
assert stream.getvalue().split("\n")[0] == "NOTEBOOK PASSED ON 1 CELL(S)!"
def test_submitted_unchanged_json(self, validator):
"""Does the validation fail on an unchanged notebook?"""
filename = os.path.join(os.path.dirname(__file__), "preprocessors", "files", "submitted-unchanged.ipynb")
output = validator.validate(filename)
assert list(output.keys()) == ["failed"]
assert len(output["failed"]) == 3
assert output["failed"][0]["source"] == "assert a == 1"
assert output["failed"][1]["source"] == "YOUR ANSWER HERE"
assert output["failed"][1]["error"] == "You did not provide a response."
assert output["failed"][2]["source"] == "# YOUR CODE HERE\nraise NotImplementedError()"
def test_submitted_changed_json(self, validator):
"""Does the validation pass on an changed notebook?"""
filename = os.path.join(os.path.dirname(__file__), "preprocessors", "files", "submitted-changed.ipynb")
output = validator.validate(filename)
assert list(output.keys()) == []
def test_invert_submitted_unchanged_json(self, validator):
"""Does the inverted validation pass on an unchanged notebook?"""
filename = os.path.join(os.path.dirname(__file__), "preprocessors", "files", "submitted-unchanged.ipynb")
validator.invert = True
output = validator.validate(filename)
assert list(output.keys()) == ["passed"]
assert len(output["passed"]) == 1
assert output["passed"][0]["source"] == 'print("Success!")'
def test_invert_submitted_changed_json(self, validator):
"""Does the inverted validation fail on a changed notebook?"""
filename = os.path.join(os.path.dirname(__file__), "preprocessors", "files", "submitted-changed.ipynb")
validator.invert = True
output = validator.validate(filename)
assert list(output.keys()) == ["passed"]
assert len(output["passed"]) == 2
assert output["passed"][0]["source"] == 'print("Success!")'
assert output["passed"][1]["source"] == 'assert a == 1'
def test_grade_cell_changed_json(self, validator):
"""Does the validate fail if a grade cell has changed?"""
filename = os.path.join(os.path.dirname(__file__), "preprocessors", "files", "submitted-grade-cell-changed.ipynb")
output = validator.validate(filename)
assert list(output.keys()) == ["changed"]
assert len(output["changed"]) == 1
assert output["changed"][0]["source"] == '#assert a == 1'
def test_grade_cell_changed_ignore_checksums_json(self, validator):
"""Does the validate pass if a grade cell has changed but we're ignoring checksums?"""
filename = os.path.join(os.path.dirname(__file__), "preprocessors", "files", "submitted-grade-cell-changed.ipynb")
validator.ignore_checksums = True
output = validator.validate(filename)
assert list(output.keys()) == []
def test_invert_grade_cell_changed_json(self, validator):
"""Does the validate fail if a grade cell has changed, even with --invert?"""
filename = os.path.join(os.path.dirname(__file__), "preprocessors", "files", "submitted-grade-cell-changed.ipynb")
validator.invert = True
output = validator.validate(filename)
assert list(output.keys()) == ["changed"]
assert len(output["changed"]) == 1
assert output["changed"][0]["source"] == '#assert a == 1'
def test_invert_grade_cell_changed_ignore_checksums_json(self, validator):
"""Does the validate fail if a grade cell has changed with --invert and ignoring checksums?"""
filename = os.path.join(os.path.dirname(__file__), "preprocessors", "files", "submitted-grade-cell-changed.ipynb")
validator.invert = True
validator.ignore_checksums = True
output = validator.validate(filename)
assert list(output.keys()) == ["passed"]
assert len(output["passed"]) == 2
assert output["passed"][0]["source"] == 'print("Success!")'
assert output["passed"][1]["source"] == '#assert a == 1'
def test_submitted_unchanged_ignore_checksums_json(self, validator):
"""Does the validation fail on an unchanged notebook with ignoring checksums?"""
filename = os.path.join(os.path.dirname(__file__), "preprocessors", "files", "submitted-unchanged.ipynb")
validator.ignore_checksums = True
output = validator.validate(filename)
assert list(output.keys()) == ["failed"]
assert len(output["failed"]) == 1
assert output["failed"][0]["source"] == 'assert a == 1'
def test_locked_cell_changed_json(self, validator):
"""Does the validate fail if a locked cell has changed?"""
filename = os.path.join(os.path.dirname(__file__), "preprocessors", "files", "submitted-locked-cell-changed.ipynb")
output = validator.validate(filename)
assert list(output.keys()) == ["changed"]
assert len(output["changed"]) == 2
assert output["changed"][0]["source"] == '#print("Don\'t change this cell!")'
assert output["changed"][1]["source"] == "This cell shouldn't \nbe changed."
def test_locked_cell_changed_ignore_checksums_json(self, validator):
"""Does the validate pass if a locked cell has changed but we're ignoring checksums?"""
filename = os.path.join(os.path.dirname(__file__), "preprocessors", "files", "submitted-locked-cell-changed.ipynb")
validator.ignore_checksums = True
output = validator.validate(filename)
assert list(output.keys()) == ["failed"]
assert len(output["failed"]) == 1
assert output["failed"][0]["source"] == 'assert a == 1'
def test_invert_locked_cell_changed_json(self, validator):
"""Does the validate fail if a locked cell has changed, even with --invert?"""
filename = os.path.join(os.path.dirname(__file__), "preprocessors", "files", "submitted-locked-cell-changed.ipynb")
validator.invert = True
output = validator.validate(filename)
assert list(output.keys()) == ["changed"]
assert len(output["changed"]) == 2
assert output["changed"][0]["source"] == '#print("Don\'t change this cell!")'
assert output["changed"][1]["source"] == "This cell shouldn't \nbe changed."
def test_invert_locked_cell_changed_ignore_checksums_json(self, validator):
"""Does the validate fail if a locked cell has changed with --invert and ignoring checksums?"""
filename = os.path.join(os.path.dirname(__file__), "preprocessors", "files", "submitted-locked-cell-changed.ipynb")
validator.invert = True
validator.ignore_checksums = True
output = validator.validate(filename)
assert list(output.keys()) == ["passed"]
assert len(output["passed"]) == 1
assert output["passed"][0]["source"] == 'print("Success!")'
def test_answer_cell_type_changed(self, validator: Validator, stream: StringIO) -> None:
"""Does the validate fail if the type of a answer cell has changed?"""
filename = os.path.join(os.path.dirname(__file__), "preprocessors", "files", "submitted-answer-cell-type-changed.ipynb")
validator.stream = stream
validator.validate_and_print(filename)
assert stream.getvalue().split("\n")[0] == "THE TYPES OF 1 CELL(S) HAVE CHANGED! This might mean that even though the tests are"
def test_answer_cell_type_changed_ignore_checksums(self, validator, stream):
"""Does the validate fail if the type of a answer cell has changed but we're ignoring checksums?"""
filename = os.path.join(os.path.dirname(__file__), "preprocessors", "files", "submitted-answer-cell-type-changed.ipynb")
validator.stream = stream
validator.ignore_checksums = True
validator.validate_and_print(filename)
assert stream.getvalue().split("\n")[0] == "THE TYPES OF 1 CELL(S) HAVE CHANGED! This might mean that even though the tests are"
def test_invert_answer_cell_type_changed(self, validator, stream):
"""Does the validate fail if the type of a answer cell has changed, even with --invert?"""
filename = os.path.join(os.path.dirname(__file__), "preprocessors", "files", "submitted-answer-cell-type-changed.ipynb")
validator.stream = stream
validator.invert = True
validator.validate_and_print(filename)
assert stream.getvalue().split("\n")[0] == "THE TYPES OF 1 CELL(S) HAVE CHANGED! This might mean that even though the tests are"
def test_invert_answer_cell_type_changed_ignore_checksums(self, validator, stream):
"""Does the validate fail if the type of a answer cell has changed with --invert and ignoring checksums?"""
filename = os.path.join(os.path.dirname(__file__), "preprocessors", "files", "submitted-answer-cell-type-changed.ipynb")
validator.stream = stream
validator.invert = True
validator.ignore_checksums = True
validator.validate_and_print(filename)
assert stream.getvalue().split("\n")[0] == "THE TYPES OF 1 CELL(S) HAVE CHANGED! This might mean that even though the tests are"
def test_answer_cell_type_changed_json(self, validator):
"""Does the validate fail if the type of a answer cell has changed?"""
filename = os.path.join(os.path.dirname(__file__), "preprocessors", "files", "submitted-answer-cell-type-changed.ipynb")
output = validator.validate(filename)
assert list(output.keys()) == ["type_changed"]
assert len(output["type_changed"]) == 1
assert output["type_changed"][0]["source"] == "# YOUR CODE HERE\nraise NotImplementedError()"
def test_answer_cell_type_changed_ignore_checksums_json(self, validator):
"""Does the validate pass if the type of a answer cell has changed but we're ignoring checksums?"""
filename = os.path.join(os.path.dirname(__file__), "preprocessors", "files", "submitted-answer-cell-type-changed.ipynb")
validator.ignore_checksums = True
output = validator.validate(filename)
assert list(output.keys()) == ["type_changed"]
assert len(output["type_changed"]) == 1
assert output["type_changed"][0]["source"] == "# YOUR CODE HERE\nraise NotImplementedError()"
def test_invert_answer_cell_type_changed_json(self, validator):
"""Does the validate fail if the type of a answer cell has changed, even with --invert?"""
filename = os.path.join(os.path.dirname(__file__), "preprocessors", "files", "submitted-answer-cell-type-changed.ipynb")
validator.invert = True
output = validator.validate(filename)
assert list(output.keys()) == ["type_changed"]
assert len(output["type_changed"]) == 1
assert output["type_changed"][0]["source"] == "# YOUR CODE HERE\nraise NotImplementedError()"
def test_invert_answer_cell_type_changed_ignore_checksums_json(self, validator):
"""Does the validate fail if the type of a answer cell has changed with --invert and ignoring checksums?"""
filename = os.path.join(os.path.dirname(__file__), "preprocessors", "files", "submitted-answer-cell-type-changed.ipynb")
validator.invert = True
validator.ignore_checksums = True
output = validator.validate(filename)
assert list(output.keys()) == ["type_changed"]
assert len(output["type_changed"]) == 1
assert output["type_changed"][0]["source"] == "# YOUR CODE HERE\nraise NotImplementedError()"
def test_grade_cell_type_changed(self, validator, stream):
"""Does the validate fail if the type of a grade cell has changed?"""
filename = os.path.join(os.path.dirname(__file__), "preprocessors", "files", "submitted-grade-cell-type-changed.ipynb")
validator.stream = stream
validator.validate_and_print(filename)
assert stream.getvalue().split("\n")[0] == "THE TYPES OF 1 CELL(S) HAVE CHANGED! This might mean that even though the tests are"
def test_grade_cell_type_changed_ignore_checksums(self, validator, stream):
"""Does the validate fail if the type of a grade cell has changed but we're ignoring checksums?"""
filename = os.path.join(os.path.dirname(__file__), "preprocessors", "files", "submitted-grade-cell-type-changed.ipynb")
validator.stream = stream
validator.ignore_checksums = True
validator.validate_and_print(filename)
assert stream.getvalue().split("\n")[0] == "THE TYPES OF 1 CELL(S) HAVE CHANGED! This might mean that even though the tests are"
def test_invert_grade_cell_type_changed(self, validator, stream):
"""Does the validate fail if the type of a grade cell has changed, even with --invert?"""
filename = os.path.join(os.path.dirname(__file__), "preprocessors", "files", "submitted-grade-cell-type-changed.ipynb")
validator.stream = stream
validator.invert = True
validator.validate_and_print(filename)
assert stream.getvalue().split("\n")[0] == "THE TYPES OF 1 CELL(S) HAVE CHANGED! This might mean that even though the tests are"
def test_invert_grade_cell_type_changed_ignore_checksums(self, validator, stream):
"""Does the validate fail if the type of a grade cell has changed with --invert and ignoring checksums?"""
filename = os.path.join(os.path.dirname(__file__), "preprocessors", "files", "submitted-grade-cell-type-changed.ipynb")
validator.stream = stream
validator.invert = True
validator.ignore_checksums = True
validator.validate_and_print(filename)
assert stream.getvalue().split("\n")[0] == "THE TYPES OF 1 CELL(S) HAVE CHANGED! This might mean that even though the tests are"
def test_grade_cell_type_changed_json(self, validator):
"""Does the validate fail if the type of a grade cell has changed?"""
filename = os.path.join(os.path.dirname(__file__), "preprocessors", "files", "submitted-grade-cell-type-changed.ipynb")
output = validator.validate(filename)
assert list(output.keys()) == ["type_changed"]
assert len(output["type_changed"]) == 1
assert output["type_changed"][0]["source"] == "assert a == 1"
def test_grade_cell_type_changed_ignore_checksums_json(self, validator):
"""Does the validate pass if the type of a grade cell has changed but we're ignoring checksums?"""
filename = os.path.join(os.path.dirname(__file__), "preprocessors", "files", "submitted-grade-cell-type-changed.ipynb")
validator.ignore_checksums = True
output = validator.validate(filename)
assert list(output.keys()) == ["type_changed"]
assert len(output["type_changed"]) == 1
assert output["type_changed"][0]["source"] == "assert a == 1"
def test_invert_grade_cell_type_changed_json(self, validator):
"""Does the validate fail if the type of a grade cell has changed, even with --invert?"""
filename = os.path.join(os.path.dirname(__file__), "preprocessors", "files", "submitted-grade-cell-type-changed.ipynb")
validator.invert = True
output = validator.validate(filename)
assert list(output.keys()) == ["type_changed"]
assert len(output["type_changed"]) == 1
assert output["type_changed"][0]["source"] == "assert a == 1"
def test_invert_grade_cell_type_changed_ignore_checksums_json(self, validator):
"""Does the validate fail if the type of a grade cell has changed with --invert and ignoring checksums?"""
filename = os.path.join(os.path.dirname(__file__), "preprocessors", "files", "submitted-grade-cell-type-changed.ipynb")
validator.invert = True
validator.ignore_checksums = True
output = validator.validate(filename)
assert list(output.keys()) == ["type_changed"]
assert len(output["type_changed"]) == 1
assert output["type_changed"][0]["source"] == "assert a == 1"
| 49.924269
| 140
| 0.652106
| 3,516
| 29,006
| 5.212742
| 0.047213
| 0.027499
| 0.032082
| 0.041248
| 0.950022
| 0.932508
| 0.91734
| 0.893005
| 0.875873
| 0.865343
| 0
| 0.006092
| 0.219058
| 29,006
| 580
| 141
| 50.010345
| 0.80302
| 0.112563
| 0
| 0.688312
| 0
| 0.005195
| 0.228342
| 0.063643
| 0
| 0
| 0
| 0
| 0.277922
| 1
| 0.155844
| false
| 0.075325
| 0.023377
| 0.005195
| 0.18961
| 0.142857
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
fca75597ee7c011121814c01602a3f9008ca7175
| 8,489
|
py
|
Python
|
dfirtrack_main/tests/analystmemo/test_analystmemo_views.py
|
blackhatethicalhacking/dfirtrack
|
9c2e13015291f2981d14d63c9683e7c447e91f3a
|
[
"MIT"
] | 4
|
2020-03-06T17:37:09.000Z
|
2020-03-17T07:50:55.000Z
|
dfirtrack_main/tests/analystmemo/test_analystmemo_views.py
|
blackhatethicalhacking/dfirtrack
|
9c2e13015291f2981d14d63c9683e7c447e91f3a
|
[
"MIT"
] | null | null | null |
dfirtrack_main/tests/analystmemo/test_analystmemo_views.py
|
blackhatethicalhacking/dfirtrack
|
9c2e13015291f2981d14d63c9683e7c447e91f3a
|
[
"MIT"
] | 1
|
2020-03-06T20:54:52.000Z
|
2020-03-06T20:54:52.000Z
|
from django.contrib.auth.models import User
from django.test import TestCase
from django.utils import timezone
from dfirtrack_main.models import Analystmemo, System, Systemstatus
import urllib.parse
class AnalystmemoViewTestCase(TestCase):
""" analystmemo view tests """
@classmethod
def setUpTestData(cls):
# create user
test_user = User.objects.create_user(username='testuser_analystmemo', password='M4d878CFQiHcJQrZr4iN')
# create object
systemstatus_1 = Systemstatus.objects.create(systemstatus_name='systemstatus_1')
# create object
system_1 = System.objects.create(
system_name='system_1',
systemstatus = systemstatus_1,
system_modify_time = timezone.now(),
system_created_by_user_id = test_user,
system_modified_by_user_id = test_user,
)
# create object
Analystmemo.objects.create(
analystmemo_note='lorem ipsum',
system = system_1,
analystmemo_created_by_user_id = test_user,
analystmemo_modified_by_user_id = test_user,
)
def test_analystmemos_list_not_logged_in(self):
""" test list view """
# create url
destination = '/login/?next=' + urllib.parse.quote('/analystmemos/', safe='')
# get response
response = self.client.get('/analystmemos/', follow=True)
# compare
self.assertRedirects(response, destination, status_code=302, target_status_code=200)
def test_analystmemos_list_logged_in(self):
""" test list view """
# login testuser
login = self.client.login(username='testuser_analystmemo', password='M4d878CFQiHcJQrZr4iN')
# get response
response = self.client.get('/analystmemos/')
# compare
self.assertEqual(response.status_code, 200)
def test_analystmemos_list_template(self):
""" test list view """
# login testuser
login = self.client.login(username='testuser_analystmemo', password='M4d878CFQiHcJQrZr4iN')
# get response
response = self.client.get('/analystmemos/')
# compare
self.assertTemplateUsed(response, 'dfirtrack_main/analystmemo/analystmemos_list.html')
def test_analystmemos_list_get_user_context(self):
""" test list view """
# login testuser
login = self.client.login(username='testuser_analystmemo', password='M4d878CFQiHcJQrZr4iN')
# get response
response = self.client.get('/analystmemos/')
# compare
self.assertEqual(str(response.context['user']), 'testuser_analystmemo')
def test_analystmemos_detail_not_logged_in(self):
""" test detail view """
# get object
analystmemo_1 = Analystmemo.objects.get(analystmemo_note='lorem ipsum')
# create url
destination = '/login/?next=' + urllib.parse.quote('/analystmemos/' + str(analystmemo_1.analystmemo_id), safe='')
# get response
response = self.client.get('/analystmemos/' + str(analystmemo_1.analystmemo_id), follow=True)
# compare
self.assertRedirects(response, destination, status_code=302, target_status_code=200)
def test_analystmemos_detail_logged_in(self):
""" test detail view """
# get object
analystmemo_1 = Analystmemo.objects.get(analystmemo_note='lorem ipsum')
# login testuser
login = self.client.login(username='testuser_analystmemo', password='M4d878CFQiHcJQrZr4iN')
# get response
response = self.client.get('/analystmemos/' + str(analystmemo_1.analystmemo_id))
# compare
self.assertEqual(response.status_code, 200)
def test_analystmemos_detail_template(self):
""" test detail view """
# get object
analystmemo_1 = Analystmemo.objects.get(analystmemo_note='lorem ipsum')
# login testuser
login = self.client.login(username='testuser_analystmemo', password='M4d878CFQiHcJQrZr4iN')
# get response
response = self.client.get('/analystmemos/' + str(analystmemo_1.analystmemo_id))
# compare
self.assertTemplateUsed(response, 'dfirtrack_main/analystmemo/analystmemos_detail.html')
def test_analystmemos_detail_get_user_context(self):
""" test detail view """
# get object
analystmemo_1 = Analystmemo.objects.get(analystmemo_note='lorem ipsum')
# login testuser
login = self.client.login(username='testuser_analystmemo', password='M4d878CFQiHcJQrZr4iN')
# get response
response = self.client.get('/analystmemos/' + str(analystmemo_1.analystmemo_id))
# compare
self.assertEqual(str(response.context['user']), 'testuser_analystmemo')
def test_analystmemos_add_not_logged_in(self):
""" test add view """
# create url
destination = '/login/?next=' + urllib.parse.quote('/analystmemos/add/', safe='')
# get response
response = self.client.get('/analystmemos/add/', follow=True)
# compare
self.assertRedirects(response, destination, status_code=302, target_status_code=200)
def test_analystmemos_add_logged_in(self):
""" test add view """
# login testuser
login = self.client.login(username='testuser_analystmemo', password='M4d878CFQiHcJQrZr4iN')
# get response
response = self.client.get('/analystmemos/add/')
# compare
self.assertEqual(response.status_code, 200)
def test_analystmemos_add_template(self):
""" test add view """
# login testuser
login = self.client.login(username='testuser_analystmemo', password='M4d878CFQiHcJQrZr4iN')
# get response
response = self.client.get('/analystmemos/add/')
# compare
self.assertTemplateUsed(response, 'dfirtrack_main/analystmemo/analystmemos_add.html')
def test_analystmemos_add_get_user_context(self):
""" test add view """
# login testuser
login = self.client.login(username='testuser_analystmemo', password='M4d878CFQiHcJQrZr4iN')
# get response
response = self.client.get('/analystmemos/add/')
# compare
self.assertEqual(str(response.context['user']), 'testuser_analystmemo')
def test_analystmemos_edit_not_logged_in(self):
""" test edit view """
# get object
analystmemo_1 = Analystmemo.objects.get(analystmemo_note='lorem ipsum')
# create url
destination = '/login/?next=' + urllib.parse.quote('/analystmemos/' + str(analystmemo_1.analystmemo_id) + '/edit/', safe='')
# get response
response = self.client.get('/analystmemos/' + str(analystmemo_1.analystmemo_id) + '/edit/', follow=True)
# compare
self.assertRedirects(response, destination, status_code=302, target_status_code=200)
def test_analystmemos_edit_logged_in(self):
""" test edit view """
# get object
analystmemo_1 = Analystmemo.objects.get(analystmemo_note='lorem ipsum')
# login testuser
login = self.client.login(username='testuser_analystmemo', password='M4d878CFQiHcJQrZr4iN')
# get response
response = self.client.get('/analystmemos/' + str(analystmemo_1.analystmemo_id) + '/edit/')
# compare
self.assertEqual(response.status_code, 200)
def test_analystmemos_edit_template(self):
""" test edit view """
# get object
analystmemo_1 = Analystmemo.objects.get(analystmemo_note='lorem ipsum')
# login testuser
login = self.client.login(username='testuser_analystmemo', password='M4d878CFQiHcJQrZr4iN')
# get response
response = self.client.get('/analystmemos/' + str(analystmemo_1.analystmemo_id) + '/edit/')
# compare
self.assertTemplateUsed(response, 'dfirtrack_main/analystmemo/analystmemos_edit.html')
def test_analystmemos_edit_get_user_context(self):
""" test edit view """
# get object
analystmemo_1 = Analystmemo.objects.get(analystmemo_note='lorem ipsum')
# login testuser
login = self.client.login(username='testuser_analystmemo', password='M4d878CFQiHcJQrZr4iN')
# get response
response = self.client.get('/analystmemos/' + str(analystmemo_1.analystmemo_id) + '/edit/')
# compare
self.assertEqual(str(response.context['user']), 'testuser_analystmemo')
| 40.232227
| 132
| 0.667216
| 877
| 8,489
| 6.254276
| 0.08894
| 0.051048
| 0.075479
| 0.067092
| 0.856882
| 0.834458
| 0.807293
| 0.800365
| 0.737101
| 0.729626
| 0
| 0.018957
| 0.22323
| 8,489
| 210
| 133
| 40.42381
| 0.81286
| 0.11615
| 0
| 0.463158
| 0
| 0
| 0.179267
| 0.026836
| 0
| 0
| 0
| 0
| 0.168421
| 1
| 0.178947
| false
| 0.136842
| 0.052632
| 0
| 0.242105
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
fca80831d929e23bfbfb60baf9d42d3b150c9da1
| 2,443
|
py
|
Python
|
tests/test_utils.py
|
Tedyst/hikvisionapi
|
376d8e7243d2eaf97c3e65705b6992fac0dafd86
|
[
"MIT"
] | 2
|
2021-07-29T17:41:36.000Z
|
2022-03-01T08:44:17.000Z
|
tests/test_utils.py
|
Tedyst/hikvisionapi
|
376d8e7243d2eaf97c3e65705b6992fac0dafd86
|
[
"MIT"
] | null | null | null |
tests/test_utils.py
|
Tedyst/hikvisionapi
|
376d8e7243d2eaf97c3e65705b6992fac0dafd86
|
[
"MIT"
] | 3
|
2020-02-07T00:44:45.000Z
|
2022-03-08T16:05:57.000Z
|
import hikvisionapi.utils
from collections import OrderedDict
def test_xml2dict():
xmlorig = b"""<?xml version = "1.0" encoding = "UTF-8" ?>
<DeviceInfo version = "1.0" asd = "2.0"
xmlns = "http://www.hikvision.com/ver20/XMLSchema">
<deviceName>DVR</deviceName>
<deviceID>someID</deviceID>
<model>DS-7208HUHI-F2/N/A</model>
<serialNumber>someID</serialNumber>
<macAddress>aa:bb:cc:dd:ee:ff</macAddress>
<firmwareVersion>V3.4.88</firmwareVersion>
<firmwareReleasedDate>build180228</firmwareReleasedDate>
<encoderVersion>V5.0</encoderVersion>
<encoderReleasedDate>build180126</encoderReleasedDate>
<deviceType>IPC</deviceType>
<telecontrolID>1</telecontrolID>
</DeviceInfo>
"""
dicti = hikvisionapi.utils.xml2dict(xmlorig)
dictiorig = {'DeviceInfo': OrderedDict([('deviceName', 'DVR'), ('deviceID', 'someID'), ('model', 'DS-7208HUHI-F2/N/A'), ('serialNumber', 'someID'), ('macAddress', 'aa:bb:cc:dd:ee:ff'),
('firmwareVersion', 'V3.4.88'), ('firmwareReleasedDate', 'build180228'), ('encoderVersion', 'V5.0'), ('encoderReleasedDate', 'build180126'), ('deviceType', 'IPC'), ('telecontrolID', '1'), ('@attrs', {'version': '1.0', 'asd': '2.0'})])}
assert dicti == dictiorig
def test_dict2xml():
dicti = {'DeviceInfo': OrderedDict([('deviceName', 'DVR'), ('deviceID', 'someID'), ('model', 'DS-7208HUHI-F2/N/A'), ('serialNumber', 'someID'), ('macAddress', 'aa:bb:cc:dd:ee:ff'),
('firmwareVersion', 'V3.4.88'), ('firmwareReleasedDate', 'build180228'), ('encoderVersion', 'V5.0'), ('encoderReleasedDate', 'build180126'), ('deviceType', 'IPC'), ('telecontrolID', '1'), ('@attrs', {'version': '1.0', 'asd': '2.0'})])}
xmlorig = """<?xml version = "1.0" encoding = "UTF-8" ?><DeviceInfo asd="2.0" version="1.0" xmlns="http://www.hikvision.com/ver20/XMLSchema"><deviceName>DVR</deviceName><deviceID>someID</deviceID><model>DS-7208HUHI-F2/N/A</model><serialNumber>someID</serialNumber><macAddress>aa:bb:cc:dd:ee:ff</macAddress><firmwareVersion>V3.4.88</firmwareVersion><firmwareReleasedDate>build180228</firmwareReleasedDate><encoderVersion>V5.0</encoderVersion><encoderReleasedDate>build180126</encoderReleasedDate><deviceType>IPC</deviceType><telecontrolID>1</telecontrolID></DeviceInfo>"""
newxml = hikvisionapi.utils.dict2xml(dicti)
print(newxml)
assert str(xmlorig) == str(newxml)
| 69.8
| 575
| 0.66844
| 258
| 2,443
| 6.321705
| 0.25969
| 0.02943
| 0.033109
| 0.041692
| 0.843041
| 0.843041
| 0.835071
| 0.835071
| 0.793378
| 0.793378
| 0
| 0.058879
| 0.124028
| 2,443
| 34
| 576
| 71.852941
| 0.703271
| 0
| 0
| 0.066667
| 0
| 0.033333
| 0.690954
| 0.169464
| 0
| 0
| 0
| 0
| 0.066667
| 1
| 0.066667
| false
| 0
| 0.066667
| 0
| 0.133333
| 0.033333
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5dc3593c048633395389b5d9f5389f970a201d2f
| 18,453
|
py
|
Python
|
tiered_disclosure/views.py
|
danielbankocfpb/tiered-disclosure
|
9631c1e1c1c297c275436577cdcfa8fe63301cd9
|
[
"CC0-1.0"
] | 2
|
2019-02-25T21:50:37.000Z
|
2019-02-25T21:50:37.000Z
|
tiered_disclosure/views.py
|
danielbankocfpb/tiered-disclosure
|
9631c1e1c1c297c275436577cdcfa8fe63301cd9
|
[
"CC0-1.0"
] | null | null | null |
tiered_disclosure/views.py
|
danielbankocfpb/tiered-disclosure
|
9631c1e1c1c297c275436577cdcfa8fe63301cd9
|
[
"CC0-1.0"
] | 3
|
2019-02-25T21:50:27.000Z
|
2021-02-20T10:52:03.000Z
|
from otree.api import Currency as c, currency_range
from . import models
from ._builtin import Page, WaitPage
from .models import Constants, Player, Subsession, Group, BaseSubsession
from django.shortcuts import render
from django.contrib.auth.decorators import login_required
from . import export
import string
class InstructionsBasics(Page):
def is_displayed(self):
return self.subsession.show_instructions_base
def vars_for_template(self):
return {
'tokens_per_dollar': int(1./float(self.session.config["real_world_currency_per_point"])),
'showup': self.session.config['participation_fee'],
}
class InstructionsBasicsQuiz(Page):
form_model = models.Player
form_fields = ['basics_q1']
def is_displayed(self):
return self.subsession.show_instructions_base
def vars_for_template(self):
return {
'tokens_per_dollar': int(1. / float(self.session.config["real_world_currency_per_point"])),
}
class InstructionsFullInformation(Page):
def is_displayed(self):
return self.subsession.show_instructions_base
class PracticeBegin(Page):
def is_displayed(self):
return self.subsession.show_instructions_practice
def vars_for_template(self):
practicerounds = Constants.num_rounds_practice[self.subsession.block - 1]
return {
"practicerounds": practicerounds
}
class PracticeEnd(Page):
def is_displayed(self):
return self.subsession.show_instructions_real
def vars_for_template(self):
treatmentrounds = Constants.num_rounds_treatment[self.subsession.block - 1]
return {
"treatmentrounds": treatmentrounds,
}
class ChoiceFullInformation(Page):
form_model = models.Player
form_fields = ['product_selected']
def is_displayed(self):
return self.subsession.is_asl == 0
def vars_for_template(self):
products_total = Constants.num_products[self.subsession.block - 1]
productdims_total = Constants.productdims_total[self.subsession.block - 1]
productdims_shown = Constants.productdims_shown[self.subsession.block - 1]
productdimvals = self.session.vars["productdims_round" + str(self.round_number)]
productdimvals_transposed = list(map(list, zip(*productdimvals)))
productdims_list = list(range(1, productdims_shown + 1))
products_list = list(range(1, products_total + 1))
preferencedims = Constants.num_prefdims[self.subsession.block - 1]
preferencedimvals = self.session.vars["preferencedims_round" + str(self.round_number)]
# preferencedimvals_transposed = list(map(list, zip(*preferencedimvals)))
preferences_list = list(range(1, preferencedims + 1))
treatmentorder = list(map(int, self.session.config['treatmentorder'].split(',')))
treatmentorder = [i - 1 for i in treatmentorder]
num_rounds_treatment = [Constants.num_rounds_treatment[i] for i in treatmentorder]
num_rounds_practice = [Constants.num_rounds_practice[i] for i in treatmentorder]
numpracticerounds = sum(num_rounds_practice[:self.subsession.block])
numtreatrounds = sum(num_rounds_treatment[:self.subsession.block])
treatmentroundnum = self.subsession.round_number - numpracticerounds
numpracticerounds = sum(num_rounds_practice[:self.subsession.block])
numtreatrounds = sum(Constants.num_rounds_treatment[:self.subsession.block - 1])
product_dims = []
if self.subsession.practiceround:
pass
# product_dims = self.participant.vars["practice_proddims" + str(self.subsession.round_number)]
# else:
# for i in range(self.subsession.numproducts):
# product_dims.append([pd.value for pd in self.group.get_player_by_role(role).get_ask().productdim_set.all()])
maxprefdim = max(Constants.num_prefdims)
maxproduct = max(Constants.num_products)
maxproductdim = max(Constants.productdims_total)
d = dict(enumerate(string.ascii_uppercase, 1)) # converts number to alphabet
prefdim_fns = [str(d[i]) for i in range(1, maxprefdim + 1)]
proddim_fns = [str(d[j]) for j in range(1, maxproductdim + 1)]
roundnum = self.subsession.round_number - numpracticerounds - numtreatrounds
# self.participant.vars["product_selected" + str(self.subsession.round_number)]
# products = list(zip(range(1, self.subsession.productdims_total + 1), zip(*product_dims)))
return {
"products_total": products_total,
"productdims_total": productdims_total,
"productdims_shown": productdims_shown,
"productdimvals": productdimvals,
"productdimvals_transposed": productdimvals_transposed,
"preferencedimvals": preferencedimvals,
"products_list": products_list,
"productdims_list": productdims_list,
"preferences_list": preferences_list,
"treatmentorder": treatmentorder,
"treatmentrounds": Constants.num_rounds_treatment[self.subsession.block - 1],
"numpracticerounds": numpracticerounds,
"numtreatrounds": numtreatrounds,
"treatmentrounds": Constants.num_rounds_treatment[self.subsession.block - 1],
"treatmentroundnum": treatmentroundnum,
"round_number": self.round_number,
"preferencedims": preferencedims,
"practicerounds": Constants.num_rounds_practice[self.subsession.block - 1],
"practice_round:": self.subsession.practiceround,
"prefdim_fns": prefdim_fns,
"proddim_fns": proddim_fns,
"round": roundnum,
"treatmentrounds": Constants.num_rounds_treatment[self.subsession.block - 1],
"block": self.subsession.block,
}
def before_next_page(self):
pass
# self.player.product_selected_dims = self.session.vars["productdims_round" + str(self.round_number)][self.player.product_selected]
# product_choice = self.get_product_by_id("Prod" + str(self.player.product_selected))
# productdims = [pd.value for pd in ask.productdim_sell.all()]
# if self.subsession.practiceround:
# self.participant.vars["practice_proddims" + str(self.subsession.round_number)] = [[0] * self.subsession.productdims_total for i in range(self.subsession.num_products)]
# else:
# self.participant.vars["proddims" + str(self.subsession.round_number)] = [[0] * self.subsession.productdims_total for i in range(self.subsession.num_products)]
class ChoiceTruncation(Page):
form_model = models.Player
form_fields = ['product_selected']
def is_displayed(self):
return self.subsession.is_asl == 0
def vars_for_template(self):
products_total = Constants.num_products[self.subsession.block - 1]
productdims_total = Constants.productdims_total[self.subsession.block - 1]
productdims_shown = Constants.productdims_shown[self.subsession.block - 1]
productdimvals = self.session.vars["productdims_round" + str(self.round_number)]
productdimvals_shown = self.session.vars["productdims_shown_round" + str(self.round_number)]
productdimvals_transposed = list(map(list, zip(*productdimvals_shown)))
productdims_list = list(range(1, productdims_shown + 1))
products_list = list(range(1, products_total + 1))
preferencedims = Constants.num_prefdims[self.subsession.block - 1]
preferencedimvals = self.session.vars["preferencedims_round" + str(self.round_number)]
# preferencedimvals_transposed = list(map(list, zip(*preferencedimvals)))
preferences_list = list(range(1, preferencedims + 1))
treatmentorder = list(map(int, self.session.config['treatmentorder'].split(',')))
treatmentorder = [i - 1 for i in treatmentorder]
num_rounds_treatment = [Constants.num_rounds_treatment[i] for i in treatmentorder]
num_rounds_practice = [Constants.num_rounds_practice[i] for i in treatmentorder]
numpracticerounds = sum(num_rounds_practice[:self.subsession.block])
numtreatrounds = sum(num_rounds_treatment[:self.subsession.block])
treatmentroundnum = self.subsession.round_number - numpracticerounds
numpracticerounds = sum(num_rounds_practice[:self.subsession.block])
numtreatrounds = sum(Constants.num_rounds_treatment[:self.subsession.block - 1])
product_dims = []
if self.subsession.practiceround:
pass
# product_dims = self.participant.vars["practice_proddims" + str(self.subsession.round_number)]
# else:
# for i in range(self.subsession.numproducts):
# product_dims.append([pd.value for pd in self.group.get_player_by_role(role).get_ask().productdim_set.all()])
maxprefdim = max(Constants.num_prefdims)
maxproduct = max(Constants.num_products)
maxproductdim = max(Constants.productdims_total)
d = dict(enumerate(string.ascii_uppercase, 1)) # converts number to alphabet
prefdim_fns = [str(d[i]) for i in range(1, maxprefdim + 1)]
proddim_fns = [str(d[j]) for j in range(1, maxproductdim + 1)]
roundnum = self.subsession.round_number - numpracticerounds - numtreatrounds
# self.participant.vars["product_selected" + str(self.subsession.round_number)]
# products = list(zip(range(1, self.subsession.productdims_total + 1), zip(*product_dims)))
return {
"products_total": products_total,
"productdims_total": productdims_total,
"productdims_shown": productdims_shown,
"productdimvals": productdimvals,
"productdimvals_shown": productdimvals_shown, # number of product dims shown
"productdimvals_transposed": productdimvals_transposed,
"preferencedimvals": preferencedimvals,
"products_list": products_list,
"productdims_list": productdims_list,
"preferences_list": preferences_list,
"treatmentorder": treatmentorder,
"treatmentrounds": Constants.num_rounds_treatment[self.subsession.block - 1],
"numpracticerounds": numpracticerounds,
"numtreatrounds": numtreatrounds,
"treatmentrounds": Constants.num_rounds_treatment[self.subsession.block - 1],
"treatmentroundnum": treatmentroundnum,
"round_number": self.round_number,
"preferencedims": preferencedims,
"practicerounds": Constants.num_rounds_practice[self.subsession.block - 1],
"practice_round:": self.subsession.practiceround,
"prefdim_fns": prefdim_fns,
"proddim_fns": proddim_fns,
"round": roundnum,
"treatmentrounds": Constants.num_rounds_treatment[self.subsession.block - 1],
"block": self.subsession.block,
}
def before_next_page(self):
pass
# self.player.product_selected_dims = self.session.vars["productdims_round" + str(self.round_number)][self.player.product_selected]
# product_choice = self.get_product_by_id("Prod" + str(self.player.product_selected))
# productdims = [pd.value for pd in ask.productdim_sell.all()]
# if self.subsession.practiceround:
# self.participant.vars["practice_proddims" + str(self.subsession.round_number)] = [[0] * self.subsession.productdims_total for i in range(self.subsession.num_products)]
# else:
# self.participant.vars["proddims" + str(self.subsession.round_number)] = [[0] * self.subsession.productdims_total for i in range(self.subsession.num_products)]
class ChoiceASL(Page):
form_model = models.Player
form_fields = ['product_selected']
def is_displayed(self):
return self.subsession.is_asl
def vars_for_template(self):
products_total = Constants.num_products[self.subsession.block - 1]
productdims_total = Constants.productdims_total[self.subsession.block - 1]
representativedimvals = self.session.vars["representativedims_round" + str(self.round_number)]
preferencedims = self.session.vars["preferencedims_round" + str(self.round_number)]
productdims_total = Constants.productdims_total[self.subsession.block - 1]
productdimvals = self.session.vars["productdims_round" + str(self.round_number)]
products_total = Constants.num_products[self.subsession.block - 1]
treatmentorder = list(map(int, self.session.config['treatmentorder'].split(',')))
treatmentorder = [i - 1 for i in treatmentorder]
num_rounds_treatment = [Constants.num_rounds_treatment[i] for i in treatmentorder]
num_rounds_practice = [Constants.num_rounds_practice[i] for i in treatmentorder]
numpracticerounds = sum(num_rounds_practice[:self.subsession.block])
# treatmentrounds = Constants.num_rounds_treatment[self.subsession.block - 1]
numtreatrounds = sum(Constants.num_rounds_treatment[:self.subsession.block - 1])
# treatmentroundnum = self.subsession.round_number/self.subsession.block NEED TO MULTIPLY BY NUMBER OF PRACTICE ROUNDS THAT HVA EOCCURED
num_representatives = Constants.num_representatives[self.subsession.block - 1]
num_preferences = Constants.num_prefdims[self.subsession.block - 1]
utility_dims = self.session.vars["reputility_round" + str(self.round_number)]
representatives_list = list(range(1, num_representatives + 1))
preferences_list = list(range(1, num_preferences + 1))
representativedimvals_transposed = list(map(list, zip(*representativedimvals)))
treatmentorder = list(map(int, self.session.config['treatmentorder'].split(',')))
treatmentorder = [i - 1 for i in treatmentorder]
products_list = list(range(1, products_total + 1))
# numpracticerounds = sum(Constants.num_rounds_practice[:self.subsession.block])
roundnum = self.subsession.round_number - numpracticerounds - numtreatrounds
return {
"num_representatives": num_representatives,
"representativedimvals": representativedimvals,
"utility_dims": utility_dims,
"products_total": products_total,
"productdims_total": productdims_total,
"products_list": products_list,
"representativedimvals_transposed": representativedimvals_transposed,
"representatives_list": representatives_list,
"preferences_list": preferences_list,
# "treatmentrounds": treatmentrounds,
# "treatmentroundnum": treatmentroundnum,
"preferencedims": preferencedims,
"block": self.subsession.block,
"round": roundnum,
"treatmentrounds": Constants.num_rounds_treatment[self.subsession.block - 1]
}
def before_next_page(self):
pass
class RoundResults(Page):
def vars_for_template(self):
product_selected = self.player.product_selected
num_preferences = Constants.num_prefdims[self.subsession.block - 1]
product_best = self.session.vars["bestproduct_round" + str(self.round_number)]
is_mistake = -1
preferences_list = list(range(1, num_preferences + 1))
if product_selected == product_best:
is_mistake = 0
else:
is_mistake = 1
productdimvals_selected = self.session.vars["productdims_round" + str(self.round_number)][product_selected - 1]
productdimvals_best = self.session.vars["productdims_round" + str(self.round_number)][product_best - 1]
selected_utility = self.session.vars["productutilities_round" + str(self.round_number)][product_selected - 1]
utilities_list = self.session.vars["productutilities_round" + str(self.round_number)]
productdimvals = self.session.vars["productdims_round" + str(self.round_number)]
if self.subsession.is_asl == 1:
num_representatives = Constants.num_representatives[self.subsession.block - 1]
representatives_list = list(range(1, num_representatives + 1))
representativedimvals = self.session.vars["productdims_round" + str(self.round_number)]
representativedimvals_transposed = list(map(list, zip(*representativedimvals)))
else:
productdimvals_shown = self.session.vars["productdims_shown_round" + str(self.round_number)]
productdims_shown = Constants.productdims_shown[self.subsession.block - 1]
products_total = Constants.num_products[self.subsession.block - 1]
productdims_total = Constants.productdims_total[self.subsession.block - 1]
productdimvals_transposed = list(map(list, zip(*productdimvals)))
preferencedims = self.session.vars["preferencedims_round" + str(self.round_number)]
products_list = list(range(1, products_total + 1))
best_utility = self.session.vars["productutilities_round" + str(self.round_number)][product_best - 1]
self.player.is_mistake = is_mistake
self.player.product_best = product_best
# productdims_list = list(range(1, productdims_shown + 1))
# if self.subsession.practiceround:
# products = self.participant.vars["practice_proddims" + str(self.subsession.round_number)]
# # if self.subsession.is_asl:
# else:
# products = self.participant.vars["proddims" + str(self.subsession.round_number)]
# preferences = self
# if self.subsession.is_asl:
# product_choice = player.product_selected
# product_choice = self.participant.vars["product_selected" + str(self.subsession.round_number)]
# Create a list of lists where each individual list is product dimension i for all products
# for i in range(self.subsession.num_products):
# product = "Prod" + str(i + 1)
# product_dims.append([pd.value for pd in self.get]) TODO: finish this line using general_dimensions views line 376
# products_list = list(zip(range(1, self.subsession.productdims_total + 1), zip(*product_dims)))
return {
"is_asl": self.subsession.is_asl,
"is_mistake": is_mistake,
"preferencedims": preferencedims,
"productdimvals_best": productdimvals_best,
"product_best": product_best,
"product_selected": product_selected,
"productdims_total": productdims_total,
"productdimvals": productdimvals,
"productdimvals_selected": productdimvals_selected,
"productdimvals_transposed": productdimvals_transposed,
"products_list": products_list,
"products_total": products_total,
"preferences_list": preferences_list,
"utility_best": best_utility,
"utilities_list": utilities_list,
"utility_selected": selected_utility,
"practice_round:": self.subsession.practiceround,
}
class Splash(Page):
form_model = models.Player
form_fields = [ ]
# @login_required
def DataDownload(request):
headers, body = export.export_contracts()
return export.export_csv("Data", headers, body)
page_sequence = [
InstructionsBasics,
InstructionsBasicsQuiz,
InstructionsFullInformation,
# InstructionsTruncation,
# InstructionsTruncationQuiz,
# InstructionsASL,
# InstructionsASLQuiz,
# InstructionsNewTreatment,
# InstructionsRoundResultsQuiz,
PracticeBegin,
PracticeEnd,
ChoiceTruncation,
ChoiceASL,
RoundResults,
]
# page_sequence = [
# ChoiceTruncation,
# ChoiceASL,
# RoundResults,
# ]
| 42.616628
| 173
| 0.741993
| 2,091
| 18,453
| 6.324247
| 0.090866
| 0.102692
| 0.066092
| 0.051422
| 0.810799
| 0.785768
| 0.781307
| 0.751739
| 0.726709
| 0.664171
| 0
| 0.00644
| 0.150111
| 18,453
| 433
| 174
| 42.616628
| 0.836766
| 0.207283
| 0
| 0.708772
| 0
| 0
| 0.127229
| 0.024413
| 0
| 0
| 0
| 0.002309
| 0
| 1
| 0.070175
| false
| 0.017544
| 0.02807
| 0.035088
| 0.22807
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5df6f3e2f8570e1b1fc508af7408ff3b74406dae
| 126
|
py
|
Python
|
pos_loyalty/models/__init__.py
|
kenysmile/test_facebook
|
844a3ddd53abd319c0115de86909118a37106c67
|
[
"Apache-2.0"
] | null | null | null |
pos_loyalty/models/__init__.py
|
kenysmile/test_facebook
|
844a3ddd53abd319c0115de86909118a37106c67
|
[
"Apache-2.0"
] | null | null | null |
pos_loyalty/models/__init__.py
|
kenysmile/test_facebook
|
844a3ddd53abd319c0115de86909118a37106c67
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
from . import pos_loyalty
from . import pos_config
from . import res_partner
from . import pos_order
| 18
| 25
| 0.722222
| 19
| 126
| 4.578947
| 0.578947
| 0.45977
| 0.448276
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.009615
| 0.174603
| 126
| 6
| 26
| 21
| 0.826923
| 0.166667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
b909d98ce6af074aceede38b66f40c1b0f29345d
| 3,824
|
py
|
Python
|
src/endpoints/bots/forms.py
|
devsetgo/pynote_2
|
e5a086a06ed19774f569eb59486d1d0960e78c06
|
[
"MIT"
] | null | null | null |
src/endpoints/bots/forms.py
|
devsetgo/pynote_2
|
e5a086a06ed19774f569eb59486d1d0960e78c06
|
[
"MIT"
] | 47
|
2021-04-10T14:47:38.000Z
|
2022-03-05T02:40:59.000Z
|
src/endpoints/bots/forms.py
|
devsetgo/pynote_2
|
e5a086a06ed19774f569eb59486d1d0960e78c06
|
[
"MIT"
] | null | null | null |
# # -*- coding: utf-8 -*-
# from starlette_wtf import StarletteForm
# from wtforms import BooleanField
# from wtforms import FileField
# from wtforms import TextAreaField
# from wtforms import TextField
# from wtforms.validators import DataRequired
# from wtforms.validators import Length
# from wtforms.widgets import FileInput
# class LyricsForm(StarletteForm):
# """Lyrics File Upload"""
# lyric_file = FileField(
# "Lyrics CSV",
# widget=FileInput(multiple=False),
# validators=[DataRequired("File is required"),],
# )
# class BotActivationForm(StarletteForm):
# """ Activate or Deactivate Bot """
# activate_bot = BooleanField()
# class BotDeletionForm(StarletteForm):
# """ Delete Bot """
# twitter_name = TextField(
# "Twiter Name", validators=[DataRequired("Please enter the twitter account"),],
# )
# class UpdateBotForm(StarletteForm):
# twitter_name = TextField(
# "Twiter Name",
# validators=[
# DataRequired("Please enter the twitter account"),
# Length(min=4, max=30, message="length min 4, max 30"),
# ],
# )
# consumer_key = TextField(
# "Consumer Key",
# validators=[
# DataRequired("Please enter the consumer key"),
# Length(min=4, max=300),
# ],
# )
# consumer_secret = TextField(
# "Consumer Secret",
# validators=[
# DataRequired("Please enter the consumer secret "),
# Length(min=4, max=300),
# ],
# )
# access_token = TextField(
# "Access Token",
# validators=[
# DataRequired("Please enter the access token"),
# Length(min=4, max=300),
# ],
# )
# access_token_secret = TextField(
# "Access Token Secret",
# validators=[
# DataRequired("Please enter the access token secret"),
# Length(min=4, max=300),
# ],
# )
# description = TextAreaField(
# "Description",
# validators=[
# DataRequired("Please enter a description"),
# Length(min=10, max=500),
# ],
# )
# bot_image = FileField(
# "Bot Image",
# widget=FileInput(multiple=False),
# # validators=[DataRequired("File is required")],
# )
# class NewBotForm(StarletteForm):
# twitter_name = TextField(
# "Twiter Name",
# validators=[
# DataRequired("Please enter the twitter account"),
# Length(min=4, max=30, message="length min 4, max 30"),
# ],
# )
# consumer_key = TextField(
# "Consumer Key",
# validators=[
# DataRequired("Please enter the consumer key"),
# Length(min=4, max=300),
# ],
# )
# consumer_secret = TextField(
# "Consumer Secret",
# validators=[
# DataRequired("Please enter the consumer secret "),
# Length(min=4, max=300),
# ],
# )
# access_token = TextField(
# "Access Token",
# validators=[
# DataRequired("Please enter the access token"),
# Length(min=4, max=300),
# ],
# )
# access_token_secret = TextField(
# "Access Token Secret",
# validators=[
# DataRequired("Please enter the access token secret"),
# Length(min=4, max=300),
# ],
# )
# description = TextAreaField(
# "Description",
# validators=[
# DataRequired("Please enter a description"),
# Length(min=10, max=500),
# ],
# )
# bot_image = FileField(
# "Bot Image",
# widget=FileInput(multiple=False),
# validators=[DataRequired("File is required")],
# )
| 28.117647
| 88
| 0.546025
| 336
| 3,824
| 6.160714
| 0.184524
| 0.170048
| 0.175845
| 0.207246
| 0.769082
| 0.769082
| 0.769082
| 0.769082
| 0.769082
| 0.769082
| 0
| 0.021219
| 0.322176
| 3,824
| 135
| 89
| 28.325926
| 0.777392
| 0.93227
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
f8f6d0ef53cdf767d8b3e90c40e0af4604df337b
| 195
|
py
|
Python
|
med_ai_site/anouncements/admin.py
|
com-med-ai/med_ai_site
|
91794f43a9bc6925bd95d2daa55244e1de89892a
|
[
"Apache-2.0"
] | 2
|
2021-09-22T13:16:59.000Z
|
2021-09-22T13:19:55.000Z
|
med_ai_site/anouncements/admin.py
|
com-med-ai/med_ai_site
|
91794f43a9bc6925bd95d2daa55244e1de89892a
|
[
"Apache-2.0"
] | null | null | null |
med_ai_site/anouncements/admin.py
|
com-med-ai/med_ai_site
|
91794f43a9bc6925bd95d2daa55244e1de89892a
|
[
"Apache-2.0"
] | 1
|
2021-09-22T13:20:00.000Z
|
2021-09-22T13:20:00.000Z
|
from django.contrib import admin
"""from .models import Anouncements, AnonuncemntPhotos
admin.site.register(AnonuncemntPhotos)
admin.site.register(Anouncements)"""
# Register your models here.
| 24.375
| 54
| 0.810256
| 22
| 195
| 7.181818
| 0.545455
| 0.278481
| 0.329114
| 0.43038
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.092308
| 195
| 7
| 55
| 27.857143
| 0.892655
| 0.133333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
5d29e1ff3d7768ebf4c429381f55bbb7c6bbe800
| 162,789
|
py
|
Python
|
cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_crypto_ssh_oper.py
|
CiscoDevNet/ydk-py
|
073731fea50694d0bc6cd8ebf10fec308dcc0aa9
|
[
"ECL-2.0",
"Apache-2.0"
] | 177
|
2016-03-15T17:03:51.000Z
|
2022-03-18T16:48:44.000Z
|
cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_crypto_ssh_oper.py
|
CiscoDevNet/ydk-py
|
073731fea50694d0bc6cd8ebf10fec308dcc0aa9
|
[
"ECL-2.0",
"Apache-2.0"
] | 18
|
2016-03-30T10:45:22.000Z
|
2020-07-14T16:28:13.000Z
|
cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_crypto_ssh_oper.py
|
CiscoDevNet/ydk-py
|
073731fea50694d0bc6cd8ebf10fec308dcc0aa9
|
[
"ECL-2.0",
"Apache-2.0"
] | 85
|
2016-03-16T20:38:57.000Z
|
2022-02-22T04:26:02.000Z
|
""" Cisco_IOS_XR_crypto_ssh_oper
This module contains a collection of YANG definitions
for Cisco IOS\-XR crypto\-ssh package operational data.
This module contains definitions
for the following management objects\:
ssh1\: Crypto Secure Shell(SSH) data
ssh\: ssh
Copyright (c) 2013\-2018 by Cisco Systems, Inc.
All rights reserved.
"""
import sys
from collections import OrderedDict
from ydk.types import Entity as _Entity_
from ydk.types import EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64
from ydk.types import Entity, EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64
from ydk.filters import YFilter
from ydk.errors import YError, YModelError
from ydk.errors.error_handler import handle_type_error as _handle_type_error
class Authen(Enum):
"""
Authen (Enum Class)
SSH session authentication types
.. data:: password = 0
Password
.. data:: rsa_public_key = 1
RSA public key encryption type
.. data:: keyboard_interactive = 2
Keyboard interactive
"""
password = Enum.YLeaf(0, "password")
rsa_public_key = Enum.YLeaf(1, "rsa-public-key")
keyboard_interactive = Enum.YLeaf(2, "keyboard-interactive")
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_crypto_ssh_oper as meta
return meta._meta_table['Authen']
class Cipher(Enum):
"""
Cipher (Enum Class)
SSH session in and out cipher standards
.. data:: cipher_not_applicable = -1
unknown
.. data:: aes128_cbc = 0
Advanced Encryption Standard(AES) 128 bits
cipher block chaining(CBC)
.. data:: aes192_cbc = 1
Advanced Encryption Standard(AES) 192 bits
cipher block chaining(CBC)
.. data:: aes256_cbc = 2
Advanced Encryption Standard(AES) 256 bits
cipher block chaining(CBC)
.. data:: triple_des_cbc = 3
Triple Data Encryption Standard(DES) cipher
block chaining(CBC)
.. data:: aes128_ctr = 4
Advanced Encryption Standard(AES) 128 bits
counter mode (CTR)
.. data:: aes192_ctr = 5
Advanced Encryption Standard(AES) 192 bits
counter mode (CTR)
.. data:: aes256_ctr = 6
Advanced Encryption Standard(AES) 256 bits
counter mode (CTR)
.. data:: aes128_gcm = 7
Advanced Encryption Standard(AES) 128 bits GCM
mode (GCM)
.. data:: aes256_gcm = 8
Advanced Encryption Standard(AES) 256 bits GCM
mode (GCM)
"""
cipher_not_applicable = Enum.YLeaf(-1, "cipher-not-applicable")
aes128_cbc = Enum.YLeaf(0, "aes128-cbc")
aes192_cbc = Enum.YLeaf(1, "aes192-cbc")
aes256_cbc = Enum.YLeaf(2, "aes256-cbc")
triple_des_cbc = Enum.YLeaf(3, "triple-des-cbc")
aes128_ctr = Enum.YLeaf(4, "aes128-ctr")
aes192_ctr = Enum.YLeaf(5, "aes192-ctr")
aes256_ctr = Enum.YLeaf(6, "aes256-ctr")
aes128_gcm = Enum.YLeaf(7, "aes128-gcm")
aes256_gcm = Enum.YLeaf(8, "aes256-gcm")
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_crypto_ssh_oper as meta
return meta._meta_table['Cipher']
class Connection(Enum):
"""
Connection (Enum Class)
SSH channel connection types
.. data:: undefined = 0
connection type not yet known
.. data:: shell = 1
Interactive Shell
.. data:: exec_ = 2
Remote Command Execution
.. data:: scp = 3
Secure Copy
.. data:: sftp_subsystem = 4
Secure File Transfer
.. data:: netconf_subsystem = 5
Netconf Subsystem
.. data:: tl1_subsystem = 6
TL1 Subsystem
.. data:: netconf_xml_subsystem = 7
Netconf XML Subsystem
"""
undefined = Enum.YLeaf(0, "undefined")
shell = Enum.YLeaf(1, "shell")
exec_ = Enum.YLeaf(2, "exec")
scp = Enum.YLeaf(3, "scp")
sftp_subsystem = Enum.YLeaf(4, "sftp-subsystem")
netconf_subsystem = Enum.YLeaf(5, "netconf-subsystem")
tl1_subsystem = Enum.YLeaf(6, "tl1-subsystem")
netconf_xml_subsystem = Enum.YLeaf(7, "netconf-xml-subsystem")
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_crypto_ssh_oper as meta
return meta._meta_table['Connection']
class Hostkey(Enum):
"""
Hostkey (Enum Class)
SSH session authentication types
.. data:: host_key_not_applicable = -1
unknown
.. data:: ssh_dss = 0
Algorithm type DSS
.. data:: ssh_rsa = 1
Algorithm type RSA
.. data:: ecdsa_sha2_nistp521 = 2
Algorithm type ECDSA NISTP521
.. data:: ecdsa_sha2_nistp384 = 3
Algorithm type ECDSA NISTP384
.. data:: ecdsa_sha2_nistp256 = 4
Algorithm type ECDSA NISTP256
"""
host_key_not_applicable = Enum.YLeaf(-1, "host-key-not-applicable")
ssh_dss = Enum.YLeaf(0, "ssh-dss")
ssh_rsa = Enum.YLeaf(1, "ssh-rsa")
ecdsa_sha2_nistp521 = Enum.YLeaf(2, "ecdsa-sha2-nistp521")
ecdsa_sha2_nistp384 = Enum.YLeaf(3, "ecdsa-sha2-nistp384")
ecdsa_sha2_nistp256 = Enum.YLeaf(4, "ecdsa-sha2-nistp256")
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_crypto_ssh_oper as meta
return meta._meta_table['Hostkey']
class KexName(Enum):
"""
KexName (Enum Class)
Different key\-exchange(kex) algorithms
.. data:: kex_not_applicable = -1
unknown
.. data:: diffie_hellman_group1 = 0
Diffie-Hellman group 1 key exchange algorithm
.. data:: diffie_hellman_group14 = 1
Diffie-Hellman group 14 key exchange algorithm
.. data:: diffie_hellman_group15 = 2
Diffie-Hellman group 14 key exchange algorithm
.. data:: diffie_hellman_group16 = 3
Diffie-Hellman group 16 key exchange algorithm
.. data:: diffie_hellman_group17 = 4
Diffie-Hellman group 17 key exchange algorithm
.. data:: diffie_hellman_group18 = 5
Diffie-Hellman key group 18 exchange algorithm
.. data:: ecdh_nistp256 = 6
Elliptical curve Diffie-Hellman prime 256 key
exchange algorithm
.. data:: ecdh_nistp384 = 7
Elliptical curve Diffie-Hellman prime 384 key
exchange algorithm
.. data:: ecdh_nistp521 = 8
Elliptical curve Diffie-Hellman prime 521
exchange algorithm
.. data:: password_authenticated = 9
Password authenticated key agreement algorithm
"""
kex_not_applicable = Enum.YLeaf(-1, "kex-not-applicable")
diffie_hellman_group1 = Enum.YLeaf(0, "diffie-hellman-group1")
diffie_hellman_group14 = Enum.YLeaf(1, "diffie-hellman-group14")
diffie_hellman_group15 = Enum.YLeaf(2, "diffie-hellman-group15")
diffie_hellman_group16 = Enum.YLeaf(3, "diffie-hellman-group16")
diffie_hellman_group17 = Enum.YLeaf(4, "diffie-hellman-group17")
diffie_hellman_group18 = Enum.YLeaf(5, "diffie-hellman-group18")
ecdh_nistp256 = Enum.YLeaf(6, "ecdh-nistp256")
ecdh_nistp384 = Enum.YLeaf(7, "ecdh-nistp384")
ecdh_nistp521 = Enum.YLeaf(8, "ecdh-nistp521")
password_authenticated = Enum.YLeaf(9, "password-authenticated")
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_crypto_ssh_oper as meta
return meta._meta_table['KexName']
class Mac(Enum):
"""
Mac (Enum Class)
Different Message Authentication Code(MAC)
functions
.. data:: mac_not_applicable = -1
unknown
.. data:: hmac_md5 = 0
Hash-based Message Authentication Code(HMAC)
MD5 algorithm
.. data:: hmac_sha1 = 1
Hash-based Message Authentication Code(HMAC)
SHA1 algorithm
.. data:: hmac_sha2_256 = 2
Hash-based Message Authentication Code(HMAC)
SHA2-256 algorithm
.. data:: hmac_sha2_512 = 3
Hash-based Message Authentication Code(HMAC)
SHA2-512 algorithm
.. data:: aes_gcm = 4
AES GCM based Authentication Tag as MAC
algorithm
"""
mac_not_applicable = Enum.YLeaf(-1, "mac-not-applicable")
hmac_md5 = Enum.YLeaf(0, "hmac-md5")
hmac_sha1 = Enum.YLeaf(1, "hmac-sha1")
hmac_sha2_256 = Enum.YLeaf(2, "hmac-sha2-256")
hmac_sha2_512 = Enum.YLeaf(3, "hmac-sha2-512")
aes_gcm = Enum.YLeaf(4, "aes-gcm")
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_crypto_ssh_oper as meta
return meta._meta_table['Mac']
class States(Enum):
"""
States (Enum Class)
SSH session states
.. data:: open = 1
SSH Open
.. data:: version_ok = 2
SSH version OK
.. data:: key_exchange_initialize = 3
Key exchange(KEX) init message exchanged
.. data:: key_exchange_dh = 4
Diffie-Hellman(DH) secret is generated
.. data:: new_keys = 5
New keys are received
.. data:: authenticate_information = 6
Need more information to authenticate
.. data:: authenticated = 7
The client successfully authenticated
.. data:: channel_open = 8
Channel has been successfully opened
.. data:: pty_open = 9
Allocated PTY
.. data:: session_open = 10
Opened an exec shell
.. data:: rekey = 11
Received rekey request
.. data:: suspended = 12
Session is suspended
.. data:: session_closed = 13
Session has been closed
"""
open = Enum.YLeaf(1, "open")
version_ok = Enum.YLeaf(2, "version-ok")
key_exchange_initialize = Enum.YLeaf(3, "key-exchange-initialize")
key_exchange_dh = Enum.YLeaf(4, "key-exchange-dh")
new_keys = Enum.YLeaf(5, "new-keys")
authenticate_information = Enum.YLeaf(6, "authenticate-information")
authenticated = Enum.YLeaf(7, "authenticated")
channel_open = Enum.YLeaf(8, "channel-open")
pty_open = Enum.YLeaf(9, "pty-open")
session_open = Enum.YLeaf(10, "session-open")
rekey = Enum.YLeaf(11, "rekey")
suspended = Enum.YLeaf(12, "suspended")
session_closed = Enum.YLeaf(13, "session-closed")
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_crypto_ssh_oper as meta
return meta._meta_table['States']
class Version(Enum):
"""
Version (Enum Class)
SSH state versions
.. data:: v2 = 0
Version V2
.. data:: v1 = 1
Version V1
"""
v2 = Enum.YLeaf(0, "v2")
v1 = Enum.YLeaf(1, "v1")
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_crypto_ssh_oper as meta
return meta._meta_table['Version']
class Ssh1(_Entity_):
"""
Crypto Secure Shell(SSH) data
.. attribute:: kex
key exchange method data
**type**\: :py:class:`Kex <ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper.Ssh1.Kex>`
**config**\: False
"""
_prefix = 'crypto-ssh-oper'
_revision = '2017-08-25'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Ssh1, self).__init__()
self._top_entity = None
self.yang_name = "ssh1"
self.yang_parent_name = "Cisco-IOS-XR-crypto-ssh-oper"
self.is_top_level_class = True
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("kex", ("kex", Ssh1.Kex))])
self._leafs = OrderedDict()
self.kex = Ssh1.Kex()
self.kex.parent = self
self._children_name_map["kex"] = "kex"
self._segment_path = lambda: "Cisco-IOS-XR-crypto-ssh-oper:ssh1"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Ssh1, [], name, value)
class Kex(_Entity_):
"""
key exchange method data
.. attribute:: nodes
Node\-specific ssh session details
**type**\: :py:class:`Nodes <ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper.Ssh1.Kex.Nodes>`
**config**\: False
"""
_prefix = 'crypto-ssh-oper'
_revision = '2017-08-25'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Ssh1.Kex, self).__init__()
self.yang_name = "kex"
self.yang_parent_name = "ssh1"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("nodes", ("nodes", Ssh1.Kex.Nodes))])
self._leafs = OrderedDict()
self.nodes = Ssh1.Kex.Nodes()
self.nodes.parent = self
self._children_name_map["nodes"] = "nodes"
self._segment_path = lambda: "kex"
self._absolute_path = lambda: "Cisco-IOS-XR-crypto-ssh-oper:ssh1/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Ssh1.Kex, [], name, value)
class Nodes(_Entity_):
"""
Node\-specific ssh session details
.. attribute:: node
SSH session details for a particular node
**type**\: list of :py:class:`Node <ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper.Ssh1.Kex.Nodes.Node>`
**config**\: False
"""
_prefix = 'crypto-ssh-oper'
_revision = '2017-08-25'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Ssh1.Kex.Nodes, self).__init__()
self.yang_name = "nodes"
self.yang_parent_name = "kex"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("node", ("node", Ssh1.Kex.Nodes.Node))])
self._leafs = OrderedDict()
self.node = YList(self)
self._segment_path = lambda: "nodes"
self._absolute_path = lambda: "Cisco-IOS-XR-crypto-ssh-oper:ssh1/kex/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Ssh1.Kex.Nodes, [], name, value)
class Node(_Entity_):
"""
SSH session details for a particular node
.. attribute:: node_name (key)
Node name
**type**\: str
**pattern:** ([a\-zA\-Z0\-9\_]\*\\d+/){1,2}([a\-zA\-Z0\-9\_]\*\\d+)
**config**\: False
.. attribute:: incoming_sessions
List of incoming sessions
**type**\: :py:class:`IncomingSessions <ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper.Ssh1.Kex.Nodes.Node.IncomingSessions>`
**config**\: False
.. attribute:: outgoing_connections
List of outgoing connections
**type**\: :py:class:`OutgoingConnections <ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper.Ssh1.Kex.Nodes.Node.OutgoingConnections>`
**config**\: False
"""
_prefix = 'crypto-ssh-oper'
_revision = '2017-08-25'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Ssh1.Kex.Nodes.Node, self).__init__()
self.yang_name = "node"
self.yang_parent_name = "nodes"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['node_name']
self._child_classes = OrderedDict([("incoming-sessions", ("incoming_sessions", Ssh1.Kex.Nodes.Node.IncomingSessions)), ("outgoing-connections", ("outgoing_connections", Ssh1.Kex.Nodes.Node.OutgoingConnections))])
self._leafs = OrderedDict([
('node_name', (YLeaf(YType.str, 'node-name'), ['str'])),
])
self.node_name = None
self.incoming_sessions = Ssh1.Kex.Nodes.Node.IncomingSessions()
self.incoming_sessions.parent = self
self._children_name_map["incoming_sessions"] = "incoming-sessions"
self.outgoing_connections = Ssh1.Kex.Nodes.Node.OutgoingConnections()
self.outgoing_connections.parent = self
self._children_name_map["outgoing_connections"] = "outgoing-connections"
self._segment_path = lambda: "node" + "[node-name='" + str(self.node_name) + "']"
self._absolute_path = lambda: "Cisco-IOS-XR-crypto-ssh-oper:ssh1/kex/nodes/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Ssh1.Kex.Nodes.Node, ['node_name'], name, value)
class IncomingSessions(_Entity_):
"""
List of incoming sessions
.. attribute:: session_detail_info
session detail info
**type**\: list of :py:class:`SessionDetailInfo <ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper.Ssh1.Kex.Nodes.Node.IncomingSessions.SessionDetailInfo>`
**config**\: False
"""
_prefix = 'crypto-ssh-oper'
_revision = '2017-08-25'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Ssh1.Kex.Nodes.Node.IncomingSessions, self).__init__()
self.yang_name = "incoming-sessions"
self.yang_parent_name = "node"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("session-detail-info", ("session_detail_info", Ssh1.Kex.Nodes.Node.IncomingSessions.SessionDetailInfo))])
self._leafs = OrderedDict()
self.session_detail_info = YList(self)
self._segment_path = lambda: "incoming-sessions"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Ssh1.Kex.Nodes.Node.IncomingSessions, [], name, value)
class SessionDetailInfo(_Entity_):
"""
session detail info
.. attribute:: next_session
next session
**type**\: :py:class:`NextSession <ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper.Ssh1.Kex.Nodes.Node.IncomingSessions.SessionDetailInfo.NextSession>`
**config**\: False
.. attribute:: session_id
Session ID
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: key_exchange
Key exchange name
**type**\: :py:class:`KexName <ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper.KexName>`
**config**\: False
.. attribute:: public_key
Host key algorithm
**type**\: :py:class:`Hostkey <ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper.Hostkey>`
**config**\: False
.. attribute:: in_cipher
In cipher algorithm
**type**\: :py:class:`Cipher <ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper.Cipher>`
**config**\: False
.. attribute:: out_cipher
Out cipher algorithm
**type**\: :py:class:`Cipher <ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper.Cipher>`
**config**\: False
.. attribute:: in_mac
In MAC
**type**\: :py:class:`Mac <ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper.Mac>`
**config**\: False
.. attribute:: out_mac
Out MAC
**type**\: :py:class:`Mac <ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper.Mac>`
**config**\: False
.. attribute:: start_time
session start time
**type**\: str
**config**\: False
.. attribute:: end_time
session end time
**type**\: str
**config**\: False
"""
_prefix = 'crypto-ssh-oper'
_revision = '2017-08-25'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Ssh1.Kex.Nodes.Node.IncomingSessions.SessionDetailInfo, self).__init__()
self.yang_name = "session-detail-info"
self.yang_parent_name = "incoming-sessions"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("next-session", ("next_session", Ssh1.Kex.Nodes.Node.IncomingSessions.SessionDetailInfo.NextSession))])
self._leafs = OrderedDict([
('session_id', (YLeaf(YType.uint32, 'session-id'), ['int'])),
('key_exchange', (YLeaf(YType.enumeration, 'key-exchange'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper', 'KexName', '')])),
('public_key', (YLeaf(YType.enumeration, 'public-key'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper', 'Hostkey', '')])),
('in_cipher', (YLeaf(YType.enumeration, 'in-cipher'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper', 'Cipher', '')])),
('out_cipher', (YLeaf(YType.enumeration, 'out-cipher'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper', 'Cipher', '')])),
('in_mac', (YLeaf(YType.enumeration, 'in-mac'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper', 'Mac', '')])),
('out_mac', (YLeaf(YType.enumeration, 'out-mac'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper', 'Mac', '')])),
('start_time', (YLeaf(YType.str, 'start-time'), ['str'])),
('end_time', (YLeaf(YType.str, 'end-time'), ['str'])),
])
self.session_id = None
self.key_exchange = None
self.public_key = None
self.in_cipher = None
self.out_cipher = None
self.in_mac = None
self.out_mac = None
self.start_time = None
self.end_time = None
self.next_session = Ssh1.Kex.Nodes.Node.IncomingSessions.SessionDetailInfo.NextSession()
self.next_session.parent = self
self._children_name_map["next_session"] = "next-session"
self._segment_path = lambda: "session-detail-info"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Ssh1.Kex.Nodes.Node.IncomingSessions.SessionDetailInfo, ['session_id', 'key_exchange', 'public_key', 'in_cipher', 'out_cipher', 'in_mac', 'out_mac', 'start_time', 'end_time'], name, value)
class NextSession(_Entity_):
"""
next session
"""
_prefix = 'crypto-ssh-oper'
_revision = '2017-08-25'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Ssh1.Kex.Nodes.Node.IncomingSessions.SessionDetailInfo.NextSession, self).__init__()
self.yang_name = "next-session"
self.yang_parent_name = "session-detail-info"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict()
self._segment_path = lambda: "next-session"
self._is_frozen = True
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_crypto_ssh_oper as meta
return meta._meta_table['Ssh1.Kex.Nodes.Node.IncomingSessions.SessionDetailInfo.NextSession']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_crypto_ssh_oper as meta
return meta._meta_table['Ssh1.Kex.Nodes.Node.IncomingSessions.SessionDetailInfo']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_crypto_ssh_oper as meta
return meta._meta_table['Ssh1.Kex.Nodes.Node.IncomingSessions']['meta_info']
class OutgoingConnections(_Entity_):
"""
List of outgoing connections
.. attribute:: session_detail_info
session detail info
**type**\: list of :py:class:`SessionDetailInfo <ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper.Ssh1.Kex.Nodes.Node.OutgoingConnections.SessionDetailInfo>`
**config**\: False
"""
_prefix = 'crypto-ssh-oper'
_revision = '2017-08-25'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Ssh1.Kex.Nodes.Node.OutgoingConnections, self).__init__()
self.yang_name = "outgoing-connections"
self.yang_parent_name = "node"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("session-detail-info", ("session_detail_info", Ssh1.Kex.Nodes.Node.OutgoingConnections.SessionDetailInfo))])
self._leafs = OrderedDict()
self.session_detail_info = YList(self)
self._segment_path = lambda: "outgoing-connections"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Ssh1.Kex.Nodes.Node.OutgoingConnections, [], name, value)
class SessionDetailInfo(_Entity_):
"""
session detail info
.. attribute:: next_session
next session
**type**\: :py:class:`NextSession <ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper.Ssh1.Kex.Nodes.Node.OutgoingConnections.SessionDetailInfo.NextSession>`
**config**\: False
.. attribute:: session_id
Session ID
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: key_exchange
Key exchange name
**type**\: :py:class:`KexName <ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper.KexName>`
**config**\: False
.. attribute:: public_key
Host key algorithm
**type**\: :py:class:`Hostkey <ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper.Hostkey>`
**config**\: False
.. attribute:: in_cipher
In cipher algorithm
**type**\: :py:class:`Cipher <ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper.Cipher>`
**config**\: False
.. attribute:: out_cipher
Out cipher algorithm
**type**\: :py:class:`Cipher <ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper.Cipher>`
**config**\: False
.. attribute:: in_mac
In MAC
**type**\: :py:class:`Mac <ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper.Mac>`
**config**\: False
.. attribute:: out_mac
Out MAC
**type**\: :py:class:`Mac <ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper.Mac>`
**config**\: False
.. attribute:: start_time
session start time
**type**\: str
**config**\: False
.. attribute:: end_time
session end time
**type**\: str
**config**\: False
"""
_prefix = 'crypto-ssh-oper'
_revision = '2017-08-25'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Ssh1.Kex.Nodes.Node.OutgoingConnections.SessionDetailInfo, self).__init__()
self.yang_name = "session-detail-info"
self.yang_parent_name = "outgoing-connections"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("next-session", ("next_session", Ssh1.Kex.Nodes.Node.OutgoingConnections.SessionDetailInfo.NextSession))])
self._leafs = OrderedDict([
('session_id', (YLeaf(YType.uint32, 'session-id'), ['int'])),
('key_exchange', (YLeaf(YType.enumeration, 'key-exchange'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper', 'KexName', '')])),
('public_key', (YLeaf(YType.enumeration, 'public-key'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper', 'Hostkey', '')])),
('in_cipher', (YLeaf(YType.enumeration, 'in-cipher'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper', 'Cipher', '')])),
('out_cipher', (YLeaf(YType.enumeration, 'out-cipher'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper', 'Cipher', '')])),
('in_mac', (YLeaf(YType.enumeration, 'in-mac'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper', 'Mac', '')])),
('out_mac', (YLeaf(YType.enumeration, 'out-mac'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper', 'Mac', '')])),
('start_time', (YLeaf(YType.str, 'start-time'), ['str'])),
('end_time', (YLeaf(YType.str, 'end-time'), ['str'])),
])
self.session_id = None
self.key_exchange = None
self.public_key = None
self.in_cipher = None
self.out_cipher = None
self.in_mac = None
self.out_mac = None
self.start_time = None
self.end_time = None
self.next_session = Ssh1.Kex.Nodes.Node.OutgoingConnections.SessionDetailInfo.NextSession()
self.next_session.parent = self
self._children_name_map["next_session"] = "next-session"
self._segment_path = lambda: "session-detail-info"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Ssh1.Kex.Nodes.Node.OutgoingConnections.SessionDetailInfo, ['session_id', 'key_exchange', 'public_key', 'in_cipher', 'out_cipher', 'in_mac', 'out_mac', 'start_time', 'end_time'], name, value)
class NextSession(_Entity_):
"""
next session
"""
_prefix = 'crypto-ssh-oper'
_revision = '2017-08-25'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Ssh1.Kex.Nodes.Node.OutgoingConnections.SessionDetailInfo.NextSession, self).__init__()
self.yang_name = "next-session"
self.yang_parent_name = "session-detail-info"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict()
self._segment_path = lambda: "next-session"
self._is_frozen = True
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_crypto_ssh_oper as meta
return meta._meta_table['Ssh1.Kex.Nodes.Node.OutgoingConnections.SessionDetailInfo.NextSession']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_crypto_ssh_oper as meta
return meta._meta_table['Ssh1.Kex.Nodes.Node.OutgoingConnections.SessionDetailInfo']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_crypto_ssh_oper as meta
return meta._meta_table['Ssh1.Kex.Nodes.Node.OutgoingConnections']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_crypto_ssh_oper as meta
return meta._meta_table['Ssh1.Kex.Nodes.Node']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_crypto_ssh_oper as meta
return meta._meta_table['Ssh1.Kex.Nodes']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_crypto_ssh_oper as meta
return meta._meta_table['Ssh1.Kex']['meta_info']
def clone_ptr(self):
self._top_entity = Ssh1()
return self._top_entity
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_crypto_ssh_oper as meta
return meta._meta_table['Ssh1']['meta_info']
class Ssh(_Entity_):
"""
ssh
.. attribute:: session
Crypto SSH session
**type**\: :py:class:`Session <ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper.Ssh.Session>`
**config**\: False
.. attribute:: server
SSH server parameters
**type**\: :py:class:`Server <ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper.Ssh.Server>`
**config**\: False
"""
_prefix = 'crypto-ssh-oper'
_revision = '2017-08-25'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Ssh, self).__init__()
self._top_entity = None
self.yang_name = "ssh"
self.yang_parent_name = "Cisco-IOS-XR-crypto-ssh-oper"
self.is_top_level_class = True
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("session", ("session", Ssh.Session)), ("server", ("server", Ssh.Server))])
self._leafs = OrderedDict()
self.session = Ssh.Session()
self.session.parent = self
self._children_name_map["session"] = "session"
self.server = Ssh.Server()
self.server.parent = self
self._children_name_map["server"] = "server"
self._segment_path = lambda: "Cisco-IOS-XR-crypto-ssh-oper:ssh"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Ssh, [], name, value)
class Session(_Entity_):
"""
Crypto SSH session
.. attribute:: rekey
SSH session rekey information
**type**\: :py:class:`Rekey <ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper.Ssh.Session.Rekey>`
**config**\: False
.. attribute:: history_detail
SSH session history detail information
**type**\: :py:class:`HistoryDetail <ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper.Ssh.Session.HistoryDetail>`
**config**\: False
.. attribute:: brief
SSH session brief information
**type**\: :py:class:`Brief <ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper.Ssh.Session.Brief>`
**config**\: False
.. attribute:: history
SSH session history information
**type**\: :py:class:`History <ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper.Ssh.Session.History>`
**config**\: False
.. attribute:: detail
SSH session detail information
**type**\: :py:class:`Detail <ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper.Ssh.Session.Detail>`
**config**\: False
"""
_prefix = 'crypto-ssh-oper'
_revision = '2017-08-25'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Ssh.Session, self).__init__()
self.yang_name = "session"
self.yang_parent_name = "ssh"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("rekey", ("rekey", Ssh.Session.Rekey)), ("history-detail", ("history_detail", Ssh.Session.HistoryDetail)), ("brief", ("brief", Ssh.Session.Brief)), ("history", ("history", Ssh.Session.History)), ("detail", ("detail", Ssh.Session.Detail))])
self._leafs = OrderedDict()
self.rekey = Ssh.Session.Rekey()
self.rekey.parent = self
self._children_name_map["rekey"] = "rekey"
self.history_detail = Ssh.Session.HistoryDetail()
self.history_detail.parent = self
self._children_name_map["history_detail"] = "history-detail"
self.brief = Ssh.Session.Brief()
self.brief.parent = self
self._children_name_map["brief"] = "brief"
self.history = Ssh.Session.History()
self.history.parent = self
self._children_name_map["history"] = "history"
self.detail = Ssh.Session.Detail()
self.detail.parent = self
self._children_name_map["detail"] = "detail"
self._segment_path = lambda: "session"
self._absolute_path = lambda: "Cisco-IOS-XR-crypto-ssh-oper:ssh/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Ssh.Session, [], name, value)
class Rekey(_Entity_):
"""
SSH session rekey information
.. attribute:: incoming_sessions
List of incoming sessions
**type**\: :py:class:`IncomingSessions <ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper.Ssh.Session.Rekey.IncomingSessions>`
**config**\: False
.. attribute:: outgoing_connections
List of outgoing connections
**type**\: :py:class:`OutgoingConnections <ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper.Ssh.Session.Rekey.OutgoingConnections>`
**config**\: False
"""
_prefix = 'crypto-ssh-oper'
_revision = '2017-08-25'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Ssh.Session.Rekey, self).__init__()
self.yang_name = "rekey"
self.yang_parent_name = "session"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("incoming-sessions", ("incoming_sessions", Ssh.Session.Rekey.IncomingSessions)), ("outgoing-connections", ("outgoing_connections", Ssh.Session.Rekey.OutgoingConnections))])
self._leafs = OrderedDict()
self.incoming_sessions = Ssh.Session.Rekey.IncomingSessions()
self.incoming_sessions.parent = self
self._children_name_map["incoming_sessions"] = "incoming-sessions"
self.outgoing_connections = Ssh.Session.Rekey.OutgoingConnections()
self.outgoing_connections.parent = self
self._children_name_map["outgoing_connections"] = "outgoing-connections"
self._segment_path = lambda: "rekey"
self._absolute_path = lambda: "Cisco-IOS-XR-crypto-ssh-oper:ssh/session/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Ssh.Session.Rekey, [], name, value)
class IncomingSessions(_Entity_):
"""
List of incoming sessions
.. attribute:: session_rekey_info
session rekey info
**type**\: list of :py:class:`SessionRekeyInfo <ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper.Ssh.Session.Rekey.IncomingSessions.SessionRekeyInfo>`
**config**\: False
"""
_prefix = 'crypto-ssh-oper'
_revision = '2017-08-25'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Ssh.Session.Rekey.IncomingSessions, self).__init__()
self.yang_name = "incoming-sessions"
self.yang_parent_name = "rekey"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("session-rekey-info", ("session_rekey_info", Ssh.Session.Rekey.IncomingSessions.SessionRekeyInfo))])
self._leafs = OrderedDict()
self.session_rekey_info = YList(self)
self._segment_path = lambda: "incoming-sessions"
self._absolute_path = lambda: "Cisco-IOS-XR-crypto-ssh-oper:ssh/session/rekey/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Ssh.Session.Rekey.IncomingSessions, [], name, value)
class SessionRekeyInfo(_Entity_):
"""
session rekey info
.. attribute:: next_session
next session
**type**\: :py:class:`NextSession <ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper.Ssh.Session.Rekey.IncomingSessions.SessionRekeyInfo.NextSession>`
**config**\: False
.. attribute:: session_id
Session ID
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: session_rekey_count
Session Rekey Count
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: time_to_rekey
Time To Rekey
**type**\: str
**config**\: False
.. attribute:: volume_to_rekey
Volume To Rekey
**type**\: str
**config**\: False
"""
_prefix = 'crypto-ssh-oper'
_revision = '2017-08-25'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Ssh.Session.Rekey.IncomingSessions.SessionRekeyInfo, self).__init__()
self.yang_name = "session-rekey-info"
self.yang_parent_name = "incoming-sessions"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("next-session", ("next_session", Ssh.Session.Rekey.IncomingSessions.SessionRekeyInfo.NextSession))])
self._leafs = OrderedDict([
('session_id', (YLeaf(YType.uint32, 'session-id'), ['int'])),
('session_rekey_count', (YLeaf(YType.uint32, 'session-rekey-count'), ['int'])),
('time_to_rekey', (YLeaf(YType.str, 'time-to-rekey'), ['str'])),
('volume_to_rekey', (YLeaf(YType.str, 'volume-to-rekey'), ['str'])),
])
self.session_id = None
self.session_rekey_count = None
self.time_to_rekey = None
self.volume_to_rekey = None
self.next_session = Ssh.Session.Rekey.IncomingSessions.SessionRekeyInfo.NextSession()
self.next_session.parent = self
self._children_name_map["next_session"] = "next-session"
self._segment_path = lambda: "session-rekey-info"
self._absolute_path = lambda: "Cisco-IOS-XR-crypto-ssh-oper:ssh/session/rekey/incoming-sessions/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Ssh.Session.Rekey.IncomingSessions.SessionRekeyInfo, ['session_id', 'session_rekey_count', 'time_to_rekey', 'volume_to_rekey'], name, value)
class NextSession(_Entity_):
"""
next session
"""
_prefix = 'crypto-ssh-oper'
_revision = '2017-08-25'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Ssh.Session.Rekey.IncomingSessions.SessionRekeyInfo.NextSession, self).__init__()
self.yang_name = "next-session"
self.yang_parent_name = "session-rekey-info"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict()
self._segment_path = lambda: "next-session"
self._absolute_path = lambda: "Cisco-IOS-XR-crypto-ssh-oper:ssh/session/rekey/incoming-sessions/session-rekey-info/%s" % self._segment_path()
self._is_frozen = True
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_crypto_ssh_oper as meta
return meta._meta_table['Ssh.Session.Rekey.IncomingSessions.SessionRekeyInfo.NextSession']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_crypto_ssh_oper as meta
return meta._meta_table['Ssh.Session.Rekey.IncomingSessions.SessionRekeyInfo']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_crypto_ssh_oper as meta
return meta._meta_table['Ssh.Session.Rekey.IncomingSessions']['meta_info']
class OutgoingConnections(_Entity_):
"""
List of outgoing connections
.. attribute:: session_rekey_info
session rekey info
**type**\: list of :py:class:`SessionRekeyInfo <ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper.Ssh.Session.Rekey.OutgoingConnections.SessionRekeyInfo>`
**config**\: False
"""
_prefix = 'crypto-ssh-oper'
_revision = '2017-08-25'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Ssh.Session.Rekey.OutgoingConnections, self).__init__()
self.yang_name = "outgoing-connections"
self.yang_parent_name = "rekey"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("session-rekey-info", ("session_rekey_info", Ssh.Session.Rekey.OutgoingConnections.SessionRekeyInfo))])
self._leafs = OrderedDict()
self.session_rekey_info = YList(self)
self._segment_path = lambda: "outgoing-connections"
self._absolute_path = lambda: "Cisco-IOS-XR-crypto-ssh-oper:ssh/session/rekey/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Ssh.Session.Rekey.OutgoingConnections, [], name, value)
class SessionRekeyInfo(_Entity_):
"""
session rekey info
.. attribute:: next_session
next session
**type**\: :py:class:`NextSession <ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper.Ssh.Session.Rekey.OutgoingConnections.SessionRekeyInfo.NextSession>`
**config**\: False
.. attribute:: session_id
Session ID
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: session_rekey_count
Session Rekey Count
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: time_to_rekey
Time To Rekey
**type**\: str
**config**\: False
.. attribute:: volume_to_rekey
Volume To Rekey
**type**\: str
**config**\: False
"""
_prefix = 'crypto-ssh-oper'
_revision = '2017-08-25'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Ssh.Session.Rekey.OutgoingConnections.SessionRekeyInfo, self).__init__()
self.yang_name = "session-rekey-info"
self.yang_parent_name = "outgoing-connections"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("next-session", ("next_session", Ssh.Session.Rekey.OutgoingConnections.SessionRekeyInfo.NextSession))])
self._leafs = OrderedDict([
('session_id', (YLeaf(YType.uint32, 'session-id'), ['int'])),
('session_rekey_count', (YLeaf(YType.uint32, 'session-rekey-count'), ['int'])),
('time_to_rekey', (YLeaf(YType.str, 'time-to-rekey'), ['str'])),
('volume_to_rekey', (YLeaf(YType.str, 'volume-to-rekey'), ['str'])),
])
self.session_id = None
self.session_rekey_count = None
self.time_to_rekey = None
self.volume_to_rekey = None
self.next_session = Ssh.Session.Rekey.OutgoingConnections.SessionRekeyInfo.NextSession()
self.next_session.parent = self
self._children_name_map["next_session"] = "next-session"
self._segment_path = lambda: "session-rekey-info"
self._absolute_path = lambda: "Cisco-IOS-XR-crypto-ssh-oper:ssh/session/rekey/outgoing-connections/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Ssh.Session.Rekey.OutgoingConnections.SessionRekeyInfo, ['session_id', 'session_rekey_count', 'time_to_rekey', 'volume_to_rekey'], name, value)
class NextSession(_Entity_):
"""
next session
"""
_prefix = 'crypto-ssh-oper'
_revision = '2017-08-25'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Ssh.Session.Rekey.OutgoingConnections.SessionRekeyInfo.NextSession, self).__init__()
self.yang_name = "next-session"
self.yang_parent_name = "session-rekey-info"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict()
self._segment_path = lambda: "next-session"
self._absolute_path = lambda: "Cisco-IOS-XR-crypto-ssh-oper:ssh/session/rekey/outgoing-connections/session-rekey-info/%s" % self._segment_path()
self._is_frozen = True
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_crypto_ssh_oper as meta
return meta._meta_table['Ssh.Session.Rekey.OutgoingConnections.SessionRekeyInfo.NextSession']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_crypto_ssh_oper as meta
return meta._meta_table['Ssh.Session.Rekey.OutgoingConnections.SessionRekeyInfo']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_crypto_ssh_oper as meta
return meta._meta_table['Ssh.Session.Rekey.OutgoingConnections']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_crypto_ssh_oper as meta
return meta._meta_table['Ssh.Session.Rekey']['meta_info']
class HistoryDetail(_Entity_):
"""
SSH session history detail information
.. attribute:: incoming_sessions
List of incoming sessions
**type**\: :py:class:`IncomingSessions <ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper.Ssh.Session.HistoryDetail.IncomingSessions>`
**config**\: False
.. attribute:: outgoing_connections
List of outgoing connections
**type**\: :py:class:`OutgoingConnections <ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper.Ssh.Session.HistoryDetail.OutgoingConnections>`
**config**\: False
"""
_prefix = 'crypto-ssh-oper'
_revision = '2017-08-25'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Ssh.Session.HistoryDetail, self).__init__()
self.yang_name = "history-detail"
self.yang_parent_name = "session"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("incoming-sessions", ("incoming_sessions", Ssh.Session.HistoryDetail.IncomingSessions)), ("outgoing-connections", ("outgoing_connections", Ssh.Session.HistoryDetail.OutgoingConnections))])
self._leafs = OrderedDict()
self.incoming_sessions = Ssh.Session.HistoryDetail.IncomingSessions()
self.incoming_sessions.parent = self
self._children_name_map["incoming_sessions"] = "incoming-sessions"
self.outgoing_connections = Ssh.Session.HistoryDetail.OutgoingConnections()
self.outgoing_connections.parent = self
self._children_name_map["outgoing_connections"] = "outgoing-connections"
self._segment_path = lambda: "history-detail"
self._absolute_path = lambda: "Cisco-IOS-XR-crypto-ssh-oper:ssh/session/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Ssh.Session.HistoryDetail, [], name, value)
class IncomingSessions(_Entity_):
"""
List of incoming sessions
.. attribute:: session_detail_info
session detail info
**type**\: list of :py:class:`SessionDetailInfo <ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper.Ssh.Session.HistoryDetail.IncomingSessions.SessionDetailInfo>`
**config**\: False
"""
_prefix = 'crypto-ssh-oper'
_revision = '2017-08-25'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Ssh.Session.HistoryDetail.IncomingSessions, self).__init__()
self.yang_name = "incoming-sessions"
self.yang_parent_name = "history-detail"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("session-detail-info", ("session_detail_info", Ssh.Session.HistoryDetail.IncomingSessions.SessionDetailInfo))])
self._leafs = OrderedDict()
self.session_detail_info = YList(self)
self._segment_path = lambda: "incoming-sessions"
self._absolute_path = lambda: "Cisco-IOS-XR-crypto-ssh-oper:ssh/session/history-detail/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Ssh.Session.HistoryDetail.IncomingSessions, [], name, value)
class SessionDetailInfo(_Entity_):
"""
session detail info
.. attribute:: next_session
next session
**type**\: :py:class:`NextSession <ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper.Ssh.Session.HistoryDetail.IncomingSessions.SessionDetailInfo.NextSession>`
**config**\: False
.. attribute:: session_id
Session ID
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: key_exchange
Key exchange name
**type**\: :py:class:`KexName <ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper.KexName>`
**config**\: False
.. attribute:: public_key
Host key algorithm
**type**\: :py:class:`Hostkey <ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper.Hostkey>`
**config**\: False
.. attribute:: in_cipher
In cipher algorithm
**type**\: :py:class:`Cipher <ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper.Cipher>`
**config**\: False
.. attribute:: out_cipher
Out cipher algorithm
**type**\: :py:class:`Cipher <ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper.Cipher>`
**config**\: False
.. attribute:: in_mac
In MAC
**type**\: :py:class:`Mac <ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper.Mac>`
**config**\: False
.. attribute:: out_mac
Out MAC
**type**\: :py:class:`Mac <ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper.Mac>`
**config**\: False
.. attribute:: start_time
session start time
**type**\: str
**config**\: False
.. attribute:: end_time
session end time
**type**\: str
**config**\: False
"""
_prefix = 'crypto-ssh-oper'
_revision = '2017-08-25'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Ssh.Session.HistoryDetail.IncomingSessions.SessionDetailInfo, self).__init__()
self.yang_name = "session-detail-info"
self.yang_parent_name = "incoming-sessions"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("next-session", ("next_session", Ssh.Session.HistoryDetail.IncomingSessions.SessionDetailInfo.NextSession))])
self._leafs = OrderedDict([
('session_id', (YLeaf(YType.uint32, 'session-id'), ['int'])),
('key_exchange', (YLeaf(YType.enumeration, 'key-exchange'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper', 'KexName', '')])),
('public_key', (YLeaf(YType.enumeration, 'public-key'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper', 'Hostkey', '')])),
('in_cipher', (YLeaf(YType.enumeration, 'in-cipher'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper', 'Cipher', '')])),
('out_cipher', (YLeaf(YType.enumeration, 'out-cipher'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper', 'Cipher', '')])),
('in_mac', (YLeaf(YType.enumeration, 'in-mac'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper', 'Mac', '')])),
('out_mac', (YLeaf(YType.enumeration, 'out-mac'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper', 'Mac', '')])),
('start_time', (YLeaf(YType.str, 'start-time'), ['str'])),
('end_time', (YLeaf(YType.str, 'end-time'), ['str'])),
])
self.session_id = None
self.key_exchange = None
self.public_key = None
self.in_cipher = None
self.out_cipher = None
self.in_mac = None
self.out_mac = None
self.start_time = None
self.end_time = None
self.next_session = Ssh.Session.HistoryDetail.IncomingSessions.SessionDetailInfo.NextSession()
self.next_session.parent = self
self._children_name_map["next_session"] = "next-session"
self._segment_path = lambda: "session-detail-info"
self._absolute_path = lambda: "Cisco-IOS-XR-crypto-ssh-oper:ssh/session/history-detail/incoming-sessions/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Ssh.Session.HistoryDetail.IncomingSessions.SessionDetailInfo, ['session_id', 'key_exchange', 'public_key', 'in_cipher', 'out_cipher', 'in_mac', 'out_mac', 'start_time', 'end_time'], name, value)
class NextSession(_Entity_):
"""
next session
"""
_prefix = 'crypto-ssh-oper'
_revision = '2017-08-25'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Ssh.Session.HistoryDetail.IncomingSessions.SessionDetailInfo.NextSession, self).__init__()
self.yang_name = "next-session"
self.yang_parent_name = "session-detail-info"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict()
self._segment_path = lambda: "next-session"
self._absolute_path = lambda: "Cisco-IOS-XR-crypto-ssh-oper:ssh/session/history-detail/incoming-sessions/session-detail-info/%s" % self._segment_path()
self._is_frozen = True
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_crypto_ssh_oper as meta
return meta._meta_table['Ssh.Session.HistoryDetail.IncomingSessions.SessionDetailInfo.NextSession']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_crypto_ssh_oper as meta
return meta._meta_table['Ssh.Session.HistoryDetail.IncomingSessions.SessionDetailInfo']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_crypto_ssh_oper as meta
return meta._meta_table['Ssh.Session.HistoryDetail.IncomingSessions']['meta_info']
class OutgoingConnections(_Entity_):
"""
List of outgoing connections
.. attribute:: session_detail_info
session detail info
**type**\: list of :py:class:`SessionDetailInfo <ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper.Ssh.Session.HistoryDetail.OutgoingConnections.SessionDetailInfo>`
**config**\: False
"""
_prefix = 'crypto-ssh-oper'
_revision = '2017-08-25'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Ssh.Session.HistoryDetail.OutgoingConnections, self).__init__()
self.yang_name = "outgoing-connections"
self.yang_parent_name = "history-detail"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("session-detail-info", ("session_detail_info", Ssh.Session.HistoryDetail.OutgoingConnections.SessionDetailInfo))])
self._leafs = OrderedDict()
self.session_detail_info = YList(self)
self._segment_path = lambda: "outgoing-connections"
self._absolute_path = lambda: "Cisco-IOS-XR-crypto-ssh-oper:ssh/session/history-detail/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Ssh.Session.HistoryDetail.OutgoingConnections, [], name, value)
class SessionDetailInfo(_Entity_):
"""
session detail info
.. attribute:: next_session
next session
**type**\: :py:class:`NextSession <ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper.Ssh.Session.HistoryDetail.OutgoingConnections.SessionDetailInfo.NextSession>`
**config**\: False
.. attribute:: session_id
Session ID
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: key_exchange
Key exchange name
**type**\: :py:class:`KexName <ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper.KexName>`
**config**\: False
.. attribute:: public_key
Host key algorithm
**type**\: :py:class:`Hostkey <ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper.Hostkey>`
**config**\: False
.. attribute:: in_cipher
In cipher algorithm
**type**\: :py:class:`Cipher <ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper.Cipher>`
**config**\: False
.. attribute:: out_cipher
Out cipher algorithm
**type**\: :py:class:`Cipher <ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper.Cipher>`
**config**\: False
.. attribute:: in_mac
In MAC
**type**\: :py:class:`Mac <ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper.Mac>`
**config**\: False
.. attribute:: out_mac
Out MAC
**type**\: :py:class:`Mac <ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper.Mac>`
**config**\: False
.. attribute:: start_time
session start time
**type**\: str
**config**\: False
.. attribute:: end_time
session end time
**type**\: str
**config**\: False
"""
_prefix = 'crypto-ssh-oper'
_revision = '2017-08-25'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Ssh.Session.HistoryDetail.OutgoingConnections.SessionDetailInfo, self).__init__()
self.yang_name = "session-detail-info"
self.yang_parent_name = "outgoing-connections"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("next-session", ("next_session", Ssh.Session.HistoryDetail.OutgoingConnections.SessionDetailInfo.NextSession))])
self._leafs = OrderedDict([
('session_id', (YLeaf(YType.uint32, 'session-id'), ['int'])),
('key_exchange', (YLeaf(YType.enumeration, 'key-exchange'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper', 'KexName', '')])),
('public_key', (YLeaf(YType.enumeration, 'public-key'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper', 'Hostkey', '')])),
('in_cipher', (YLeaf(YType.enumeration, 'in-cipher'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper', 'Cipher', '')])),
('out_cipher', (YLeaf(YType.enumeration, 'out-cipher'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper', 'Cipher', '')])),
('in_mac', (YLeaf(YType.enumeration, 'in-mac'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper', 'Mac', '')])),
('out_mac', (YLeaf(YType.enumeration, 'out-mac'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper', 'Mac', '')])),
('start_time', (YLeaf(YType.str, 'start-time'), ['str'])),
('end_time', (YLeaf(YType.str, 'end-time'), ['str'])),
])
self.session_id = None
self.key_exchange = None
self.public_key = None
self.in_cipher = None
self.out_cipher = None
self.in_mac = None
self.out_mac = None
self.start_time = None
self.end_time = None
self.next_session = Ssh.Session.HistoryDetail.OutgoingConnections.SessionDetailInfo.NextSession()
self.next_session.parent = self
self._children_name_map["next_session"] = "next-session"
self._segment_path = lambda: "session-detail-info"
self._absolute_path = lambda: "Cisco-IOS-XR-crypto-ssh-oper:ssh/session/history-detail/outgoing-connections/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Ssh.Session.HistoryDetail.OutgoingConnections.SessionDetailInfo, ['session_id', 'key_exchange', 'public_key', 'in_cipher', 'out_cipher', 'in_mac', 'out_mac', 'start_time', 'end_time'], name, value)
class NextSession(_Entity_):
"""
next session
"""
_prefix = 'crypto-ssh-oper'
_revision = '2017-08-25'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Ssh.Session.HistoryDetail.OutgoingConnections.SessionDetailInfo.NextSession, self).__init__()
self.yang_name = "next-session"
self.yang_parent_name = "session-detail-info"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict()
self._segment_path = lambda: "next-session"
self._absolute_path = lambda: "Cisco-IOS-XR-crypto-ssh-oper:ssh/session/history-detail/outgoing-connections/session-detail-info/%s" % self._segment_path()
self._is_frozen = True
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_crypto_ssh_oper as meta
return meta._meta_table['Ssh.Session.HistoryDetail.OutgoingConnections.SessionDetailInfo.NextSession']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_crypto_ssh_oper as meta
return meta._meta_table['Ssh.Session.HistoryDetail.OutgoingConnections.SessionDetailInfo']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_crypto_ssh_oper as meta
return meta._meta_table['Ssh.Session.HistoryDetail.OutgoingConnections']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_crypto_ssh_oper as meta
return meta._meta_table['Ssh.Session.HistoryDetail']['meta_info']
class Brief(_Entity_):
"""
SSH session brief information
.. attribute:: incoming_sessions
List of incoming sessions
**type**\: :py:class:`IncomingSessions <ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper.Ssh.Session.Brief.IncomingSessions>`
**config**\: False
.. attribute:: outgoing_sessions
List of outgoing sessions
**type**\: :py:class:`OutgoingSessions <ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper.Ssh.Session.Brief.OutgoingSessions>`
**config**\: False
"""
_prefix = 'crypto-ssh-oper'
_revision = '2017-08-25'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Ssh.Session.Brief, self).__init__()
self.yang_name = "brief"
self.yang_parent_name = "session"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("incoming-sessions", ("incoming_sessions", Ssh.Session.Brief.IncomingSessions)), ("outgoing-sessions", ("outgoing_sessions", Ssh.Session.Brief.OutgoingSessions))])
self._leafs = OrderedDict()
self.incoming_sessions = Ssh.Session.Brief.IncomingSessions()
self.incoming_sessions.parent = self
self._children_name_map["incoming_sessions"] = "incoming-sessions"
self.outgoing_sessions = Ssh.Session.Brief.OutgoingSessions()
self.outgoing_sessions.parent = self
self._children_name_map["outgoing_sessions"] = "outgoing-sessions"
self._segment_path = lambda: "brief"
self._absolute_path = lambda: "Cisco-IOS-XR-crypto-ssh-oper:ssh/session/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Ssh.Session.Brief, [], name, value)
class IncomingSessions(_Entity_):
"""
List of incoming sessions
.. attribute:: session_brief_info
session brief info
**type**\: list of :py:class:`SessionBriefInfo <ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper.Ssh.Session.Brief.IncomingSessions.SessionBriefInfo>`
**config**\: False
"""
_prefix = 'crypto-ssh-oper'
_revision = '2017-08-25'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Ssh.Session.Brief.IncomingSessions, self).__init__()
self.yang_name = "incoming-sessions"
self.yang_parent_name = "brief"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("session-brief-info", ("session_brief_info", Ssh.Session.Brief.IncomingSessions.SessionBriefInfo))])
self._leafs = OrderedDict()
self.session_brief_info = YList(self)
self._segment_path = lambda: "incoming-sessions"
self._absolute_path = lambda: "Cisco-IOS-XR-crypto-ssh-oper:ssh/session/brief/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Ssh.Session.Brief.IncomingSessions, [], name, value)
class SessionBriefInfo(_Entity_):
"""
session brief info
.. attribute:: next_session
next session
**type**\: :py:class:`NextSession <ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper.Ssh.Session.Brief.IncomingSessions.SessionBriefInfo.NextSession>`
**config**\: False
.. attribute:: session_id
Session ID
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: node_name
Node name
**type**\: str
**pattern:** ([a\-zA\-Z0\-9\_]\*\\d+/){1,2}([a\-zA\-Z0\-9\_]\*\\d+)
**config**\: False
.. attribute:: session_state
SSH session state
**type**\: :py:class:`States <ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper.States>`
**config**\: False
.. attribute:: user_id
User ID
**type**\: str
**config**\: False
.. attribute:: host_address
Host address
**type**\: str
**config**\: False
.. attribute:: version
SSH state version
**type**\: :py:class:`Version <ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper.Version>`
**config**\: False
.. attribute:: authentication_type
Authentication method
**type**\: :py:class:`Authen <ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper.Authen>`
**config**\: False
.. attribute:: mc_info
List of channel info
**type**\: list of :py:class:`McInfo <ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper.Ssh.Session.Brief.IncomingSessions.SessionBriefInfo.McInfo>`
**config**\: False
"""
_prefix = 'crypto-ssh-oper'
_revision = '2017-08-25'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Ssh.Session.Brief.IncomingSessions.SessionBriefInfo, self).__init__()
self.yang_name = "session-brief-info"
self.yang_parent_name = "incoming-sessions"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("next-session", ("next_session", Ssh.Session.Brief.IncomingSessions.SessionBriefInfo.NextSession)), ("mc-info", ("mc_info", Ssh.Session.Brief.IncomingSessions.SessionBriefInfo.McInfo))])
self._leafs = OrderedDict([
('session_id', (YLeaf(YType.uint32, 'session-id'), ['int'])),
('node_name', (YLeaf(YType.str, 'node-name'), ['str'])),
('session_state', (YLeaf(YType.enumeration, 'session-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper', 'States', '')])),
('user_id', (YLeaf(YType.str, 'user-id'), ['str'])),
('host_address', (YLeaf(YType.str, 'host-address'), ['str'])),
('version', (YLeaf(YType.enumeration, 'version'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper', 'Version', '')])),
('authentication_type', (YLeaf(YType.enumeration, 'authentication-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper', 'Authen', '')])),
])
self.session_id = None
self.node_name = None
self.session_state = None
self.user_id = None
self.host_address = None
self.version = None
self.authentication_type = None
self.next_session = Ssh.Session.Brief.IncomingSessions.SessionBriefInfo.NextSession()
self.next_session.parent = self
self._children_name_map["next_session"] = "next-session"
self.mc_info = YList(self)
self._segment_path = lambda: "session-brief-info"
self._absolute_path = lambda: "Cisco-IOS-XR-crypto-ssh-oper:ssh/session/brief/incoming-sessions/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Ssh.Session.Brief.IncomingSessions.SessionBriefInfo, ['session_id', 'node_name', 'session_state', 'user_id', 'host_address', 'version', 'authentication_type'], name, value)
class NextSession(_Entity_):
"""
next session
"""
_prefix = 'crypto-ssh-oper'
_revision = '2017-08-25'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Ssh.Session.Brief.IncomingSessions.SessionBriefInfo.NextSession, self).__init__()
self.yang_name = "next-session"
self.yang_parent_name = "session-brief-info"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict()
self._segment_path = lambda: "next-session"
self._absolute_path = lambda: "Cisco-IOS-XR-crypto-ssh-oper:ssh/session/brief/incoming-sessions/session-brief-info/%s" % self._segment_path()
self._is_frozen = True
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_crypto_ssh_oper as meta
return meta._meta_table['Ssh.Session.Brief.IncomingSessions.SessionBriefInfo.NextSession']['meta_info']
class McInfo(_Entity_):
"""
List of channel info
.. attribute:: channel_id
Channel ID
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: connection_type
Channel Connection Type
**type**\: :py:class:`Connection <ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper.Connection>`
**config**\: False
.. attribute:: vty_line_number
VTY line number
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: vty_assigned
Boolean indicating whether line VTY line number is valid
**type**\: bool
**config**\: False
"""
_prefix = 'crypto-ssh-oper'
_revision = '2017-08-25'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Ssh.Session.Brief.IncomingSessions.SessionBriefInfo.McInfo, self).__init__()
self.yang_name = "mc-info"
self.yang_parent_name = "session-brief-info"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('channel_id', (YLeaf(YType.uint32, 'channel-id'), ['int'])),
('connection_type', (YLeaf(YType.enumeration, 'connection-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper', 'Connection', '')])),
('vty_line_number', (YLeaf(YType.uint32, 'vty-line-number'), ['int'])),
('vty_assigned', (YLeaf(YType.boolean, 'vty-assigned'), ['bool'])),
])
self.channel_id = None
self.connection_type = None
self.vty_line_number = None
self.vty_assigned = None
self._segment_path = lambda: "mc-info"
self._absolute_path = lambda: "Cisco-IOS-XR-crypto-ssh-oper:ssh/session/brief/incoming-sessions/session-brief-info/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Ssh.Session.Brief.IncomingSessions.SessionBriefInfo.McInfo, ['channel_id', 'connection_type', 'vty_line_number', 'vty_assigned'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_crypto_ssh_oper as meta
return meta._meta_table['Ssh.Session.Brief.IncomingSessions.SessionBriefInfo.McInfo']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_crypto_ssh_oper as meta
return meta._meta_table['Ssh.Session.Brief.IncomingSessions.SessionBriefInfo']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_crypto_ssh_oper as meta
return meta._meta_table['Ssh.Session.Brief.IncomingSessions']['meta_info']
class OutgoingSessions(_Entity_):
"""
List of outgoing sessions
.. attribute:: session_brief_info
session brief info
**type**\: list of :py:class:`SessionBriefInfo <ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper.Ssh.Session.Brief.OutgoingSessions.SessionBriefInfo>`
**config**\: False
"""
_prefix = 'crypto-ssh-oper'
_revision = '2017-08-25'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Ssh.Session.Brief.OutgoingSessions, self).__init__()
self.yang_name = "outgoing-sessions"
self.yang_parent_name = "brief"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("session-brief-info", ("session_brief_info", Ssh.Session.Brief.OutgoingSessions.SessionBriefInfo))])
self._leafs = OrderedDict()
self.session_brief_info = YList(self)
self._segment_path = lambda: "outgoing-sessions"
self._absolute_path = lambda: "Cisco-IOS-XR-crypto-ssh-oper:ssh/session/brief/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Ssh.Session.Brief.OutgoingSessions, [], name, value)
class SessionBriefInfo(_Entity_):
"""
session brief info
.. attribute:: next_session
next session
**type**\: :py:class:`NextSession <ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper.Ssh.Session.Brief.OutgoingSessions.SessionBriefInfo.NextSession>`
**config**\: False
.. attribute:: session_id
Session ID
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: node_name
Node name
**type**\: str
**pattern:** ([a\-zA\-Z0\-9\_]\*\\d+/){1,2}([a\-zA\-Z0\-9\_]\*\\d+)
**config**\: False
.. attribute:: session_state
SSH session state
**type**\: :py:class:`States <ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper.States>`
**config**\: False
.. attribute:: user_id
User ID
**type**\: str
**config**\: False
.. attribute:: host_address
Host address
**type**\: str
**config**\: False
.. attribute:: version
SSH state version
**type**\: :py:class:`Version <ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper.Version>`
**config**\: False
.. attribute:: authentication_type
Authentication method
**type**\: :py:class:`Authen <ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper.Authen>`
**config**\: False
.. attribute:: mc_info
List of channel info
**type**\: list of :py:class:`McInfo <ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper.Ssh.Session.Brief.OutgoingSessions.SessionBriefInfo.McInfo>`
**config**\: False
"""
_prefix = 'crypto-ssh-oper'
_revision = '2017-08-25'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Ssh.Session.Brief.OutgoingSessions.SessionBriefInfo, self).__init__()
self.yang_name = "session-brief-info"
self.yang_parent_name = "outgoing-sessions"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("next-session", ("next_session", Ssh.Session.Brief.OutgoingSessions.SessionBriefInfo.NextSession)), ("mc-info", ("mc_info", Ssh.Session.Brief.OutgoingSessions.SessionBriefInfo.McInfo))])
self._leafs = OrderedDict([
('session_id', (YLeaf(YType.uint32, 'session-id'), ['int'])),
('node_name', (YLeaf(YType.str, 'node-name'), ['str'])),
('session_state', (YLeaf(YType.enumeration, 'session-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper', 'States', '')])),
('user_id', (YLeaf(YType.str, 'user-id'), ['str'])),
('host_address', (YLeaf(YType.str, 'host-address'), ['str'])),
('version', (YLeaf(YType.enumeration, 'version'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper', 'Version', '')])),
('authentication_type', (YLeaf(YType.enumeration, 'authentication-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper', 'Authen', '')])),
])
self.session_id = None
self.node_name = None
self.session_state = None
self.user_id = None
self.host_address = None
self.version = None
self.authentication_type = None
self.next_session = Ssh.Session.Brief.OutgoingSessions.SessionBriefInfo.NextSession()
self.next_session.parent = self
self._children_name_map["next_session"] = "next-session"
self.mc_info = YList(self)
self._segment_path = lambda: "session-brief-info"
self._absolute_path = lambda: "Cisco-IOS-XR-crypto-ssh-oper:ssh/session/brief/outgoing-sessions/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Ssh.Session.Brief.OutgoingSessions.SessionBriefInfo, ['session_id', 'node_name', 'session_state', 'user_id', 'host_address', 'version', 'authentication_type'], name, value)
class NextSession(_Entity_):
"""
next session
"""
_prefix = 'crypto-ssh-oper'
_revision = '2017-08-25'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Ssh.Session.Brief.OutgoingSessions.SessionBriefInfo.NextSession, self).__init__()
self.yang_name = "next-session"
self.yang_parent_name = "session-brief-info"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict()
self._segment_path = lambda: "next-session"
self._absolute_path = lambda: "Cisco-IOS-XR-crypto-ssh-oper:ssh/session/brief/outgoing-sessions/session-brief-info/%s" % self._segment_path()
self._is_frozen = True
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_crypto_ssh_oper as meta
return meta._meta_table['Ssh.Session.Brief.OutgoingSessions.SessionBriefInfo.NextSession']['meta_info']
class McInfo(_Entity_):
"""
List of channel info
.. attribute:: channel_id
Channel ID
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: connection_type
Channel Connection Type
**type**\: :py:class:`Connection <ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper.Connection>`
**config**\: False
.. attribute:: vty_line_number
VTY line number
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: vty_assigned
Boolean indicating whether line VTY line number is valid
**type**\: bool
**config**\: False
"""
_prefix = 'crypto-ssh-oper'
_revision = '2017-08-25'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Ssh.Session.Brief.OutgoingSessions.SessionBriefInfo.McInfo, self).__init__()
self.yang_name = "mc-info"
self.yang_parent_name = "session-brief-info"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('channel_id', (YLeaf(YType.uint32, 'channel-id'), ['int'])),
('connection_type', (YLeaf(YType.enumeration, 'connection-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper', 'Connection', '')])),
('vty_line_number', (YLeaf(YType.uint32, 'vty-line-number'), ['int'])),
('vty_assigned', (YLeaf(YType.boolean, 'vty-assigned'), ['bool'])),
])
self.channel_id = None
self.connection_type = None
self.vty_line_number = None
self.vty_assigned = None
self._segment_path = lambda: "mc-info"
self._absolute_path = lambda: "Cisco-IOS-XR-crypto-ssh-oper:ssh/session/brief/outgoing-sessions/session-brief-info/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Ssh.Session.Brief.OutgoingSessions.SessionBriefInfo.McInfo, ['channel_id', 'connection_type', 'vty_line_number', 'vty_assigned'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_crypto_ssh_oper as meta
return meta._meta_table['Ssh.Session.Brief.OutgoingSessions.SessionBriefInfo.McInfo']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_crypto_ssh_oper as meta
return meta._meta_table['Ssh.Session.Brief.OutgoingSessions.SessionBriefInfo']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_crypto_ssh_oper as meta
return meta._meta_table['Ssh.Session.Brief.OutgoingSessions']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_crypto_ssh_oper as meta
return meta._meta_table['Ssh.Session.Brief']['meta_info']
class History(_Entity_):
"""
SSH session history information
.. attribute:: incoming_sessions
List of incoming sessions
**type**\: :py:class:`IncomingSessions <ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper.Ssh.Session.History.IncomingSessions>`
**config**\: False
"""
_prefix = 'crypto-ssh-oper'
_revision = '2017-08-25'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Ssh.Session.History, self).__init__()
self.yang_name = "history"
self.yang_parent_name = "session"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("incoming-sessions", ("incoming_sessions", Ssh.Session.History.IncomingSessions))])
self._leafs = OrderedDict()
self.incoming_sessions = Ssh.Session.History.IncomingSessions()
self.incoming_sessions.parent = self
self._children_name_map["incoming_sessions"] = "incoming-sessions"
self._segment_path = lambda: "history"
self._absolute_path = lambda: "Cisco-IOS-XR-crypto-ssh-oper:ssh/session/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Ssh.Session.History, [], name, value)
class IncomingSessions(_Entity_):
"""
List of incoming sessions
.. attribute:: session_history_info
session history info
**type**\: list of :py:class:`SessionHistoryInfo <ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper.Ssh.Session.History.IncomingSessions.SessionHistoryInfo>`
**config**\: False
"""
_prefix = 'crypto-ssh-oper'
_revision = '2017-08-25'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Ssh.Session.History.IncomingSessions, self).__init__()
self.yang_name = "incoming-sessions"
self.yang_parent_name = "history"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("session-history-info", ("session_history_info", Ssh.Session.History.IncomingSessions.SessionHistoryInfo))])
self._leafs = OrderedDict()
self.session_history_info = YList(self)
self._segment_path = lambda: "incoming-sessions"
self._absolute_path = lambda: "Cisco-IOS-XR-crypto-ssh-oper:ssh/session/history/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Ssh.Session.History.IncomingSessions, [], name, value)
class SessionHistoryInfo(_Entity_):
"""
session history info
.. attribute:: next_session
next session
**type**\: :py:class:`NextSession <ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper.Ssh.Session.History.IncomingSessions.SessionHistoryInfo.NextSession>`
**config**\: False
.. attribute:: session_id
Session ID
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: node_name
Node name
**type**\: str
**pattern:** ([a\-zA\-Z0\-9\_]\*\\d+/){1,2}([a\-zA\-Z0\-9\_]\*\\d+)
**config**\: False
.. attribute:: user_id
User ID
**type**\: str
**config**\: False
.. attribute:: host_address
Host address
**type**\: str
**config**\: False
.. attribute:: version
SSH state version
**type**\: :py:class:`Version <ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper.Version>`
**config**\: False
.. attribute:: authentication_type
Authentication method
**type**\: :py:class:`Authen <ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper.Authen>`
**config**\: False
.. attribute:: mc_info
List of channel info
**type**\: list of :py:class:`McInfo <ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper.Ssh.Session.History.IncomingSessions.SessionHistoryInfo.McInfo>`
**config**\: False
"""
_prefix = 'crypto-ssh-oper'
_revision = '2017-08-25'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Ssh.Session.History.IncomingSessions.SessionHistoryInfo, self).__init__()
self.yang_name = "session-history-info"
self.yang_parent_name = "incoming-sessions"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("next-session", ("next_session", Ssh.Session.History.IncomingSessions.SessionHistoryInfo.NextSession)), ("mc-info", ("mc_info", Ssh.Session.History.IncomingSessions.SessionHistoryInfo.McInfo))])
self._leafs = OrderedDict([
('session_id', (YLeaf(YType.uint32, 'session-id'), ['int'])),
('node_name', (YLeaf(YType.str, 'node-name'), ['str'])),
('user_id', (YLeaf(YType.str, 'user-id'), ['str'])),
('host_address', (YLeaf(YType.str, 'host-address'), ['str'])),
('version', (YLeaf(YType.enumeration, 'version'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper', 'Version', '')])),
('authentication_type', (YLeaf(YType.enumeration, 'authentication-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper', 'Authen', '')])),
])
self.session_id = None
self.node_name = None
self.user_id = None
self.host_address = None
self.version = None
self.authentication_type = None
self.next_session = Ssh.Session.History.IncomingSessions.SessionHistoryInfo.NextSession()
self.next_session.parent = self
self._children_name_map["next_session"] = "next-session"
self.mc_info = YList(self)
self._segment_path = lambda: "session-history-info"
self._absolute_path = lambda: "Cisco-IOS-XR-crypto-ssh-oper:ssh/session/history/incoming-sessions/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Ssh.Session.History.IncomingSessions.SessionHistoryInfo, ['session_id', 'node_name', 'user_id', 'host_address', 'version', 'authentication_type'], name, value)
class NextSession(_Entity_):
"""
next session
"""
_prefix = 'crypto-ssh-oper'
_revision = '2017-08-25'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Ssh.Session.History.IncomingSessions.SessionHistoryInfo.NextSession, self).__init__()
self.yang_name = "next-session"
self.yang_parent_name = "session-history-info"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict()
self._segment_path = lambda: "next-session"
self._absolute_path = lambda: "Cisco-IOS-XR-crypto-ssh-oper:ssh/session/history/incoming-sessions/session-history-info/%s" % self._segment_path()
self._is_frozen = True
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_crypto_ssh_oper as meta
return meta._meta_table['Ssh.Session.History.IncomingSessions.SessionHistoryInfo.NextSession']['meta_info']
class McInfo(_Entity_):
"""
List of channel info
.. attribute:: channel_id
Channel ID
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: connection_type
Channel Connection Type
**type**\: :py:class:`Connection <ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper.Connection>`
**config**\: False
.. attribute:: vty_line_number
VTY line number
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: vty_assigned
Boolean indicating whether line VTY line number is valid
**type**\: bool
**config**\: False
"""
_prefix = 'crypto-ssh-oper'
_revision = '2017-08-25'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Ssh.Session.History.IncomingSessions.SessionHistoryInfo.McInfo, self).__init__()
self.yang_name = "mc-info"
self.yang_parent_name = "session-history-info"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('channel_id', (YLeaf(YType.uint32, 'channel-id'), ['int'])),
('connection_type', (YLeaf(YType.enumeration, 'connection-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper', 'Connection', '')])),
('vty_line_number', (YLeaf(YType.uint32, 'vty-line-number'), ['int'])),
('vty_assigned', (YLeaf(YType.boolean, 'vty-assigned'), ['bool'])),
])
self.channel_id = None
self.connection_type = None
self.vty_line_number = None
self.vty_assigned = None
self._segment_path = lambda: "mc-info"
self._absolute_path = lambda: "Cisco-IOS-XR-crypto-ssh-oper:ssh/session/history/incoming-sessions/session-history-info/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Ssh.Session.History.IncomingSessions.SessionHistoryInfo.McInfo, ['channel_id', 'connection_type', 'vty_line_number', 'vty_assigned'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_crypto_ssh_oper as meta
return meta._meta_table['Ssh.Session.History.IncomingSessions.SessionHistoryInfo.McInfo']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_crypto_ssh_oper as meta
return meta._meta_table['Ssh.Session.History.IncomingSessions.SessionHistoryInfo']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_crypto_ssh_oper as meta
return meta._meta_table['Ssh.Session.History.IncomingSessions']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_crypto_ssh_oper as meta
return meta._meta_table['Ssh.Session.History']['meta_info']
class Detail(_Entity_):
"""
SSH session detail information
.. attribute:: incoming_sessions
List of incoming sessions
**type**\: :py:class:`IncomingSessions <ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper.Ssh.Session.Detail.IncomingSessions>`
**config**\: False
.. attribute:: outgoing_connections
List of outgoing connections
**type**\: :py:class:`OutgoingConnections <ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper.Ssh.Session.Detail.OutgoingConnections>`
**config**\: False
"""
_prefix = 'crypto-ssh-oper'
_revision = '2017-08-25'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Ssh.Session.Detail, self).__init__()
self.yang_name = "detail"
self.yang_parent_name = "session"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("incoming-sessions", ("incoming_sessions", Ssh.Session.Detail.IncomingSessions)), ("outgoing-connections", ("outgoing_connections", Ssh.Session.Detail.OutgoingConnections))])
self._leafs = OrderedDict()
self.incoming_sessions = Ssh.Session.Detail.IncomingSessions()
self.incoming_sessions.parent = self
self._children_name_map["incoming_sessions"] = "incoming-sessions"
self.outgoing_connections = Ssh.Session.Detail.OutgoingConnections()
self.outgoing_connections.parent = self
self._children_name_map["outgoing_connections"] = "outgoing-connections"
self._segment_path = lambda: "detail"
self._absolute_path = lambda: "Cisco-IOS-XR-crypto-ssh-oper:ssh/session/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Ssh.Session.Detail, [], name, value)
class IncomingSessions(_Entity_):
"""
List of incoming sessions
.. attribute:: session_detail_info
session detail info
**type**\: list of :py:class:`SessionDetailInfo <ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper.Ssh.Session.Detail.IncomingSessions.SessionDetailInfo>`
**config**\: False
"""
_prefix = 'crypto-ssh-oper'
_revision = '2017-08-25'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Ssh.Session.Detail.IncomingSessions, self).__init__()
self.yang_name = "incoming-sessions"
self.yang_parent_name = "detail"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("session-detail-info", ("session_detail_info", Ssh.Session.Detail.IncomingSessions.SessionDetailInfo))])
self._leafs = OrderedDict()
self.session_detail_info = YList(self)
self._segment_path = lambda: "incoming-sessions"
self._absolute_path = lambda: "Cisco-IOS-XR-crypto-ssh-oper:ssh/session/detail/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Ssh.Session.Detail.IncomingSessions, [], name, value)
class SessionDetailInfo(_Entity_):
"""
session detail info
.. attribute:: next_session
next session
**type**\: :py:class:`NextSession <ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper.Ssh.Session.Detail.IncomingSessions.SessionDetailInfo.NextSession>`
**config**\: False
.. attribute:: session_id
Session ID
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: key_exchange
Key exchange name
**type**\: :py:class:`KexName <ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper.KexName>`
**config**\: False
.. attribute:: public_key
Host key algorithm
**type**\: :py:class:`Hostkey <ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper.Hostkey>`
**config**\: False
.. attribute:: in_cipher
In cipher algorithm
**type**\: :py:class:`Cipher <ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper.Cipher>`
**config**\: False
.. attribute:: out_cipher
Out cipher algorithm
**type**\: :py:class:`Cipher <ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper.Cipher>`
**config**\: False
.. attribute:: in_mac
In MAC
**type**\: :py:class:`Mac <ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper.Mac>`
**config**\: False
.. attribute:: out_mac
Out MAC
**type**\: :py:class:`Mac <ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper.Mac>`
**config**\: False
.. attribute:: start_time
session start time
**type**\: str
**config**\: False
.. attribute:: end_time
session end time
**type**\: str
**config**\: False
"""
_prefix = 'crypto-ssh-oper'
_revision = '2017-08-25'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Ssh.Session.Detail.IncomingSessions.SessionDetailInfo, self).__init__()
self.yang_name = "session-detail-info"
self.yang_parent_name = "incoming-sessions"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("next-session", ("next_session", Ssh.Session.Detail.IncomingSessions.SessionDetailInfo.NextSession))])
self._leafs = OrderedDict([
('session_id', (YLeaf(YType.uint32, 'session-id'), ['int'])),
('key_exchange', (YLeaf(YType.enumeration, 'key-exchange'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper', 'KexName', '')])),
('public_key', (YLeaf(YType.enumeration, 'public-key'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper', 'Hostkey', '')])),
('in_cipher', (YLeaf(YType.enumeration, 'in-cipher'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper', 'Cipher', '')])),
('out_cipher', (YLeaf(YType.enumeration, 'out-cipher'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper', 'Cipher', '')])),
('in_mac', (YLeaf(YType.enumeration, 'in-mac'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper', 'Mac', '')])),
('out_mac', (YLeaf(YType.enumeration, 'out-mac'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper', 'Mac', '')])),
('start_time', (YLeaf(YType.str, 'start-time'), ['str'])),
('end_time', (YLeaf(YType.str, 'end-time'), ['str'])),
])
self.session_id = None
self.key_exchange = None
self.public_key = None
self.in_cipher = None
self.out_cipher = None
self.in_mac = None
self.out_mac = None
self.start_time = None
self.end_time = None
self.next_session = Ssh.Session.Detail.IncomingSessions.SessionDetailInfo.NextSession()
self.next_session.parent = self
self._children_name_map["next_session"] = "next-session"
self._segment_path = lambda: "session-detail-info"
self._absolute_path = lambda: "Cisco-IOS-XR-crypto-ssh-oper:ssh/session/detail/incoming-sessions/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Ssh.Session.Detail.IncomingSessions.SessionDetailInfo, ['session_id', 'key_exchange', 'public_key', 'in_cipher', 'out_cipher', 'in_mac', 'out_mac', 'start_time', 'end_time'], name, value)
class NextSession(_Entity_):
"""
next session
"""
_prefix = 'crypto-ssh-oper'
_revision = '2017-08-25'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Ssh.Session.Detail.IncomingSessions.SessionDetailInfo.NextSession, self).__init__()
self.yang_name = "next-session"
self.yang_parent_name = "session-detail-info"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict()
self._segment_path = lambda: "next-session"
self._absolute_path = lambda: "Cisco-IOS-XR-crypto-ssh-oper:ssh/session/detail/incoming-sessions/session-detail-info/%s" % self._segment_path()
self._is_frozen = True
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_crypto_ssh_oper as meta
return meta._meta_table['Ssh.Session.Detail.IncomingSessions.SessionDetailInfo.NextSession']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_crypto_ssh_oper as meta
return meta._meta_table['Ssh.Session.Detail.IncomingSessions.SessionDetailInfo']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_crypto_ssh_oper as meta
return meta._meta_table['Ssh.Session.Detail.IncomingSessions']['meta_info']
class OutgoingConnections(_Entity_):
"""
List of outgoing connections
.. attribute:: session_detail_info
session detail info
**type**\: list of :py:class:`SessionDetailInfo <ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper.Ssh.Session.Detail.OutgoingConnections.SessionDetailInfo>`
**config**\: False
"""
_prefix = 'crypto-ssh-oper'
_revision = '2017-08-25'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Ssh.Session.Detail.OutgoingConnections, self).__init__()
self.yang_name = "outgoing-connections"
self.yang_parent_name = "detail"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("session-detail-info", ("session_detail_info", Ssh.Session.Detail.OutgoingConnections.SessionDetailInfo))])
self._leafs = OrderedDict()
self.session_detail_info = YList(self)
self._segment_path = lambda: "outgoing-connections"
self._absolute_path = lambda: "Cisco-IOS-XR-crypto-ssh-oper:ssh/session/detail/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Ssh.Session.Detail.OutgoingConnections, [], name, value)
class SessionDetailInfo(_Entity_):
"""
session detail info
.. attribute:: next_session
next session
**type**\: :py:class:`NextSession <ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper.Ssh.Session.Detail.OutgoingConnections.SessionDetailInfo.NextSession>`
**config**\: False
.. attribute:: session_id
Session ID
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: key_exchange
Key exchange name
**type**\: :py:class:`KexName <ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper.KexName>`
**config**\: False
.. attribute:: public_key
Host key algorithm
**type**\: :py:class:`Hostkey <ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper.Hostkey>`
**config**\: False
.. attribute:: in_cipher
In cipher algorithm
**type**\: :py:class:`Cipher <ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper.Cipher>`
**config**\: False
.. attribute:: out_cipher
Out cipher algorithm
**type**\: :py:class:`Cipher <ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper.Cipher>`
**config**\: False
.. attribute:: in_mac
In MAC
**type**\: :py:class:`Mac <ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper.Mac>`
**config**\: False
.. attribute:: out_mac
Out MAC
**type**\: :py:class:`Mac <ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper.Mac>`
**config**\: False
.. attribute:: start_time
session start time
**type**\: str
**config**\: False
.. attribute:: end_time
session end time
**type**\: str
**config**\: False
"""
_prefix = 'crypto-ssh-oper'
_revision = '2017-08-25'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Ssh.Session.Detail.OutgoingConnections.SessionDetailInfo, self).__init__()
self.yang_name = "session-detail-info"
self.yang_parent_name = "outgoing-connections"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("next-session", ("next_session", Ssh.Session.Detail.OutgoingConnections.SessionDetailInfo.NextSession))])
self._leafs = OrderedDict([
('session_id', (YLeaf(YType.uint32, 'session-id'), ['int'])),
('key_exchange', (YLeaf(YType.enumeration, 'key-exchange'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper', 'KexName', '')])),
('public_key', (YLeaf(YType.enumeration, 'public-key'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper', 'Hostkey', '')])),
('in_cipher', (YLeaf(YType.enumeration, 'in-cipher'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper', 'Cipher', '')])),
('out_cipher', (YLeaf(YType.enumeration, 'out-cipher'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper', 'Cipher', '')])),
('in_mac', (YLeaf(YType.enumeration, 'in-mac'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper', 'Mac', '')])),
('out_mac', (YLeaf(YType.enumeration, 'out-mac'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_crypto_ssh_oper', 'Mac', '')])),
('start_time', (YLeaf(YType.str, 'start-time'), ['str'])),
('end_time', (YLeaf(YType.str, 'end-time'), ['str'])),
])
self.session_id = None
self.key_exchange = None
self.public_key = None
self.in_cipher = None
self.out_cipher = None
self.in_mac = None
self.out_mac = None
self.start_time = None
self.end_time = None
self.next_session = Ssh.Session.Detail.OutgoingConnections.SessionDetailInfo.NextSession()
self.next_session.parent = self
self._children_name_map["next_session"] = "next-session"
self._segment_path = lambda: "session-detail-info"
self._absolute_path = lambda: "Cisco-IOS-XR-crypto-ssh-oper:ssh/session/detail/outgoing-connections/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Ssh.Session.Detail.OutgoingConnections.SessionDetailInfo, ['session_id', 'key_exchange', 'public_key', 'in_cipher', 'out_cipher', 'in_mac', 'out_mac', 'start_time', 'end_time'], name, value)
class NextSession(_Entity_):
"""
next session
"""
_prefix = 'crypto-ssh-oper'
_revision = '2017-08-25'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Ssh.Session.Detail.OutgoingConnections.SessionDetailInfo.NextSession, self).__init__()
self.yang_name = "next-session"
self.yang_parent_name = "session-detail-info"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict()
self._segment_path = lambda: "next-session"
self._absolute_path = lambda: "Cisco-IOS-XR-crypto-ssh-oper:ssh/session/detail/outgoing-connections/session-detail-info/%s" % self._segment_path()
self._is_frozen = True
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_crypto_ssh_oper as meta
return meta._meta_table['Ssh.Session.Detail.OutgoingConnections.SessionDetailInfo.NextSession']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_crypto_ssh_oper as meta
return meta._meta_table['Ssh.Session.Detail.OutgoingConnections.SessionDetailInfo']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_crypto_ssh_oper as meta
return meta._meta_table['Ssh.Session.Detail.OutgoingConnections']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_crypto_ssh_oper as meta
return meta._meta_table['Ssh.Session.Detail']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_crypto_ssh_oper as meta
return meta._meta_table['Ssh.Session']['meta_info']
class Server(_Entity_):
"""
SSH server parameters
.. attribute:: version
Version
**type**\: str
**length:** 0..10
**config**\: False
.. attribute:: port
SSH Port
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: vrf
Vrfs and acls
**type**\: str
**length:** 0..500
**config**\: False
.. attribute:: netconfport
Netconf Port
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: netconfvrf
Netconf vrfs and acls
**type**\: str
**length:** 0..500
**config**\: False
.. attribute:: netconfver
Netconf Version
**type**\: str
**length:** 0..10
**config**\: False
.. attribute:: hostkeyalgo
Hostkey algorithms
**type**\: str
**length:** 0..200
**config**\: False
.. attribute:: kexalgo
Key exchange algorithms
**type**\: str
**length:** 0..200
**config**\: False
.. attribute:: cipheralgo
Encryption algorithms
**type**\: str
**length:** 0..200
**config**\: False
.. attribute:: macalgo
Mac algorithms
**type**\: str
**length:** 0..200
**config**\: False
.. attribute:: backupserver
Backup SSH server
**type**\: str
**length:** 0..100
**config**\: False
.. attribute:: dscp
Dscp
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: ratelimit
ratelimit
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: sessionlimit
session limit
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: rekeytime
Rekey Time
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: rekeyvolume
Rekey Volume
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: windowscalefactor
Window scale factor
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: passwordauthen
Password Authentication support
**type**\: bool
**config**\: False
.. attribute:: keyboardinteractiveauthen
Pubkey Authentication support
**type**\: bool
**config**\: False
.. attribute:: pubkeyauthen
Pubkey Authentication support
**type**\: bool
**config**\: False
.. attribute:: certificateauthen
Certificate based Authentication support
**type**\: bool
**config**\: False
"""
_prefix = 'crypto-ssh-oper'
_revision = '2017-08-25'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Ssh.Server, self).__init__()
self.yang_name = "server"
self.yang_parent_name = "ssh"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('version', (YLeaf(YType.str, 'version'), ['str'])),
('port', (YLeaf(YType.uint32, 'port'), ['int'])),
('vrf', (YLeaf(YType.str, 'vrf'), ['str'])),
('netconfport', (YLeaf(YType.uint32, 'netconfport'), ['int'])),
('netconfvrf', (YLeaf(YType.str, 'netconfvrf'), ['str'])),
('netconfver', (YLeaf(YType.str, 'netconfver'), ['str'])),
('hostkeyalgo', (YLeaf(YType.str, 'hostkeyalgo'), ['str'])),
('kexalgo', (YLeaf(YType.str, 'kexalgo'), ['str'])),
('cipheralgo', (YLeaf(YType.str, 'cipheralgo'), ['str'])),
('macalgo', (YLeaf(YType.str, 'macalgo'), ['str'])),
('backupserver', (YLeaf(YType.str, 'backupserver'), ['str'])),
('dscp', (YLeaf(YType.uint32, 'dscp'), ['int'])),
('ratelimit', (YLeaf(YType.uint32, 'ratelimit'), ['int'])),
('sessionlimit', (YLeaf(YType.uint32, 'sessionlimit'), ['int'])),
('rekeytime', (YLeaf(YType.uint32, 'rekeytime'), ['int'])),
('rekeyvolume', (YLeaf(YType.uint32, 'rekeyvolume'), ['int'])),
('windowscalefactor', (YLeaf(YType.uint32, 'windowscalefactor'), ['int'])),
('passwordauthen', (YLeaf(YType.boolean, 'passwordauthen'), ['bool'])),
('keyboardinteractiveauthen', (YLeaf(YType.boolean, 'keyboardinteractiveauthen'), ['bool'])),
('pubkeyauthen', (YLeaf(YType.boolean, 'pubkeyauthen'), ['bool'])),
('certificateauthen', (YLeaf(YType.boolean, 'certificateauthen'), ['bool'])),
])
self.version = None
self.port = None
self.vrf = None
self.netconfport = None
self.netconfvrf = None
self.netconfver = None
self.hostkeyalgo = None
self.kexalgo = None
self.cipheralgo = None
self.macalgo = None
self.backupserver = None
self.dscp = None
self.ratelimit = None
self.sessionlimit = None
self.rekeytime = None
self.rekeyvolume = None
self.windowscalefactor = None
self.passwordauthen = None
self.keyboardinteractiveauthen = None
self.pubkeyauthen = None
self.certificateauthen = None
self._segment_path = lambda: "server"
self._absolute_path = lambda: "Cisco-IOS-XR-crypto-ssh-oper:ssh/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Ssh.Server, ['version', 'port', 'vrf', 'netconfport', 'netconfvrf', 'netconfver', 'hostkeyalgo', 'kexalgo', 'cipheralgo', 'macalgo', 'backupserver', 'dscp', 'ratelimit', 'sessionlimit', 'rekeytime', 'rekeyvolume', 'windowscalefactor', 'passwordauthen', 'keyboardinteractiveauthen', 'pubkeyauthen', 'certificateauthen'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_crypto_ssh_oper as meta
return meta._meta_table['Ssh.Server']['meta_info']
def clone_ptr(self):
self._top_entity = Ssh()
return self._top_entity
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_crypto_ssh_oper as meta
return meta._meta_table['Ssh']['meta_info']
| 41.751475
| 366
| 0.478325
| 14,252
| 162,789
| 5.138367
| 0.024418
| 0.047848
| 0.05981
| 0.052873
| 0.887549
| 0.870453
| 0.853766
| 0.840111
| 0.829214
| 0.816774
| 0
| 0.013942
| 0.421503
| 162,789
| 3,898
| 367
| 41.762186
| 0.76369
| 0.204971
| 0
| 0.741368
| 0
| 0.014438
| 0.166449
| 0.06996
| 0
| 0
| 0
| 0
| 0
| 1
| 0.089768
| false
| 0.003139
| 0.040176
| 0
| 0.241055
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5d3124464e6567d9c22ab75c5f718e33efdf47b4
| 12,871
|
py
|
Python
|
tests/system/test_process.py
|
dhhse/prereform2modern
|
b35f7d99f5a38fadf63e7d11ce0a59fed8ef80a1
|
[
"MIT"
] | 1
|
2020-09-09T09:51:30.000Z
|
2020-09-09T09:51:30.000Z
|
tests/system/test_process.py
|
dhhse/prereform2modern
|
b35f7d99f5a38fadf63e7d11ce0a59fed8ef80a1
|
[
"MIT"
] | null | null | null |
tests/system/test_process.py
|
dhhse/prereform2modern
|
b35f7d99f5a38fadf63e7d11ce0a59fed8ef80a1
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
This module contains tests for translit_from_string.
My setup for running this module is as follows:
# in directory prereform2modern/
$ python -m pip install --user virtualenv
$ virtualenv .venv --python=python2.7
$ source .venv/bin/activate
(.venv) $ ../.venv/bin/pip install pytest
(.venv) $ ../.venv/bin/python2.7 -m pytest
This test module can also be run as follows:
$ python2.7 -m unittest -v tests.system.test_process
OR $ ../.venv/bin/python2.7 -m pytest -v tests/system/test_process.py
>>> from prereform2modern import Processor
>>> text = "Онъ стоялъ подлѣ письменнаго стола"
>>> print Processor.process_text(
text=text.decode('utf-8'),
show=False,
delimiters=[u'', u'{', u'}'],
check_brackets=True,
print_log=False
)[0]
Он{Онъ} стоял{стоялъ} подле{подлѣ} письменного{письменнаго} стола
"""
import json
from unittest import TestCase
from prereform2modern.process import Processor
class TestProcess(TestCase):
def test_process_text_all_args_false(self):
orig_text = u'Онъ стоялъ подлѣ письменнаго стола.'
text_res, changes, _json = Processor.process_text(
orig_text, '', '', ''
)
t_expected = u'Он стоял подле письменного стола.'
self.assertEqual(text_res, t_expected)
changes_expected = u'Онъ --> Он\n\
стоялъ --> стоял\n\
подлѣ --> подле\n\
письменнаго --> письменного'
self.assertEqual(changes, changes_expected)
_json = json.loads(_json)
word_res = _json[u'6'][u'word']
old_word_res = _json[u'6'][u'old_word']
word_expected = u'письменного'
old_word_expected = u'письменнаго'
self.assertEqual(word_res, word_expected)
self.assertEqual(old_word_res, old_word_expected)
def test_process_text_with_delimiters(self):
text = u'Онъ стоялъ подлѣ письменнаго стола.'
text_res, changes, _json = Processor.process_text(
text=text,
show=True,
delimiters=[u'', u'{', u'}'],
check_brackets=False,
# print_log=False
)
t_expected = u'Он{Онъ} стоял{стоялъ} подле{подлѣ} письменного{письменнаго} стола.'
self.assertEqual(text_res, t_expected)
# The rest is the same as in the previous test
changes_expected = u'Онъ --> Он\n\
стоялъ --> стоял\n\
подлѣ --> подле\n\
письменнаго --> письменного'
self.assertEqual(changes, changes_expected)
_json = json.loads(_json)
word_res = _json[u'6'][u'word']
old_word_res = _json[u'6'][u'old_word']
word_expected = u'письменного'
old_word_expected = u'письменнаго'
self.assertEqual(word_res, word_expected)
self.assertEqual(old_word_res, old_word_expected)
def test_process_text_with_editorial_correction_in_brackets(self):
orig_text = u'такъ [называемую]'
text_res, changes, _json = Processor.process_text(
text=orig_text,
show=False,
delimiters=[u'', u'{', u'}'],
check_brackets=True,
# print_log=False
)
t_expected = u"так{такъ} <choice original_editorial_correction='[называемую]'><sic></sic><corr>называемую</corr></choice>"
self.assertEqual(text_res, t_expected)
changes_expected = u'такъ --> так'
self.assertEqual(changes, changes_expected)
json_obj = json.loads(_json)
expected_json = {
u'1': {u'word': u' ',
# u'plain_word': None,
u'type': u'punct',
u'old_word': u'',
# u'old_plain_word': None
},
u'0': {u'word': u'так',
# u'plain_word': None,
u'type': u'word',
u'old_word': u'такъ',
# u'old_plain_word': None
},
u'2': {u'word': u'[называемую]',
# u'plain_word': None,
u'type': u'word',
u'old_word': u'',
# u'old_plain_word': None
}
}
self.assertDictEqual(json_obj, expected_json)
def test_process_text_with_edit_corr_in_brackets_false_brackets(self):
orig_text = u'такъ [называемую]'
text_res, changes, _json = Processor.process_text(
text=orig_text,
show=False,
delimiters=[u'', u'{', u'}'],
check_brackets=False, # This is different from the test above
# print_log=False
)
t_expected = u'так [называемую]'
self.assertEqual(text_res, t_expected)
changes_expected = u'такъ --> так'
self.assertEqual(changes, changes_expected)
json_obj = json.loads(_json)
expected_json = {
u'1': {u'word': u' ',
# u'plain_word': None,
u'type': u'punct',
u'old_word': u'',
# u'old_plain_word': None
},
u'0': {u'word': u'так',
# u'plain_word': None,
u'type': u'word',
u'old_word': u'такъ',
# u'old_plain_word': None
},
u'2': {u'word': u'[называемую]',
# u'plain_word': None,
u'type': u'word',
u'old_word': u'',
# u'old_plain_word': None
}
}
self.assertDictEqual(json_obj, expected_json)
def test_process_text_with_digits_in_brackets(self):
orig_text = u'такъ [13]'
text_res, changes, _json = Processor.process_text(
text=orig_text,
show=False,
delimiters=[u'', u'{', u'}'],
check_brackets=True,
# print_log=False
)
t_expected = u"так{такъ} [13]"
self.assertEqual(text_res, t_expected)
changes_expected = u'такъ --> так'
self.assertEqual(changes, changes_expected)
json_obj = json.loads(_json)
expected_json = {
u'1': {u'word': u' ',
# u'plain_word': None,
u'type': u'punct',
u'old_word': u'',
# u'old_plain_word': None
},
u'0': {u'word': u'так',
# u'plain_word': None,
u'type': u'word',
u'old_word': u'такъ',
# u'old_plain_word': None
},
u'3': {u'word': u'13',
# u'plain_word': None,
u'type': u'punct',
u'old_word': u'',
# u'old_plain_word': None
},
u'2': {u'word': u'[',
# u'plain_word': None,
u'type': u'punct',
u'old_word': u'',
# u'old_plain_word': None
},
u'4': {u'word': u']',
# u'plain_word': None,
u'type': u'punct',
u'old_word': u'',
# u'old_plain_word': None
}
}
self.assertDictEqual(json_obj, expected_json)
def test_process_text_with_empty_brackets_check_true(self):
orig_text = u'такъ []'
text_res, changes, _json = Processor.process_text(
text=orig_text,
show=False,
delimiters=[u'', u'{', u'}'],
check_brackets=True,
# print_log=False
)
t_expected = u"так{такъ} []"
self.assertEqual(text_res, t_expected)
changes_expected = u'такъ --> так'
self.assertEqual(changes, changes_expected)
json_obj = json.loads(_json)
expected_json = {
u'1': {u'word': u' ',
# u'plain_word': None,
u'type': u'punct',
u'old_word': u'',
# u'old_plain_word': None
},
u'0': {u'word': u'так',
# u'plain_word': None,
u'type': u'word',
u'old_word': u'такъ',
# u'old_plain_word': None
},
u'3': {u'word': u']',
# u'plain_word': None,
u'type': u'punct',
u'old_word': u'',
# u'old_plain_word': None
},
u'2': {u'word': u'[',
# u'plain_word': None,
u'type': u'punct',
u'old_word': u'',
# u'old_plain_word': None
}
}
self.assertDictEqual(json_obj, expected_json)
def test_process_text_with_empty_brackets_check_false(self):
orig_text = u'такъ []'
text_res, changes, _json = Processor.process_text(
text=orig_text,
show=False,
delimiters=[u'', u'{', u'}'],
check_brackets=False, # This is different from the test above
# print_log=False
)
t_expected = u"так []"
self.assertEqual(text_res, t_expected)
changes_expected = u'такъ --> так'
self.assertEqual(changes, changes_expected)
json_obj = json.loads(_json)
expected_json = {
u'1': {u'word': u' ',
# u'plain_word': None,
u'type': u'punct',
u'old_word': u'',
# u'old_plain_word': None
},
u'0': {u'word': u'так',
# u'plain_word': None,
u'type': u'word',
u'old_word': u'такъ',
# u'old_plain_word': None
},
u'3': {u'word': u']',
# u'plain_word': None,
u'type': u'punct',
u'old_word': u'',
# u'old_plain_word': None
},
u'2': {u'word': u'[',
# u'plain_word': None,
u'type': u'punct',
u'old_word': u'',
# u'old_plain_word': None
}
}
self.assertDictEqual(json_obj, expected_json)
def test_process_text_empty_input(self):
orig_text = u''
text_res, changes, _json = Processor.process_text(
text=orig_text,
show=False,
delimiters=[u'', u'{', u'}'],
check_brackets=False,
# print_log=False
)
t_expected = u''
self.assertEqual(text_res, t_expected)
changes_expected = u''
self.assertEqual(changes, changes_expected)
json_obj = json.loads(_json)
expected_json = {}
self.assertDictEqual(json_obj, expected_json)
def test_process_text_old_style_correction_in_brackets_check_true(self):
orig_text = u'обычно[мъ]'
text_res, changes, _json = Processor.process_text(
text=orig_text,
show=False,
delimiters=[u'', u'{', u'}'],
check_brackets=True,
# print_log=False
)
t_expected = u"<choice original_editorial_correction='обычно[мъ]'><sic>обычно</sic><corr>обычном{обычномъ}</corr></choice>"
self.assertEqual(text_res, t_expected)
changes_expected = u'обычно[мъ] --> обычно[м]'
self.assertEqual(changes, changes_expected)
json_obj = json.loads(_json)
expected_json = {
u'0': {u'word': u'обычно[м]',
# u'plain_word': None,
u'type': u'word',
u'old_word': u'обычно[мъ]',
# u'old_plain_word': None
}
}
self.assertDictEqual(json_obj, expected_json)
def test_process_text_old_style_correction_in_brackets_check_false(self):
orig_text = u'обычно[мъ]'
text_res, changes, _json = Processor.process_text(
text=orig_text,
show=False,
delimiters=[u'', u'{', u'}'],
check_brackets=False, # This is different from the test above
# print_log=False
)
t_expected = u'обычно[м]'
self.assertEqual(text_res, t_expected)
changes_expected = u'обычно[мъ] --> обычно[м]'
self.assertEqual(changes, changes_expected)
json_obj = json.loads(_json)
expected_json = {
u'0': {u'word': u'обычно[м]',
# u'plain_word': None,
u'type': u'word',
u'old_word': u'обычно[мъ]',
# u'old_plain_word': None
}
}
self.assertDictEqual(json_obj, expected_json)
| 33.693717
| 131
| 0.502059
| 1,459
| 12,871
| 4.1878
| 0.08499
| 0.041735
| 0.089362
| 0.080196
| 0.867758
| 0.847954
| 0.840753
| 0.827169
| 0.826514
| 0.796399
| 0
| 0.005568
| 0.372077
| 12,871
| 381
| 132
| 33.782152
| 0.750433
| 0.164168
| 0
| 0.714844
| 0
| 0.007813
| 0.119413
| 0.019731
| 0
| 0
| 0
| 0
| 0.125
| 1
| 0.039063
| false
| 0
| 0.011719
| 0
| 0.054688
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5d3b3ff35e8e3bc66c9a01d022dfac300554a0f5
| 10,465
|
py
|
Python
|
tests/test_movingfiles.py
|
virusdave/tvnamer
|
ee4f6bc6c09cf76661052c195cc0047017292595
|
[
"Unlicense"
] | 529
|
2015-01-05T18:27:09.000Z
|
2022-03-14T23:57:11.000Z
|
tests/test_movingfiles.py
|
virusdave/tvnamer
|
ee4f6bc6c09cf76661052c195cc0047017292595
|
[
"Unlicense"
] | 118
|
2015-01-01T02:16:27.000Z
|
2022-03-27T11:08:10.000Z
|
tests/test_movingfiles.py
|
virusdave/tvnamer
|
ee4f6bc6c09cf76661052c195cc0047017292595
|
[
"Unlicense"
] | 93
|
2015-01-12T10:28:18.000Z
|
2022-03-05T14:26:07.000Z
|
#!/usr/bin/env python
"""Tests moving renamed files
"""
from functional_runner import run_tvnamer, verify_out_data
from helpers import attr
@attr("functional")
def test_simple_realtive_move():
"""Move file to simple relative static dir
"""
conf = """
{"move_files_enable": true,
"move_files_destination": "test/",
"batch": true}
"""
out_data = run_tvnamer(
with_files = ['scrubs.s01e01.avi'],
with_config = conf,
with_input = "")
expected_files = ['test/Scrubs - [01x01] - My First Day.avi']
verify_out_data(out_data, expected_files)
@attr("functional")
def test_dynamic_destination():
"""Move file to simple relative static dir
"""
conf = """
{"move_files_enable": true,
"move_files_destination": "tv/%(seriesname)s/season %(seasonnumber)d/",
"batch": true}
"""
out_data = run_tvnamer(
with_files = ['scrubs.s01e01.avi'],
with_config = conf)
expected_files = ['tv/Scrubs/season 1/Scrubs - [01x01] - My First Day.avi']
verify_out_data(out_data, expected_files)
@attr("functional")
def test_cli_destination():
"""Tests specifying the destination via command line argument
"""
out_data = run_tvnamer(
with_files = ['scrubs.s01e01.avi'],
with_flags = ['--batch', '--move', '--movedestination=season %(seasonnumber)d/'])
expected_files = ['season 1/Scrubs - [01x01] - My First Day.avi']
verify_out_data(out_data, expected_files)
@attr("functional")
def test_move_interactive_allyes():
"""Tests interactive UI for moving all files
"""
conf = """
{"move_files_enable": true,
"move_files_destination": "test",
"select_first": true}
"""
out_data = run_tvnamer(
with_files = ['scrubs.s01e01.avi', 'scrubs.s01e02.avi'],
with_config = conf,
with_input = "y\ny\ny\ny\n")
expected_files = ['test/Scrubs - [01x01] - My First Day.avi',
'test/Scrubs - [01x02] - My Mentor.avi']
verify_out_data(out_data, expected_files)
@attr("functional")
def test_move_interactive_allno():
"""Tests interactive UI allows not moving any files
"""
conf = """
{"move_files_enable": true,
"move_files_destination": "test",
"select_first": true}
"""
out_data = run_tvnamer(
with_files = ['scrubs.s01e01.avi', 'scrubs.s01e02.avi'],
with_config = conf,
with_input = "y\nn\ny\nn\n")
expected_files = ['Scrubs - [01x01] - My First Day.avi',
'Scrubs - [01x02] - My Mentor.avi']
verify_out_data(out_data, expected_files)
@attr("functional")
def test_move_interactive_somefiles():
"""Tests interactive UI allows not renaming some files, renaming/moving others
Rename and move first file, don't rename second file (so no move), and
rename but do not move last file (Input is: y/y, n, y/n)
"""
conf = """
{"move_files_enable": true,
"move_files_destination": "test",
"select_first": true}
"""
out_data = run_tvnamer(
with_files = ['scrubs.s01e01.avi', 'scrubs.s01e02.avi', 'scrubs.s01e03.avi'],
with_config = conf,
with_input = "y\ny\nn\ny\nn\n")
expected_files = ['test/Scrubs - [01x01] - My First Day.avi',
'scrubs.s01e02.avi',
'Scrubs - [01x03] - My Best Friend\'s Mistake.avi']
verify_out_data(out_data, expected_files)
@attr("functional")
def test_with_invalid_seriesname():
"""Tests series name containing invalid filename characters
"""
conf = """
{"move_files_enable": true,
"move_files_destination": "%(seriesname)s",
"batch": true,
"windows_safe_filenames": true}
"""
out_data = run_tvnamer(
with_files = ['csi.cyber.s01e03.avi'],
with_config = conf)
expected_files = ['CSI_ Cyber/CSI_ Cyber - [01x03] - Killer En Route.avi']
verify_out_data(out_data, expected_files)
@attr("functional")
def test_with_invalid_seriesname_test2():
"""Another test for series name containing invalid filename characters
"""
conf = """
{"move_files_enable": true,
"move_files_destination": "%(seriesname)s",
"batch": true,
"move_files_fullpath_replacements": [
{"is_regex": true,
"match": "CSI_ Miami",
"replacement": "CSI"}],
"windows_safe_filenames": true}
"""
out_data = run_tvnamer(
with_files = ['csi.miami.s01e01.avi'],
with_config = conf)
expected_files = ['CSI/CSI - [01x01] - Golden Parachute.avi']
verify_out_data(out_data, expected_files)
@attr("functional")
def test_move_files_lowercase_destination():
"""Test move_files_lowercase_destination configuration option.
"""
conf = """
{"move_files_enable": true,
"move_files_destination": "Test/This/%(seriesname)s/S%(seasonnumber)02d",
"move_files_lowercase_destination": true,
"batch": true}
"""
out_data = run_tvnamer(
with_files = ['scrubs.s01e01.This.Is.a.Test.avi'],
with_config = conf,
with_input = "")
expected_files = ['Test/This/scrubs/S01/Scrubs - [01x01] - My First Day.avi']
verify_out_data(out_data, expected_files)
@attr("functional")
def test_move_date_based_episode():
"""Moving a date-base episode (lighthouse ticket #56)
"""
conf = """
{"move_files_enable": true,
"move_files_destination_date": "Test/%(seriesname)s/%(year)s/%(month)s/%(day)s",
"move_files_lowercase_destination": true,
"batch": true}
"""
out_data = run_tvnamer(
with_files = ['The Colbert Report - 2011-09-28 Ken Burns.avi'],
with_config = conf,
with_input = "")
expected_files = ['Test/The Colbert Report/2011/9/28/The Colbert Report - [2011-09-28] - Ken Burns.avi']
verify_out_data(out_data, expected_files)
@attr("functional")
def test_move_files_full_filepath_simple():
"""Moving file destination including a fixed filename
"""
conf = """
{"move_files_enable": true,
"move_files_destination": "TestDir/%(seriesname)s/season %(seasonnumber)02d/%(episodenumbers)s/SpecificName.avi",
"move_files_destination_is_filepath": true,
"batch": true}
"""
out_data = run_tvnamer(
with_files = ['scrubs.s01e02.avi'],
with_config = conf,
with_input = "")
expected_files = ['TestDir/Scrubs/season 01/02/SpecificName.avi']
verify_out_data(out_data, expected_files)
@attr("functional")
def test_move_files_full_filepath_with_origfilename():
"""Moving file destination including a filename
"""
conf = """
{"move_files_enable": true,
"move_files_destination": "TestDir/%(seriesname)s/season %(seasonnumber)02d/%(episodenumbers)s/%(originalfilename)s",
"move_files_destination_is_filepath": true,
"batch": true}
"""
out_data = run_tvnamer(
with_files = ['scrubs.s01e01.avi', 'scrubs.s01e02.avi'],
with_config = conf,
with_input = "")
expected_files = [
'TestDir/Scrubs/season 01/01/scrubs.s01e01.avi',
'TestDir/Scrubs/season 01/02/scrubs.s01e02.avi']
verify_out_data(out_data, expected_files)
@attr("functional")
def test_move_with_correct_name():
"""Files with correct name should still be moved
"""
conf = """
{"move_files_enable": true,
"move_files_destination": "SubDir",
"batch": true}
"""
out_data = run_tvnamer(
with_files = ['Scrubs - [01x02] - My Mentor.avi'],
with_config = conf,
with_input = "y\n")
expected_files = ['SubDir/Scrubs - [01x02] - My Mentor.avi']
verify_out_data(out_data, expected_files)
@attr("functional")
def test_move_no_season():
"""Files with no season number should moveable [#94]
"""
conf = """
{"move_files_enable": true,
"move_files_destination": "SubDir",
"batch": true}
"""
out_data = run_tvnamer(
with_files = ['Scrubs - [02] - My Mentor.avi'],
with_config = conf,
with_input = "y\n")
expected_files = ['SubDir/Scrubs - [02] - My Mentor.avi']
verify_out_data(out_data, expected_files)
@attr("functional")
def test_move_files_only():
"""With parameter move_files_only set to true files should be moved and not renamed
"""
conf = """
{"move_files_only": true,
"move_files_enable": true,
"move_files_destination": "tv/%(seriesname)s/season %(seasonnumber)d/",
"batch": true}
"""
out_data = run_tvnamer(
with_files = ['scrubs.s01e01.avi'],
with_config = conf)
expected_files = ['tv/Scrubs/season 1/scrubs.s01e01.avi']
verify_out_data(out_data, expected_files)
@attr("functional")
def test_forcefully_moving_enabled():
"""Forcefully moving files, overwriting destination
"""
conf = """
{"move_files_enable": true,
"move_files_destination": "tv/%(seriesname)s/season %(seasonnumber)d/",
"batch": true,
"overwrite_destination_on_move": true}
"""
out_data = run_tvnamer(
with_files = ['scrubs.s01e01.avi', 'Scrubs - [01x01] - My First Day.avi'],
with_config = conf)
expected_files = ['tv/Scrubs/season 1/Scrubs - [01x01] - My First Day.avi']
verify_out_data(out_data, expected_files)
@attr("functional")
def test_forcefully_moving_disabled():
"""Explicitly disable forcefully moving files
"""
conf = """
{"move_files_enable": true,
"move_files_destination": "tv/%(seriesname)s/season %(seasonnumber)d/",
"batch": true,
"overwrite_destination_on_move": false}
"""
out_data = run_tvnamer(
with_files = ['scrubs.s01e01.avi', 'scrubs - [01x01].avi'],
with_config = conf)
expected_files = [
'Scrubs - [01x01] - My First Day.avi',
'tv/Scrubs/season 1/Scrubs - [01x01] - My First Day.avi']
verify_out_data(out_data, expected_files)
@attr("functional")
def test_forcefully_moving_default():
"""Ensure default is not overwrite destination
"""
conf = """
{"move_files_enable": true,
"move_files_destination": "tv/%(seriesname)s/season %(seasonnumber)d/",
"batch": true}
"""
out_data = run_tvnamer(
with_files = ['scrubs.s01e01.avi', 'scrubs - [01x01].avi'],
with_config = conf)
expected_files = [
'Scrubs - [01x01] - My First Day.avi',
'tv/Scrubs/season 1/Scrubs - [01x01] - My First Day.avi']
verify_out_data(out_data, expected_files)
| 26.29397
| 121
| 0.642045
| 1,285
| 10,465
| 4.958755
| 0.140856
| 0.060421
| 0.038763
| 0.059322
| 0.800847
| 0.76381
| 0.760201
| 0.74796
| 0.73054
| 0.694131
| 0
| 0.029433
| 0.214333
| 10,465
| 397
| 122
| 26.360202
| 0.745561
| 0.115815
| 0
| 0.743902
| 0
| 0.012195
| 0.492399
| 0.182653
| 0
| 0
| 0
| 0
| 0
| 1
| 0.073171
| false
| 0
| 0.00813
| 0
| 0.081301
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5d466765abdaa72c7371ab0ce248dcba4ebea97e
| 36,056
|
py
|
Python
|
sdk/network/azure-mgmt-network/azure/mgmt/network/_operations_mixin.py
|
praveenkuttappan/azure-sdk-for-python
|
4b79413667b7539750a6c7dde15737013a3d4bd5
|
[
"MIT"
] | null | null | null |
sdk/network/azure-mgmt-network/azure/mgmt/network/_operations_mixin.py
|
praveenkuttappan/azure-sdk-for-python
|
4b79413667b7539750a6c7dde15737013a3d4bd5
|
[
"MIT"
] | 1
|
2021-06-07T06:37:28.000Z
|
2021-06-07T06:37:28.000Z
|
sdk/network/azure-mgmt-network/azure/mgmt/network/_operations_mixin.py
|
praveenkuttappan/azure-sdk-for-python
|
4b79413667b7539750a6c7dde15737013a3d4bd5
|
[
"MIT"
] | null | null | null |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest import Serializer, Deserializer
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.arm_polling import ARMPolling
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
class NetworkManagementClientOperationsMixin(object):
def begin_delete_bastion_shareable_link(
self,
resource_group_name, # type: str
bastion_host_name, # type: str
bsl_request, # type: "_models.BastionShareableLinkListRequest"
**kwargs # type: Any
):
"""Deletes the Bastion Shareable Links for all the VMs specified in the request.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param bastion_host_name: The name of the Bastion Host.
:type bastion_host_name: str
:param bsl_request: Post request for all the Bastion Shareable Link endpoints.
:type bsl_request: ~azure.mgmt.network.v2021_03_01.models.BastionShareableLinkListRequest
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
api_version = self._get_api_version('begin_delete_bastion_shareable_link')
if api_version == '2019-09-01':
from .v2019_09_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2019-11-01':
from .v2019_11_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2019-12-01':
from .v2019_12_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2020-03-01':
from .v2020_03_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2020-04-01':
from .v2020_04_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2020-05-01':
from .v2020_05_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2020-06-01':
from .v2020_06_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2020-07-01':
from .v2020_07_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2020-08-01':
from .v2020_08_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2020-11-01':
from .v2020_11_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2021-02-01':
from .v2021_02_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2021-03-01':
from .v2021_03_01.operations import NetworkManagementClientOperationsMixin as OperationClass
else:
raise ValueError("API version {} does not have operation 'begin_delete_bastion_shareable_link'".format(api_version))
mixin_instance = OperationClass()
mixin_instance._client = self._client
mixin_instance._config = self._config
mixin_instance._serialize = Serializer(self._models_dict(api_version))
mixin_instance._serialize.client_side_validation = False
mixin_instance._deserialize = Deserializer(self._models_dict(api_version))
return mixin_instance.begin_delete_bastion_shareable_link(resource_group_name, bastion_host_name, bsl_request, **kwargs)
def begin_generatevirtualwanvpnserverconfigurationvpnprofile(
self,
resource_group_name, # type: str
virtual_wan_name, # type: str
vpn_client_params, # type: "_models.VirtualWanVpnProfileParameters"
**kwargs # type: Any
):
"""Generates a unique VPN profile for P2S clients for VirtualWan and associated
VpnServerConfiguration combination in the specified resource group.
:param resource_group_name: The resource group name.
:type resource_group_name: str
:param virtual_wan_name: The name of the VirtualWAN whose associated VpnServerConfigurations is
needed.
:type virtual_wan_name: str
:param vpn_client_params: Parameters supplied to the generate VirtualWan VPN profile generation
operation.
:type vpn_client_params: ~azure.mgmt.network.v2021_03_01.models.VirtualWanVpnProfileParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either VpnProfileResponse or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2021_03_01.models.VpnProfileResponse]
:raises ~azure.core.exceptions.HttpResponseError:
"""
api_version = self._get_api_version('begin_generatevirtualwanvpnserverconfigurationvpnprofile')
if api_version == '2019-08-01':
from .v2019_08_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2019-09-01':
from .v2019_09_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2019-11-01':
from .v2019_11_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2019-12-01':
from .v2019_12_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2020-03-01':
from .v2020_03_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2020-04-01':
from .v2020_04_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2020-05-01':
from .v2020_05_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2020-06-01':
from .v2020_06_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2020-07-01':
from .v2020_07_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2020-08-01':
from .v2020_08_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2020-11-01':
from .v2020_11_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2021-02-01':
from .v2021_02_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2021-03-01':
from .v2021_03_01.operations import NetworkManagementClientOperationsMixin as OperationClass
else:
raise ValueError("API version {} does not have operation 'begin_generatevirtualwanvpnserverconfigurationvpnprofile'".format(api_version))
mixin_instance = OperationClass()
mixin_instance._client = self._client
mixin_instance._config = self._config
mixin_instance._serialize = Serializer(self._models_dict(api_version))
mixin_instance._serialize.client_side_validation = False
mixin_instance._deserialize = Deserializer(self._models_dict(api_version))
return mixin_instance.begin_generatevirtualwanvpnserverconfigurationvpnprofile(resource_group_name, virtual_wan_name, vpn_client_params, **kwargs)
def begin_get_active_sessions(
self,
resource_group_name, # type: str
bastion_host_name, # type: str
**kwargs # type: Any
):
"""Returns the list of currently active sessions on the Bastion.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param bastion_host_name: The name of the Bastion Host.
:type bastion_host_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns an iterator like instance of either BastionActiveSessionListResult or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.core.paging.ItemPaged[~azure.mgmt.network.v2021_03_01.models.BastionActiveSessionListResult]]
:raises ~azure.core.exceptions.HttpResponseError:
"""
api_version = self._get_api_version('begin_get_active_sessions')
if api_version == '2019-09-01':
from .v2019_09_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2019-11-01':
from .v2019_11_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2019-12-01':
from .v2019_12_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2020-03-01':
from .v2020_03_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2020-04-01':
from .v2020_04_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2020-05-01':
from .v2020_05_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2020-06-01':
from .v2020_06_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2020-07-01':
from .v2020_07_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2020-08-01':
from .v2020_08_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2020-11-01':
from .v2020_11_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2021-02-01':
from .v2021_02_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2021-03-01':
from .v2021_03_01.operations import NetworkManagementClientOperationsMixin as OperationClass
else:
raise ValueError("API version {} does not have operation 'begin_get_active_sessions'".format(api_version))
mixin_instance = OperationClass()
mixin_instance._client = self._client
mixin_instance._config = self._config
mixin_instance._serialize = Serializer(self._models_dict(api_version))
mixin_instance._serialize.client_side_validation = False
mixin_instance._deserialize = Deserializer(self._models_dict(api_version))
return mixin_instance.begin_get_active_sessions(resource_group_name, bastion_host_name, **kwargs)
def begin_put_bastion_shareable_link(
self,
resource_group_name, # type: str
bastion_host_name, # type: str
bsl_request, # type: "_models.BastionShareableLinkListRequest"
**kwargs # type: Any
):
"""Creates a Bastion Shareable Links for all the VMs specified in the request.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param bastion_host_name: The name of the Bastion Host.
:type bastion_host_name: str
:param bsl_request: Post request for all the Bastion Shareable Link endpoints.
:type bsl_request: ~azure.mgmt.network.v2021_03_01.models.BastionShareableLinkListRequest
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns an iterator like instance of either BastionShareableLinkListResult or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.core.paging.ItemPaged[~azure.mgmt.network.v2021_03_01.models.BastionShareableLinkListResult]]
:raises ~azure.core.exceptions.HttpResponseError:
"""
api_version = self._get_api_version('begin_put_bastion_shareable_link')
if api_version == '2019-09-01':
from .v2019_09_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2019-11-01':
from .v2019_11_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2019-12-01':
from .v2019_12_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2020-03-01':
from .v2020_03_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2020-04-01':
from .v2020_04_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2020-05-01':
from .v2020_05_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2020-06-01':
from .v2020_06_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2020-07-01':
from .v2020_07_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2020-08-01':
from .v2020_08_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2020-11-01':
from .v2020_11_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2021-02-01':
from .v2021_02_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2021-03-01':
from .v2021_03_01.operations import NetworkManagementClientOperationsMixin as OperationClass
else:
raise ValueError("API version {} does not have operation 'begin_put_bastion_shareable_link'".format(api_version))
mixin_instance = OperationClass()
mixin_instance._client = self._client
mixin_instance._config = self._config
mixin_instance._serialize = Serializer(self._models_dict(api_version))
mixin_instance._serialize.client_side_validation = False
mixin_instance._deserialize = Deserializer(self._models_dict(api_version))
return mixin_instance.begin_put_bastion_shareable_link(resource_group_name, bastion_host_name, bsl_request, **kwargs)
def check_dns_name_availability(
self,
location, # type: str
domain_name_label, # type: str
**kwargs # type: Any
):
"""Checks whether a domain name in the cloudapp.azure.com zone is available for use.
:param location: The location of the domain name.
:type location: str
:param domain_name_label: The domain name to be verified. It must conform to the following
regular expression: ^[a-z][a-z0-9-]{1,61}[a-z0-9]$.
:type domain_name_label: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: DnsNameAvailabilityResult, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2021_03_01.models.DnsNameAvailabilityResult
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = self._get_api_version('check_dns_name_availability')
if api_version == '2015-06-15':
from .v2015_06_15.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2016-09-01':
from .v2016_09_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2016-12-01':
from .v2016_12_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2017-03-01':
from .v2017_03_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2017-06-01':
from .v2017_06_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2017-09-01':
from .v2017_09_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2017-10-01':
from .v2017_10_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2017-11-01':
from .v2017_11_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2018-01-01':
from .v2018_01_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2018-02-01':
from .v2018_02_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2018-04-01':
from .v2018_04_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2018-06-01':
from .v2018_06_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2018-07-01':
from .v2018_07_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2018-08-01':
from .v2018_08_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2018-10-01':
from .v2018_10_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2018-11-01':
from .v2018_11_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2018-12-01':
from .v2018_12_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2019-02-01':
from .v2019_02_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2019-04-01':
from .v2019_04_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2019-06-01':
from .v2019_06_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2019-07-01':
from .v2019_07_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2019-08-01':
from .v2019_08_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2019-09-01':
from .v2019_09_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2019-11-01':
from .v2019_11_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2019-12-01':
from .v2019_12_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2020-03-01':
from .v2020_03_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2020-04-01':
from .v2020_04_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2020-05-01':
from .v2020_05_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2020-06-01':
from .v2020_06_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2020-07-01':
from .v2020_07_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2020-08-01':
from .v2020_08_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2020-11-01':
from .v2020_11_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2021-02-01':
from .v2021_02_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2021-03-01':
from .v2021_03_01.operations import NetworkManagementClientOperationsMixin as OperationClass
else:
raise ValueError("API version {} does not have operation 'check_dns_name_availability'".format(api_version))
mixin_instance = OperationClass()
mixin_instance._client = self._client
mixin_instance._config = self._config
mixin_instance._serialize = Serializer(self._models_dict(api_version))
mixin_instance._serialize.client_side_validation = False
mixin_instance._deserialize = Deserializer(self._models_dict(api_version))
return mixin_instance.check_dns_name_availability(location, domain_name_label, **kwargs)
def disconnect_active_sessions(
self,
resource_group_name, # type: str
bastion_host_name, # type: str
session_ids, # type: "_models.SessionIds"
**kwargs # type: Any
):
"""Returns the list of currently active sessions on the Bastion.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param bastion_host_name: The name of the Bastion Host.
:type bastion_host_name: str
:param session_ids: The list of sessionids to disconnect.
:type session_ids: ~azure.mgmt.network.v2021_03_01.models.SessionIds
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either BastionSessionDeleteResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2021_03_01.models.BastionSessionDeleteResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = self._get_api_version('disconnect_active_sessions')
if api_version == '2019-09-01':
from .v2019_09_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2019-11-01':
from .v2019_11_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2019-12-01':
from .v2019_12_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2020-03-01':
from .v2020_03_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2020-04-01':
from .v2020_04_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2020-05-01':
from .v2020_05_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2020-06-01':
from .v2020_06_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2020-07-01':
from .v2020_07_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2020-08-01':
from .v2020_08_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2020-11-01':
from .v2020_11_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2021-02-01':
from .v2021_02_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2021-03-01':
from .v2021_03_01.operations import NetworkManagementClientOperationsMixin as OperationClass
else:
raise ValueError("API version {} does not have operation 'disconnect_active_sessions'".format(api_version))
mixin_instance = OperationClass()
mixin_instance._client = self._client
mixin_instance._config = self._config
mixin_instance._serialize = Serializer(self._models_dict(api_version))
mixin_instance._serialize.client_side_validation = False
mixin_instance._deserialize = Deserializer(self._models_dict(api_version))
return mixin_instance.disconnect_active_sessions(resource_group_name, bastion_host_name, session_ids, **kwargs)
def get_bastion_shareable_link(
self,
resource_group_name, # type: str
bastion_host_name, # type: str
bsl_request, # type: "_models.BastionShareableLinkListRequest"
**kwargs # type: Any
):
"""Return the Bastion Shareable Links for all the VMs specified in the request.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param bastion_host_name: The name of the Bastion Host.
:type bastion_host_name: str
:param bsl_request: Post request for all the Bastion Shareable Link endpoints.
:type bsl_request: ~azure.mgmt.network.v2021_03_01.models.BastionShareableLinkListRequest
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either BastionShareableLinkListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2021_03_01.models.BastionShareableLinkListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = self._get_api_version('get_bastion_shareable_link')
if api_version == '2019-09-01':
from .v2019_09_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2019-11-01':
from .v2019_11_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2019-12-01':
from .v2019_12_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2020-03-01':
from .v2020_03_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2020-04-01':
from .v2020_04_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2020-05-01':
from .v2020_05_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2020-06-01':
from .v2020_06_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2020-07-01':
from .v2020_07_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2020-08-01':
from .v2020_08_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2020-11-01':
from .v2020_11_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2021-02-01':
from .v2021_02_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2021-03-01':
from .v2021_03_01.operations import NetworkManagementClientOperationsMixin as OperationClass
else:
raise ValueError("API version {} does not have operation 'get_bastion_shareable_link'".format(api_version))
mixin_instance = OperationClass()
mixin_instance._client = self._client
mixin_instance._config = self._config
mixin_instance._serialize = Serializer(self._models_dict(api_version))
mixin_instance._serialize.client_side_validation = False
mixin_instance._deserialize = Deserializer(self._models_dict(api_version))
return mixin_instance.get_bastion_shareable_link(resource_group_name, bastion_host_name, bsl_request, **kwargs)
def supported_security_providers(
self,
resource_group_name, # type: str
virtual_wan_name, # type: str
**kwargs # type: Any
):
"""Gives the supported security providers for the virtual wan.
:param resource_group_name: The resource group name.
:type resource_group_name: str
:param virtual_wan_name: The name of the VirtualWAN for which supported security providers are
needed.
:type virtual_wan_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: VirtualWanSecurityProviders, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2021_03_01.models.VirtualWanSecurityProviders
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = self._get_api_version('supported_security_providers')
if api_version == '2018-08-01':
from .v2018_08_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2018-10-01':
from .v2018_10_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2018-11-01':
from .v2018_11_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2018-12-01':
from .v2018_12_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2019-02-01':
from .v2019_02_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2019-04-01':
from .v2019_04_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2019-06-01':
from .v2019_06_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2019-07-01':
from .v2019_07_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2019-08-01':
from .v2019_08_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2019-09-01':
from .v2019_09_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2019-11-01':
from .v2019_11_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2019-12-01':
from .v2019_12_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2020-03-01':
from .v2020_03_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2020-04-01':
from .v2020_04_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2020-05-01':
from .v2020_05_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2020-06-01':
from .v2020_06_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2020-07-01':
from .v2020_07_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2020-08-01':
from .v2020_08_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2020-11-01':
from .v2020_11_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2021-02-01':
from .v2021_02_01.operations import NetworkManagementClientOperationsMixin as OperationClass
elif api_version == '2021-03-01':
from .v2021_03_01.operations import NetworkManagementClientOperationsMixin as OperationClass
else:
raise ValueError("API version {} does not have operation 'supported_security_providers'".format(api_version))
mixin_instance = OperationClass()
mixin_instance._client = self._client
mixin_instance._config = self._config
mixin_instance._serialize = Serializer(self._models_dict(api_version))
mixin_instance._serialize.client_side_validation = False
mixin_instance._deserialize = Deserializer(self._models_dict(api_version))
return mixin_instance.supported_security_providers(resource_group_name, virtual_wan_name, **kwargs)
| 65.675774
| 154
| 0.730308
| 4,006
| 36,056
| 6.355217
| 0.071892
| 0.069131
| 0.271495
| 0.281551
| 0.890923
| 0.88833
| 0.883538
| 0.880082
| 0.876468
| 0.872854
| 0
| 0.0749
| 0.202768
| 36,056
| 548
| 155
| 65.79562
| 0.810784
| 0.2396
| 0
| 0.80597
| 0
| 0
| 0.080006
| 0.019869
| 0
| 0
| 0
| 0
| 0
| 1
| 0.019901
| false
| 0
| 0.345771
| 0
| 0.38806
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 10
|
5d5b4d233c12f3e2bb2c7fa7608cbb4f73675ba3
| 30
|
py
|
Python
|
tiankafei-code-python/analysis/frequency/__init__.py
|
tiankafei/java
|
9ff39cb47b8f2144851856b4412b1b0b7781cb09
|
[
"Apache-2.0"
] | 1
|
2021-08-23T03:10:33.000Z
|
2021-08-23T03:10:33.000Z
|
tiankafei-code-python/analysis/frequency/__init__.py
|
tiankafei/java
|
9ff39cb47b8f2144851856b4412b1b0b7781cb09
|
[
"Apache-2.0"
] | 8
|
2020-09-02T15:14:03.000Z
|
2021-01-08T00:34:26.000Z
|
tiankafei-code-python/analysis/frequency/__init__.py
|
tiankafei/java
|
9ff39cb47b8f2144851856b4412b1b0b7781cb09
|
[
"Apache-2.0"
] | 2
|
2020-11-25T07:58:22.000Z
|
2021-01-28T00:15:11.000Z
|
# 作者:甜咖啡
# 新建时间:2021/4/9 1:31
| 10
| 20
| 0.6
| 8
| 30
| 2.25
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.36
| 0.166667
| 30
| 2
| 21
| 15
| 0.36
| 0.833333
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
538234f6b1c6caa6dbe2b79a51b5fccdd6570b3c
| 9,133
|
py
|
Python
|
services/tests/test_data_generators/binance_tdg.py
|
ToucanBran/gateio-crypto-trading-bot-binance-announcements-new-coins
|
a83f4f0de3463001855c5e89b8f39c6b20ca3e99
|
[
"MIT"
] | 1
|
2021-12-27T01:54:38.000Z
|
2021-12-27T01:54:38.000Z
|
services/tests/test_data_generators/binance_tdg.py
|
ToucanBran/gateio-crypto-trading-bot-binance-announcements-new-coins
|
a83f4f0de3463001855c5e89b8f39c6b20ca3e99
|
[
"MIT"
] | null | null | null |
services/tests/test_data_generators/binance_tdg.py
|
ToucanBran/gateio-crypto-trading-bot-binance-announcements-new-coins
|
a83f4f0de3463001855c5e89b8f39c6b20ca3e99
|
[
"MIT"
] | null | null | null |
announcements = {'code': '000000', 'message': None, 'messageDetail': None, 'data': {'articles': [{'id': 72913, 'code': 'b9fbcf46da3a4d7f8f2d1bf1bd874665', 'title': 'Binance Adds BETA & BNX on Isolated Margin, Stablecoins Annual Interest Rate Starts at 6.20%!', 'body': None, 'type': None, 'catalogId': None, 'catalogName': None, 'publishDate': None}, {'id': 72876, 'code': '14e4354e11444c7092934e618f5eac64', 'title': 'Introducing the FC Porto Fan Token (PORTO) Token Sale on Binance Launchpad!', 'body': None, 'type': None, 'catalogId': None, 'catalogName': None, 'publishDate': None}, {'id': 72806, 'code': '5c649babec204c27b7d7b734dba937ee', 'title': 'Binance Adds ALGO/RUB, AUD/USDC, LAZIO/BUSD, LUNA/BIDR, MANA/TRY, OXT/BUSD & SHIB/UAH Trading Pairs', 'body': None, 'type': None, 'catalogId': None, 'catalogName': None, 'publishDate': None}, {'id': 72789, 'code': 'f9e1123ffc12458795716a0e027ff07a', 'title': 'Binance Will List Rari Governance Token (RGT)', 'body': None, 'type': None, 'catalogId': None,
'catalogName': None, 'publishDate': None}, {'id': 72774, 'code': '1f01516fe1f64bd08ed5a9f4207a9e5b', 'title': 'Binance Adds DOGE/BUSD Cross Margin & SHIB/DOGE Isolated Margin Pairs, Stablecoins Annual Interest Rate Starts at 6.20%!', 'body': None, 'type': None, 'catalogId': None, 'catalogName': None, 'publishDate': None}, {'id': 72633, 'code': '0337a0095c5b47f09a4882b6af624bdd', 'title': 'Binance Will List BinaryX (BNX) in the Innovation Zone', 'body': None, 'type': None, 'catalogId': None, 'catalogName': None, 'publishDate': None}, {'id': 72275, 'code': '8f89686731e04da3b9a98e20a0897413', 'title': 'Binance Futures Will Launch Coin-Margined FTM Perpetual Contracts with Up to 20X Leverage', 'body': None, 'type': None, 'catalogId': None, 'catalogName': None, 'publishDate': None}, {'id': 72200, 'code': 'e75ededcc356463a94786de743009a31', 'title': 'Binance Adds SHIB/DOGE Trading Pair', 'body': None, 'type': None, 'catalogId': None, 'catalogName': None, 'publishDate': None}, {'id': 71995, 'code': 'eabde816eb1043f6b1cb04215902b7c1', 'title': 'Binance Adds ADX/USDT, AUCTION/USDT, CELO/BUSD, FTM/RUB, NU/AUD, NU/RUB, REEF/BIDR & REEF/TRY Trading Pairs', 'body': None, 'type': None, 'catalogId': None, 'catalogName': None, 'publishDate': None}, {'id': 71918, 'code': '28c68286fe9340cd97740774b4e13af9', 'title': 'Introducing Mines of Dalarnia (DAR) on Binance Launchpool! Farm DAR By Staking BNB and BUSD', 'body': None, 'type': None, 'catalogId': None, 'catalogName': None, 'publishDate': None}, {'id': 71904, 'code': '6ccecf57e6b24276a23ce49b77496efd', 'title': 'Binance Adds CHESS on Isolated Margin, Stablecoins Annual Interest Rate Starts at 6.20%!', 'body': None, 'type': None, 'catalogId': None, 'catalogName': None, 'publishDate': None}, {'id': 71467, 'code': 'aa4a4f5e4f1344ae921ad13c90d7a21f', 'title': 'Binance Futures Will Launch USDT-Margined CTSI Perpetual Contracts with Up to 25X Leverage', 'body': None, 'type': None, 'catalogId': None, 'catalogName': None, 'publishDate': None}, {'id': 71321, 'code': '8d0eaa46e96e490999a7e1a5cd6bebcd', 'title': 'Binance Adds FTM/AUD, FTM/BRL & SCRT/BUSD Trading Pairs', 'body': None, 'type': None, 'catalogId': None, 'catalogName': None, 'publishDate': None}, {'id': 71304, 'code': '75ee0166b3ec43aba2266ef98893dd56', 'title': 'Binance Will List Tranchess (CHESS) in the Innovation Zone', 'body': None, 'type': None, 'catalogId': None, 'catalogName': None, 'publishDate': None}, {'id': 71268, 'code': 'c94a767e99244553b7a976ee8957a2c7', 'title': 'Binance Completes the Lazio Fan Token Subscription Launchpad and Will Open Trading for LAZIO', 'body': None, 'type': None, 'catalogId': None, 'catalogName': None, 'publishDate': None}], 'total': 842}, 'success': True}
articles = [
{
"id":72913,
"code":"b9fbcf46da3a4d7f8f2d1bf1bd874665",
"title":"Binance Adds BETA & BNX on Isolated Margin, Stablecoins Annual Interest Rate Starts at 6.20%!",
"body":"None",
"type":"None",
"catalogId":"None",
"catalogName":"None",
"publishDate":"None"
},
{
"id":72876,
"code":"14e4354e11444c7092934e618f5eac64",
"title":"Introducing the FC Porto Fan Token (PORTO) Token Sale on Binance Launchpad!",
"body":"None",
"type":"None",
"catalogId":"None",
"catalogName":"None",
"publishDate":"None"
},
{
"id":72806,
"code":"5c649babec204c27b7d7b734dba937ee",
"title":"Binance Adds ALGO/RUB, AUD/USDC, LAZIO/BUSD, LUNA/BIDR, MANA/TRY, OXT/BUSD & SHIB/UAH Trading Pairs",
"body":"None",
"type":"None",
"catalogId":"None",
"catalogName":"None",
"publishDate":"None"
},
{
"id":72789,
"code":"f9e1123ffc12458795716a0e027ff07a",
"title":"Binance Will List Rari Governance Token (RGT)",
"body":"None",
"type":"None",
"catalogId":"None",
"catalogName":"None",
"publishDate":"None"
},
{
"id":72774,
"code":"1f01516fe1f64bd08ed5a9f4207a9e5b",
"title":"Binance Adds DOGE/BUSD Cross Margin & SHIB/DOGE Isolated Margin Pairs, Stablecoins Annual Interest Rate Starts at 6.20%!",
"body":"None",
"type":"None",
"catalogId":"None",
"catalogName":"None",
"publishDate":"None"
},
{
"id":72633,
"code":"0337a0095c5b47f09a4882b6af624bdd",
"title":"Binance Will List BinaryX (BNX) in the Innovation Zone",
"body":"None",
"type":"None",
"catalogId":"None",
"catalogName":"None",
"publishDate":"None"
},
{
"id":72275,
"code":"8f89686731e04da3b9a98e20a0897413",
"title":"Binance Futures Will Launch Coin-Margined FTM Perpetual Contracts with Up to 20X Leverage",
"body":"None",
"type":"None",
"catalogId":"None",
"catalogName":"None",
"publishDate":"None"
},
{
"id":72200,
"code":"e75ededcc356463a94786de743009a31",
"title":"Binance Adds SHIB/DOGE Trading Pair",
"body":"None",
"type":"None",
"catalogId":"None",
"catalogName":"None",
"publishDate":"None"
},
{
"id":71995,
"code":"eabde816eb1043f6b1cb04215902b7c1",
"title":"Binance Adds ADX/USDT, AUCTION/USDT, CELO/BUSD, FTM/RUB, NU/AUD, NU/RUB, REEF/BIDR & REEF/TRY Trading Pairs",
"body":"None",
"type":"None",
"catalogId":"None",
"catalogName":"None",
"publishDate":"None"
},
{
"id":71918,
"code":"28c68286fe9340cd97740774b4e13af9",
"title":"Introducing Mines of Dalarnia (DAR) on Binance Launchpool! Farm DAR By Staking BNB and BUSD",
"body":"None",
"type":"None",
"catalogId":"None",
"catalogName":"None",
"publishDate":"None"
},
{
"id":71904,
"code":"6ccecf57e6b24276a23ce49b77496efd",
"title":"Binance Adds CHESS on Isolated Margin, Stablecoins Annual Interest Rate Starts at 6.20%!",
"body":"None",
"type":"None",
"catalogId":"None",
"catalogName":"None",
"publishDate":"None"
},
{
"id":71467,
"code":"aa4a4f5e4f1344ae921ad13c90d7a21f",
"title":"Binance Futures Will Launch USDT-Margined CTSI Perpetual Contracts with Up to 25X Leverage",
"body":"None",
"type":"None",
"catalogId":"None",
"catalogName":"None",
"publishDate":"None"
},
{
"id":71321,
"code":"8d0eaa46e96e490999a7e1a5cd6bebcd",
"title":"Binance Adds FTM/AUD, FTM/BRL & SCRT/BUSD Trading Pairs",
"body":"None",
"type":"None",
"catalogId":"None",
"catalogName":"None",
"publishDate":"None"
},
{
"id":71304,
"code":"75ee0166b3ec43aba2266ef98893dd56",
"title":"Binance Will List Tranchess (CHESS) in the Innovation Zone",
"body":"None",
"type":"None",
"catalogId":"None",
"catalogName":"None",
"publishDate":"None"
},
{
"id":71268,
"code":"c94a767e99244553b7a976ee8957a2c7",
"title":"Binance Completes the Lazio Fan Token Subscription Launchpad and Will Open Trading for LAZIO",
"body":"None",
"type":"None",
"catalogId":"None",
"catalogName":"None",
"publishDate":"None"
}
]
| 58.922581
| 2,725
| 0.573087
| 856
| 9,133
| 6.114486
| 0.160047
| 0.045854
| 0.068781
| 0.091708
| 0.985862
| 0.985862
| 0.985862
| 0.985862
| 0.985862
| 0.985862
| 0
| 0.117673
| 0.267711
| 9,133
| 155
| 2,726
| 58.922581
| 0.664922
| 0
| 0
| 0.487013
| 0
| 0.038961
| 0.568864
| 0.105102
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
53adc6c9cc2e96df1b20d134ca1d5ed4cb13c6ea
| 36,342
|
py
|
Python
|
youtube_search_requests/constants.py
|
mansuf/youtube-search-requests
|
f4e961688895d13343298eb9d3ccef131b3f8d0d
|
[
"MIT"
] | 3
|
2020-12-24T18:25:54.000Z
|
2020-12-29T20:58:05.000Z
|
youtube_search_requests/constants.py
|
trollfist20/youtube-search-requests
|
63c70ece7be4590c4e31df2c510d646e30744f2c
|
[
"MIT"
] | 1
|
2021-08-24T21:47:16.000Z
|
2021-11-09T16:21:57.000Z
|
youtube_search_requests/constants.py
|
mansuf/youtube-search-requests
|
f4e961688895d13343298eb9d3ccef131b3f8d0d
|
[
"MIT"
] | null | null | null |
# youtube-search-requests
# constants.py
import random
from urllib.parse import quote
# all Youtube URLs
BASE_YOUTUBE_URL = 'https://www.youtube.com/'
BASE_YOUTUBE_SEARCH_INTERNAL_API_URL = 'https://www.youtube.com/youtubei/v1/search?key='
BASE_YOUTUBE_SEARCH_QUERY_URL = 'https://www.youtube.com/results?search_query='
BASE_YOUTUBE_SHORT_URL = 'https://youtu.be/'
BASE_YOUTUBE_CHANNEL_URL = 'https://www.youtube.com/channel/'
BASE_YOUTUBE_WATCH_URL = 'https://www.youtube.com/watch?v='
BASE_YOUTUBE_PLAYLIST_URL = 'https://www.youtube.com/playlist?list='
BASE_YOUTUBE_SEARCH_RELATED_VIDEOS_INTERNAL_API_URL = 'https://www.youtube.com/youtubei/v1/next?key='
# all types filters.
ALL_FILTERS = {
'PLAYLISTS_FILTER': quote('EgIQAw=='),
'CHANNELS_FILTER': quote('EgIQAg=='),
'VIDEOS_FILTER': quote('EgIQAQ==')
}
# all videos filters.
# HD, 4K, live videos and etc.
ALL_VIDEOS_FILTERS = {
'LEN<4MIN': 'EgQQARgB', # videos with length less than 4 minutes
'LEN>20MIN': 'EgQQARgC', # videos with length more than 20 minutes
'LIVE': 'EgQQAUAB', # live videos
'4K': 'EgQQAXAB', # 4K videos
'HD': 'EgQQASAB', # HD videos
'WITH_SUBTITLES': 'EgQQASgB', # videos with subtitles
'VR360': 'EgQQAXgB', # videos with VR 360 support
'VR180': quote('EgUQAdABAQ=='), # videos with VR 180 support
'3D': 'EgQQATgB', # videos with 3D support
'HDR': quote('EgUQAcgBAQ=='), # videos with HDR support
'NO_FILTER': ALL_FILTERS['VIDEOS_FILTER'] # videos with no filter
}
# NEED DOCUMENTATION !!!
# all valid languages in youtube
VALID_LANGUAGES = [
'af',
'az',
'id',
'ms',
'bs',
'ca',
'cs',
'da',
'de',
'et',
'en-IN',
'en-GB',
'en',
'es',
'es-419',
'es-US',
'eu',
'fil',
'fr',
'fr-CA',
'gl',
'hr',
'zu',
'is',
'sw',
'lv',
'lt',
'hu',
'nl',
'no',
'uz',
'sq',
'vi',
'tr',
'be',
'bg',
'ky',
'kk',
'mn',
'ru',
'sr',
'uk',
'el',
'hy',
'iw',
'ur',
'ar',
'fa',
'ne',
'mr',
'as',
'bn',
'pa',
'gu',
'or',
'ta',
'te',
'kn',
'si',
'th',
'lo',
'my',
'ka',
'am',
'km',
'zh-CN',
'zh-TW',
'zh-HK',
'ja',
'ko'
]
# TODO: Finish this !!!
VALID_REGIONS = [
'ID',
'US'
]
# TODO: FINISH THIS !!!
VALID_USER_AGENTS = [
'BOT',
'LINUX_FIREFOX',
'LINUX_CHROME',
'LINUX_OPERA',
'WINDOWS_FIREFOX',
# 'WINDOWS_CHROME',
# 'WINDOWS_OPERA',
# 'WINDOWS_EDGE',
# 'WINDOWS_IE',
# 'MAC_SAFARI',
# 'MAC_FIREFOX',
# 'MAC_CHROME',
# 'MAC_OPERA',
'ANDROID_CHROME',
'ANDROID_FIREFOX',
'ANDROID_OPERA',
'ANDROID_SAMSUNG_BROWSER',
'RANDOM'
]
WINDOWS_FIREFOX_USER_AGENTS = [
'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:59.0) Gecko/20100101 Firefox/59.0',
'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:60.0) Gecko/20100101 Firefox/60.0',
'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:56.0) Gecko/20100101 Firefox/56.0',
'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:55.0) Gecko/20100101 Firefox/55.0',
'Mozilla/5.0 (Windows NT 10.0; WOW64; rv:56.0) Gecko/20100101 Firefox/56.0',
'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:57.0) Gecko/20100101 Firefox/57.0',
'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:58.0) Gecko/20100101 Firefox/58.0',
'Mozilla/5.0 (Windows NT 10.0; WOW64; rv:59.0) Gecko/20100101 Firefox/59.0',
'Mozilla/5.0 (Windows NT 10.0; rv:59.0) Gecko/20100101 Firefox/59.0',
'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:61.0) Gecko/20100101 Firefox/61.0',
'Mozilla/5.0 (Windows NT 10.0; WOW64; rv:53.0) Gecko/20100101 Firefox/53.0',
'Mozilla/5.0 (Windows NT 10.0; WOW64; rv:60.0) Gecko/20100101 Firefox/60.0',
'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:53.0) Gecko/20100101 Firefox/53.0',
'Mozilla/5.0 (Windows NT 10.0; rv:57.0) Gecko/20100101 Firefox/57.0',
'Mozilla/5.0 (Windows NT 10.0; rv:56.0) Gecko/20100101 Firefox/56.0',
'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:54.0) Gecko/20100101 Firefox/54.0',
'Mozilla/5.0 (Windows NT 10.0; WOW64; rv:54.0) Gecko/20100101 Firefox/54.0',
'Mozilla/5.0 (Windows NT 10.0; WOW64; rv:55.0) Gecko/20100101 Firefox/55.0',
'Mozilla/5.0 (Windows NT 10.0; rv:55.0) Gecko/20100101 Firefox/55.0',
'Mozilla/5.0 (Windows NT 10.0; WOW64; rv:57.0) Gecko/20100101 Firefox/57.0',
'Mozilla/5.0 (Windows NT 10.0; rv:58.0) Gecko/20100101 Firefox/58.0',
'Mozilla/5.0 (Windows NT 10.0; WOW64; rv:58.0) Gecko/20100101 Firefox/58.0',
'Mozilla/5.0 (Windows NT 10.0; rv:54.0) Gecko/20100101 Firefox/54.0',
'Mozilla/5.0 (Windows NT 10.0; rv:60.0) Gecko/20100101 Firefox/60.0',
'Mozilla/5.0 (Windows NT 10.0; Win64; rv:58.0) Gecko/20100101 Firefox/58.0',
'Mozilla/5.0 (Windows NT 10.0; WOW64; rv:61.0) Gecko/20100101 Firefox/61.0',
'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:62.0) Gecko/20100101 Firefox/62.0',
'Mozilla/5.0 (Windows NT 10.0; rv:61.0) Gecko/20100101 Firefox/61.0',
'Mozilla/5.0 (Windows NT 10.0; rv:53.0) Gecko/20100101 Firefox/53.0',
'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:59.0) Gecko/20100001 Firefox/59.0',
]
LINUX_OPERA_USER_AGENTS = [
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/84.0.4147.125 Safari/537.36 OPR/70.0.3728.133',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/84.0.4147.89 Safari/537.36 OPR/70.0.3728.71',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.122 Safari/537.36 OPR/67.0.3575.53',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.100 Safari/537.36 OPR/67.0.3575.31',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.132 Safari/537.36 OPR/67.0.3575.79',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/79.0.3945.130 Safari/537.36 OPR/66.0.3515.72',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/75.0.3770.80 Safari/537.36 OPR/62.0.3331.14 (Edition beta),gzip(gfe)',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.28 Safari/537.36 OPR/61.0.3298.6 (Edition developer)',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/73.0.3683.103 Safari/537.36 OPR/60.0.3255.170',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/73.0.3683.103 Safari/537.36 OPR/60.0.3255.59',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/73.0.3683.103 Safari/537.36 OPR/60.0.3255.83',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.98 Safari/537.36 OPR/58.0.3135.47',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.98 Safari/537.36 OPR/58.0.3135.90',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.102 Safari/537.36 OPR/57.0.3098.116',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.102 Safari/537.36 OPR/57.0.3098.76',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/69.0.3497.100 Safari/537.36 OPR/56.0.3051.31',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/69.0.3497.100 Safari/537.36 OPR/56.0.3051.104',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/65.0.3325.162 Safari/537.36 OPR/52.0.2871.30',
'Mozilla/5.0 (X11; Linux i686) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.36 OPR/45.0.2552.898'
]
LINUX_CHROME_USER_AGENTS = [
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/85.0.4183.83 Safari/537.36',
'Mozilla/5.0 (X11; Linux armv7l) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/84.0.4147.135 Safari/537.36',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/84.0.4147.105 Safari/537.36',
'Mozilla/5.0 (X11; Linux i686) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/84.0.4147.94 Safari/537.36',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/83.0.4103.116 Safari/537.36',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/83.0.4103.106 Safari/537.36',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/83.0.4103.97 Safari/537.36',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.149 Safari/537.36',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.100 Safari/537.36',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.116 Safari/537.36',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.122 Safari/537.36',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/80.0.3987.132 Safari/537.36',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.132 Safari/537.36',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.106 Safari/537.36',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.136 Safari/537.36',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/79.0.3945.88 Safari/537.36',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/79.0.3945.131 Safari/537.36',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/79.0.3945.130 Safari/537.36',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/78.0.3904.108 Safari/537.36',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/78.0.3904.97 Safari/537.36',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/77.0.3865.120 Safari/537.36',
'Mozilla/5.0 (X11; U; Linux i686) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/77.0.3833.111 Safari/537.36',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/77.0.3831.6 Safari/537.36',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/75.0.3770.142 Safari/537.36',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/75.0.3770.90 Safari/537.36',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/75.0.3770.100 Safari/537.36',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.108 Safari/537.36',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.108 Safari/537.36',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.131 Safari/537.36',
'Mozilla/5.0 (X11; U; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3690.84 Safari/537.36',
'Mozilla/5.0 (X11; U; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3690.144 Safari/537.36',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3690.84 Safari/537.36',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/73.0.3800.1 Iron Safari/537.36',
'Mozilla/5.0 (X11; U; Linux i686) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/73.0.3683.140 Safari/537.36',
'Mozilla/5.0 (X11; U; Linux i686) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/73.0.3683.133 Safari/537.36',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/73.0.3683.103 Safari/537.36',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/73.0.3683.75 Safari/537.36',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/73.0.3683.27 Safari/537.36',
'Mozilla/5.0 (X11; Linux i686) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/72.0.3626.97 Safari/537.36',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/72.0.3626.81 Safari/537.36',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/72.0.3626.109 Safari/537.36',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/72.0.3626.121 Safari/537.36',
'Mozilla/5.0 (X11; U; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.99 Safari/537.36',
'Mozilla/5.0 (X11; Linux i686) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.80 Safari/537.36',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.77 Safari/537.36',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.67 Safari/537.36',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.84 Safari/537.36',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.106 Safari/537.36',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.99 Safari/537.36',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.79 Safari/537.36',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/65.0.3325.109 Safari/537.36',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/63.0.3239.132 Safari/537.36',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/61.0.3163.79 Safari/537.36',
'Mozilla/5.0 (X11; Linux armv7l) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/60.0.3112.89 Safari/537.36',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/60.0.3112.78 Safari/537.36',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/60.0.3112.90 Safari/537.36',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/60.0.3112.90 Safari/537.36',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/60.0.3112.113 Safari/537.36',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/60.0.3112.101 Safari/537.36',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/59.0.3071.86 Safari/537.36',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/59.0.3071.109 Safari/537.36'
]
LINUX_FIREFOX_USER_AGENTS = [
'Mozilla/5.0 (X11; Linux x86_64; rv:80.0) Gecko/20100101 Firefox/80.0',
'Mozilla/5.0 (X11; Linux x86_64; rv:79.0) Gecko/20100101 Firefox/79.0',
'Mozilla/5.0 (X11; Linux x86_64; rv:78.0) Gecko/20100101 Firefox/78.0',
'Mozilla/5.0 (X11; Linux x86_64; rv:75.0) Gecko/20100101 Firefox/75.0',
'Mozilla/5.0 (X11; Linux i686; rv:74.0) Gecko/20100101 Firefox/74.0',
'Mozilla/5.0 (X11; Linux x86_64; rv:74.0) Gecko/20100101 Firefox/74.0',
'Mozilla/5.0 (X11; Linux x86_64; rv:73.0) Gecko/20100101 Firefox/73.0',
'Mozilla/5.0 (X11; Linux i686; rv:73.0) Gecko/20100101 Firefox/73.0',
'Mozilla/5.0 (X11; Linux x86_64; rv:72.0) Gecko/20100101 Firefox/72.0',
'Mozilla/5.0 (X11; Linux i686; rv:72.0) Gecko/20100101 Firefox/72.0',
'Mozilla/5.0 (X11; Linux x86_64; rv:69.0) Gecko/20100101 Firefox/69.0',
'Mozilla/5.0 (X11; Linux x86_64; rv:68.0) Gecko/20100101 Firefox/68.0',
'Mozilla/5.0 (X11; Linux i686; rv:67.0) Gecko/20100101 Firefox/67.0',
'Mozilla/5.0 (X11; Linux x86_64; rv:67.0) Gecko/20100101 Firefox/67.0',
'Mozilla/5.0 (X11; Linux i686; rv:66.0) Gecko/20100101 Firefox/66.0',
'Mozilla/5.0 (X11; Linux x86_64; rv:66.0) Gecko/20100101 Firefox/66.0',
'Mozilla/5.0 (X11; Linux x86_64; rv:65.0) Gecko/20100101 Firefox/65.0',
'Mozilla/5.0 (X11; Linux i686; rv:63.0) Gecko/20100101 Firefox/63.0',
'Mozilla/5.0 (X11; Linux x86_64; rv:63.0) Gecko/20100101 Firefox/63.0',
'Mozilla/5.0 (X11; Linux i686; rv:62.0) Gecko/20100101 Firefox/62.0',
'Mozilla/5.0 (X11; Linux x86_64; rv:61.0) Gecko/20100101 Firefox/61.0',
'Mozilla/5.0 (X11; Linux i686; rv:60.0) Gecko/20100101 Firefox/60.0',
'Mozilla/5.0 (X11; Linux x86_64; rv:60.0) Gecko/20100101 Firefox/60.0',
'Mozilla/5.0 (X11; Linux x86_64; rv:59.0) Gecko/20100101 Firefox/59.0',
'Mozilla/5.0 (X11; Linux i686; rv:57.0) Gecko/20100101 Firefox/57.0',
'Mozilla/5.0 (X11; Linux x86_64; rv:55.0) Gecko/20100101 Firefox/55.0',
'Mozilla/5.0 (X11; Linux i686; rv:52.0) Gecko/20100101 Firefox/52.0',
]
BOT_USER_AGENTS = [
'Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)',
'Mozilla/5.0 (TweetmemeBot/4.0; +http://datasift.com/bot.html) Gecko/20100101 Firefox/31.0',
'Mozilla/5.0 (compatible; AhrefsBot/7.0; +http://ahrefs.com/robot/)',
'Mozilla/5.0 (compatible; SemrushBot/6~bl; +http://www.semrush.com/bot.html)',
'Mozilla/5.0 (compatible; coccocbot-web/1.0; +http://help.coccoc.com/searchengine)',
'Mozilla/5.0 (compatible; AmazonAdBot/1.0; +https://adbot.amazon.com)',
'Mozilla/5.0 (compatible;contxbot/1.0)',
'Mozilla/5.0 (compatible; SemrushBot/1.0~bm; +http://www.semrush.com/bot.html)',
'Mozilla/5.0 (compatible; Go-http-client/1.1; +centurybot9@gmail.com)',
'Mozilla/5.0 (compatible; Googlebot/2.1; startmebot/1.0; +https://start.me/bot)',
'Mozilla/5.0 (compatible; SeznamBot/3.2; +http://napoveda.seznam.cz/en/seznambot-intro/)',
'Mozilla/5.0 (compatible; MJ12bot/v1.4.8; http://mj12bot.com/)',
'Mozilla/5.0 (compatible; coccocbot-image/1.0; +http://help.coccoc.com/searchengine)',
'Mozilla/5.0 (compatible; AhrefsBot/6.1; +http://ahrefs.com/robot/)',
'Mozilla/5.0 (compatible; Linux x86_64; Mail.RU_Bot/2.0; +http://go.mail.ru/help/robots)',
'Mozilla/5.0 (compatible; bingbot/2.0; +http://www.bing.com/bingbot.htm)',
'Mozilla/5.0 (compatible; SemrushBot/3~bl; +http://www.semrush.com/bot.html)',
'Mozilla/5.0 (compatible; Qwantify/Bleriot/1.1; +https://help.qwant.com/bot)',
'Mozilla/5.0 (compatible; DotBot/1.1; http://www.opensiteexplorer.org/dotbot, help@moz.com)',
'Mozilla/5.0 (compatible; Exabot/3.0; +http://www.exabot.com/go/robot)',
'Mozilla/5.0 (compatible; YandexBot/3.0; +http://yandex.com/bots)',
'DuckDuckBot/1.0; (+http://duckduckgo.com/duckduckbot.html)',
'Mozilla/5.0 (compatible; Bingbot/2.0; +http://www.bing.com/bingbot.htm)',
'Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)',
]
ANDROID_SAMSUNG_BROWSER_USER_AGENTS = [
'Mozilla/5.0 (Linux; Android 10; SAMSUNG SM-G975U) AppleWebKit/537.36 (KHTML, like Gecko) SamsungBrowser/11.0 Chrome/75.0.3770.143 Mobile Safari/537.36',
'Mozilla/5.0 (Linux; Android 9; SAMSUNG SM-S367VL) AppleWebKit/537.36 (KHTML, like Gecko) SamsungBrowser/10.2 Chrome/71.0.3578.99 Mobile Safari/537.36',
'Mozilla/5.0 (Linux; Android 9; SAMSUNG SM-A205U) AppleWebKit/537.36 (KHTML, like Gecko) SamsungBrowser/10.2 Chrome/71.0.3578.99 Mobile Safari/537.36',
'Mozilla/5.0 (Linux; Android 9; SAMSUNG SM-A102U) AppleWebKit/537.36 (KHTML, like Gecko) SamsungBrowser/10.2 Chrome/71.0.3578.99 Mobile Safari/537.36',
'Mozilla/5.0 (Linux; Android 9; SAMSUNG SM-S767VL) AppleWebKit/537.36 (KHTML, like Gecko) SamsungBrowser/10.2 Chrome/71.0.3578.99 Mobile Safari/537.36',
'Mozilla/5.0 (Linux; Android 9; SAMSUNG SM-N960U) AppleWebKit/537.36 (KHTML, like Gecko) SamsungBrowser/10.2 Chrome/71.0.3578.99 Mobile Safari/537.36',
'Mozilla/5.0 (Linux; Android 9; SAMSUNG SM-G955U) AppleWebKit/537.36 (KHTML, like Gecko) SamsungBrowser/10.2 Chrome/71.0.3578.99 Mobile Safari/537.36',
'Mozilla/5.0 (Linux; Android 9; SAMSUNG SM-N950U) AppleWebKit/537.36 (KHTML, like Gecko) SamsungBrowser/10.2 Chrome/71.0.3578.99 Mobile Safari/537.36',
'Mozilla/5.0 (Linux; Android 9; SAMSUNG SM-G965U) AppleWebKit/537.36 (KHTML, like Gecko) SamsungBrowser/10.2 Chrome/71.0.3578.99 Mobile Safari/537.36',
'Mozilla/5.0 (Linux; Android 9; SAMSUNG SM-G950U) AppleWebKit/537.36 (KHTML, like Gecko) SamsungBrowser/10.2 Chrome/71.0.3578.99 Mobile Safari/537.36',
'Mozilla/5.0 (Linux; Android 9; SAMSUNG SM-G960U) AppleWebKit/537.36 (KHTML, like Gecko) SamsungBrowser/10.2 Chrome/71.0.3578.99 Mobile Safari/537.36',
'Mozilla/5.0 (Linux; Android 9; SAMSUNG SM-S767VL) AppleWebKit/537.36 (KHTML, like Gecko) SamsungBrowser/10.1 Chrome/71.0.3578.99 Mobile Safari/537.36',
'Mozilla/5.0 (Linux; Android 9; SAMSUNG SM-N960U) AppleWebKit/537.36 (KHTML, like Gecko) SamsungBrowser/10.1 Chrome/71.0.3578.99 Mobile Safari/537.36',
'Mozilla/5.0 (Linux; Android 9; SAMSUNG SM-G955U) AppleWebKit/537.36 (KHTML, like Gecko) SamsungBrowser/10.1 Chrome/71.0.3578.99 Mobile Safari/537.36',
'Mozilla/5.0 (Linux; Android 9; SAMSUNG SM-N950U) AppleWebKit/537.36 (KHTML, like Gecko) SamsungBrowser/10.1 Chrome/71.0.3578.99 Mobile Safari/537.36',
'Mozilla/5.0 (Linux; Android 9; SAMSUNG SM-G965U) AppleWebKit/537.36 (KHTML, like Gecko) SamsungBrowser/10.1 Chrome/71.0.3578.99 Mobile Safari/537.36',
'Mozilla/5.0 (Linux; Android 9; SAMSUNG SM-G950U) AppleWebKit/537.36 (KHTML, like Gecko) SamsungBrowser/10.1 Chrome/71.0.3578.99 Mobile Safari/537.36',
'Mozilla/5.0 (Linux; Android 9; SAMSUNG SM-G960U) AppleWebKit/537.36 (KHTML, like Gecko) SamsungBrowser/10.1 Chrome/71.0.3578.99 Mobile Safari/537.36',
'Mozilla/5.0 (Linux; Android 9; SAMSUNG SM-G950U Build/PPR1.180610.011) AppleWebKit/537.36 (KHTML, like Gecko) SamsungBrowser/9.4 Chrome/67.0.3396.87 Mobile Safari/537.36',
'Mozilla/5.0 (Linux; Android 9; SAMSUNG SM-G960U Build/PPR1.180610.011) AppleWebKit/537.36 (KHTML, like Gecko) SamsungBrowser/9.4 Chrome/67.0.3396.87 Mobile Safari/537.36',
'Mozilla/5.0 (Linux; Android 9; SAMSUNG SM-G950U Build/PPR1.180610.011) AppleWebKit/537.36 (KHTML, like Gecko) SamsungBrowser/9.2 Chrome/67.0.3396.87 Mobile Safari/537.36',
'Mozilla/5.0 (Linux; Android 9; SAMSUNG SM-G950F Build/PPR1.180610.011) AppleWebKit/537.36 (KHTML, like Gecko) SamsungBrowser/9.2 Chrome/67.0.3396.87 Mobile Safari/537.36',
'Mozilla/5.0 (Linux; Android 9; SAMSUNG SM-G960U Build/PPR1.180610.011) AppleWebKit/537.36 (KHTML, like Gecko) SamsungBrowser/9.2 Chrome/67.0.3396.87 Mobile Safari/537.36',
'Mozilla/5.0 (Linux; Android 9; SAMSUNG SM-G960F Build/PPR1.180610.011) AppleWebKit/537.36 (KHTML, like Gecko) SamsungBrowser/9.2 Chrome/67.0.3396.87 Mobile Safari/537.36',
'Mozilla/5.0 (Linux; Android 8.0.0; SAMSUNG SM-A520F Build/R16NW) AppleWebKit/537.36 (KHTML, like Gecko) SamsungBrowser/7.4 Chrome/59.0.3071.125 Mobile Safari/537.36',
'Mozilla/5.0 (Linux; Android 8.0.0; SAMSUNG SM-G935F Build/R16NW) AppleWebKit/537.36 (KHTML, like Gecko) SamsungBrowser/7.4 Chrome/59.0.3071.125 Mobile Safari/537.36',
'Mozilla/5.0 (Linux; Android 8.0.0; SAMSUNG SM-G955F Build/R16NW) AppleWebKit/537.36 (KHTML, like Gecko) SamsungBrowser/7.4 Chrome/59.0.3071.125 Mobile Safari/537.36',
'Mozilla/5.0 (Linux; Android 8.0.0; SAMSUNG SM-G930F Build/R16NW) AppleWebKit/537.36 (KHTML, like Gecko) SamsungBrowser/7.4 Chrome/59.0.3071.125 Mobile Safari/537.36',
'Mozilla/5.0 (Linux; Android 6.0.1; SAMSUNG SM-J500M Build/MMB29M) AppleWebKit/537.36 (KHTML, like Gecko) SamsungBrowser/7.4 Chrome/59.0.3071.125 Mobile Safari/537.36',
'Mozilla/5.0 (Linux; Android 6.0.1; SAMSUNG SM-G532M Build/MMB29T) AppleWebKit/537.36 (KHTML, like Gecko) SamsungBrowser/7.4 Chrome/59.0.3071.125 Mobile Safari/537.36',
'Mozilla/5.0 (Linux; Android 7.0; SAMSUNG SM-G570M Build/NRD90M) AppleWebKit/537.36 (KHTML, like Gecko) SamsungBrowser/7.4 Chrome/59.0.3071.125 Mobile Safari/537.36',
'Mozilla/5.0 (Linux; Android 8.0.0; SAMSUNG SM-G950F Build/R16NW) AppleWebKit/537.36 (KHTML, like Gecko) SamsungBrowser/7.4 Chrome/59.0.3071.125 Mobile Safari/537.36',
'Mozilla/5.0 (Linux; Android 7.0; SAMSUNG SM-G610M Build/NRD90M) AppleWebKit/537.36 (KHTML, like Gecko) SamsungBrowser/7.4 Chrome/59.0.3071.125 Mobile Safari/537.36',
'Mozilla/5.0 (Linux; Android 7.0; SAMSUNG SM-J701MT Build/NRD90M) AppleWebKit/537.36 (KHTML, like Gecko) SamsungBrowser/6.4 Chrome/56.0.2924.87 Mobile Safari/537.36',
'Mozilla/5.0 (Linux; Android 7.0; SAMSUNG SM-N920C Build/NRD90M) AppleWebKit/537.36 (KHTML, like Gecko) SamsungBrowser/6.2 Chrome/56.0.2924.87 Mobile Safari/537.36',
'Mozilla/5.0 (Linux; Android 7.0; SAMSUNG SM-G955U Build/NRD90M) AppleWebKit/537.36 (KHTML, like Gecko) SamsungBrowser/5.4 Chrome/51.0.2704.106 Mobile Safari/537.36',
'Mozilla/5.0 (Linux; Android 7.0; SAMSUNG SM-G950F Build/NRD90M) AppleWebKit/537.36 (KHTML, like Gecko) SamsungBrowser/5.2 Chrome/51.0.2704.106 Mobile Safari/537.36',
'Mozilla/5.0 (Linux; Android 6.0.1; SAMSUNG SM-G532M Build/MMB29T) AppleWebKit/537.36 (KHTML, like Gecko) SamsungBrowser/4.2 Chrome/44.0.2403.133 Mobile Safari/537.36',
'Mozilla/5.0 (Linux; Android 6.0.1; SAMSUNG SM-G900F Build/MMB29M) AppleWebKit/537.36 (KHTML, like Gecko) SamsungBrowser/4.0 Chrome/44.0.2403.133 Mobile Safari/537.36',
'Mozilla/5.0 (Linux; Android 6.0.1; SAMSUNG SM-G920F Build/MMB29K) AppleWebKit/537.36 (KHTML, like Gecko) SamsungBrowser/4.0 Chrome/44.0.2403.133 Mobile Safari/537.36',
'Mozilla/5.0 (Linux; Android 6.1.1; SAMSUNG SM-G925F Build/LMY47X) AppleWebKit/537.36 (KHTML, like Gecko) SamsungBrowser/4.0 Chrome/51.0.2704.81 Mobile Safari/537.36',
]
ANDROID_FIREFOX_USER_AGENTS = [
'Mozilla/5.0 (Android 10; Mobile; rv:79.0) Gecko/79.0 Firefox/79.0',
'Mozilla/5.0 (Android 10; Mobile; rv:78.0) Gecko/78.0 Firefox/78.0',
'Mozilla/5.0 (Android 9; Mobile; rv:68.6.0) Gecko/68.6.0 Firefox/68.6.0',
'Mozilla/5.0 (Android 7.1.2; Mobile; rv:68.4.2) Gecko/68.4.2 Firefox/68.4.2',
'Mozilla/5.0 (Android 4.4.4; Tablet; rv:68.0) Gecko/68.0 Firefox/68.0',
'Mozilla/5.0 (Android 5.1.1; Mobile; rv:68.0) Gecko/68.0 Firefox/68.0',
'Mozilla/5.0 (Android 7.1.1; Mobile; rv:68.0) Gecko/68.0 Firefox/68.0',
'Mozilla/5.0 (Android 5.1.1; Tablet; rv:68.0) Gecko/68.0 Firefox/68.0',
'Mozilla/5.0 (Android 8.1.0; Tablet; rv:68.0) Gecko/68.0 Firefox/68.0',
'Mozilla/5.0 (Android 7.1.2; Mobile; rv:68.0) Gecko/68.0 Firefox/68.0',
'Mozilla/5.0 (Android 8.0.0; Tablet; rv:68.0) Gecko/68.0 Firefox/68.0',
'Mozilla/5.0 (Android 6.0.1; Mobile; rv:68.0) Gecko/68.0 Firefox/68.0',
'Mozilla/5.0 (Android 8.1.0; Mobile; rv:68.0) Gecko/68.0 Firefox/68.0',
'Mozilla/5.0 (Android 9; Tablet; rv:68.0) Gecko/68.0 Firefox/68.0',
'Mozilla/5.0 (Android 7.0; Tablet; rv:68.0) Gecko/68.0 Firefox/68.0',
'Mozilla/5.0 (Android 6.0; Mobile; rv:68.0) Gecko/68.0 Firefox/68.0',
'Mozilla/5.0 (Android 5.0.1; Tablet; rv:68.0) Gecko/68.0 Firefox/68.0',
'Mozilla/5.0 (Android 8.0.0; Tablet; rv:66.0) Gecko/66.0 Firefox/66.0',
'Mozilla/5.0 (Android 5.1.1; Mobile; rv:66.0) Gecko/66.0 Firefox/66.0',
'Mozilla/5.0 (Android 8.1.0; Mobile; rv:66.0) Gecko/66.0 Firefox/66.0',
'Mozilla/5.0 (Android 8.0.0; Mobile; rv:66.0) Gecko/66.0 Firefox/66.0',
'Mozilla/5.0 (Android 5.1.1; Tablet; rv:66.0) Gecko/66.0 Firefox/66.0',
'Mozilla/5.0 (Android 7.1.2; Tablet; rv:65.0) Gecko/65.0 Firefox/65.0',
'Mozilla/5.0 (Android 6.0; Mobile; rv:65.0) Gecko/65.0 Firefox/65.0',
'Mozilla/5.0 (Android 9; Mobile; rv:65.0) Gecko/65.0 Firefox/65.0',
'Mozilla/5.0 (Android 8.1.0; Mobile; rv:65.0) Gecko/65.0 Firefox/65.0',
'Mozilla/5.0 (Android 7.0; Mobile; rv:65.0) Gecko/65.0 Firefox/65.0',
'Mozilla/5.0 (Android 8.0.0; Mobile; rv:65.0) Gecko/65.0 Firefox/65.0',
'Mozilla/5.0 (Android 6.0.1; Tablet; rv:64.0) Gecko/64.0 Firefox/64.0',
'Mozilla/5.0 (Android 4.4.4; Mobile; rv:64.0) Gecko/64.0 Firefox/64.0',
'Mozilla/5.0 (Android 8.1.0; Mobile; rv:62.0) Gecko/62.0 Firefox/62.0',
'Mozilla/5.0 (Android 8.1.0; Mobile; rv:61.0) Gecko/61.0 Firefox/61.0',
'Mozilla/5.0 (Android 5.0.2; Mobile; rv:61.0) Gecko/61.0 Firefox/61.0',
'Mozilla/5.0 (Android 7.0; Mobile; rv:61.0) Gecko/61.0 Firefox/61.0',
'Mozilla/5.0 (Android 4.4.2; Mobile; rv:61.0) Gecko/61.0 Firefox/61.0',
'Mozilla/5.0 (Android 5.1; Mobile; rv:60.0) Gecko/60.0 Firefox/60.0',
'Mozilla/5.0 (Android 7.0; Mobile; rv:60.0) Gecko/60.0 Firefox/60.0',
'Mozilla/5.0 (Android 7.0; Mobile; rv:57.0) Gecko/57.0 Firefox/57.0',
'Mozilla/5.0 (Android 7.0; Mobile; rv:54.0) Gecko/54.0 Firefox/54.0',
]
ANDROID_CHROME_USER_AGENTS = [
'Mozilla/5.0 (Linux; Android 10; SM-G975U) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/79.0.3945.93 Mobile Safari/537.36',
'Mozilla/5.0 (Linux; Android 9; LM-Q720) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/79.0.3945.116 Mobile Safari/537.36',
'Mozilla/5.0 (Linux; Android 8.0.0; moto e5 play) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/79.0.3945.93 Mobile Safari/537.36',
'Mozilla/5.0 (Linux; Android 8.1.0; LM-Q710(FGN)) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/79.0.3945.93 Mobile Safari/537.36',
'Mozilla/5.0 (Linux; Android 5.1.1; SM-G531M) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/79.0.3945.93 Mobile Safari/537.36',
'Mozilla/5.0 (Linux; Android 9; SM-A105M) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/79.0.3945.136 Mobile Safari/537.36',
'Mozilla/5.0 (Linux; Android 8.1.0; LG-Q710AL) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/78.0.3904.108 Mobile Safari/537.36',
'Mozilla/5.0 (Linux; Android 9; SM-G973U) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/78.0.3904.108 Mobile Safari/537.36',
'Mozilla/5.0 (Linux; Android 8.0.0; moto e5 play) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/78.0.3904.108 Mobile Safari/537.36',
'Mozilla/5.0 (Linux; Android 10; Infinix X604) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/77.0.3865.116 Mobile Safari/537.36',
'Mozilla/5.0 (Linux; Android 8.0.0; SM-G570M) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/77.0.3865.92 Mobile Safari/537.36',
'Mozilla/5.0 (Linux; Android 10; POCO X2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/77.0.3865.92 Mobile Safari/537.36',
'Mozilla/5.0 (Linux; Android 8.1.0; itel W4003) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/77.0.3865.92 Mobile Safari/537.36',
'Mozilla/5.0 (Linux; Android 6.0.1; ifive mini 4S) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/77.0.3865.92 Safari/537.36',
'Mozilla/5.0 (Linux; Android 5.1.1; Lenovo A6020a46) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/77.0.3865.92 Mobile Safari/537.36',
'Mozilla/5.0 (Linux; Android 8.1.0; SM-J710MN) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/77.0.3865.92 Mobile Safari/537.36',
'Mozilla/5.0 (Linux; Android 7.1.1; Android SDK built for x86) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/76.0.3809.111 Mobile Safari/537.36',
'Mozilla/5.0 (Linux; Android 8.0.0; SM-G930F) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/75.0.3770.101 Mobile Safari/537.36',
'Mozilla/5.0 (Linux; Android 9; SM-G960U) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.157 Mobile Safari/537.36',
'Mozilla/5.0 (Linux; Android 9; SM-G950F) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.157 Mobile Safari/537.36',
'Mozilla/5.0 (Linux; Android 6.0; Tiger) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.136 Mobile Safari/537.36',
'Mozilla/5.0 (Linux; Android 9; moto g(7) power) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/73.0.3683.90 Mobile Safari/537.36',
'Mozilla/5.0 (Linux; Android 8.0.0; SM-G950F) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.80 Mobile Safari/537.36',
'Mozilla/5.0 (Linux; Android 8.0.0; SM-G950F) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.80 Mobile Safari/537.36',
'Mozilla/5.0 (Linux; Android 9; moto g(7) power) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/73.0.3683.90 Mobile Safari/537.36',
'Mozilla/5.0 (Linux; Android 7.0; SM-G610M) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.64 Mobile Safari/537.36',
'Mozilla/5.0 (Linux; Android 8.1.0; SM-G610M) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.110 Mobile Safari/537.36',
'Mozilla/5.0 (Linux; Android 7.0; Moto G (4) Build/NPJS25.93-14-8.1-9) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/69.0.3497.100 Mobile Safari/537.36',
'Mozilla/5.0 (Linux; Android 7.1.1; Moto G (5S) Plus Build/NPSS26.116-64-11) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/69.0.3497.100 Mobile Safari/537.36',
'Mozilla/5.0 (Linux; Android 6.0; LG-K350 Build/MRA58K) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/69.0.3497.100 Mobile Safari/537.36',
'Mozilla/5.0 (Linux; Android 8.0.0; Moto Z2 Play Build/OPSS27.76-12-25-7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/69.0.3497.100 Mobile Safari/537.36',
'Mozilla/5.0 (Linux; Android 5.1.1; SM-J111M Build/LMY47V) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/69.0.3497.100 Mobile Safari/537.36',
'Mozilla/5.0 (Linux; Android 6.0; MotoG3 Build/MPIS24.65-33.1-2-16) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.91 Mobile Safari/537.36',
'Mozilla/5.0 (Linux; Android 7.1.1; Moto G Play Build/NPIS26.48-43-2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.91 Mobile Safari/537.36',
'Mozilla/5.0 (Linux; Android 6.0.1; SM-G532M Build/MMB29T) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.91 Mobile Safari/537.36',
'Mozilla/5.0 (Linux; Android 6.0.1; SM-J500M Build/MMB29M) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.91 Mobile Safari/537.36',
'Mozilla/5.0 (Linux; Android 7.1.1; Moto G (5S) Build/NPPS26.102-49-11) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.91 Mobile Safari/537.36',
'Mozilla/5.0 (Linux; Android 8.1.0; INE-LX2 Build/HUAWEIINE-LX2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.91 Mobile Safari/537.36',
'Mozilla/5.0 (Linux; Android 7.1.1; Moto G Play Build/NPIS26.48-43-2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.87 Mobile Safari/537.36',
'Mozilla/5.0 (Linux; Android 6.0.1; SM-G532M Build/MMB29T) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.87 Mobile Safari/537.36',
'Mozilla/5.0 (Linux; Android 7.0; Moto G (5) Build/NPPS25.137-93-2-3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.87 Mobile Safari/537.36',
'Mozilla/5.0 (Linux; Android 8.0.0; SM-G950F Build/R16NW) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.87 Mobile Safari/537.36',
'Mozilla/5.0 (Linux; Android 4.4.4; GT-I9060I Build/KTU84P) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.87 Mobile Safari/537.36',
'Mozilla/5.0 (Linux; Android 7.0; SM-G935F Build/NRD90M) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/66.0.3359.158 Mobile Safari/537.36',
'Mozilla/5.0 (Linux; Android 7.1.1; Moto G (5S) Build/NPPS26.102-49-8) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/66.0.3359.126 Mobile Safari/537.36',
'Mozilla/5.0 (Linux; Android 6.0.1; SM-J500M Build/MMB29M) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/66.0.3359.126 Mobile Safari/537.36',
'Mozilla/5.0 (Linux; Android 6.0.1; SM-J700M Build/MMB29K) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/65.0.3325.109 Mobile Safari/537.36',
'Mozilla/5.0 (Linux; Android 7.1.1; Moto G (5S) Build/NPPS26.102-49-4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/65.0.3325.109 Mobile Safari/537.36',
]
ANDROID_OPERA_USER_AGENTS = [
'Mozilla/5.0 (Linux; Android 7.0; SM-A310F Build/NRD90M) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.91 Mobile Safari/537.36 OPR/42.7.2246.114996'
]
def RandomUserAgent():
"""Randomly choose one of all user-agents"""
while True:
selected = random.choice(VALID_USER_AGENTS)
if selected == 'RANDOM':
continue
else:
return random.choice(USER_AGENT_HEADERS[selected])
# Each user-agents give different results
# this give advantages to query more results
USER_AGENT_HEADERS = {
'WINDOWS_FIREFOX': WINDOWS_FIREFOX_USER_AGENTS,
'LINUX_FIREFOX': LINUX_FIREFOX_USER_AGENTS,
'LINUX_CHROME': LINUX_CHROME_USER_AGENTS,
'LINUX_OPERA': LINUX_OPERA_USER_AGENTS,
'BOT': BOT_USER_AGENTS,
'ANDROID_SAMSUNG_BROWSER': ANDROID_SAMSUNG_BROWSER_USER_AGENTS,
'ANDROID_FIREFOX': ANDROID_FIREFOX_USER_AGENTS,
'ANDROID_CHROME': ANDROID_CHROME_USER_AGENTS,
'ANDROID_OPERA': ANDROID_OPERA_USER_AGENTS,
'RANDOM': RandomUserAgent
}
| 75.086777
| 176
| 0.691899
| 6,586
| 36,342
| 3.784391
| 0.078955
| 0.068207
| 0.104357
| 0.143235
| 0.859052
| 0.839953
| 0.834096
| 0.824426
| 0.808377
| 0.79273
| 0
| 0.220439
| 0.134087
| 36,342
| 483
| 177
| 75.242236
| 0.571578
| 0.019812
| 0
| 0.022573
| 0
| 0.650113
| 0.873433
| 0.013432
| 0
| 0
| 0
| 0.00207
| 0
| 1
| 0.002257
| false
| 0
| 0.004515
| 0
| 0.009029
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
990661edcbea961778649465705b276e44b1e70e
| 1,040
|
py
|
Python
|
tmh/foo.py
|
jimregan/tmh
|
a8d8e9421f24a84cde724d5f30b3f140aff12b19
|
[
"MIT"
] | 6
|
2021-09-09T16:19:45.000Z
|
2022-03-16T18:18:36.000Z
|
tmh/foo.py
|
jimregan/tmh
|
a8d8e9421f24a84cde724d5f30b3f140aff12b19
|
[
"MIT"
] | 3
|
2021-09-09T17:43:18.000Z
|
2021-09-28T12:55:48.000Z
|
tmh/foo.py
|
jimregan/tmh
|
a8d8e9421f24a84cde724d5f30b3f140aff12b19
|
[
"MIT"
] | 4
|
2021-09-09T16:19:47.000Z
|
2022-03-11T14:33:26.000Z
|
The VAD determines if a given signal is speech or non-speech.
The VAD is based on the energy of the signal.
The VAD is based on the signal energy.
The VAD is based on the signal spectrum.
The VAD is based on the signal spectrum.
The VAD is based on the signal spectrum.
The VAD is based on the signal spectrum.
The VAD is based on the signal spectrum.
The VAD is based on the signal spectrum.
The VAD is based on the signal spectrum.
The VAD is based on the signal spectrum.
The VAD is based on the signal spectrum.
The VAD is based on the signal spectrum.
The VAD is based on the signal spectrum.
The VAD is based on the signal spectrum.
The VAD is based on the signal spectrum.
The VAD is based on the signal spectrum.
The VAD is based on the signal spectrum.
The VAD is based on the signal spectrum.
The VAD is based on the signal spectrum.
The VAD is based on the signal spectrum.
The VAD is based on the signal spectrum.
The VAD is based on the signal spectrum.
The VAD is based on the signal spectrum.
The VAD
| 20.8
| 61
| 0.761538
| 200
| 1,040
| 3.96
| 0.08
| 0.189394
| 0.232323
| 0.377525
| 0.909091
| 0.909091
| 0.886364
| 0.856061
| 0.856061
| 0.856061
| 0
| 0
| 0.214423
| 1,040
| 50
| 62
| 20.8
| 0.9694
| 0
| 0
| 0.84
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
54fb5ce4e57c54dcc085ab73c70a7da1a33d3172
| 43,428
|
py
|
Python
|
spark_fhir_schemas/r4/resources/appointment.py
|
icanbwell/SparkFhirSchemas
|
8c828313c39850b65f8676e67f526ee92b7d624e
|
[
"Apache-2.0"
] | null | null | null |
spark_fhir_schemas/r4/resources/appointment.py
|
icanbwell/SparkFhirSchemas
|
8c828313c39850b65f8676e67f526ee92b7d624e
|
[
"Apache-2.0"
] | null | null | null |
spark_fhir_schemas/r4/resources/appointment.py
|
icanbwell/SparkFhirSchemas
|
8c828313c39850b65f8676e67f526ee92b7d624e
|
[
"Apache-2.0"
] | null | null | null |
from typing import Union, List, Optional
from pyspark.sql.types import StructType, StructField, StringType, ArrayType, DataType
# This file is auto-generated by generate_schema so do not edit it manually
# noinspection PyPep8Naming
class AppointmentSchema:
"""
A booking of a healthcare event among patient(s), practitioner(s), related
person(s) and/or device(s) for a specific date/time. This may result in one or
more Encounter(s).
"""
# noinspection PyDefaultArgument
@staticmethod
def get_schema(
max_nesting_depth: Optional[int] = 6,
nesting_depth: int = 0,
nesting_list: List[str] = [],
max_recursion_limit: Optional[int] = 2,
include_extension: Optional[bool] = False,
extension_fields: Optional[List[str]] = None,
extension_depth: int = 0,
max_extension_depth: Optional[int] = 2,
include_modifierExtension: Optional[bool] = False,
use_date_for: Optional[List[str]] = None,
parent_path: Optional[str] = "",
) -> Union[StructType, DataType]:
"""
A booking of a healthcare event among patient(s), practitioner(s), related
person(s) and/or device(s) for a specific date/time. This may result in one or
more Encounter(s).
resourceType: This is a Appointment resource
id: The logical id of the resource, as used in the URL for the resource. Once
assigned, this value never changes.
meta: The metadata about the resource. This is content that is maintained by the
infrastructure. Changes to the content might not always be associated with
version changes to the resource.
implicitRules: A reference to a set of rules that were followed when the resource was
constructed, and which must be understood when processing the content. Often,
this is a reference to an implementation guide that defines the special rules
along with other profiles etc.
language: The base language in which the resource is written.
text: A human-readable narrative that contains a summary of the resource and can be
used to represent the content of the resource to a human. The narrative need
not encode all the structured data, but is required to contain sufficient
detail to make it "clinically safe" for a human to just read the narrative.
Resource definitions may define what content should be represented in the
narrative to ensure clinical safety.
contained: These resources do not have an independent existence apart from the resource
that contains them - they cannot be identified independently, and nor can they
have their own independent transaction scope.
extension: May be used to represent additional information that is not part of the basic
definition of the resource. To make the use of extensions safe and manageable,
there is a strict set of governance applied to the definition and use of
extensions. Though any implementer can define an extension, there is a set of
requirements that SHALL be met as part of the definition of the extension.
modifierExtension: May be used to represent additional information that is not part of the basic
definition of the resource and that modifies the understanding of the element
that contains it and/or the understanding of the containing element's
descendants. Usually modifier elements provide negation or qualification. To
make the use of extensions safe and manageable, there is a strict set of
governance applied to the definition and use of extensions. Though any
implementer is allowed to define an extension, there is a set of requirements
that SHALL be met as part of the definition of the extension. Applications
processing a resource are required to check for modifier extensions.
Modifier extensions SHALL NOT change the meaning of any elements on Resource
or DomainResource (including cannot change the meaning of modifierExtension
itself).
identifier: This records identifiers associated with this appointment concern that are
defined by business processes and/or used to refer to it when a direct URL
reference to the resource itself is not appropriate (e.g. in CDA documents, or
in written / printed documentation).
status: The overall status of the Appointment. Each of the participants has their own
participation status which indicates their involvement in the process, however
this status indicates the shared status.
cancelationReason: The coded reason for the appointment being cancelled. This is often used in
reporting/billing/futher processing to determine if further actions are
required, or specific fees apply.
serviceCategory: A broad categorization of the service that is to be performed during this
appointment.
serviceType: The specific service that is to be performed during this appointment.
specialty: The specialty of a practitioner that would be required to perform the service
requested in this appointment.
appointmentType: The style of appointment or patient that has been booked in the slot (not
service type).
reasonCode: The coded reason that this appointment is being scheduled. This is more
clinical than administrative.
reasonReference: Reason the appointment has been scheduled to take place, as specified using
information from another resource. When the patient arrives and the encounter
begins it may be used as the admission diagnosis. The indication will
typically be a Condition (with other resources referenced in the
evidence.detail), or a Procedure.
priority: The priority of the appointment. Can be used to make informed decisions if
needing to re-prioritize appointments. (The iCal Standard specifies 0 as
undefined, 1 as highest, 9 as lowest priority).
description: The brief description of the appointment as would be shown on a subject line
in a meeting request, or appointment list. Detailed or expanded information
should be put in the comment field.
supportingInformation: Additional information to support the appointment provided when making the
appointment.
start: Date/Time that the appointment is to take place.
end: Date/Time that the appointment is to conclude.
minutesDuration: Number of minutes that the appointment is to take. This can be less than the
duration between the start and end times. For example, where the actual time
of appointment is only an estimate or if a 30 minute appointment is being
requested, but any time would work. Also, if there is, for example, a planned
15 minute break in the middle of a long appointment, the duration may be 15
minutes less than the difference between the start and end.
slot: The slots from the participants' schedules that will be filled by the
appointment.
created: The date that this appointment was initially created. This could be different
to the meta.lastModified value on the initial entry, as this could have been
before the resource was created on the FHIR server, and should remain
unchanged over the lifespan of the appointment.
comment: Additional comments about the appointment.
patientInstruction: While Appointment.comment contains information for internal use,
Appointment.patientInstructions is used to capture patient facing information
about the Appointment (e.g. please bring your referral or fast from 8pm night
before).
basedOn: The service request this appointment is allocated to assess (e.g. incoming
referral or procedure request).
participant: List of participants involved in the appointment.
requestedPeriod: A set of date ranges (potentially including times) that the appointment is
preferred to be scheduled within.
The duration (usually in minutes) could also be provided to indicate the
length of the appointment to fill and populate the start/end times for the
actual allocated time. However, in other situations the duration may be
calculated by the scheduling system.
"""
if extension_fields is None:
extension_fields = [
"valueBoolean",
"valueCode",
"valueDate",
"valueDateTime",
"valueDecimal",
"valueId",
"valueInteger",
"valuePositiveInt",
"valueString",
"valueTime",
"valueUnsignedInt",
"valueUri",
"valueUrl",
"valueReference",
"valueCodeableConcept",
"valueAddress",
]
from spark_fhir_schemas.r4.simple_types.id import idSchema
from spark_fhir_schemas.r4.complex_types.meta import MetaSchema
from spark_fhir_schemas.r4.simple_types.uri import uriSchema
from spark_fhir_schemas.r4.simple_types.code import codeSchema
from spark_fhir_schemas.r4.complex_types.narrative import NarrativeSchema
from spark_fhir_schemas.r4.complex_types.resourcelist import ResourceListSchema
from spark_fhir_schemas.r4.complex_types.extension import ExtensionSchema
from spark_fhir_schemas.r4.complex_types.identifier import IdentifierSchema
from spark_fhir_schemas.r4.complex_types.codeableconcept import (
CodeableConceptSchema,
)
from spark_fhir_schemas.r4.complex_types.reference import ReferenceSchema
from spark_fhir_schemas.r4.simple_types.unsignedint import unsignedIntSchema
from spark_fhir_schemas.r4.simple_types.instant import instantSchema
from spark_fhir_schemas.r4.simple_types.positiveint import positiveIntSchema
from spark_fhir_schemas.r4.simple_types.datetime import dateTimeSchema
from spark_fhir_schemas.r4.complex_types.appointment_participant import (
Appointment_ParticipantSchema,
)
from spark_fhir_schemas.r4.complex_types.period import PeriodSchema
if (
max_recursion_limit
and nesting_list.count("Appointment") >= max_recursion_limit
) or (max_nesting_depth and nesting_depth >= max_nesting_depth):
return StructType([StructField("id", StringType(), True)])
# add my name to recursion list for later
my_nesting_list: List[str] = nesting_list + ["Appointment"]
my_parent_path = parent_path + ".appointment" if parent_path else "appointment"
schema = StructType(
[
# This is a Appointment resource
StructField("resourceType", StringType(), True),
# The logical id of the resource, as used in the URL for the resource. Once
# assigned, this value never changes.
StructField(
"id",
idSchema.get_schema(
max_nesting_depth=max_nesting_depth,
nesting_depth=nesting_depth + 1,
nesting_list=my_nesting_list,
max_recursion_limit=max_recursion_limit,
include_extension=include_extension,
extension_fields=extension_fields,
extension_depth=extension_depth + 1,
max_extension_depth=max_extension_depth,
include_modifierExtension=include_modifierExtension,
use_date_for=use_date_for,
parent_path=my_parent_path + ".id",
),
True,
),
# The metadata about the resource. This is content that is maintained by the
# infrastructure. Changes to the content might not always be associated with
# version changes to the resource.
StructField(
"meta",
MetaSchema.get_schema(
max_nesting_depth=max_nesting_depth,
nesting_depth=nesting_depth + 1,
nesting_list=my_nesting_list,
max_recursion_limit=max_recursion_limit,
include_extension=include_extension,
extension_fields=extension_fields,
extension_depth=extension_depth + 1,
max_extension_depth=max_extension_depth,
include_modifierExtension=include_modifierExtension,
use_date_for=use_date_for,
parent_path=my_parent_path,
),
True,
),
# A reference to a set of rules that were followed when the resource was
# constructed, and which must be understood when processing the content. Often,
# this is a reference to an implementation guide that defines the special rules
# along with other profiles etc.
StructField(
"implicitRules",
uriSchema.get_schema(
max_nesting_depth=max_nesting_depth,
nesting_depth=nesting_depth + 1,
nesting_list=my_nesting_list,
max_recursion_limit=max_recursion_limit,
include_extension=include_extension,
extension_fields=extension_fields,
extension_depth=extension_depth + 1,
max_extension_depth=max_extension_depth,
include_modifierExtension=include_modifierExtension,
use_date_for=use_date_for,
parent_path=my_parent_path + ".implicitrules",
),
True,
),
# The base language in which the resource is written.
StructField(
"language",
codeSchema.get_schema(
max_nesting_depth=max_nesting_depth,
nesting_depth=nesting_depth + 1,
nesting_list=my_nesting_list,
max_recursion_limit=max_recursion_limit,
include_extension=include_extension,
extension_fields=extension_fields,
extension_depth=extension_depth + 1,
max_extension_depth=max_extension_depth,
include_modifierExtension=include_modifierExtension,
use_date_for=use_date_for,
parent_path=my_parent_path + ".language",
),
True,
),
# A human-readable narrative that contains a summary of the resource and can be
# used to represent the content of the resource to a human. The narrative need
# not encode all the structured data, but is required to contain sufficient
# detail to make it "clinically safe" for a human to just read the narrative.
# Resource definitions may define what content should be represented in the
# narrative to ensure clinical safety.
StructField(
"text",
NarrativeSchema.get_schema(
max_nesting_depth=max_nesting_depth,
nesting_depth=nesting_depth + 1,
nesting_list=my_nesting_list,
max_recursion_limit=max_recursion_limit,
include_extension=include_extension,
extension_fields=extension_fields,
extension_depth=extension_depth + 1,
max_extension_depth=max_extension_depth,
include_modifierExtension=include_modifierExtension,
use_date_for=use_date_for,
parent_path=my_parent_path,
),
True,
),
# These resources do not have an independent existence apart from the resource
# that contains them - they cannot be identified independently, and nor can they
# have their own independent transaction scope.
StructField(
"contained",
ArrayType(
ResourceListSchema.get_schema(
max_nesting_depth=max_nesting_depth,
nesting_depth=nesting_depth + 1,
nesting_list=my_nesting_list,
max_recursion_limit=max_recursion_limit,
include_extension=include_extension,
extension_fields=extension_fields,
extension_depth=extension_depth,
max_extension_depth=max_extension_depth,
include_modifierExtension=include_modifierExtension,
use_date_for=use_date_for,
parent_path=my_parent_path,
)
),
True,
),
# May be used to represent additional information that is not part of the basic
# definition of the resource. To make the use of extensions safe and manageable,
# there is a strict set of governance applied to the definition and use of
# extensions. Though any implementer can define an extension, there is a set of
# requirements that SHALL be met as part of the definition of the extension.
StructField(
"extension",
ArrayType(
ExtensionSchema.get_schema(
max_nesting_depth=max_nesting_depth,
nesting_depth=nesting_depth + 1,
nesting_list=my_nesting_list,
max_recursion_limit=max_recursion_limit,
include_extension=include_extension,
extension_fields=extension_fields,
extension_depth=extension_depth,
max_extension_depth=max_extension_depth,
include_modifierExtension=include_modifierExtension,
use_date_for=use_date_for,
parent_path=my_parent_path,
)
),
True,
),
# May be used to represent additional information that is not part of the basic
# definition of the resource and that modifies the understanding of the element
# that contains it and/or the understanding of the containing element's
# descendants. Usually modifier elements provide negation or qualification. To
# make the use of extensions safe and manageable, there is a strict set of
# governance applied to the definition and use of extensions. Though any
# implementer is allowed to define an extension, there is a set of requirements
# that SHALL be met as part of the definition of the extension. Applications
# processing a resource are required to check for modifier extensions.
#
# Modifier extensions SHALL NOT change the meaning of any elements on Resource
# or DomainResource (including cannot change the meaning of modifierExtension
# itself).
StructField(
"modifierExtension",
ArrayType(
ExtensionSchema.get_schema(
max_nesting_depth=max_nesting_depth,
nesting_depth=nesting_depth + 1,
nesting_list=my_nesting_list,
max_recursion_limit=max_recursion_limit,
include_extension=include_extension,
extension_fields=extension_fields,
extension_depth=extension_depth,
max_extension_depth=max_extension_depth,
include_modifierExtension=include_modifierExtension,
use_date_for=use_date_for,
parent_path=my_parent_path,
)
),
True,
),
# This records identifiers associated with this appointment concern that are
# defined by business processes and/or used to refer to it when a direct URL
# reference to the resource itself is not appropriate (e.g. in CDA documents, or
# in written / printed documentation).
StructField(
"identifier",
ArrayType(
IdentifierSchema.get_schema(
max_nesting_depth=max_nesting_depth,
nesting_depth=nesting_depth + 1,
nesting_list=my_nesting_list,
max_recursion_limit=max_recursion_limit,
include_extension=include_extension,
extension_fields=extension_fields,
extension_depth=extension_depth,
max_extension_depth=max_extension_depth,
include_modifierExtension=include_modifierExtension,
use_date_for=use_date_for,
parent_path=my_parent_path,
)
),
True,
),
# The overall status of the Appointment. Each of the participants has their own
# participation status which indicates their involvement in the process, however
# this status indicates the shared status.
StructField("status", StringType(), True),
# The coded reason for the appointment being cancelled. This is often used in
# reporting/billing/futher processing to determine if further actions are
# required, or specific fees apply.
StructField(
"cancelationReason",
CodeableConceptSchema.get_schema(
max_nesting_depth=max_nesting_depth,
nesting_depth=nesting_depth + 1,
nesting_list=my_nesting_list,
max_recursion_limit=max_recursion_limit,
include_extension=include_extension,
extension_fields=extension_fields,
extension_depth=extension_depth + 1,
max_extension_depth=max_extension_depth,
include_modifierExtension=include_modifierExtension,
use_date_for=use_date_for,
parent_path=my_parent_path,
),
True,
),
# A broad categorization of the service that is to be performed during this
# appointment.
StructField(
"serviceCategory",
ArrayType(
CodeableConceptSchema.get_schema(
max_nesting_depth=max_nesting_depth,
nesting_depth=nesting_depth + 1,
nesting_list=my_nesting_list,
max_recursion_limit=max_recursion_limit,
include_extension=include_extension,
extension_fields=extension_fields,
extension_depth=extension_depth,
max_extension_depth=max_extension_depth,
include_modifierExtension=include_modifierExtension,
use_date_for=use_date_for,
parent_path=my_parent_path,
)
),
True,
),
# The specific service that is to be performed during this appointment.
StructField(
"serviceType",
ArrayType(
CodeableConceptSchema.get_schema(
max_nesting_depth=max_nesting_depth,
nesting_depth=nesting_depth + 1,
nesting_list=my_nesting_list,
max_recursion_limit=max_recursion_limit,
include_extension=include_extension,
extension_fields=extension_fields,
extension_depth=extension_depth,
max_extension_depth=max_extension_depth,
include_modifierExtension=include_modifierExtension,
use_date_for=use_date_for,
parent_path=my_parent_path,
)
),
True,
),
# The specialty of a practitioner that would be required to perform the service
# requested in this appointment.
StructField(
"specialty",
ArrayType(
CodeableConceptSchema.get_schema(
max_nesting_depth=max_nesting_depth,
nesting_depth=nesting_depth + 1,
nesting_list=my_nesting_list,
max_recursion_limit=max_recursion_limit,
include_extension=include_extension,
extension_fields=extension_fields,
extension_depth=extension_depth,
max_extension_depth=max_extension_depth,
include_modifierExtension=include_modifierExtension,
use_date_for=use_date_for,
parent_path=my_parent_path,
)
),
True,
),
# The style of appointment or patient that has been booked in the slot (not
# service type).
StructField(
"appointmentType",
CodeableConceptSchema.get_schema(
max_nesting_depth=max_nesting_depth,
nesting_depth=nesting_depth + 1,
nesting_list=my_nesting_list,
max_recursion_limit=max_recursion_limit,
include_extension=include_extension,
extension_fields=extension_fields,
extension_depth=extension_depth + 1,
max_extension_depth=max_extension_depth,
include_modifierExtension=include_modifierExtension,
use_date_for=use_date_for,
parent_path=my_parent_path,
),
True,
),
# The coded reason that this appointment is being scheduled. This is more
# clinical than administrative.
StructField(
"reasonCode",
ArrayType(
CodeableConceptSchema.get_schema(
max_nesting_depth=max_nesting_depth,
nesting_depth=nesting_depth + 1,
nesting_list=my_nesting_list,
max_recursion_limit=max_recursion_limit,
include_extension=include_extension,
extension_fields=extension_fields,
extension_depth=extension_depth,
max_extension_depth=max_extension_depth,
include_modifierExtension=include_modifierExtension,
use_date_for=use_date_for,
parent_path=my_parent_path,
)
),
True,
),
# Reason the appointment has been scheduled to take place, as specified using
# information from another resource. When the patient arrives and the encounter
# begins it may be used as the admission diagnosis. The indication will
# typically be a Condition (with other resources referenced in the
# evidence.detail), or a Procedure.
StructField(
"reasonReference",
ArrayType(
ReferenceSchema.get_schema(
max_nesting_depth=max_nesting_depth,
nesting_depth=nesting_depth + 1,
nesting_list=my_nesting_list,
max_recursion_limit=max_recursion_limit,
include_extension=include_extension,
extension_fields=extension_fields,
extension_depth=extension_depth,
max_extension_depth=max_extension_depth,
include_modifierExtension=include_modifierExtension,
use_date_for=use_date_for,
parent_path=my_parent_path,
)
),
True,
),
# The priority of the appointment. Can be used to make informed decisions if
# needing to re-prioritize appointments. (The iCal Standard specifies 0 as
# undefined, 1 as highest, 9 as lowest priority).
StructField(
"priority",
unsignedIntSchema.get_schema(
max_nesting_depth=max_nesting_depth,
nesting_depth=nesting_depth + 1,
nesting_list=my_nesting_list,
max_recursion_limit=max_recursion_limit,
include_extension=include_extension,
extension_fields=extension_fields,
extension_depth=extension_depth + 1,
max_extension_depth=max_extension_depth,
include_modifierExtension=include_modifierExtension,
use_date_for=use_date_for,
parent_path=my_parent_path + ".priority",
),
True,
),
# The brief description of the appointment as would be shown on a subject line
# in a meeting request, or appointment list. Detailed or expanded information
# should be put in the comment field.
StructField("description", StringType(), True),
# Additional information to support the appointment provided when making the
# appointment.
StructField(
"supportingInformation",
ArrayType(
ReferenceSchema.get_schema(
max_nesting_depth=max_nesting_depth,
nesting_depth=nesting_depth + 1,
nesting_list=my_nesting_list,
max_recursion_limit=max_recursion_limit,
include_extension=include_extension,
extension_fields=extension_fields,
extension_depth=extension_depth,
max_extension_depth=max_extension_depth,
include_modifierExtension=include_modifierExtension,
use_date_for=use_date_for,
parent_path=my_parent_path,
)
),
True,
),
# Date/Time that the appointment is to take place.
StructField(
"start",
instantSchema.get_schema(
max_nesting_depth=max_nesting_depth,
nesting_depth=nesting_depth + 1,
nesting_list=my_nesting_list,
max_recursion_limit=max_recursion_limit,
include_extension=include_extension,
extension_fields=extension_fields,
extension_depth=extension_depth + 1,
max_extension_depth=max_extension_depth,
include_modifierExtension=include_modifierExtension,
use_date_for=use_date_for,
parent_path=my_parent_path + ".start",
),
True,
),
# Date/Time that the appointment is to conclude.
StructField(
"end",
instantSchema.get_schema(
max_nesting_depth=max_nesting_depth,
nesting_depth=nesting_depth + 1,
nesting_list=my_nesting_list,
max_recursion_limit=max_recursion_limit,
include_extension=include_extension,
extension_fields=extension_fields,
extension_depth=extension_depth + 1,
max_extension_depth=max_extension_depth,
include_modifierExtension=include_modifierExtension,
use_date_for=use_date_for,
parent_path=my_parent_path + ".end",
),
True,
),
# Number of minutes that the appointment is to take. This can be less than the
# duration between the start and end times. For example, where the actual time
# of appointment is only an estimate or if a 30 minute appointment is being
# requested, but any time would work. Also, if there is, for example, a planned
# 15 minute break in the middle of a long appointment, the duration may be 15
# minutes less than the difference between the start and end.
StructField(
"minutesDuration",
positiveIntSchema.get_schema(
max_nesting_depth=max_nesting_depth,
nesting_depth=nesting_depth + 1,
nesting_list=my_nesting_list,
max_recursion_limit=max_recursion_limit,
include_extension=include_extension,
extension_fields=extension_fields,
extension_depth=extension_depth + 1,
max_extension_depth=max_extension_depth,
include_modifierExtension=include_modifierExtension,
use_date_for=use_date_for,
parent_path=my_parent_path + ".minutesduration",
),
True,
),
# The slots from the participants' schedules that will be filled by the
# appointment.
StructField(
"slot",
ArrayType(
ReferenceSchema.get_schema(
max_nesting_depth=max_nesting_depth,
nesting_depth=nesting_depth + 1,
nesting_list=my_nesting_list,
max_recursion_limit=max_recursion_limit,
include_extension=include_extension,
extension_fields=extension_fields,
extension_depth=extension_depth,
max_extension_depth=max_extension_depth,
include_modifierExtension=include_modifierExtension,
use_date_for=use_date_for,
parent_path=my_parent_path,
)
),
True,
),
# The date that this appointment was initially created. This could be different
# to the meta.lastModified value on the initial entry, as this could have been
# before the resource was created on the FHIR server, and should remain
# unchanged over the lifespan of the appointment.
StructField(
"created",
dateTimeSchema.get_schema(
max_nesting_depth=max_nesting_depth,
nesting_depth=nesting_depth + 1,
nesting_list=my_nesting_list,
max_recursion_limit=max_recursion_limit,
include_extension=include_extension,
extension_fields=extension_fields,
extension_depth=extension_depth + 1,
max_extension_depth=max_extension_depth,
include_modifierExtension=include_modifierExtension,
use_date_for=use_date_for,
parent_path=my_parent_path + ".created",
),
True,
),
# Additional comments about the appointment.
StructField("comment", StringType(), True),
# While Appointment.comment contains information for internal use,
# Appointment.patientInstructions is used to capture patient facing information
# about the Appointment (e.g. please bring your referral or fast from 8pm night
# before).
StructField("patientInstruction", StringType(), True),
# The service request this appointment is allocated to assess (e.g. incoming
# referral or procedure request).
StructField(
"basedOn",
ArrayType(
ReferenceSchema.get_schema(
max_nesting_depth=max_nesting_depth,
nesting_depth=nesting_depth + 1,
nesting_list=my_nesting_list,
max_recursion_limit=max_recursion_limit,
include_extension=include_extension,
extension_fields=extension_fields,
extension_depth=extension_depth,
max_extension_depth=max_extension_depth,
include_modifierExtension=include_modifierExtension,
use_date_for=use_date_for,
parent_path=my_parent_path,
)
),
True,
),
# List of participants involved in the appointment.
StructField(
"participant",
ArrayType(
Appointment_ParticipantSchema.get_schema(
max_nesting_depth=max_nesting_depth,
nesting_depth=nesting_depth + 1,
nesting_list=my_nesting_list,
max_recursion_limit=max_recursion_limit,
include_extension=include_extension,
extension_fields=extension_fields,
extension_depth=extension_depth,
max_extension_depth=max_extension_depth,
include_modifierExtension=include_modifierExtension,
use_date_for=use_date_for,
parent_path=my_parent_path,
)
),
True,
),
# A set of date ranges (potentially including times) that the appointment is
# preferred to be scheduled within.
#
# The duration (usually in minutes) could also be provided to indicate the
# length of the appointment to fill and populate the start/end times for the
# actual allocated time. However, in other situations the duration may be
# calculated by the scheduling system.
StructField(
"requestedPeriod",
ArrayType(
PeriodSchema.get_schema(
max_nesting_depth=max_nesting_depth,
nesting_depth=nesting_depth + 1,
nesting_list=my_nesting_list,
max_recursion_limit=max_recursion_limit,
include_extension=include_extension,
extension_fields=extension_fields,
extension_depth=extension_depth,
max_extension_depth=max_extension_depth,
include_modifierExtension=include_modifierExtension,
use_date_for=use_date_for,
parent_path=my_parent_path,
)
),
True,
),
]
)
if not include_extension:
schema.fields = [
c
if c.name != "extension"
else StructField("extension", StringType(), True)
for c in schema.fields
]
if not include_modifierExtension:
schema.fields = [
c
if c.name != "modifierExtension"
else StructField("modifierExtension", StringType(), True)
for c in schema.fields
]
return schema
| 53.614815
| 105
| 0.553122
| 4,074
| 43,428
| 5.663721
| 0.112911
| 0.056687
| 0.035755
| 0.054087
| 0.869637
| 0.861966
| 0.860232
| 0.830545
| 0.827078
| 0.820057
| 0
| 0.00311
| 0.40764
| 43,428
| 809
| 106
| 53.681088
| 0.893839
| 0.306507
| 0
| 0.743268
| 1
| 0
| 0.023102
| 0.00072
| 0
| 0
| 0
| 0
| 0
| 1
| 0.001795
| false
| 0
| 0.032316
| 0
| 0.039497
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
0701318c50ea2db26cd503c1964a192a27a3b6a8
| 8,172
|
py
|
Python
|
api/migrations/0004_disasterneighborhoods_disasterneighborhoodview.py
|
hackoregon/disaster-resilience-backend
|
7776ca37bc50ef79e8bbf0830b6ca4b798f0df9f
|
[
"MIT"
] | 2
|
2018-04-27T09:10:08.000Z
|
2018-05-01T08:38:29.000Z
|
api/migrations/0004_disasterneighborhoods_disasterneighborhoodview.py
|
hackoregon/disaster-resilience-backend
|
7776ca37bc50ef79e8bbf0830b6ca4b798f0df9f
|
[
"MIT"
] | 21
|
2018-05-27T23:51:40.000Z
|
2021-06-10T20:15:17.000Z
|
api/migrations/0004_disasterneighborhoods_disasterneighborhoodview.py
|
hackoregon/disaster-resilience-backend
|
7776ca37bc50ef79e8bbf0830b6ca4b798f0df9f
|
[
"MIT"
] | 3
|
2018-04-27T09:11:06.000Z
|
2019-03-10T19:32:26.000Z
|
# Generated by Django 2.0.1 on 2018-06-13 23:58
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('api', '0003_poi'),
]
operations = [
migrations.CreateModel(
name='DisasterNeighborhoods',
fields=[
('id', models.AutoField(primary_key=True, serialize=False)),
('name', models.CharField(blank=True, max_length=255, null=True)),
('wkb_geometry', models.TextField(blank=True, null=True)),
('pgv_site_count', models.CharField(blank=True, max_length=255, null=True)),
('pgv_site_max', models.CharField(blank=True, max_length=255, null=True)),
('pgv_site_mean', models.CharField(blank=True, max_length=255, null=True)),
('pgv_site_min', models.CharField(blank=True, max_length=255, null=True)),
('pgv_site_std', models.CharField(blank=True, max_length=255, null=True)),
('pgd_landslide_dry_count', models.CharField(blank=True, max_length=255, null=True)),
('pgd_landslide_dry_max', models.CharField(blank=True, max_length=255, null=True)),
('pgd_landslide_dry_mean', models.CharField(blank=True, max_length=255, null=True)),
('pgd_landslide_dry_min', models.CharField(blank=True, max_length=255, null=True)),
('pgd_landslide_dry_std', models.CharField(blank=True, max_length=255, null=True)),
('pgd_landslide_wet_count', models.CharField(blank=True, max_length=255, null=True)),
('pgd_landslide_wet_max', models.CharField(blank=True, max_length=255, null=True)),
('pgd_landslide_wet_mean', models.CharField(blank=True, max_length=255, null=True)),
('pgd_landslide_wet_min', models.CharField(blank=True, max_length=255, null=True)),
('pgd_landslide_wet_std', models.CharField(blank=True, max_length=255, null=True)),
('pgd_liquefaction_wet_count', models.CharField(blank=True, max_length=255, null=True)),
('pgd_liquefaction_wet_max', models.CharField(blank=True, max_length=255, null=True)),
('pgd_liquefaction_wet_mean', models.CharField(blank=True, max_length=255, null=True)),
('pgd_liquefaction_wet_min', models.CharField(blank=True, max_length=255, null=True)),
('pgd_liquefaction_wet_std', models.CharField(blank=True, max_length=255, null=True)),
('pgd_total_wet_mean', models.CharField(blank=True, max_length=255, null=True)),
('pgv_site_min_mmi', models.BigIntegerField(blank=True, null=True)),
('pgv_site_max_mmi', models.BigIntegerField(blank=True, null=True)),
('pgv_site_mean_mmi', models.BigIntegerField(blank=True, null=True)),
('pgd_landslide_dry_min_di', models.CharField(blank=True, max_length=255, null=True)),
('pgd_landslide_dry_max_di', models.CharField(blank=True, max_length=255, null=True)),
('pgd_landslide_dry_mean_di', models.CharField(blank=True, max_length=255, null=True)),
('pgd_landslide_wet_min_di', models.CharField(blank=True, max_length=255, null=True)),
('pgd_landslide_wet_max_di', models.CharField(blank=True, max_length=255, null=True)),
('pgd_landslide_wet_mean_di', models.CharField(blank=True, max_length=255, null=True)),
('pgd_liquefaction_wet_min_di', models.CharField(blank=True, max_length=255, null=True)),
('pgd_liquefaction_wet_max_di', models.CharField(blank=True, max_length=255, null=True)),
('pgd_liquefaction_wet_mean_di', models.CharField(blank=True, max_length=255, null=True)),
('pgd_total_wet_mean_di', models.CharField(blank=True, max_length=255, null=True)),
],
options={
'db_table': 'disaster_neighborhoods',
'managed': False,
},
),
migrations.CreateModel(
name='DisasterNeighborhoodView',
fields=[
('id', models.AutoField(primary_key=True, serialize=False)),
('name', models.CharField(blank=True, max_length=255, null=True)),
('wkb_geometry', models.TextField(blank=True, null=True)),
('pgv_site_count', models.CharField(blank=True, max_length=255, null=True)),
('pgv_site_max', models.CharField(blank=True, max_length=255, null=True)),
('pgv_site_mean', models.CharField(blank=True, max_length=255, null=True)),
('pgv_site_min', models.CharField(blank=True, max_length=255, null=True)),
('pgv_site_std', models.CharField(blank=True, max_length=255, null=True)),
('pgd_landslide_dry_count', models.CharField(blank=True, max_length=255, null=True)),
('pgd_landslide_dry_max', models.CharField(blank=True, max_length=255, null=True)),
('pgd_landslide_dry_mean', models.CharField(blank=True, max_length=255, null=True)),
('pgd_landslide_dry_min', models.CharField(blank=True, max_length=255, null=True)),
('pgd_landslide_dry_std', models.CharField(blank=True, max_length=255, null=True)),
('pgd_landslide_wet_count', models.CharField(blank=True, max_length=255, null=True)),
('pgd_landslide_wet_max', models.CharField(blank=True, max_length=255, null=True)),
('pgd_landslide_wet_mean', models.CharField(blank=True, max_length=255, null=True)),
('pgd_landslide_wet_min', models.CharField(blank=True, max_length=255, null=True)),
('pgd_landslide_wet_std', models.CharField(blank=True, max_length=255, null=True)),
('pgd_liquefaction_wet_count', models.CharField(blank=True, max_length=255, null=True)),
('pgd_liquefaction_wet_max', models.CharField(blank=True, max_length=255, null=True)),
('pgd_liquefaction_wet_mean', models.CharField(blank=True, max_length=255, null=True)),
('pgd_liquefaction_wet_min', models.CharField(blank=True, max_length=255, null=True)),
('pgd_liquefaction_wet_std', models.CharField(blank=True, max_length=255, null=True)),
('pgd_total_wet_mean', models.CharField(blank=True, max_length=255, null=True)),
('pgv_site_min_mmi', models.IntegerField(blank=True, null=True)),
('pgv_site_max_mmi', models.IntegerField(blank=True, null=True)),
('pgv_site_mean_mmi', models.IntegerField(blank=True, null=True)),
('pgd_landslide_dry_min_di', models.CharField(blank=True, max_length=255, null=True)),
('pgd_landslide_dry_max_di', models.CharField(blank=True, max_length=255, null=True)),
('pgd_landslide_dry_mean_di', models.CharField(blank=True, max_length=255, null=True)),
('pgd_landslide_wet_min_di', models.CharField(blank=True, max_length=255, null=True)),
('pgd_landslide_wet_max_di', models.CharField(blank=True, max_length=255, null=True)),
('pgd_landslide_wet_mean_di', models.CharField(blank=True, max_length=255, null=True)),
('pgd_liquefaction_wet_min_di', models.CharField(blank=True, max_length=255, null=True)),
('pgd_liquefaction_wet_max_di', models.CharField(blank=True, max_length=255, null=True)),
('pgd_liquefaction_wet_mean_di', models.CharField(blank=True, max_length=255, null=True)),
('pgd_total_wet_mean_di', models.CharField(blank=True, max_length=255, null=True)),
('pgv_site_min_mmi_txt', models.TextField(blank=True, null=True)),
('pgv_site_max_mmi_txt', models.TextField(blank=True, null=True)),
('pgv_site_mean_mmi_txt', models.TextField(blank=True, null=True)),
],
options={
'db_table': 'disaster_neighborhood_view',
'managed': False,
},
),
]
| 74.972477
| 106
| 0.639134
| 1,013
| 8,172
| 4.859822
| 0.068115
| 0.137112
| 0.260004
| 0.312005
| 0.942921
| 0.942921
| 0.933983
| 0.928499
| 0.919358
| 0.878732
| 0
| 0.033213
| 0.222589
| 8,172
| 108
| 107
| 75.666667
| 0.741697
| 0.005507
| 0
| 0.784314
| 1
| 0
| 0.205169
| 0.158769
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.009804
| 0
| 0.039216
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
0760c9c2b20d9e36da5a12f101e019e3ac49b587
| 10,419
|
py
|
Python
|
test/test_descriptor.py
|
prusnak/HWI
|
f83f9815c56948b59ce65986ed3844bc8fae7f06
|
[
"MIT"
] | 2
|
2021-04-28T21:04:46.000Z
|
2021-04-28T21:04:48.000Z
|
test/test_descriptor.py
|
Sjors/HWI
|
b3b9f8818d9a851e9a88368f83de77ce504c522c
|
[
"MIT"
] | null | null | null |
test/test_descriptor.py
|
Sjors/HWI
|
b3b9f8818d9a851e9a88368f83de77ce504c522c
|
[
"MIT"
] | null | null | null |
#! /usr/bin/env python3
from hwilib.descriptor import (
parse_descriptor,
MultisigDescriptor,
WPKHDescriptor,
WSHDescriptor,
)
import unittest
class TestDescriptor(unittest.TestCase):
def test_parse_descriptor_with_origin(self):
d = "wpkh([00000001/84'/1'/0']tpubD6NzVbkrYhZ4WaWSyoBvQwbpLkojyoTZPRsgXELWz3Popb3qkjcJyJUGLnL4qHHoQvao8ESaAstxYSnhyswJ76uZPStJRJCTKvosUCJZL5B/0/0)"
desc = parse_descriptor(d)
self.assertTrue(isinstance(desc, WPKHDescriptor))
self.assertEqual(desc.pubkeys[0].origin.get_fingerprint_hex(), "00000001")
self.assertEqual(desc.pubkeys[0].origin.get_derivation_path(), "m/84'/1'/0'")
self.assertEqual(desc.pubkeys[0].pubkey, "tpubD6NzVbkrYhZ4WaWSyoBvQwbpLkojyoTZPRsgXELWz3Popb3qkjcJyJUGLnL4qHHoQvao8ESaAstxYSnhyswJ76uZPStJRJCTKvosUCJZL5B")
self.assertEqual(desc.pubkeys[0].deriv_path, "/0/0")
self.assertEqual(desc.to_string_no_checksum(), d)
def test_parse_multisig_descriptor_with_origin(self):
d = "wsh(multi(2,[00000001/48'/0'/0'/2']tpubD6NzVbkrYhZ4WaWSyoBvQwbpLkojyoTZPRsgXELWz3Popb3qkjcJyJUGLnL4qHHoQvao8ESaAstxYSnhyswJ76uZPStJRJCTKvosUCJZL5B/0/0,[00000002/48'/0'/0'/2']tpubDFHiBJDeNvqPWNJbzzxqDVXmJZoNn2GEtoVcFhMjXipQiorGUmps3e5ieDGbRrBPTFTh9TXEKJCwbAGW9uZnfrVPbMxxbFohuFzfT6VThty/0/0))"
desc = parse_descriptor(d)
self.assertTrue(isinstance(desc, WSHDescriptor))
self.assertTrue(isinstance(desc.subdescriptor, MultisigDescriptor))
self.assertEqual(desc.subdescriptor.pubkeys[0].origin.get_fingerprint_hex(), "00000001")
self.assertEqual(desc.subdescriptor.pubkeys[0].origin.get_derivation_path(), "m/48'/0'/0'/2'")
self.assertEqual(desc.subdescriptor.pubkeys[0].pubkey, "tpubD6NzVbkrYhZ4WaWSyoBvQwbpLkojyoTZPRsgXELWz3Popb3qkjcJyJUGLnL4qHHoQvao8ESaAstxYSnhyswJ76uZPStJRJCTKvosUCJZL5B")
self.assertEqual(desc.subdescriptor.pubkeys[0].deriv_path, "/0/0")
self.assertEqual(desc.subdescriptor.pubkeys[1].origin.get_fingerprint_hex(), "00000002")
self.assertEqual(desc.subdescriptor.pubkeys[1].origin.get_derivation_path(), "m/48'/0'/0'/2'")
self.assertEqual(desc.subdescriptor.pubkeys[1].pubkey, "tpubDFHiBJDeNvqPWNJbzzxqDVXmJZoNn2GEtoVcFhMjXipQiorGUmps3e5ieDGbRrBPTFTh9TXEKJCwbAGW9uZnfrVPbMxxbFohuFzfT6VThty")
self.assertEqual(desc.subdescriptor.pubkeys[1].deriv_path, "/0/0")
self.assertEqual(desc.to_string_no_checksum(), d)
def test_parse_descriptor_without_origin(self):
d = "wpkh(tpubD6NzVbkrYhZ4WaWSyoBvQwbpLkojyoTZPRsgXELWz3Popb3qkjcJyJUGLnL4qHHoQvao8ESaAstxYSnhyswJ76uZPStJRJCTKvosUCJZL5B/0/0)"
desc = parse_descriptor(d)
self.assertTrue(isinstance(desc, WPKHDescriptor))
self.assertEqual(desc.pubkeys[0].origin, None)
self.assertEqual(desc.pubkeys[0].pubkey, "tpubD6NzVbkrYhZ4WaWSyoBvQwbpLkojyoTZPRsgXELWz3Popb3qkjcJyJUGLnL4qHHoQvao8ESaAstxYSnhyswJ76uZPStJRJCTKvosUCJZL5B")
self.assertEqual(desc.pubkeys[0].deriv_path, "/0/0")
self.assertEqual(desc.to_string_no_checksum(), d)
def test_parse_descriptor_with_origin_fingerprint_only(self):
d = "wpkh([00000001]tpubD6NzVbkrYhZ4WaWSyoBvQwbpLkojyoTZPRsgXELWz3Popb3qkjcJyJUGLnL4qHHoQvao8ESaAstxYSnhyswJ76uZPStJRJCTKvosUCJZL5B/0/0)"
desc = parse_descriptor(d)
self.assertTrue(isinstance(desc, WPKHDescriptor))
self.assertEqual(desc.pubkeys[0].origin.get_fingerprint_hex(), "00000001")
self.assertEqual(len(desc.pubkeys[0].origin.path), 0)
self.assertEqual(desc.pubkeys[0].pubkey, "tpubD6NzVbkrYhZ4WaWSyoBvQwbpLkojyoTZPRsgXELWz3Popb3qkjcJyJUGLnL4qHHoQvao8ESaAstxYSnhyswJ76uZPStJRJCTKvosUCJZL5B")
self.assertEqual(desc.pubkeys[0].deriv_path, "/0/0")
self.assertEqual(desc.to_string_no_checksum(), d)
def test_parse_descriptor_with_key_at_end_with_origin(self):
d = "wpkh([00000001/84'/1'/0'/0/0]0297dc3f4420402e01a113984311bf4a1b8de376cac0bdcfaf1b3ac81f13433c7)"
desc = parse_descriptor(d)
self.assertTrue(isinstance(desc, WPKHDescriptor))
self.assertEqual(desc.pubkeys[0].origin.get_fingerprint_hex(), "00000001")
self.assertEqual(desc.pubkeys[0].origin.get_derivation_path(), "m/84'/1'/0'/0/0")
self.assertEqual(desc.pubkeys[0].pubkey, "0297dc3f4420402e01a113984311bf4a1b8de376cac0bdcfaf1b3ac81f13433c7")
self.assertEqual(desc.pubkeys[0].deriv_path, None)
self.assertEqual(desc.to_string_no_checksum(), d)
def test_parse_descriptor_with_key_at_end_without_origin(self):
d = "wpkh(0297dc3f4420402e01a113984311bf4a1b8de376cac0bdcfaf1b3ac81f13433c7)"
desc = parse_descriptor(d)
self.assertTrue(isinstance(desc, WPKHDescriptor))
self.assertEqual(desc.pubkeys[0].origin, None)
self.assertEqual(desc.pubkeys[0].pubkey, "0297dc3f4420402e01a113984311bf4a1b8de376cac0bdcfaf1b3ac81f13433c7")
self.assertEqual(desc.pubkeys[0].deriv_path, None)
self.assertEqual(desc.to_string_no_checksum(), d)
def test_parse_empty_descriptor(self):
self.assertRaises(ValueError, parse_descriptor, "")
def test_parse_descriptor_replace_h(self):
d = "wpkh([00000001/84h/1h/0']tpubD6NzVbkrYhZ4WaWSyoBvQwbpLkojyoTZPRsgXELWz3Popb3qkjcJyJUGLnL4qHHoQvao8ESaAstxYSnhyswJ76uZPStJRJCTKvosUCJZL5B/0/0)"
desc = parse_descriptor(d)
self.assertIsNotNone(desc)
self.assertEqual(desc.pubkeys[0].origin.get_derivation_path(), "m/84'/1'/0'")
def test_checksums(self):
with self.subTest(msg='Valid checksum'):
self.assertIsNotNone(parse_descriptor("sh(multi(2,[00000000/111'/222]xprvA1RpRA33e1JQ7ifknakTFpgNXPmW2YvmhqLQYMmrj4xJXXWYpDPS3xz7iAxn8L39njGVyuoseXzU6rcxFLJ8HFsTjSyQbLYnMpCqE2VbFWc,xprv9uPDJpEQgRQfDcW7BkF7eTya6RPxXeJCqCJGHuCJ4GiRVLzkTXBAJMu2qaMWPrS7AANYqdq6vcBcBUdJCVVFceUvJFjaPdGZ2y9WACViL4L/0))#ggrsrxfy"))
self.assertIsNotNone(parse_descriptor("sh(multi(2,[00000000/111'/222]xpub6ERApfZwUNrhLCkDtcHTcxd75RbzS1ed54G1LkBUHQVHQKqhMkhgbmJbZRkrgZw4koxb5JaHWkY4ALHY2grBGRjaDMzQLcgJvLJuZZvRcEL,xpub68NZiKmJWnxxS6aaHmn81bvJeTESw724CRDs6HbuccFQN9Ku14VQrADWgqbhhTHBaohPX4CjNLf9fq9MYo6oDaPPLPxSb7gwQN3ih19Zm4Y/0))#tjg09x5t"))
self.assertIsNotNone(parse_descriptor("sh(multi(2,[00000000/111'/222]xprvA1RpRA33e1JQ7ifknakTFpgNXPmW2YvmhqLQYMmrj4xJXXWYpDPS3xz7iAxn8L39njGVyuoseXzU6rcxFLJ8HFsTjSyQbLYnMpCqE2VbFWc,xprv9uPDJpEQgRQfDcW7BkF7eTya6RPxXeJCqCJGHuCJ4GiRVLzkTXBAJMu2qaMWPrS7AANYqdq6vcBcBUdJCVVFceUvJFjaPdGZ2y9WACViL4L/0))"))
self.assertIsNotNone(parse_descriptor("sh(multi(2,[00000000/111'/222]xpub6ERApfZwUNrhLCkDtcHTcxd75RbzS1ed54G1LkBUHQVHQKqhMkhgbmJbZRkrgZw4koxb5JaHWkY4ALHY2grBGRjaDMzQLcgJvLJuZZvRcEL,xpub68NZiKmJWnxxS6aaHmn81bvJeTESw724CRDs6HbuccFQN9Ku14VQrADWgqbhhTHBaohPX4CjNLf9fq9MYo6oDaPPLPxSb7gwQN3ih19Zm4Y/0))"))
with self.subTest(msg="Empty Checksum"):
self.assertRaises(ValueError, parse_descriptor, "sh(multi(2,[00000000/111'/222]xprvA1RpRA33e1JQ7ifknakTFpgNXPmW2YvmhqLQYMmrj4xJXXWYpDPS3xz7iAxn8L39njGVyuoseXzU6rcxFLJ8HFsTjSyQbLYnMpCqE2VbFWc,xprv9uPDJpEQgRQfDcW7BkF7eTya6RPxXeJCqCJGHuCJ4GiRVLzkTXBAJMu2qaMWPrS7AANYqdq6vcBcBUdJCVVFceUvJFjaPdGZ2y9WACViL4L/0))#")
self.assertRaises(ValueError, parse_descriptor, "sh(multi(2,[00000000/111'/222]xpub6ERApfZwUNrhLCkDtcHTcxd75RbzS1ed54G1LkBUHQVHQKqhMkhgbmJbZRkrgZw4koxb5JaHWkY4ALHY2grBGRjaDMzQLcgJvLJuZZvRcEL,xpub68NZiKmJWnxxS6aaHmn81bvJeTESw724CRDs6HbuccFQN9Ku14VQrADWgqbhhTHBaohPX4CjNLf9fq9MYo6oDaPPLPxSb7gwQN3ih19Zm4Y/0))#")
with self.subTest(msg="Too long Checksum"):
self.assertRaises(ValueError, parse_descriptor, "sh(multi(2,[00000000/111'/222]xprvA1RpRA33e1JQ7ifknakTFpgNXPmW2YvmhqLQYMmrj4xJXXWYpDPS3xz7iAxn8L39njGVyuoseXzU6rcxFLJ8HFsTjSyQbLYnMpCqE2VbFWc,xprv9uPDJpEQgRQfDcW7BkF7eTya6RPxXeJCqCJGHuCJ4GiRVLzkTXBAJMu2qaMWPrS7AANYqdq6vcBcBUdJCVVFceUvJFjaPdGZ2y9WACViL4L/0))#ggrsrxfyq")
self.assertRaises(ValueError, parse_descriptor, "sh(multi(2,[00000000/111'/222]xpub6ERApfZwUNrhLCkDtcHTcxd75RbzS1ed54G1LkBUHQVHQKqhMkhgbmJbZRkrgZw4koxb5JaHWkY4ALHY2grBGRjaDMzQLcgJvLJuZZvRcEL,xpub68NZiKmJWnxxS6aaHmn81bvJeTESw724CRDs6HbuccFQN9Ku14VQrADWgqbhhTHBaohPX4CjNLf9fq9MYo6oDaPPLPxSb7gwQN3ih19Zm4Y/0))#tjg09x5tq")
with self.subTest(msg="Too Short Checksum"):
self.assertRaises(ValueError, parse_descriptor, "sh(multi(2,[00000000/111'/222]xprvA1RpRA33e1JQ7ifknakTFpgNXPmW2YvmhqLQYMmrj4xJXXWYpDPS3xz7iAxn8L39njGVyuoseXzU6rcxFLJ8HFsTjSyQbLYnMpCqE2VbFWc,xprv9uPDJpEQgRQfDcW7BkF7eTya6RPxXeJCqCJGHuCJ4GiRVLzkTXBAJMu2qaMWPrS7AANYqdq6vcBcBUdJCVVFceUvJFjaPdGZ2y9WACViL4L/0))#ggrsrxf")
self.assertRaises(ValueError, parse_descriptor, "sh(multi(2,[00000000/111'/222]xpub6ERApfZwUNrhLCkDtcHTcxd75RbzS1ed54G1LkBUHQVHQKqhMkhgbmJbZRkrgZw4koxb5JaHWkY4ALHY2grBGRjaDMzQLcgJvLJuZZvRcEL,xpub68NZiKmJWnxxS6aaHmn81bvJeTESw724CRDs6HbuccFQN9Ku14VQrADWgqbhhTHBaohPX4CjNLf9fq9MYo6oDaPPLPxSb7gwQN3ih19Zm4Y/0))#tjg09x5")
with self.subTest(msg="Error in Payload"):
self.assertRaises(ValueError, parse_descriptor, "sh(multi(3,[00000000/111'/222]xprvA1RpRA33e1JQ7ifknakTFpgNXPmW2YvmhqLQYMmrj4xJXXWYpDPS3xz7iAxn8L39njGVyuoseXzU6rcxFLJ8HFsTjSyQbLYnMpCqE2VbFWc,xprv9uPDJpEQgRQfDcW7BkF7eTya6RPxXeJCqCJGHuCJ4GiRVLzkTXBAJMu2qaMWPrS7AANYqdq6vcBcBUdJCVVFceUvJFjaPdGZ2y9WACViL4L/0))#ggrsrxfy")
self.assertRaises(ValueError, parse_descriptor, "sh(multi(3,[00000000/111'/222]xpub6ERApfZwUNrhLCkDtcHTcxd75RbzS1ed54G1LkBUHQVHQKqhMkhgbmJbZRkrgZw4koxb5JaHWkY4ALHY2grBGRjaDMzQLcgJvLJuZZvRcEL,xpub68NZiKmJWnxxS6aaHmn81bvJeTESw724CRDs6HbuccFQN9Ku14VQrADWgqbhhTHBaohPX4CjNLf9fq9MYo6oDaPPLPxSb7gwQN3ih19Zm4Y/0))#tjg09x5t")
with self.subTest(msg="Error in Checksum"):
self.assertRaises(ValueError, parse_descriptor, "sh(multi(2,[00000000/111'/222]xprvA1RpRA33e1JQ7ifknakTFpgNXPmW2YvmhqLQYMmrj4xJXXWYpDPS3xz7iAxn8L39njGVyuoseXzU6rcxFLJ8HFsTjSyQbLYnMpCqE2VbFWc,xprv9uPDJpEQgRQfDcW7BkF7eTya6RPxXeJCqCJGHuCJ4GiRVLzkTXBAJMu2qaMWPrS7AANYqdq6vcBcBUdJCVVFceUvJFjaPdGZ2y9WACViL4L/0))#ggssrxfy")
self.assertRaises(ValueError, parse_descriptor, "sh(multi(2,[00000000/111'/222]xpub6ERApfZwUNrhLCkDtcHTcxd75RbzS1ed54G1LkBUHQVHQKqhMkhgbmJbZRkrgZw4koxb5JaHWkY4ALHY2grBGRjaDMzQLcgJvLJuZZvRcEL,xpub68NZiKmJWnxxS6aaHmn81bvJeTESw724CRDs6HbuccFQN9Ku14VQrADWgqbhhTHBaohPX4CjNLf9fq9MYo6oDaPPLPxSb7gwQN3ih19Zm4Y/0))#tjq09x4t")
if __name__ == "__main__":
unittest.main()
| 94.718182
| 330
| 0.819081
| 777
| 10,419
| 10.830116
| 0.127413
| 0.058824
| 0.072252
| 0.055615
| 0.890909
| 0.874629
| 0.858942
| 0.814141
| 0.771123
| 0.74284
| 0
| 0.114739
| 0.091563
| 10,419
| 109
| 331
| 95.587156
| 0.774326
| 0.002112
| 0
| 0.368421
| 0
| 0.010526
| 0.538188
| 0.51616
| 0
| 0
| 0
| 0
| 0.589474
| 1
| 0.094737
| false
| 0
| 0.021053
| 0
| 0.126316
| 0.063158
| 0
| 0
| 1
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
ab392f1539079cf8214ad8f946d879950e4e4aaf
| 87,178
|
py
|
Python
|
tests/aat/api/v1/client/api/packet_generators_api.py
|
gchagnotSpt/openperf
|
0ae14cb7a685b1b059f707379773fb3bcb421d40
|
[
"Apache-2.0"
] | null | null | null |
tests/aat/api/v1/client/api/packet_generators_api.py
|
gchagnotSpt/openperf
|
0ae14cb7a685b1b059f707379773fb3bcb421d40
|
[
"Apache-2.0"
] | null | null | null |
tests/aat/api/v1/client/api/packet_generators_api.py
|
gchagnotSpt/openperf
|
0ae14cb7a685b1b059f707379773fb3bcb421d40
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
OpenPerf API
REST API interface for OpenPerf # noqa: E501
OpenAPI spec version: 1
Contact: support@spirent.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from client.api_client import ApiClient
class PacketGeneratorsApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def bulk_create_packet_generators(self, create, **kwargs): # noqa: E501
"""Bulk create packet generators # noqa: E501
Create multiple packet generators. Requests are processed in an all-or-nothing manner, i.e. a single generator creation failure causes all generator creations for this request to fail. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.bulk_create_packet_generators(create, async_req=True)
>>> result = thread.get()
:param async_req bool
:param BulkCreatePacketGeneratorsRequest create: Bulk creation (required)
:return: BulkCreatePacketGeneratorsResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.bulk_create_packet_generators_with_http_info(create, **kwargs) # noqa: E501
else:
(data) = self.bulk_create_packet_generators_with_http_info(create, **kwargs) # noqa: E501
return data
def bulk_create_packet_generators_with_http_info(self, create, **kwargs): # noqa: E501
"""Bulk create packet generators # noqa: E501
Create multiple packet generators. Requests are processed in an all-or-nothing manner, i.e. a single generator creation failure causes all generator creations for this request to fail. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.bulk_create_packet_generators_with_http_info(create, async_req=True)
>>> result = thread.get()
:param async_req bool
:param BulkCreatePacketGeneratorsRequest create: Bulk creation (required)
:return: BulkCreatePacketGeneratorsResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['create'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method bulk_create_packet_generators" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'create' is set
if ('create' not in params or
params['create'] is None):
raise ValueError("Missing the required parameter `create` when calling `bulk_create_packet_generators`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'create' in params:
body_params = params['create']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/packet/generators/x/bulk-create', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='BulkCreatePacketGeneratorsResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def bulk_delete_packet_generators(self, delete, **kwargs): # noqa: E501
"""Bulk delete packet generators # noqa: E501
Delete multiple packet generators in a best-effort manner. Generators can only be deleted when inactive. Active or Non-existant generator ids do not cause errors. Idempotent. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.bulk_delete_packet_generators(delete, async_req=True)
>>> result = thread.get()
:param async_req bool
:param BulkDeletePacketGeneratorsRequest delete: Bulk delete (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.bulk_delete_packet_generators_with_http_info(delete, **kwargs) # noqa: E501
else:
(data) = self.bulk_delete_packet_generators_with_http_info(delete, **kwargs) # noqa: E501
return data
def bulk_delete_packet_generators_with_http_info(self, delete, **kwargs): # noqa: E501
"""Bulk delete packet generators # noqa: E501
Delete multiple packet generators in a best-effort manner. Generators can only be deleted when inactive. Active or Non-existant generator ids do not cause errors. Idempotent. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.bulk_delete_packet_generators_with_http_info(delete, async_req=True)
>>> result = thread.get()
:param async_req bool
:param BulkDeletePacketGeneratorsRequest delete: Bulk delete (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['delete'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method bulk_delete_packet_generators" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'delete' is set
if ('delete' not in params or
params['delete'] is None):
raise ValueError("Missing the required parameter `delete` when calling `bulk_delete_packet_generators`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'delete' in params:
body_params = params['delete']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/packet/generators/x/bulk-delete', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def bulk_start_packet_generators(self, start, **kwargs): # noqa: E501
"""Bulk start packet generators # noqa: E501
Start multiple packet generators simultaneously # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.bulk_start_packet_generators(start, async_req=True)
>>> result = thread.get()
:param async_req bool
:param BulkStartPacketGeneratorsRequest start: Bulk start (required)
:return: BulkStartPacketGeneratorsResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.bulk_start_packet_generators_with_http_info(start, **kwargs) # noqa: E501
else:
(data) = self.bulk_start_packet_generators_with_http_info(start, **kwargs) # noqa: E501
return data
def bulk_start_packet_generators_with_http_info(self, start, **kwargs): # noqa: E501
"""Bulk start packet generators # noqa: E501
Start multiple packet generators simultaneously # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.bulk_start_packet_generators_with_http_info(start, async_req=True)
>>> result = thread.get()
:param async_req bool
:param BulkStartPacketGeneratorsRequest start: Bulk start (required)
:return: BulkStartPacketGeneratorsResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['start'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method bulk_start_packet_generators" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'start' is set
if ('start' not in params or
params['start'] is None):
raise ValueError("Missing the required parameter `start` when calling `bulk_start_packet_generators`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'start' in params:
body_params = params['start']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/packet/generators/x/bulk-start', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='BulkStartPacketGeneratorsResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def bulk_stop_packet_generators(self, stop, **kwargs): # noqa: E501
"""Bulk stop packet generators # noqa: E501
Stop multiple packet generators simultaneously # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.bulk_stop_packet_generators(stop, async_req=True)
>>> result = thread.get()
:param async_req bool
:param BulkStopPacketGeneratorsRequest stop: Bulk stop (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.bulk_stop_packet_generators_with_http_info(stop, **kwargs) # noqa: E501
else:
(data) = self.bulk_stop_packet_generators_with_http_info(stop, **kwargs) # noqa: E501
return data
def bulk_stop_packet_generators_with_http_info(self, stop, **kwargs): # noqa: E501
"""Bulk stop packet generators # noqa: E501
Stop multiple packet generators simultaneously # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.bulk_stop_packet_generators_with_http_info(stop, async_req=True)
>>> result = thread.get()
:param async_req bool
:param BulkStopPacketGeneratorsRequest stop: Bulk stop (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['stop'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method bulk_stop_packet_generators" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'stop' is set
if ('stop' not in params or
params['stop'] is None):
raise ValueError("Missing the required parameter `stop` when calling `bulk_stop_packet_generators`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'stop' in params:
body_params = params['stop']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/packet/generators/x/bulk-stop', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_packet_generator(self, generator, **kwargs): # noqa: E501
"""Create a new packet generator # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_packet_generator(generator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param PacketGenerator generator: New packet generator (required)
:return: PacketGenerator
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_packet_generator_with_http_info(generator, **kwargs) # noqa: E501
else:
(data) = self.create_packet_generator_with_http_info(generator, **kwargs) # noqa: E501
return data
def create_packet_generator_with_http_info(self, generator, **kwargs): # noqa: E501
"""Create a new packet generator # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_packet_generator_with_http_info(generator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param PacketGenerator generator: New packet generator (required)
:return: PacketGenerator
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['generator'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_packet_generator" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'generator' is set
if ('generator' not in params or
params['generator'] is None):
raise ValueError("Missing the required parameter `generator` when calling `create_packet_generator`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'generator' in params:
body_params = params['generator']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/packet/generators', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PacketGenerator', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_packet_generator(self, id, **kwargs): # noqa: E501
"""Delete a packet generator # noqa: E501
Delete a stopped packet generator by id. Also delete all results created by this generator. Idempotent. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_packet_generator(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Unique resource identifier (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_packet_generator_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.delete_packet_generator_with_http_info(id, **kwargs) # noqa: E501
return data
def delete_packet_generator_with_http_info(self, id, **kwargs): # noqa: E501
"""Delete a packet generator # noqa: E501
Delete a stopped packet generator by id. Also delete all results created by this generator. Idempotent. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_packet_generator_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Unique resource identifier (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_packet_generator" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `delete_packet_generator`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/packet/generators/{id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_packet_generator_result(self, id, **kwargs): # noqa: E501
"""Delete a packet generator result # noqa: E501
Delete an inactive packet generator result. Also deletes all child tx-flow objects. Idempotent. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_packet_generator_result(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Unique resource identifier (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_packet_generator_result_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.delete_packet_generator_result_with_http_info(id, **kwargs) # noqa: E501
return data
def delete_packet_generator_result_with_http_info(self, id, **kwargs): # noqa: E501
"""Delete a packet generator result # noqa: E501
Delete an inactive packet generator result. Also deletes all child tx-flow objects. Idempotent. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_packet_generator_result_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Unique resource identifier (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_packet_generator_result" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `delete_packet_generator_result`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/packet/generator-results/{id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_packet_generator_results(self, **kwargs): # noqa: E501
"""Delete all generator results # noqa: E501
Delete all inactive generator results # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_packet_generator_results(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_packet_generator_results_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.delete_packet_generator_results_with_http_info(**kwargs) # noqa: E501
return data
def delete_packet_generator_results_with_http_info(self, **kwargs): # noqa: E501
"""Delete all generator results # noqa: E501
Delete all inactive generator results # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_packet_generator_results_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_packet_generator_results" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/packet/generator-results', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_packet_generators(self, **kwargs): # noqa: E501
"""Delete all packet generators # noqa: E501
Delete all inactive packet generators and their results. Idempotent. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_packet_generators(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_packet_generators_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.delete_packet_generators_with_http_info(**kwargs) # noqa: E501
return data
def delete_packet_generators_with_http_info(self, **kwargs): # noqa: E501
"""Delete all packet generators # noqa: E501
Delete all inactive packet generators and their results. Idempotent. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_packet_generators_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_packet_generators" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/packet/generators', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_packet_generator(self, id, **kwargs): # noqa: E501
"""Get a packet generator # noqa: E501
Return a packet generator, by id. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_packet_generator(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Unique resource identifier (required)
:return: PacketGenerator
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_packet_generator_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_packet_generator_with_http_info(id, **kwargs) # noqa: E501
return data
def get_packet_generator_with_http_info(self, id, **kwargs): # noqa: E501
"""Get a packet generator # noqa: E501
Return a packet generator, by id. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_packet_generator_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Unique resource identifier (required)
:return: PacketGenerator
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_packet_generator" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_packet_generator`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/packet/generators/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PacketGenerator', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_packet_generator_learning_results(self, id, **kwargs): # noqa: E501
"""Get detailed learning information # noqa: E501
Returns learning state and resolved addresses for a packet generator attached to an interface, by id. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_packet_generator_learning_results(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Unique resource identifier (required)
:return: PacketGeneratorLearningResults
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_packet_generator_learning_results_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_packet_generator_learning_results_with_http_info(id, **kwargs) # noqa: E501
return data
def get_packet_generator_learning_results_with_http_info(self, id, **kwargs): # noqa: E501
"""Get detailed learning information # noqa: E501
Returns learning state and resolved addresses for a packet generator attached to an interface, by id. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_packet_generator_learning_results_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Unique resource identifier (required)
:return: PacketGeneratorLearningResults
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_packet_generator_learning_results" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_packet_generator_learning_results`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/packet/generators/{id}/learning', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PacketGeneratorLearningResults', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_packet_generator_result(self, id, **kwargs): # noqa: E501
"""Get a packet generator result # noqa: E501
Returns results from a packet generator by result id. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_packet_generator_result(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Unique resource identifier (required)
:return: PacketGeneratorResult
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_packet_generator_result_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_packet_generator_result_with_http_info(id, **kwargs) # noqa: E501
return data
def get_packet_generator_result_with_http_info(self, id, **kwargs): # noqa: E501
"""Get a packet generator result # noqa: E501
Returns results from a packet generator by result id. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_packet_generator_result_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Unique resource identifier (required)
:return: PacketGeneratorResult
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_packet_generator_result" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_packet_generator_result`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/packet/generator-results/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PacketGeneratorResult', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_tx_flow(self, id, **kwargs): # noqa: E501
"""Get a transmit packet flow # noqa: E501
Returns a transmit packet flow by id. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_tx_flow(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Unique resource identifier (required)
:return: TxFlow
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_tx_flow_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_tx_flow_with_http_info(id, **kwargs) # noqa: E501
return data
def get_tx_flow_with_http_info(self, id, **kwargs): # noqa: E501
"""Get a transmit packet flow # noqa: E501
Returns a transmit packet flow by id. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_tx_flow_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Unique resource identifier (required)
:return: TxFlow
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_tx_flow" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_tx_flow`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/packet/tx-flows/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TxFlow', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_packet_generator_results(self, **kwargs): # noqa: E501
"""List generator results # noqa: E501
The `generator-results` endpoint returns all generator results created by generator instances. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_packet_generator_results(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str generator_id: Filter by generator id
:param str target_id: Filter by target port or interface id
:return: list[PacketGeneratorResult]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.list_packet_generator_results_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.list_packet_generator_results_with_http_info(**kwargs) # noqa: E501
return data
def list_packet_generator_results_with_http_info(self, **kwargs): # noqa: E501
"""List generator results # noqa: E501
The `generator-results` endpoint returns all generator results created by generator instances. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_packet_generator_results_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str generator_id: Filter by generator id
:param str target_id: Filter by target port or interface id
:return: list[PacketGeneratorResult]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['generator_id', 'target_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_packet_generator_results" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'generator_id' in params:
query_params.append(('generator_id', params['generator_id'])) # noqa: E501
if 'target_id' in params:
query_params.append(('target_id', params['target_id'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/packet/generator-results', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[PacketGeneratorResult]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_packet_generators(self, **kwargs): # noqa: E501
"""List packet generators # noqa: E501
The `generators` endpoint returns all packet generators that are configured to transmit test traffic. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_packet_generators(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str target_id: Filter by target id
:return: list[PacketGenerator]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.list_packet_generators_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.list_packet_generators_with_http_info(**kwargs) # noqa: E501
return data
def list_packet_generators_with_http_info(self, **kwargs): # noqa: E501
"""List packet generators # noqa: E501
The `generators` endpoint returns all packet generators that are configured to transmit test traffic. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_packet_generators_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str target_id: Filter by target id
:return: list[PacketGenerator]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['target_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_packet_generators" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'target_id' in params:
query_params.append(('target_id', params['target_id'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/packet/generators', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[PacketGenerator]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_tx_flows(self, **kwargs): # noqa: E501
"""List packet generator transmit flows # noqa: E501
The `tx-flows` endpoint returns all packet flows that are generated by packet generators. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_tx_flows(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str generator_id: Filter by packet generator id
:param str target_id: Filter by target port or interface id
:return: list[TxFlow]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.list_tx_flows_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.list_tx_flows_with_http_info(**kwargs) # noqa: E501
return data
def list_tx_flows_with_http_info(self, **kwargs): # noqa: E501
"""List packet generator transmit flows # noqa: E501
The `tx-flows` endpoint returns all packet flows that are generated by packet generators. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_tx_flows_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str generator_id: Filter by packet generator id
:param str target_id: Filter by target port or interface id
:return: list[TxFlow]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['generator_id', 'target_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_tx_flows" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'generator_id' in params:
query_params.append(('generator_id', params['generator_id'])) # noqa: E501
if 'target_id' in params:
query_params.append(('target_id', params['target_id'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/packet/tx-flows', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[TxFlow]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def retry_packet_generator_learning(self, id, **kwargs): # noqa: E501
"""Retry MAC learning # noqa: E501
Used to retry MAC learning on a generator bound to an interface. Performs MAC learning for only unresolved addresses. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.retry_packet_generator_learning(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Unique resource identifier (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.retry_packet_generator_learning_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.retry_packet_generator_learning_with_http_info(id, **kwargs) # noqa: E501
return data
def retry_packet_generator_learning_with_http_info(self, id, **kwargs): # noqa: E501
"""Retry MAC learning # noqa: E501
Used to retry MAC learning on a generator bound to an interface. Performs MAC learning for only unresolved addresses. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.retry_packet_generator_learning_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Unique resource identifier (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method retry_packet_generator_learning" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `retry_packet_generator_learning`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/packet/generators/{id}/learning/retry', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def start_packet_generator(self, id, **kwargs): # noqa: E501
"""Start generating packets # noqa: E501
Used to start a non-running generator. Creates a new generator result upon success. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.start_packet_generator(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Unique resource identifier (required)
:return: PacketGeneratorResult
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.start_packet_generator_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.start_packet_generator_with_http_info(id, **kwargs) # noqa: E501
return data
def start_packet_generator_with_http_info(self, id, **kwargs): # noqa: E501
"""Start generating packets # noqa: E501
Used to start a non-running generator. Creates a new generator result upon success. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.start_packet_generator_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Unique resource identifier (required)
:return: PacketGeneratorResult
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method start_packet_generator" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `start_packet_generator`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/packet/generators/{id}/start', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PacketGeneratorResult', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def start_packet_generator_learning(self, id, **kwargs): # noqa: E501
"""Start MAC learning # noqa: E501
Used to start MAC learning on a generator bound to an interface. Clears previously resolved MAC table. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.start_packet_generator_learning(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Unique resource identifier (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.start_packet_generator_learning_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.start_packet_generator_learning_with_http_info(id, **kwargs) # noqa: E501
return data
def start_packet_generator_learning_with_http_info(self, id, **kwargs): # noqa: E501
"""Start MAC learning # noqa: E501
Used to start MAC learning on a generator bound to an interface. Clears previously resolved MAC table. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.start_packet_generator_learning_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Unique resource identifier (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method start_packet_generator_learning" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `start_packet_generator_learning`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/packet/generators/{id}/learning/start', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def stop_packet_generator(self, id, **kwargs): # noqa: E501
"""Stop generating packets. # noqa: E501
Use to halt a running generator. Idempotent. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.stop_packet_generator(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Unique resource identifier (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.stop_packet_generator_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.stop_packet_generator_with_http_info(id, **kwargs) # noqa: E501
return data
def stop_packet_generator_with_http_info(self, id, **kwargs): # noqa: E501
"""Stop generating packets. # noqa: E501
Use to halt a running generator. Idempotent. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.stop_packet_generator_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Unique resource identifier (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method stop_packet_generator" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `stop_packet_generator`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/packet/generators/{id}/stop', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def stop_packet_generator_learning(self, id, **kwargs): # noqa: E501
"""Stop MAC learning # noqa: E501
Used to stop MAC learning on a generator bound to an interface. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.stop_packet_generator_learning(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Unique resource identifier (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.stop_packet_generator_learning_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.stop_packet_generator_learning_with_http_info(id, **kwargs) # noqa: E501
return data
def stop_packet_generator_learning_with_http_info(self, id, **kwargs): # noqa: E501
"""Stop MAC learning # noqa: E501
Used to stop MAC learning on a generator bound to an interface. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.stop_packet_generator_learning_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Unique resource identifier (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method stop_packet_generator_learning" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `stop_packet_generator_learning`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/packet/generators/{id}/learning/stop', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def toggle_packet_generators(self, toggle, **kwargs): # noqa: E501
"""Replace a running generator with a stopped generator # noqa: E501
Atomically swap a running generator with an idle generator. Upon success, the idle generator will be in the run state and the previously running generator will be stopped. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.toggle_packet_generators(toggle, async_req=True)
>>> result = thread.get()
:param async_req bool
:param TogglePacketGeneratorsRequest toggle: Generator toggle (required)
:return: PacketGeneratorResult
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.toggle_packet_generators_with_http_info(toggle, **kwargs) # noqa: E501
else:
(data) = self.toggle_packet_generators_with_http_info(toggle, **kwargs) # noqa: E501
return data
def toggle_packet_generators_with_http_info(self, toggle, **kwargs): # noqa: E501
"""Replace a running generator with a stopped generator # noqa: E501
Atomically swap a running generator with an idle generator. Upon success, the idle generator will be in the run state and the previously running generator will be stopped. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.toggle_packet_generators_with_http_info(toggle, async_req=True)
>>> result = thread.get()
:param async_req bool
:param TogglePacketGeneratorsRequest toggle: Generator toggle (required)
:return: PacketGeneratorResult
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['toggle'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method toggle_packet_generators" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'toggle' is set
if ('toggle' not in params or
params['toggle'] is None):
raise ValueError("Missing the required parameter `toggle` when calling `toggle_packet_generators`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'toggle' in params:
body_params = params['toggle']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/packet/generators/x/toggle', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PacketGeneratorResult', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 39.789137
| 207
| 0.613962
| 9,899
| 87,178
| 5.157592
| 0.025154
| 0.053276
| 0.024131
| 0.031025
| 0.979767
| 0.972363
| 0.968563
| 0.961493
| 0.953188
| 0.951856
| 0
| 0.016799
| 0.298722
| 87,178
| 2,190
| 208
| 39.807306
| 0.818307
| 0.341164
| 0
| 0.815567
| 1
| 0
| 0.175244
| 0.059945
| 0
| 0
| 0
| 0
| 0
| 1
| 0.038071
| false
| 0
| 0.003384
| 0
| 0.098139
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
db99b9b187f072b690e17602c20220be597fcc25
| 5,768
|
py
|
Python
|
tests/assets.py
|
NitronBeenGrinding/deonedits
|
843d25be6def8163f7c24aac7676187dd83ad221
|
[
"MIT"
] | 223
|
2018-09-06T05:17:50.000Z
|
2022-03-19T19:20:28.000Z
|
tests/assets.py
|
NitronBeenGrinding/deonedits
|
843d25be6def8163f7c24aac7676187dd83ad221
|
[
"MIT"
] | 76
|
2018-08-31T22:31:12.000Z
|
2022-03-29T22:56:11.000Z
|
tests/assets.py
|
NitronBeenGrinding/deonedits
|
843d25be6def8163f7c24aac7676187dd83ad221
|
[
"MIT"
] | 51
|
2018-09-12T00:43:13.000Z
|
2022-02-17T04:06:24.000Z
|
existing_text = "There is existing text in this file."
known_good_ascii = """My Checklist
A. First section
* A.1 A1sum: First A line
* A.2 A2sum: Second A line
B. Second section
* B.1 B1sum: First B line
* B.2 B2sum: Second B line
Data Science Ethics Checklist generated with deon (http://deon.drivendata.org)."""
known_good_markdown = """# My Checklist
[](http://deon.drivendata.org/)
## A. First section
- [ ] **A.1 A1sum**: First A line
- [ ] **A.2 A2sum**: Second A line
## B. Second section
- [ ] **B.1 B1sum**: First B line
- [ ] **B.2 B2sum**: Second B line
*Data Science Ethics Checklist generated with [deon](http://deon.drivendata.org).*"""
known_good_rst = """My Checklist
============
.. image:: https://img.shields.io/badge/ethics%20checklist-deon-brightgreen.svg?style=popout-square
:target: http://deon.drivendata.org
A. First section
---------
* [ ] **A.1 A1sum**: First A line
* [ ] **A.2 A2sum**: Second A line
B. Second section
---------
* [ ] **B.1 B1sum**: First B line
* [ ] **B.2 B2sum**: Second B line
*Data Science Ethics Checklist generated with* `deon <http://deon.drivendata.org>`_."""
known_good_jupyter = {
"nbformat": 4,
"nbformat_minor": 2,
"metadata": {},
"cells": [
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# My Checklist\n",
"\n",
"[](http://deon.drivendata.org/)\n",
"\n",
"## A. First section\n",
" - [ ] **A.1 A1sum**: First A line\n",
" - [ ] **A.2 A2sum**: Second A line\n",
"\n",
"## B. Second section\n",
" - [ ] **B.1 B1sum**: First B line\n",
" - [ ] **B.2 B2sum**: Second B line\n",
"\n",
"*Data Science Ethics Checklist generated with [deon](http://deon.drivendata.org).*"
"\n",
],
}
],
}
known_good_jupyter_multicell = {
"nbformat": 4,
"nbformat_minor": 2,
"metadata": {},
"cells": [
{"cell_type": "markdown", "metadata": {}, "source": ["# My Checklist"]},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"[](http://deon.drivendata.org/)",
],
},
{"cell_type": "markdown", "metadata": {}, "source": ["## A. First section"]},
{
"cell_type": "markdown",
"metadata": {},
"source": [" - [ ] **A.1 A1sum**: First A line"],
},
{
"cell_type": "markdown",
"metadata": {},
"source": [" - [ ] **A.2 A2sum**: Second A line"],
},
{"cell_type": "markdown", "metadata": {}, "source": ["## B. Second section"]},
{
"cell_type": "markdown",
"metadata": {},
"source": [" - [ ] **B.1 B1sum**: First B line"],
},
{
"cell_type": "markdown",
"metadata": {},
"source": [" - [ ] **B.2 B2sum**: Second B line"],
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"*Data Science Ethics Checklist generated with [deon](http://deon.drivendata.org).*",
],
},
],
}
known_good_html = """<html>
<body>
<h1>
My Checklist
</h1>
<br/>
<br/>
<a href="http://deon.drivendata.org/">
<img alt="Deon badge" src="https://img.shields.io/badge/ethics%20checklist-deon-brightgreen.svg?style=popout-square"/>
</a>
<br/>
<br/>
<h2>
A. First section
</h2>
<hr/>
<ul>
<li>
<input type="checkbox"/>
<strong>
A.1 A1sum:
</strong>
First A line
</li>
<li>
<input type="checkbox"/>
<strong>
A.2 A2sum:
</strong>
Second A line
</li>
</ul>
<br/>
<h2>
B. Second section
</h2>
<hr/>
<ul>
<li>
<input type="checkbox"/>
<strong>
B.1 B1sum:
</strong>
First B line
</li>
<li>
<input type="checkbox"/>
<strong>
B.2 B2sum:
</strong>
Second B line
</li>
</ul>
<br/>
<br/>
<em>
Data Science Ethics Checklist generated with
<a href="http://deon.drivendata.org">
deon.
</a>
</em>
</body>
</html>
"""
existing_text_html = """<html>
<body>
There is existing text in this file.
</body>
</html>
"""
known_good_inserted_html = """<html>
<body>
There is existing text in this file.
<h1>
My Checklist
</h1>
<br/>
<br/>
<a href="http://deon.drivendata.org/">
<img alt="Deon badge" src="https://img.shields.io/badge/ethics%20checklist-deon-brightgreen.svg?style=popout-square"/>
</a>
<br/>
<br/>
<h2>
A. First section
</h2>
<hr/>
<ul>
<li>
<input type="checkbox"/>
<strong>
A.1 A1sum:
</strong>
First A line
</li>
<li>
<input type="checkbox"/>
<strong>
A.2 A2sum:
</strong>
Second A line
</li>
</ul>
<br/>
<h2>
B. Second section
</h2>
<hr/>
<ul>
<li>
<input type="checkbox"/>
<strong>
B.1 B1sum:
</strong>
First B line
</li>
<li>
<input type="checkbox"/>
<strong>
B.2 B2sum:
</strong>
Second B line
</li>
</ul>
<br/>
<br/>
<em>
Data Science Ethics Checklist generated with
<a href="http://deon.drivendata.org">
deon.
</a>
</em>
</body>
</html>
"""
| 22.27027
| 155
| 0.505895
| 669
| 5,768
| 4.313901
| 0.112108
| 0.024255
| 0.081081
| 0.094595
| 0.926888
| 0.9158
| 0.838184
| 0.778933
| 0.778933
| 0.778933
| 0
| 0.020664
| 0.29525
| 5,768
| 258
| 156
| 22.356589
| 0.689299
| 0
| 0
| 0.729958
| 0
| 0.029536
| 0.758148
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
dba5414971e5a382213a85368382988ae2ea9988
| 42
|
py
|
Python
|
tests/test_chalice_cdk.py
|
knowsuchagency/cdk-chalice-lite
|
27466d03e37451b0fd4040b01c89bf675d556d39
|
[
"Apache-2.0"
] | 1
|
2021-03-11T22:54:18.000Z
|
2021-03-11T22:54:18.000Z
|
tests/test_chalice_cdk.py
|
knowsuchagency/cdk-chalice-lite
|
27466d03e37451b0fd4040b01c89bf675d556d39
|
[
"Apache-2.0"
] | null | null | null |
tests/test_chalice_cdk.py
|
knowsuchagency/cdk-chalice-lite
|
27466d03e37451b0fd4040b01c89bf675d556d39
|
[
"Apache-2.0"
] | null | null | null |
# TODO: tests
def test():
assert True
| 10.5
| 15
| 0.619048
| 6
| 42
| 4.333333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.261905
| 42
| 3
| 16
| 14
| 0.83871
| 0.261905
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 0.5
| 1
| 0.5
| true
| 0
| 0
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
dbaea74e8d3a8b35fe9f3aec99278be5d4b4f0a7
| 9,820
|
py
|
Python
|
tests/test_rate_code.py
|
jonathanishhorowicz/RATE_python_package
|
d5294f047c6ce2e91658cd0d482f90a0c12396dd
|
[
"MIT"
] | null | null | null |
tests/test_rate_code.py
|
jonathanishhorowicz/RATE_python_package
|
d5294f047c6ce2e91658cd0d482f90a0c12396dd
|
[
"MIT"
] | null | null | null |
tests/test_rate_code.py
|
jonathanishhorowicz/RATE_python_package
|
d5294f047c6ce2e91658cd0d482f90a0c12396dd
|
[
"MIT"
] | null | null | null |
import pytest
import numpy as np
from rate.projections import PseudoinverseProjection, CovarianceProjection
from rate.importance import rate
from rate.wrapped_r import init_rate_r
from scipy.stats import spearmanr
def psd_matrix(n):
"""Positive semi-definite matrix with size (n,n). The matrix is full-rank"""
V_F = np.random.randn(n, n)
return np.dot(V_F, V_F.T)
def test_input_shape_handling():
#
# inconsistent ndim for M_F and V_F
#
with pytest.raises(ValueError):
rate(np.random.rand(20,10), np.random.rand(20), np.random.rand(2,20,20)) # M_F and V_F n_classes don't match
with pytest.raises(ValueError):
rate(np.random.rand(20,10), np.random.rand(2,20), np.random.rand(20,20)) # M_F and V_F n_classes don't match
#
# 1D M_F, 2D V_F
#
with pytest.raises(ValueError):
rate(np.random.rand(21,10), np.random.rand(20), np.random.rand(20,20)) # number of examples in X doesn't match M_F, V_F
with pytest.raises(ValueError):
rate(np.random.rand(20,10), np.random.rand(21), np.random.rand(20,20)) # number of examples in M_F doesn't match X, V_F
with pytest.raises(ValueError):
rate(np.random.rand(20,10), np.random.rand(20), np.random.rand(21,21)) # number of examples in V_F doesn't match X, M_F
with pytest.raises(ValueError):
rate(np.random.rand(20,10), np.random.rand(20), np.random.rand(20,21)) # V_F not square
rate(np.random.rand(20,10), np.random.rand(20), np.random.rand(20,20)) # should run
#
# 2D M_F, 3D V_F
#
with pytest.raises(ValueError):
rate(np.random.rand(20,10), np.random.rand(1,20), np.random.rand(2,20,20)) # M_F and V_F n_classes don't match
with pytest.raises(ValueError):
rate(np.random.rand(21,10), np.random.rand(2,20), np.random.rand(2,20,20)) # number of examples in X doesn't match M_F, V_F
with pytest.raises(ValueError):
rate(np.random.rand(20,10), np.random.rand(2,21), np.random.rand(2,20,20)) # number of examples in M_F doesn't match X, V_F
with pytest.raises(ValueError):
rate(np.random.rand(20,10), np.random.rand(2,20), np.random.rand(2,21,21)) # number of examples in V_F doesn't match X, M_F
with pytest.raises(ValueError):
rate(np.random.rand(20,10), np.random.rand(2,20), np.random.rand(2,20,21)) # V_F not square
rate(np.random.rand(20,10), np.random.rand(2,20), np.random.rand(2,20,20)) # should run
def test_output_shapes_single_output_class():
"""Check the outputs are of the right type, shape, etc when calculating RATE values for a single output class.
Depending on argument values the returned object is different (KLDs and the computation time can be returned).
"""
n, p = 100, 10
eps = 1e-9
# Logit posterior
M_F = np.random.rand(1,n)
V_F = psd_matrix(n)[np.newaxis]
X = np.random.randn(n, p)
#
# All the combinations that affect the returned object
#
# Output format: RATE (np.array)
for proj in [CovarianceProjection(), PseudoinverseProjection()]:
out = rate(X, M_F, V_F, projection=proj, return_KLDs=False, return_time=False)
assert 1.0-out.sum() <= eps
assert isinstance(out, np.ndarray)
assert out.shape[0]==p
assert out.ndim==1
# [RATE, KLDs] (both np.array)
out = rate(X, M_F, V_F, projection=proj, return_KLDs=True, return_time=False)
assert len(out)==2
assert isinstance(out[0], np.ndarray)
assert 1.0-out[0].sum() <= eps
assert out[0].shape[0]==p
assert out[0].ndim==1
assert isinstance(out[1], np.ndarray)
assert out[1].shape[0]==p
assert out[1].ndim==1
assert np.array_equal(out[1]/out[1].sum(), out[0])
# [[RATE, KLDs], time], where RATE and KLDs are np.array and time is a float
out = rate(X, M_F, V_F, projection=proj, return_KLDs=True, return_time=True)
assert len(out)==2
assert len(out[0])==2
assert isinstance(out[0][0], np.ndarray)
assert isinstance(out[0][1], np.ndarray)
assert isinstance(out[1], float)
assert 1.0-out[0][0].sum() <= eps
assert out[0][0].shape[0]==p
assert out[0][0].ndim==1
assert out[0][1].shape[0]==p
assert out[0][1].ndim==1
assert np.array_equal(out[0][1]/out[0][1].sum(), out[0][0])
# [RATE, time] where RATE is np.array and time is a float
out = rate(X, M_F, V_F, projection=proj, return_KLDs=False, return_time=True)
assert len(out)==2
assert isinstance(out[0], np.ndarray)
assert out[0].shape[0]==p
assert out[0].ndim==1
assert 1.0-out[0].sum() <= eps
assert isinstance(out[1], float)
def test_output_shapes_multiple_output_classes():
"""Check the outputs are of the right type, shape, etc when calculating RATE values for a 3 output classes.
Depending on argument values the returned object is different (KLDs and the computation time can be returned).
"""
n, p = 100, 10
C = 3
eps = 1e-9
# Logit posterior
M_F = np.random.randn(C,n)
V_F = np.array([psd_matrix(n) for _ in range(C)])
X = np.random.randn(n, p)
#
# All the argument combinations that affect the returned object
#
# Output format: RATE (list of np.array)
for proj in [CovarianceProjection(), PseudoinverseProjection()]:
out = rate(X, M_F, V_F, projection=proj, return_KLDs=False, return_time=False)
assert isinstance(out, list)
assert len(out)==C
assert np.all([1.0-rate_vals.sum() <= eps for rate_vals in out])
assert np.all([isinstance(rate_vals, np.ndarray) for rate_vals in out])
assert np.all([rate_vals.shape[0]==p for rate_vals in out])
assert np.all([rate_vals.ndim==1 for rate_vals in out])
# [RATE, KLDs] (both list of np.array)
out = rate(X, M_F, V_F, projection=proj, return_KLDs=True, return_time=False)
assert len(out)==2
assert isinstance(out[0], list)
assert len(out[0])==C
assert np.all([isinstance(rate_vals, np.ndarray) for rate_vals in out[0]])
assert np.all([1.0-rate_vals[0].sum() for rate_vals in out[0]])
assert np.all([rate_vals.ndim==1 for rate_vals in out[0]])
assert np.all([rate_vals.shape[0]==p for rate_vals in out[0]])
assert isinstance(out[1], list)
assert len(out[1])==C
assert np.all([isinstance(kld_vals, np.ndarray) for kld_vals in out[1]])
assert np.all([kld_vals.ndim==1 for kld_vals in out[1]])
assert np.all([kld_vals.shape[0]==p for kld_vals in out[1]])
assert np.all([np.array_equal(kld_vals/kld_vals.sum(), rate_vals) for rate_vals, kld_vals in zip(out[0], out[1])])
# [[RATE, KLDs], time], where RATE and KLDs are np.array and time is a float
out = rate(X, M_F, V_F, projection=proj, return_KLDs=True, return_time=True)
assert len(out)==2
assert len(out[0])==2
assert isinstance(out[1], float)
assert isinstance(out[0][0], list)
assert len(out[0][0])==C
assert np.all([isinstance(rate_vals, np.ndarray) for rate_vals in out[0][0]])
assert np.all([1.0-rate_vals[0].sum() for rate_vals in out[0][0]])
assert np.all([rate_vals.ndim==1 for rate_vals in out[0][0]])
assert np.all([rate_vals.shape[0]==p for rate_vals in out[0][0]])
assert isinstance(out[0][1], list)
assert len(out[0][1])==C
assert np.all([isinstance(kld_vals, np.ndarray) for kld_vals in out[0][1]])
assert np.all([kld_vals.ndim==1 for kld_vals in out[0][1]])
assert np.all([kld_vals.shape[0]==p for kld_vals in out[0][1]])
assert np.all([np.array_equal(kld_vals/kld_vals.sum(), rate_vals) for rate_vals, kld_vals in zip(out[0][0], out[0][1])])
# # [RATE, time] where RATE is np.array and time is a float
out = rate(X, M_F, V_F, projection=proj, return_KLDs=False, return_time=True)
assert len(out)==2
assert isinstance(out[0], list)
assert np.all([isinstance(rate_vals, np.ndarray) for rate_vals in out[0]])
assert np.all([rate_vals.shape[0]==p for rate_vals in out[0]])
assert np.all([rate_vals.ndim==1 for rate_vals in out[0]])
assert np.all([1.0 - rate_vals.sum() < eps for rate_vals in out[0]])
assert isinstance(out[1], float)
def test_rate_results():
"""Tests that the Python code (which uses closed-forms of the effect size analogue posterior)
converges to the same result as the original R script (which uses sampels from that posterior)
as the number of samples increases.
Takes about two minutes to run on Jonathan's workstation.
"""
n, p = 100, 10
n_draw_vals = [1000, 3000, 10000]
n_repeats = 10 # Need this many repeats as the variance of the RATE values from sampling can be quite large
rate_r_func = init_rate_r()
# Logit posterior
M_F = np.random.rand(1,n)
V_F = psd_matrix(n)[np.newaxis]
X = np.random.randn(n, p)
norms = np.zeros((len(n_draw_vals), n_repeats))
for solver in ["qr", "lstsq"]:
#
# Pseudoinverse projection
#
rate_python = rate(X, M_F, V_F, projection=PseudoinverseProjection(), solver=solver) # the python result. Doesn't use matrix factorisation
for i, n_draws in enumerate(n_draw_vals):
for j in range(n_repeats):
f_draws = np.random.multivariate_normal(M_F[0], V_F[0], size=(n_draws)) # Draw samples
rate_r, klds_r, _, _ = rate_r_func(X, f_draws, "linear", False) # Calculate rate using samples (uses R code)
norms[i,j] = np.linalg.norm(rate_r-rate_python, ord=2) # Calculate evaluation metrics (norm, correlation)
norm_mean = norms.mean(axis=1)
assert np.all(norm_mean[:-1] > norm_mean[1:]) # Mean difference over repeated sets of samples should decrease
#
# Covariance projection
#
rate_python = rate(X, M_F, V_F, projection=CovarianceProjection(), solver=solver) # the python result. Doesn't use matrix factorisation
for i, n_draws in enumerate(n_draw_vals):
for j in range(n_repeats):
f_draws = np.random.multivariate_normal(M_F[0], V_F[0], size=(n_draws)) # Draw samples
rate_r, klds_r, _, _ = rate_r_func(X, f_draws, "covariance", False) # Calculate rate using samples (uses R code)
norms[i,j] = np.linalg.norm(rate_r-rate_python, ord=2) # Calculate evaluation metrics (norm, correlation)
norm_mean = norms.mean(axis=1)
assert np.all(norm_mean[:-1] > norm_mean[1:]) # Mean difference over repeated sets of samples should decrease
| 38.968254
| 140
| 0.704175
| 1,808
| 9,820
| 3.709071
| 0.111726
| 0.057262
| 0.073367
| 0.043841
| 0.822845
| 0.799433
| 0.781688
| 0.773337
| 0.755592
| 0.732031
| 0
| 0.039661
| 0.147556
| 9,820
| 251
| 141
| 39.123506
| 0.761438
| 0.247149
| 0
| 0.474026
| 0
| 0
| 0.003149
| 0
| 0
| 0
| 0
| 0
| 0.474026
| 1
| 0.032468
| false
| 0
| 0.038961
| 0
| 0.077922
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
91723162698e711c436516bed376ba99cc877cb1
| 99,780
|
py
|
Python
|
pynos/versions/ver_7/ver_7_0_0/yang/brocade_policer.py
|
bdeetz/pynos
|
bd8a34e98f322de3fc06750827d8bbc3a0c00380
|
[
"Apache-2.0"
] | 12
|
2015-09-21T23:56:09.000Z
|
2018-03-30T04:35:32.000Z
|
pynos/versions/ver_7/ver_7_0_0/yang/brocade_policer.py
|
bdeetz/pynos
|
bd8a34e98f322de3fc06750827d8bbc3a0c00380
|
[
"Apache-2.0"
] | 10
|
2016-09-15T19:03:27.000Z
|
2017-07-17T23:38:01.000Z
|
pynos/versions/ver_7/ver_7_0_0/yang/brocade_policer.py
|
bdeetz/pynos
|
bd8a34e98f322de3fc06750827d8bbc3a0c00380
|
[
"Apache-2.0"
] | 6
|
2015-08-14T08:05:23.000Z
|
2022-02-03T15:33:54.000Z
|
#!/usr/bin/env python
import xml.etree.ElementTree as ET
class brocade_policer(object):
"""Auto generated class.
"""
def __init__(self, **kwargs):
self._callback = kwargs.pop('callback')
def police_priority_map_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
police_priority_map = ET.SubElement(config, "police-priority-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
name = ET.SubElement(police_priority_map, "name")
name.text = kwargs.pop('name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def police_priority_map_conform_map_pri0_conform(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
police_priority_map = ET.SubElement(config, "police-priority-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
name_key = ET.SubElement(police_priority_map, "name")
name_key.text = kwargs.pop('name')
conform = ET.SubElement(police_priority_map, "conform")
map_pri0_conform = ET.SubElement(conform, "map-pri0-conform")
map_pri0_conform.text = kwargs.pop('map_pri0_conform')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def police_priority_map_conform_map_pri1_conform(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
police_priority_map = ET.SubElement(config, "police-priority-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
name_key = ET.SubElement(police_priority_map, "name")
name_key.text = kwargs.pop('name')
conform = ET.SubElement(police_priority_map, "conform")
map_pri1_conform = ET.SubElement(conform, "map-pri1-conform")
map_pri1_conform.text = kwargs.pop('map_pri1_conform')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def police_priority_map_conform_map_pri2_conform(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
police_priority_map = ET.SubElement(config, "police-priority-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
name_key = ET.SubElement(police_priority_map, "name")
name_key.text = kwargs.pop('name')
conform = ET.SubElement(police_priority_map, "conform")
map_pri2_conform = ET.SubElement(conform, "map-pri2-conform")
map_pri2_conform.text = kwargs.pop('map_pri2_conform')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def police_priority_map_conform_map_pri3_conform(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
police_priority_map = ET.SubElement(config, "police-priority-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
name_key = ET.SubElement(police_priority_map, "name")
name_key.text = kwargs.pop('name')
conform = ET.SubElement(police_priority_map, "conform")
map_pri3_conform = ET.SubElement(conform, "map-pri3-conform")
map_pri3_conform.text = kwargs.pop('map_pri3_conform')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def police_priority_map_conform_map_pri4_conform(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
police_priority_map = ET.SubElement(config, "police-priority-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
name_key = ET.SubElement(police_priority_map, "name")
name_key.text = kwargs.pop('name')
conform = ET.SubElement(police_priority_map, "conform")
map_pri4_conform = ET.SubElement(conform, "map-pri4-conform")
map_pri4_conform.text = kwargs.pop('map_pri4_conform')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def police_priority_map_conform_map_pri5_conform(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
police_priority_map = ET.SubElement(config, "police-priority-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
name_key = ET.SubElement(police_priority_map, "name")
name_key.text = kwargs.pop('name')
conform = ET.SubElement(police_priority_map, "conform")
map_pri5_conform = ET.SubElement(conform, "map-pri5-conform")
map_pri5_conform.text = kwargs.pop('map_pri5_conform')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def police_priority_map_conform_map_pri6_conform(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
police_priority_map = ET.SubElement(config, "police-priority-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
name_key = ET.SubElement(police_priority_map, "name")
name_key.text = kwargs.pop('name')
conform = ET.SubElement(police_priority_map, "conform")
map_pri6_conform = ET.SubElement(conform, "map-pri6-conform")
map_pri6_conform.text = kwargs.pop('map_pri6_conform')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def police_priority_map_conform_map_pri7_conform(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
police_priority_map = ET.SubElement(config, "police-priority-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
name_key = ET.SubElement(police_priority_map, "name")
name_key.text = kwargs.pop('name')
conform = ET.SubElement(police_priority_map, "conform")
map_pri7_conform = ET.SubElement(conform, "map-pri7-conform")
map_pri7_conform.text = kwargs.pop('map_pri7_conform')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def police_priority_map_exceed_map_pri0_exceed(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
police_priority_map = ET.SubElement(config, "police-priority-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
name_key = ET.SubElement(police_priority_map, "name")
name_key.text = kwargs.pop('name')
exceed = ET.SubElement(police_priority_map, "exceed")
map_pri0_exceed = ET.SubElement(exceed, "map-pri0-exceed")
map_pri0_exceed.text = kwargs.pop('map_pri0_exceed')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def police_priority_map_exceed_map_pri1_exceed(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
police_priority_map = ET.SubElement(config, "police-priority-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
name_key = ET.SubElement(police_priority_map, "name")
name_key.text = kwargs.pop('name')
exceed = ET.SubElement(police_priority_map, "exceed")
map_pri1_exceed = ET.SubElement(exceed, "map-pri1-exceed")
map_pri1_exceed.text = kwargs.pop('map_pri1_exceed')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def police_priority_map_exceed_map_pri2_exceed(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
police_priority_map = ET.SubElement(config, "police-priority-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
name_key = ET.SubElement(police_priority_map, "name")
name_key.text = kwargs.pop('name')
exceed = ET.SubElement(police_priority_map, "exceed")
map_pri2_exceed = ET.SubElement(exceed, "map-pri2-exceed")
map_pri2_exceed.text = kwargs.pop('map_pri2_exceed')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def police_priority_map_exceed_map_pri3_exceed(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
police_priority_map = ET.SubElement(config, "police-priority-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
name_key = ET.SubElement(police_priority_map, "name")
name_key.text = kwargs.pop('name')
exceed = ET.SubElement(police_priority_map, "exceed")
map_pri3_exceed = ET.SubElement(exceed, "map-pri3-exceed")
map_pri3_exceed.text = kwargs.pop('map_pri3_exceed')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def police_priority_map_exceed_map_pri4_exceed(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
police_priority_map = ET.SubElement(config, "police-priority-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
name_key = ET.SubElement(police_priority_map, "name")
name_key.text = kwargs.pop('name')
exceed = ET.SubElement(police_priority_map, "exceed")
map_pri4_exceed = ET.SubElement(exceed, "map-pri4-exceed")
map_pri4_exceed.text = kwargs.pop('map_pri4_exceed')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def police_priority_map_exceed_map_pri5_exceed(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
police_priority_map = ET.SubElement(config, "police-priority-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
name_key = ET.SubElement(police_priority_map, "name")
name_key.text = kwargs.pop('name')
exceed = ET.SubElement(police_priority_map, "exceed")
map_pri5_exceed = ET.SubElement(exceed, "map-pri5-exceed")
map_pri5_exceed.text = kwargs.pop('map_pri5_exceed')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def police_priority_map_exceed_map_pri6_exceed(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
police_priority_map = ET.SubElement(config, "police-priority-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
name_key = ET.SubElement(police_priority_map, "name")
name_key.text = kwargs.pop('name')
exceed = ET.SubElement(police_priority_map, "exceed")
map_pri6_exceed = ET.SubElement(exceed, "map-pri6-exceed")
map_pri6_exceed.text = kwargs.pop('map_pri6_exceed')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def police_priority_map_exceed_map_pri7_exceed(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
police_priority_map = ET.SubElement(config, "police-priority-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
name_key = ET.SubElement(police_priority_map, "name")
name_key.text = kwargs.pop('name')
exceed = ET.SubElement(police_priority_map, "exceed")
map_pri7_exceed = ET.SubElement(exceed, "map-pri7-exceed")
map_pri7_exceed.text = kwargs.pop('map_pri7_exceed')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def class_map_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
class_map = ET.SubElement(config, "class-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
name = ET.SubElement(class_map, "name")
name.text = kwargs.pop('name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def class_map_match_access_group_access_group_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
class_map = ET.SubElement(config, "class-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
name_key = ET.SubElement(class_map, "name")
name_key.text = kwargs.pop('name')
match = ET.SubElement(class_map, "match")
access_group = ET.SubElement(match, "access-group")
access_group_name = ET.SubElement(access_group, "access-group-name")
access_group_name.text = kwargs.pop('access_group_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def policy_map_po_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
policy_map = ET.SubElement(config, "policy-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
po_name = ET.SubElement(policy_map, "po-name")
po_name.text = kwargs.pop('po_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def policy_map_class_cl_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
policy_map = ET.SubElement(config, "policy-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
po_name_key = ET.SubElement(policy_map, "po-name")
po_name_key.text = kwargs.pop('po_name')
class_el = ET.SubElement(policy_map, "class")
cl_name = ET.SubElement(class_el, "cl-name")
cl_name.text = kwargs.pop('cl_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def policy_map_class_police_cir(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
policy_map = ET.SubElement(config, "policy-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
po_name_key = ET.SubElement(policy_map, "po-name")
po_name_key.text = kwargs.pop('po_name')
class_el = ET.SubElement(policy_map, "class")
cl_name_key = ET.SubElement(class_el, "cl-name")
cl_name_key.text = kwargs.pop('cl_name')
police = ET.SubElement(class_el, "police")
cir = ET.SubElement(police, "cir")
cir.text = kwargs.pop('cir')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def policy_map_class_police_cbs(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
policy_map = ET.SubElement(config, "policy-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
po_name_key = ET.SubElement(policy_map, "po-name")
po_name_key.text = kwargs.pop('po_name')
class_el = ET.SubElement(policy_map, "class")
cl_name_key = ET.SubElement(class_el, "cl-name")
cl_name_key.text = kwargs.pop('cl_name')
police = ET.SubElement(class_el, "police")
cbs = ET.SubElement(police, "cbs")
cbs.text = kwargs.pop('cbs')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def policy_map_class_police_eir(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
policy_map = ET.SubElement(config, "policy-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
po_name_key = ET.SubElement(policy_map, "po-name")
po_name_key.text = kwargs.pop('po_name')
class_el = ET.SubElement(policy_map, "class")
cl_name_key = ET.SubElement(class_el, "cl-name")
cl_name_key.text = kwargs.pop('cl_name')
police = ET.SubElement(class_el, "police")
eir = ET.SubElement(police, "eir")
eir.text = kwargs.pop('eir')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def policy_map_class_police_ebs(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
policy_map = ET.SubElement(config, "policy-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
po_name_key = ET.SubElement(policy_map, "po-name")
po_name_key.text = kwargs.pop('po_name')
class_el = ET.SubElement(policy_map, "class")
cl_name_key = ET.SubElement(class_el, "cl-name")
cl_name_key.text = kwargs.pop('cl_name')
police = ET.SubElement(class_el, "police")
ebs = ET.SubElement(police, "ebs")
ebs.text = kwargs.pop('ebs')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def policy_map_class_police_set_priority(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
policy_map = ET.SubElement(config, "policy-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
po_name_key = ET.SubElement(policy_map, "po-name")
po_name_key.text = kwargs.pop('po_name')
class_el = ET.SubElement(policy_map, "class")
cl_name_key = ET.SubElement(class_el, "cl-name")
cl_name_key.text = kwargs.pop('cl_name')
police = ET.SubElement(class_el, "police")
set_priority = ET.SubElement(police, "set-priority")
set_priority.text = kwargs.pop('set_priority')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def policy_map_class_police_conform_set_dscp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
policy_map = ET.SubElement(config, "policy-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
po_name_key = ET.SubElement(policy_map, "po-name")
po_name_key.text = kwargs.pop('po_name')
class_el = ET.SubElement(policy_map, "class")
cl_name_key = ET.SubElement(class_el, "cl-name")
cl_name_key.text = kwargs.pop('cl_name')
police = ET.SubElement(class_el, "police")
conform_set_dscp = ET.SubElement(police, "conform-set-dscp")
conform_set_dscp.text = kwargs.pop('conform_set_dscp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def policy_map_class_police_conform_set_prec(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
policy_map = ET.SubElement(config, "policy-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
po_name_key = ET.SubElement(policy_map, "po-name")
po_name_key.text = kwargs.pop('po_name')
class_el = ET.SubElement(policy_map, "class")
cl_name_key = ET.SubElement(class_el, "cl-name")
cl_name_key.text = kwargs.pop('cl_name')
police = ET.SubElement(class_el, "police")
conform_set_prec = ET.SubElement(police, "conform-set-prec")
conform_set_prec.text = kwargs.pop('conform_set_prec')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def policy_map_class_police_conform_set_tc(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
policy_map = ET.SubElement(config, "policy-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
po_name_key = ET.SubElement(policy_map, "po-name")
po_name_key.text = kwargs.pop('po_name')
class_el = ET.SubElement(policy_map, "class")
cl_name_key = ET.SubElement(class_el, "cl-name")
cl_name_key.text = kwargs.pop('cl_name')
police = ET.SubElement(class_el, "police")
conform_set_tc = ET.SubElement(police, "conform-set-tc")
conform_set_tc.text = kwargs.pop('conform_set_tc')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def policy_map_class_police_exceed_set_dscp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
policy_map = ET.SubElement(config, "policy-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
po_name_key = ET.SubElement(policy_map, "po-name")
po_name_key.text = kwargs.pop('po_name')
class_el = ET.SubElement(policy_map, "class")
cl_name_key = ET.SubElement(class_el, "cl-name")
cl_name_key.text = kwargs.pop('cl_name')
police = ET.SubElement(class_el, "police")
exceed_set_dscp = ET.SubElement(police, "exceed-set-dscp")
exceed_set_dscp.text = kwargs.pop('exceed_set_dscp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def policy_map_class_police_exceed_set_prec(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
policy_map = ET.SubElement(config, "policy-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
po_name_key = ET.SubElement(policy_map, "po-name")
po_name_key.text = kwargs.pop('po_name')
class_el = ET.SubElement(policy_map, "class")
cl_name_key = ET.SubElement(class_el, "cl-name")
cl_name_key.text = kwargs.pop('cl_name')
police = ET.SubElement(class_el, "police")
exceed_set_prec = ET.SubElement(police, "exceed-set-prec")
exceed_set_prec.text = kwargs.pop('exceed_set_prec')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def policy_map_class_police_exceed_set_tc(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
policy_map = ET.SubElement(config, "policy-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
po_name_key = ET.SubElement(policy_map, "po-name")
po_name_key.text = kwargs.pop('po_name')
class_el = ET.SubElement(policy_map, "class")
cl_name_key = ET.SubElement(class_el, "cl-name")
cl_name_key.text = kwargs.pop('cl_name')
police = ET.SubElement(class_el, "police")
exceed_set_tc = ET.SubElement(police, "exceed-set-tc")
exceed_set_tc.text = kwargs.pop('exceed_set_tc')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def policy_map_class_set_set_cos_tc_cos(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
policy_map = ET.SubElement(config, "policy-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
po_name_key = ET.SubElement(policy_map, "po-name")
po_name_key.text = kwargs.pop('po_name')
class_el = ET.SubElement(policy_map, "class")
cl_name_key = ET.SubElement(class_el, "cl-name")
cl_name_key.text = kwargs.pop('cl_name')
set = ET.SubElement(class_el, "set")
set_cos_tc = ET.SubElement(set, "set_cos_tc")
cos = ET.SubElement(set_cos_tc, "cos")
cos.text = kwargs.pop('cos')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def policy_map_class_set_set_cos_tc_traffic_class(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
policy_map = ET.SubElement(config, "policy-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
po_name_key = ET.SubElement(policy_map, "po-name")
po_name_key.text = kwargs.pop('po_name')
class_el = ET.SubElement(policy_map, "class")
cl_name_key = ET.SubElement(class_el, "cl-name")
cl_name_key.text = kwargs.pop('cl_name')
set = ET.SubElement(class_el, "set")
set_cos_tc = ET.SubElement(set, "set_cos_tc")
traffic_class = ET.SubElement(set_cos_tc, "traffic-class")
traffic_class.text = kwargs.pop('traffic_class')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def policy_map_class_set_set_dscp_dscp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
policy_map = ET.SubElement(config, "policy-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
po_name_key = ET.SubElement(policy_map, "po-name")
po_name_key.text = kwargs.pop('po_name')
class_el = ET.SubElement(policy_map, "class")
cl_name_key = ET.SubElement(class_el, "cl-name")
cl_name_key.text = kwargs.pop('cl_name')
set = ET.SubElement(class_el, "set")
set_dscp = ET.SubElement(set, "set_dscp")
dscp = ET.SubElement(set_dscp, "dscp")
dscp.text = kwargs.pop('dscp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def policy_map_class_span_session(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
policy_map = ET.SubElement(config, "policy-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
po_name_key = ET.SubElement(policy_map, "po-name")
po_name_key.text = kwargs.pop('po_name')
class_el = ET.SubElement(policy_map, "class")
cl_name_key = ET.SubElement(class_el, "cl-name")
cl_name_key.text = kwargs.pop('cl_name')
span = ET.SubElement(class_el, "span")
session = ET.SubElement(span, "session")
session.text = kwargs.pop('session')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def policy_map_class_map_cos_mutation(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
policy_map = ET.SubElement(config, "policy-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
po_name_key = ET.SubElement(policy_map, "po-name")
po_name_key.text = kwargs.pop('po_name')
class_el = ET.SubElement(policy_map, "class")
cl_name_key = ET.SubElement(class_el, "cl-name")
cl_name_key.text = kwargs.pop('cl_name')
map = ET.SubElement(class_el, "map")
cos_mutation = ET.SubElement(map, "cos-mutation")
cos_mutation.text = kwargs.pop('cos_mutation')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def policy_map_class_map_cos_traffic_class(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
policy_map = ET.SubElement(config, "policy-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
po_name_key = ET.SubElement(policy_map, "po-name")
po_name_key.text = kwargs.pop('po_name')
class_el = ET.SubElement(policy_map, "class")
cl_name_key = ET.SubElement(class_el, "cl-name")
cl_name_key.text = kwargs.pop('cl_name')
map = ET.SubElement(class_el, "map")
cos_traffic_class = ET.SubElement(map, "cos-traffic-class")
cos_traffic_class.text = kwargs.pop('cos_traffic_class')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def policy_map_class_map_dscp_cos(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
policy_map = ET.SubElement(config, "policy-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
po_name_key = ET.SubElement(policy_map, "po-name")
po_name_key.text = kwargs.pop('po_name')
class_el = ET.SubElement(policy_map, "class")
cl_name_key = ET.SubElement(class_el, "cl-name")
cl_name_key.text = kwargs.pop('cl_name')
map = ET.SubElement(class_el, "map")
dscp_cos = ET.SubElement(map, "dscp-cos")
dscp_cos.text = kwargs.pop('dscp_cos')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def policy_map_class_map_dscp_traffic_class(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
policy_map = ET.SubElement(config, "policy-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
po_name_key = ET.SubElement(policy_map, "po-name")
po_name_key.text = kwargs.pop('po_name')
class_el = ET.SubElement(policy_map, "class")
cl_name_key = ET.SubElement(class_el, "cl-name")
cl_name_key.text = kwargs.pop('cl_name')
map = ET.SubElement(class_el, "map")
dscp_traffic_class = ET.SubElement(map, "dscp-traffic-class")
dscp_traffic_class.text = kwargs.pop('dscp_traffic_class')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def policy_map_class_map_dscp_mutation(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
policy_map = ET.SubElement(config, "policy-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
po_name_key = ET.SubElement(policy_map, "po-name")
po_name_key.text = kwargs.pop('po_name')
class_el = ET.SubElement(policy_map, "class")
cl_name_key = ET.SubElement(class_el, "cl-name")
cl_name_key.text = kwargs.pop('cl_name')
map = ET.SubElement(class_el, "map")
dscp_mutation = ET.SubElement(map, "dscp-mutation")
dscp_mutation.text = kwargs.pop('dscp_mutation')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def policy_map_class_map_sflow(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
policy_map = ET.SubElement(config, "policy-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
po_name_key = ET.SubElement(policy_map, "po-name")
po_name_key.text = kwargs.pop('po_name')
class_el = ET.SubElement(policy_map, "class")
cl_name_key = ET.SubElement(class_el, "cl-name")
cl_name_key.text = kwargs.pop('cl_name')
map = ET.SubElement(class_el, "map")
sflow = ET.SubElement(map, "sflow")
sflow.text = kwargs.pop('sflow')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def policy_map_class_shape_shaping_rate(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
policy_map = ET.SubElement(config, "policy-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
po_name_key = ET.SubElement(policy_map, "po-name")
po_name_key.text = kwargs.pop('po_name')
class_el = ET.SubElement(policy_map, "class")
cl_name_key = ET.SubElement(class_el, "cl-name")
cl_name_key.text = kwargs.pop('cl_name')
shape = ET.SubElement(class_el, "shape")
shaping_rate = ET.SubElement(shape, "shaping_rate")
shaping_rate.text = kwargs.pop('shaping_rate')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def policy_map_class_scheduler_strict_priority_priority_number(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
policy_map = ET.SubElement(config, "policy-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
po_name_key = ET.SubElement(policy_map, "po-name")
po_name_key.text = kwargs.pop('po_name')
class_el = ET.SubElement(policy_map, "class")
cl_name_key = ET.SubElement(class_el, "cl-name")
cl_name_key.text = kwargs.pop('cl_name')
scheduler = ET.SubElement(class_el, "scheduler")
strict_priority = ET.SubElement(scheduler, "strict-priority")
priority_number = ET.SubElement(strict_priority, "priority-number")
priority_number.text = kwargs.pop('priority_number')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def policy_map_class_scheduler_strict_priority_scheduler_type(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
policy_map = ET.SubElement(config, "policy-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
po_name_key = ET.SubElement(policy_map, "po-name")
po_name_key.text = kwargs.pop('po_name')
class_el = ET.SubElement(policy_map, "class")
cl_name_key = ET.SubElement(class_el, "cl-name")
cl_name_key.text = kwargs.pop('cl_name')
scheduler = ET.SubElement(class_el, "scheduler")
strict_priority = ET.SubElement(scheduler, "strict-priority")
scheduler_type = ET.SubElement(strict_priority, "scheduler-type")
scheduler_type.text = kwargs.pop('scheduler_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def policy_map_class_scheduler_strict_priority_dwrr_traffic_class0(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
policy_map = ET.SubElement(config, "policy-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
po_name_key = ET.SubElement(policy_map, "po-name")
po_name_key.text = kwargs.pop('po_name')
class_el = ET.SubElement(policy_map, "class")
cl_name_key = ET.SubElement(class_el, "cl-name")
cl_name_key.text = kwargs.pop('cl_name')
scheduler = ET.SubElement(class_el, "scheduler")
strict_priority = ET.SubElement(scheduler, "strict-priority")
dwrr_traffic_class0 = ET.SubElement(strict_priority, "dwrr-traffic-class0")
dwrr_traffic_class0.text = kwargs.pop('dwrr_traffic_class0')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def policy_map_class_scheduler_strict_priority_dwrr_traffic_class1(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
policy_map = ET.SubElement(config, "policy-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
po_name_key = ET.SubElement(policy_map, "po-name")
po_name_key.text = kwargs.pop('po_name')
class_el = ET.SubElement(policy_map, "class")
cl_name_key = ET.SubElement(class_el, "cl-name")
cl_name_key.text = kwargs.pop('cl_name')
scheduler = ET.SubElement(class_el, "scheduler")
strict_priority = ET.SubElement(scheduler, "strict-priority")
dwrr_traffic_class1 = ET.SubElement(strict_priority, "dwrr-traffic-class1")
dwrr_traffic_class1.text = kwargs.pop('dwrr_traffic_class1')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def policy_map_class_scheduler_strict_priority_dwrr_traffic_class2(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
policy_map = ET.SubElement(config, "policy-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
po_name_key = ET.SubElement(policy_map, "po-name")
po_name_key.text = kwargs.pop('po_name')
class_el = ET.SubElement(policy_map, "class")
cl_name_key = ET.SubElement(class_el, "cl-name")
cl_name_key.text = kwargs.pop('cl_name')
scheduler = ET.SubElement(class_el, "scheduler")
strict_priority = ET.SubElement(scheduler, "strict-priority")
dwrr_traffic_class2 = ET.SubElement(strict_priority, "dwrr-traffic-class2")
dwrr_traffic_class2.text = kwargs.pop('dwrr_traffic_class2')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def policy_map_class_scheduler_strict_priority_dwrr_traffic_class3(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
policy_map = ET.SubElement(config, "policy-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
po_name_key = ET.SubElement(policy_map, "po-name")
po_name_key.text = kwargs.pop('po_name')
class_el = ET.SubElement(policy_map, "class")
cl_name_key = ET.SubElement(class_el, "cl-name")
cl_name_key.text = kwargs.pop('cl_name')
scheduler = ET.SubElement(class_el, "scheduler")
strict_priority = ET.SubElement(scheduler, "strict-priority")
dwrr_traffic_class3 = ET.SubElement(strict_priority, "dwrr-traffic-class3")
dwrr_traffic_class3.text = kwargs.pop('dwrr_traffic_class3')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def policy_map_class_scheduler_strict_priority_dwrr_traffic_class4(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
policy_map = ET.SubElement(config, "policy-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
po_name_key = ET.SubElement(policy_map, "po-name")
po_name_key.text = kwargs.pop('po_name')
class_el = ET.SubElement(policy_map, "class")
cl_name_key = ET.SubElement(class_el, "cl-name")
cl_name_key.text = kwargs.pop('cl_name')
scheduler = ET.SubElement(class_el, "scheduler")
strict_priority = ET.SubElement(scheduler, "strict-priority")
dwrr_traffic_class4 = ET.SubElement(strict_priority, "dwrr-traffic-class4")
dwrr_traffic_class4.text = kwargs.pop('dwrr_traffic_class4')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def policy_map_class_scheduler_strict_priority_dwrr_traffic_class5(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
policy_map = ET.SubElement(config, "policy-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
po_name_key = ET.SubElement(policy_map, "po-name")
po_name_key.text = kwargs.pop('po_name')
class_el = ET.SubElement(policy_map, "class")
cl_name_key = ET.SubElement(class_el, "cl-name")
cl_name_key.text = kwargs.pop('cl_name')
scheduler = ET.SubElement(class_el, "scheduler")
strict_priority = ET.SubElement(scheduler, "strict-priority")
dwrr_traffic_class5 = ET.SubElement(strict_priority, "dwrr-traffic-class5")
dwrr_traffic_class5.text = kwargs.pop('dwrr_traffic_class5')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def policy_map_class_scheduler_strict_priority_dwrr_traffic_class6(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
policy_map = ET.SubElement(config, "policy-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
po_name_key = ET.SubElement(policy_map, "po-name")
po_name_key.text = kwargs.pop('po_name')
class_el = ET.SubElement(policy_map, "class")
cl_name_key = ET.SubElement(class_el, "cl-name")
cl_name_key.text = kwargs.pop('cl_name')
scheduler = ET.SubElement(class_el, "scheduler")
strict_priority = ET.SubElement(scheduler, "strict-priority")
dwrr_traffic_class6 = ET.SubElement(strict_priority, "dwrr-traffic-class6")
dwrr_traffic_class6.text = kwargs.pop('dwrr_traffic_class6')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def policy_map_class_scheduler_strict_priority_dwrr_traffic_class_last(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
policy_map = ET.SubElement(config, "policy-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
po_name_key = ET.SubElement(policy_map, "po-name")
po_name_key.text = kwargs.pop('po_name')
class_el = ET.SubElement(policy_map, "class")
cl_name_key = ET.SubElement(class_el, "cl-name")
cl_name_key.text = kwargs.pop('cl_name')
scheduler = ET.SubElement(class_el, "scheduler")
strict_priority = ET.SubElement(scheduler, "strict-priority")
dwrr_traffic_class_last = ET.SubElement(strict_priority, "dwrr-traffic-class-last")
dwrr_traffic_class_last.text = kwargs.pop('dwrr_traffic_class_last')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def policy_map_class_scheduler_strict_priority_TC1(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
policy_map = ET.SubElement(config, "policy-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
po_name_key = ET.SubElement(policy_map, "po-name")
po_name_key.text = kwargs.pop('po_name')
class_el = ET.SubElement(policy_map, "class")
cl_name_key = ET.SubElement(class_el, "cl-name")
cl_name_key.text = kwargs.pop('cl_name')
scheduler = ET.SubElement(class_el, "scheduler")
strict_priority = ET.SubElement(scheduler, "strict-priority")
TC1 = ET.SubElement(strict_priority, "TC1")
TC1.text = kwargs.pop('TC1')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def policy_map_class_scheduler_strict_priority_TC2(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
policy_map = ET.SubElement(config, "policy-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
po_name_key = ET.SubElement(policy_map, "po-name")
po_name_key.text = kwargs.pop('po_name')
class_el = ET.SubElement(policy_map, "class")
cl_name_key = ET.SubElement(class_el, "cl-name")
cl_name_key.text = kwargs.pop('cl_name')
scheduler = ET.SubElement(class_el, "scheduler")
strict_priority = ET.SubElement(scheduler, "strict-priority")
TC2 = ET.SubElement(strict_priority, "TC2")
TC2.text = kwargs.pop('TC2')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def policy_map_class_scheduler_strict_priority_TC3(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
policy_map = ET.SubElement(config, "policy-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
po_name_key = ET.SubElement(policy_map, "po-name")
po_name_key.text = kwargs.pop('po_name')
class_el = ET.SubElement(policy_map, "class")
cl_name_key = ET.SubElement(class_el, "cl-name")
cl_name_key.text = kwargs.pop('cl_name')
scheduler = ET.SubElement(class_el, "scheduler")
strict_priority = ET.SubElement(scheduler, "strict-priority")
TC3 = ET.SubElement(strict_priority, "TC3")
TC3.text = kwargs.pop('TC3')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def policy_map_class_scheduler_strict_priority_TC4(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
policy_map = ET.SubElement(config, "policy-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
po_name_key = ET.SubElement(policy_map, "po-name")
po_name_key.text = kwargs.pop('po_name')
class_el = ET.SubElement(policy_map, "class")
cl_name_key = ET.SubElement(class_el, "cl-name")
cl_name_key.text = kwargs.pop('cl_name')
scheduler = ET.SubElement(class_el, "scheduler")
strict_priority = ET.SubElement(scheduler, "strict-priority")
TC4 = ET.SubElement(strict_priority, "TC4")
TC4.text = kwargs.pop('TC4')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def policy_map_class_scheduler_strict_priority_TC5(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
policy_map = ET.SubElement(config, "policy-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
po_name_key = ET.SubElement(policy_map, "po-name")
po_name_key.text = kwargs.pop('po_name')
class_el = ET.SubElement(policy_map, "class")
cl_name_key = ET.SubElement(class_el, "cl-name")
cl_name_key.text = kwargs.pop('cl_name')
scheduler = ET.SubElement(class_el, "scheduler")
strict_priority = ET.SubElement(scheduler, "strict-priority")
TC5 = ET.SubElement(strict_priority, "TC5")
TC5.text = kwargs.pop('TC5')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def policy_map_class_scheduler_strict_priority_TC6(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
policy_map = ET.SubElement(config, "policy-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
po_name_key = ET.SubElement(policy_map, "po-name")
po_name_key.text = kwargs.pop('po_name')
class_el = ET.SubElement(policy_map, "class")
cl_name_key = ET.SubElement(class_el, "cl-name")
cl_name_key.text = kwargs.pop('cl_name')
scheduler = ET.SubElement(class_el, "scheduler")
strict_priority = ET.SubElement(scheduler, "strict-priority")
TC6 = ET.SubElement(strict_priority, "TC6")
TC6.text = kwargs.pop('TC6')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def policy_map_class_scheduler_strict_priority_TC7(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
policy_map = ET.SubElement(config, "policy-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
po_name_key = ET.SubElement(policy_map, "po-name")
po_name_key.text = kwargs.pop('po_name')
class_el = ET.SubElement(policy_map, "class")
cl_name_key = ET.SubElement(class_el, "cl-name")
cl_name_key.text = kwargs.pop('cl_name')
scheduler = ET.SubElement(class_el, "scheduler")
strict_priority = ET.SubElement(scheduler, "strict-priority")
TC7 = ET.SubElement(strict_priority, "TC7")
TC7.text = kwargs.pop('TC7')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def policy_map_class_priority_mapping_table_imprt_cee(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
policy_map = ET.SubElement(config, "policy-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
po_name_key = ET.SubElement(policy_map, "po-name")
po_name_key.text = kwargs.pop('po_name')
class_el = ET.SubElement(policy_map, "class")
cl_name_key = ET.SubElement(class_el, "cl-name")
cl_name_key.text = kwargs.pop('cl_name')
priority_mapping_table = ET.SubElement(class_el, "priority-mapping-table")
imprt = ET.SubElement(priority_mapping_table, "import")
cee = ET.SubElement(imprt, "cee")
cee.text = kwargs.pop('cee')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def system_qos_qos_service_policy_direction(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
system_qos = ET.SubElement(config, "system-qos", xmlns="urn:brocade.com:mgmt:brocade-policer")
qos = ET.SubElement(system_qos, "qos")
service_policy = ET.SubElement(qos, "service-policy")
policy_map_name_key = ET.SubElement(service_policy, "policy-map-name")
policy_map_name_key.text = kwargs.pop('policy_map_name')
direction = ET.SubElement(service_policy, "direction")
direction.text = kwargs.pop('direction')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def system_qos_qos_service_policy_policy_map_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
system_qos = ET.SubElement(config, "system-qos", xmlns="urn:brocade.com:mgmt:brocade-policer")
qos = ET.SubElement(system_qos, "qos")
service_policy = ET.SubElement(qos, "service-policy")
direction_key = ET.SubElement(service_policy, "direction")
direction_key.text = kwargs.pop('direction')
policy_map_name = ET.SubElement(service_policy, "policy-map-name")
policy_map_name.text = kwargs.pop('policy_map_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def system_qos_qos_service_policy_attach_rbridge_id_add_rb_add_range(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
system_qos = ET.SubElement(config, "system-qos", xmlns="urn:brocade.com:mgmt:brocade-policer")
qos = ET.SubElement(system_qos, "qos")
service_policy = ET.SubElement(qos, "service-policy")
direction_key = ET.SubElement(service_policy, "direction")
direction_key.text = kwargs.pop('direction')
policy_map_name_key = ET.SubElement(service_policy, "policy-map-name")
policy_map_name_key.text = kwargs.pop('policy_map_name')
attach = ET.SubElement(service_policy, "attach")
rbridge_id = ET.SubElement(attach, "rbridge-id")
add = ET.SubElement(rbridge_id, "add")
rb_add_range = ET.SubElement(add, "rb-add-range")
rb_add_range.text = kwargs.pop('rb_add_range')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def system_qos_qos_service_policy_attach_rbridge_id_remove_rb_remove_range(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
system_qos = ET.SubElement(config, "system-qos", xmlns="urn:brocade.com:mgmt:brocade-policer")
qos = ET.SubElement(system_qos, "qos")
service_policy = ET.SubElement(qos, "service-policy")
direction_key = ET.SubElement(service_policy, "direction")
direction_key.text = kwargs.pop('direction')
policy_map_name_key = ET.SubElement(service_policy, "policy-map-name")
policy_map_name_key.text = kwargs.pop('policy_map_name')
attach = ET.SubElement(service_policy, "attach")
rbridge_id = ET.SubElement(attach, "rbridge-id")
remove = ET.SubElement(rbridge_id, "remove")
rb_remove_range = ET.SubElement(remove, "rb-remove-range")
rb_remove_range.text = kwargs.pop('rb_remove_range')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def police_priority_map_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
police_priority_map = ET.SubElement(config, "police-priority-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
name = ET.SubElement(police_priority_map, "name")
name.text = kwargs.pop('name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def police_priority_map_conform_map_pri0_conform(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
police_priority_map = ET.SubElement(config, "police-priority-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
name_key = ET.SubElement(police_priority_map, "name")
name_key.text = kwargs.pop('name')
conform = ET.SubElement(police_priority_map, "conform")
map_pri0_conform = ET.SubElement(conform, "map-pri0-conform")
map_pri0_conform.text = kwargs.pop('map_pri0_conform')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def police_priority_map_conform_map_pri1_conform(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
police_priority_map = ET.SubElement(config, "police-priority-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
name_key = ET.SubElement(police_priority_map, "name")
name_key.text = kwargs.pop('name')
conform = ET.SubElement(police_priority_map, "conform")
map_pri1_conform = ET.SubElement(conform, "map-pri1-conform")
map_pri1_conform.text = kwargs.pop('map_pri1_conform')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def police_priority_map_conform_map_pri2_conform(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
police_priority_map = ET.SubElement(config, "police-priority-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
name_key = ET.SubElement(police_priority_map, "name")
name_key.text = kwargs.pop('name')
conform = ET.SubElement(police_priority_map, "conform")
map_pri2_conform = ET.SubElement(conform, "map-pri2-conform")
map_pri2_conform.text = kwargs.pop('map_pri2_conform')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def police_priority_map_conform_map_pri3_conform(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
police_priority_map = ET.SubElement(config, "police-priority-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
name_key = ET.SubElement(police_priority_map, "name")
name_key.text = kwargs.pop('name')
conform = ET.SubElement(police_priority_map, "conform")
map_pri3_conform = ET.SubElement(conform, "map-pri3-conform")
map_pri3_conform.text = kwargs.pop('map_pri3_conform')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def police_priority_map_conform_map_pri4_conform(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
police_priority_map = ET.SubElement(config, "police-priority-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
name_key = ET.SubElement(police_priority_map, "name")
name_key.text = kwargs.pop('name')
conform = ET.SubElement(police_priority_map, "conform")
map_pri4_conform = ET.SubElement(conform, "map-pri4-conform")
map_pri4_conform.text = kwargs.pop('map_pri4_conform')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def police_priority_map_conform_map_pri5_conform(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
police_priority_map = ET.SubElement(config, "police-priority-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
name_key = ET.SubElement(police_priority_map, "name")
name_key.text = kwargs.pop('name')
conform = ET.SubElement(police_priority_map, "conform")
map_pri5_conform = ET.SubElement(conform, "map-pri5-conform")
map_pri5_conform.text = kwargs.pop('map_pri5_conform')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def police_priority_map_conform_map_pri6_conform(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
police_priority_map = ET.SubElement(config, "police-priority-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
name_key = ET.SubElement(police_priority_map, "name")
name_key.text = kwargs.pop('name')
conform = ET.SubElement(police_priority_map, "conform")
map_pri6_conform = ET.SubElement(conform, "map-pri6-conform")
map_pri6_conform.text = kwargs.pop('map_pri6_conform')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def police_priority_map_conform_map_pri7_conform(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
police_priority_map = ET.SubElement(config, "police-priority-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
name_key = ET.SubElement(police_priority_map, "name")
name_key.text = kwargs.pop('name')
conform = ET.SubElement(police_priority_map, "conform")
map_pri7_conform = ET.SubElement(conform, "map-pri7-conform")
map_pri7_conform.text = kwargs.pop('map_pri7_conform')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def police_priority_map_exceed_map_pri0_exceed(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
police_priority_map = ET.SubElement(config, "police-priority-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
name_key = ET.SubElement(police_priority_map, "name")
name_key.text = kwargs.pop('name')
exceed = ET.SubElement(police_priority_map, "exceed")
map_pri0_exceed = ET.SubElement(exceed, "map-pri0-exceed")
map_pri0_exceed.text = kwargs.pop('map_pri0_exceed')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def police_priority_map_exceed_map_pri1_exceed(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
police_priority_map = ET.SubElement(config, "police-priority-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
name_key = ET.SubElement(police_priority_map, "name")
name_key.text = kwargs.pop('name')
exceed = ET.SubElement(police_priority_map, "exceed")
map_pri1_exceed = ET.SubElement(exceed, "map-pri1-exceed")
map_pri1_exceed.text = kwargs.pop('map_pri1_exceed')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def police_priority_map_exceed_map_pri2_exceed(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
police_priority_map = ET.SubElement(config, "police-priority-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
name_key = ET.SubElement(police_priority_map, "name")
name_key.text = kwargs.pop('name')
exceed = ET.SubElement(police_priority_map, "exceed")
map_pri2_exceed = ET.SubElement(exceed, "map-pri2-exceed")
map_pri2_exceed.text = kwargs.pop('map_pri2_exceed')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def police_priority_map_exceed_map_pri3_exceed(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
police_priority_map = ET.SubElement(config, "police-priority-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
name_key = ET.SubElement(police_priority_map, "name")
name_key.text = kwargs.pop('name')
exceed = ET.SubElement(police_priority_map, "exceed")
map_pri3_exceed = ET.SubElement(exceed, "map-pri3-exceed")
map_pri3_exceed.text = kwargs.pop('map_pri3_exceed')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def police_priority_map_exceed_map_pri4_exceed(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
police_priority_map = ET.SubElement(config, "police-priority-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
name_key = ET.SubElement(police_priority_map, "name")
name_key.text = kwargs.pop('name')
exceed = ET.SubElement(police_priority_map, "exceed")
map_pri4_exceed = ET.SubElement(exceed, "map-pri4-exceed")
map_pri4_exceed.text = kwargs.pop('map_pri4_exceed')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def police_priority_map_exceed_map_pri5_exceed(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
police_priority_map = ET.SubElement(config, "police-priority-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
name_key = ET.SubElement(police_priority_map, "name")
name_key.text = kwargs.pop('name')
exceed = ET.SubElement(police_priority_map, "exceed")
map_pri5_exceed = ET.SubElement(exceed, "map-pri5-exceed")
map_pri5_exceed.text = kwargs.pop('map_pri5_exceed')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def police_priority_map_exceed_map_pri6_exceed(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
police_priority_map = ET.SubElement(config, "police-priority-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
name_key = ET.SubElement(police_priority_map, "name")
name_key.text = kwargs.pop('name')
exceed = ET.SubElement(police_priority_map, "exceed")
map_pri6_exceed = ET.SubElement(exceed, "map-pri6-exceed")
map_pri6_exceed.text = kwargs.pop('map_pri6_exceed')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def police_priority_map_exceed_map_pri7_exceed(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
police_priority_map = ET.SubElement(config, "police-priority-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
name_key = ET.SubElement(police_priority_map, "name")
name_key.text = kwargs.pop('name')
exceed = ET.SubElement(police_priority_map, "exceed")
map_pri7_exceed = ET.SubElement(exceed, "map-pri7-exceed")
map_pri7_exceed.text = kwargs.pop('map_pri7_exceed')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def class_map_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
class_map = ET.SubElement(config, "class-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
name = ET.SubElement(class_map, "name")
name.text = kwargs.pop('name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def class_map_match_access_group_access_group_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
class_map = ET.SubElement(config, "class-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
name_key = ET.SubElement(class_map, "name")
name_key.text = kwargs.pop('name')
match = ET.SubElement(class_map, "match")
access_group = ET.SubElement(match, "access-group")
access_group_name = ET.SubElement(access_group, "access-group-name")
access_group_name.text = kwargs.pop('access_group_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def policy_map_po_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
policy_map = ET.SubElement(config, "policy-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
po_name = ET.SubElement(policy_map, "po-name")
po_name.text = kwargs.pop('po_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def policy_map_class_cl_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
policy_map = ET.SubElement(config, "policy-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
po_name_key = ET.SubElement(policy_map, "po-name")
po_name_key.text = kwargs.pop('po_name')
class_el = ET.SubElement(policy_map, "class")
cl_name = ET.SubElement(class_el, "cl-name")
cl_name.text = kwargs.pop('cl_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def policy_map_class_police_cir(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
policy_map = ET.SubElement(config, "policy-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
po_name_key = ET.SubElement(policy_map, "po-name")
po_name_key.text = kwargs.pop('po_name')
class_el = ET.SubElement(policy_map, "class")
cl_name_key = ET.SubElement(class_el, "cl-name")
cl_name_key.text = kwargs.pop('cl_name')
police = ET.SubElement(class_el, "police")
cir = ET.SubElement(police, "cir")
cir.text = kwargs.pop('cir')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def policy_map_class_police_cbs(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
policy_map = ET.SubElement(config, "policy-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
po_name_key = ET.SubElement(policy_map, "po-name")
po_name_key.text = kwargs.pop('po_name')
class_el = ET.SubElement(policy_map, "class")
cl_name_key = ET.SubElement(class_el, "cl-name")
cl_name_key.text = kwargs.pop('cl_name')
police = ET.SubElement(class_el, "police")
cbs = ET.SubElement(police, "cbs")
cbs.text = kwargs.pop('cbs')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def policy_map_class_police_eir(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
policy_map = ET.SubElement(config, "policy-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
po_name_key = ET.SubElement(policy_map, "po-name")
po_name_key.text = kwargs.pop('po_name')
class_el = ET.SubElement(policy_map, "class")
cl_name_key = ET.SubElement(class_el, "cl-name")
cl_name_key.text = kwargs.pop('cl_name')
police = ET.SubElement(class_el, "police")
eir = ET.SubElement(police, "eir")
eir.text = kwargs.pop('eir')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def policy_map_class_police_ebs(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
policy_map = ET.SubElement(config, "policy-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
po_name_key = ET.SubElement(policy_map, "po-name")
po_name_key.text = kwargs.pop('po_name')
class_el = ET.SubElement(policy_map, "class")
cl_name_key = ET.SubElement(class_el, "cl-name")
cl_name_key.text = kwargs.pop('cl_name')
police = ET.SubElement(class_el, "police")
ebs = ET.SubElement(police, "ebs")
ebs.text = kwargs.pop('ebs')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def policy_map_class_police_set_priority(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
policy_map = ET.SubElement(config, "policy-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
po_name_key = ET.SubElement(policy_map, "po-name")
po_name_key.text = kwargs.pop('po_name')
class_el = ET.SubElement(policy_map, "class")
cl_name_key = ET.SubElement(class_el, "cl-name")
cl_name_key.text = kwargs.pop('cl_name')
police = ET.SubElement(class_el, "police")
set_priority = ET.SubElement(police, "set-priority")
set_priority.text = kwargs.pop('set_priority')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def policy_map_class_police_conform_set_dscp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
policy_map = ET.SubElement(config, "policy-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
po_name_key = ET.SubElement(policy_map, "po-name")
po_name_key.text = kwargs.pop('po_name')
class_el = ET.SubElement(policy_map, "class")
cl_name_key = ET.SubElement(class_el, "cl-name")
cl_name_key.text = kwargs.pop('cl_name')
police = ET.SubElement(class_el, "police")
conform_set_dscp = ET.SubElement(police, "conform-set-dscp")
conform_set_dscp.text = kwargs.pop('conform_set_dscp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def policy_map_class_police_conform_set_prec(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
policy_map = ET.SubElement(config, "policy-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
po_name_key = ET.SubElement(policy_map, "po-name")
po_name_key.text = kwargs.pop('po_name')
class_el = ET.SubElement(policy_map, "class")
cl_name_key = ET.SubElement(class_el, "cl-name")
cl_name_key.text = kwargs.pop('cl_name')
police = ET.SubElement(class_el, "police")
conform_set_prec = ET.SubElement(police, "conform-set-prec")
conform_set_prec.text = kwargs.pop('conform_set_prec')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def policy_map_class_police_conform_set_tc(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
policy_map = ET.SubElement(config, "policy-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
po_name_key = ET.SubElement(policy_map, "po-name")
po_name_key.text = kwargs.pop('po_name')
class_el = ET.SubElement(policy_map, "class")
cl_name_key = ET.SubElement(class_el, "cl-name")
cl_name_key.text = kwargs.pop('cl_name')
police = ET.SubElement(class_el, "police")
conform_set_tc = ET.SubElement(police, "conform-set-tc")
conform_set_tc.text = kwargs.pop('conform_set_tc')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def policy_map_class_police_exceed_set_dscp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
policy_map = ET.SubElement(config, "policy-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
po_name_key = ET.SubElement(policy_map, "po-name")
po_name_key.text = kwargs.pop('po_name')
class_el = ET.SubElement(policy_map, "class")
cl_name_key = ET.SubElement(class_el, "cl-name")
cl_name_key.text = kwargs.pop('cl_name')
police = ET.SubElement(class_el, "police")
exceed_set_dscp = ET.SubElement(police, "exceed-set-dscp")
exceed_set_dscp.text = kwargs.pop('exceed_set_dscp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def policy_map_class_police_exceed_set_prec(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
policy_map = ET.SubElement(config, "policy-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
po_name_key = ET.SubElement(policy_map, "po-name")
po_name_key.text = kwargs.pop('po_name')
class_el = ET.SubElement(policy_map, "class")
cl_name_key = ET.SubElement(class_el, "cl-name")
cl_name_key.text = kwargs.pop('cl_name')
police = ET.SubElement(class_el, "police")
exceed_set_prec = ET.SubElement(police, "exceed-set-prec")
exceed_set_prec.text = kwargs.pop('exceed_set_prec')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def policy_map_class_police_exceed_set_tc(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
policy_map = ET.SubElement(config, "policy-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
po_name_key = ET.SubElement(policy_map, "po-name")
po_name_key.text = kwargs.pop('po_name')
class_el = ET.SubElement(policy_map, "class")
cl_name_key = ET.SubElement(class_el, "cl-name")
cl_name_key.text = kwargs.pop('cl_name')
police = ET.SubElement(class_el, "police")
exceed_set_tc = ET.SubElement(police, "exceed-set-tc")
exceed_set_tc.text = kwargs.pop('exceed_set_tc')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def policy_map_class_set_set_cos_tc_cos(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
policy_map = ET.SubElement(config, "policy-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
po_name_key = ET.SubElement(policy_map, "po-name")
po_name_key.text = kwargs.pop('po_name')
class_el = ET.SubElement(policy_map, "class")
cl_name_key = ET.SubElement(class_el, "cl-name")
cl_name_key.text = kwargs.pop('cl_name')
set = ET.SubElement(class_el, "set")
set_cos_tc = ET.SubElement(set, "set_cos_tc")
cos = ET.SubElement(set_cos_tc, "cos")
cos.text = kwargs.pop('cos')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def policy_map_class_set_set_cos_tc_traffic_class(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
policy_map = ET.SubElement(config, "policy-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
po_name_key = ET.SubElement(policy_map, "po-name")
po_name_key.text = kwargs.pop('po_name')
class_el = ET.SubElement(policy_map, "class")
cl_name_key = ET.SubElement(class_el, "cl-name")
cl_name_key.text = kwargs.pop('cl_name')
set = ET.SubElement(class_el, "set")
set_cos_tc = ET.SubElement(set, "set_cos_tc")
traffic_class = ET.SubElement(set_cos_tc, "traffic-class")
traffic_class.text = kwargs.pop('traffic_class')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def policy_map_class_set_set_dscp_dscp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
policy_map = ET.SubElement(config, "policy-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
po_name_key = ET.SubElement(policy_map, "po-name")
po_name_key.text = kwargs.pop('po_name')
class_el = ET.SubElement(policy_map, "class")
cl_name_key = ET.SubElement(class_el, "cl-name")
cl_name_key.text = kwargs.pop('cl_name')
set = ET.SubElement(class_el, "set")
set_dscp = ET.SubElement(set, "set_dscp")
dscp = ET.SubElement(set_dscp, "dscp")
dscp.text = kwargs.pop('dscp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def policy_map_class_span_session(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
policy_map = ET.SubElement(config, "policy-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
po_name_key = ET.SubElement(policy_map, "po-name")
po_name_key.text = kwargs.pop('po_name')
class_el = ET.SubElement(policy_map, "class")
cl_name_key = ET.SubElement(class_el, "cl-name")
cl_name_key.text = kwargs.pop('cl_name')
span = ET.SubElement(class_el, "span")
session = ET.SubElement(span, "session")
session.text = kwargs.pop('session')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def policy_map_class_map_cos_mutation(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
policy_map = ET.SubElement(config, "policy-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
po_name_key = ET.SubElement(policy_map, "po-name")
po_name_key.text = kwargs.pop('po_name')
class_el = ET.SubElement(policy_map, "class")
cl_name_key = ET.SubElement(class_el, "cl-name")
cl_name_key.text = kwargs.pop('cl_name')
map = ET.SubElement(class_el, "map")
cos_mutation = ET.SubElement(map, "cos-mutation")
cos_mutation.text = kwargs.pop('cos_mutation')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def policy_map_class_map_cos_traffic_class(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
policy_map = ET.SubElement(config, "policy-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
po_name_key = ET.SubElement(policy_map, "po-name")
po_name_key.text = kwargs.pop('po_name')
class_el = ET.SubElement(policy_map, "class")
cl_name_key = ET.SubElement(class_el, "cl-name")
cl_name_key.text = kwargs.pop('cl_name')
map = ET.SubElement(class_el, "map")
cos_traffic_class = ET.SubElement(map, "cos-traffic-class")
cos_traffic_class.text = kwargs.pop('cos_traffic_class')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def policy_map_class_map_dscp_cos(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
policy_map = ET.SubElement(config, "policy-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
po_name_key = ET.SubElement(policy_map, "po-name")
po_name_key.text = kwargs.pop('po_name')
class_el = ET.SubElement(policy_map, "class")
cl_name_key = ET.SubElement(class_el, "cl-name")
cl_name_key.text = kwargs.pop('cl_name')
map = ET.SubElement(class_el, "map")
dscp_cos = ET.SubElement(map, "dscp-cos")
dscp_cos.text = kwargs.pop('dscp_cos')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def policy_map_class_map_dscp_traffic_class(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
policy_map = ET.SubElement(config, "policy-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
po_name_key = ET.SubElement(policy_map, "po-name")
po_name_key.text = kwargs.pop('po_name')
class_el = ET.SubElement(policy_map, "class")
cl_name_key = ET.SubElement(class_el, "cl-name")
cl_name_key.text = kwargs.pop('cl_name')
map = ET.SubElement(class_el, "map")
dscp_traffic_class = ET.SubElement(map, "dscp-traffic-class")
dscp_traffic_class.text = kwargs.pop('dscp_traffic_class')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def policy_map_class_map_dscp_mutation(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
policy_map = ET.SubElement(config, "policy-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
po_name_key = ET.SubElement(policy_map, "po-name")
po_name_key.text = kwargs.pop('po_name')
class_el = ET.SubElement(policy_map, "class")
cl_name_key = ET.SubElement(class_el, "cl-name")
cl_name_key.text = kwargs.pop('cl_name')
map = ET.SubElement(class_el, "map")
dscp_mutation = ET.SubElement(map, "dscp-mutation")
dscp_mutation.text = kwargs.pop('dscp_mutation')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def policy_map_class_map_sflow(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
policy_map = ET.SubElement(config, "policy-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
po_name_key = ET.SubElement(policy_map, "po-name")
po_name_key.text = kwargs.pop('po_name')
class_el = ET.SubElement(policy_map, "class")
cl_name_key = ET.SubElement(class_el, "cl-name")
cl_name_key.text = kwargs.pop('cl_name')
map = ET.SubElement(class_el, "map")
sflow = ET.SubElement(map, "sflow")
sflow.text = kwargs.pop('sflow')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def policy_map_class_shape_shaping_rate(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
policy_map = ET.SubElement(config, "policy-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
po_name_key = ET.SubElement(policy_map, "po-name")
po_name_key.text = kwargs.pop('po_name')
class_el = ET.SubElement(policy_map, "class")
cl_name_key = ET.SubElement(class_el, "cl-name")
cl_name_key.text = kwargs.pop('cl_name')
shape = ET.SubElement(class_el, "shape")
shaping_rate = ET.SubElement(shape, "shaping_rate")
shaping_rate.text = kwargs.pop('shaping_rate')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def policy_map_class_scheduler_strict_priority_priority_number(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
policy_map = ET.SubElement(config, "policy-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
po_name_key = ET.SubElement(policy_map, "po-name")
po_name_key.text = kwargs.pop('po_name')
class_el = ET.SubElement(policy_map, "class")
cl_name_key = ET.SubElement(class_el, "cl-name")
cl_name_key.text = kwargs.pop('cl_name')
scheduler = ET.SubElement(class_el, "scheduler")
strict_priority = ET.SubElement(scheduler, "strict-priority")
priority_number = ET.SubElement(strict_priority, "priority-number")
priority_number.text = kwargs.pop('priority_number')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def policy_map_class_scheduler_strict_priority_scheduler_type(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
policy_map = ET.SubElement(config, "policy-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
po_name_key = ET.SubElement(policy_map, "po-name")
po_name_key.text = kwargs.pop('po_name')
class_el = ET.SubElement(policy_map, "class")
cl_name_key = ET.SubElement(class_el, "cl-name")
cl_name_key.text = kwargs.pop('cl_name')
scheduler = ET.SubElement(class_el, "scheduler")
strict_priority = ET.SubElement(scheduler, "strict-priority")
scheduler_type = ET.SubElement(strict_priority, "scheduler-type")
scheduler_type.text = kwargs.pop('scheduler_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def policy_map_class_scheduler_strict_priority_dwrr_traffic_class0(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
policy_map = ET.SubElement(config, "policy-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
po_name_key = ET.SubElement(policy_map, "po-name")
po_name_key.text = kwargs.pop('po_name')
class_el = ET.SubElement(policy_map, "class")
cl_name_key = ET.SubElement(class_el, "cl-name")
cl_name_key.text = kwargs.pop('cl_name')
scheduler = ET.SubElement(class_el, "scheduler")
strict_priority = ET.SubElement(scheduler, "strict-priority")
dwrr_traffic_class0 = ET.SubElement(strict_priority, "dwrr-traffic-class0")
dwrr_traffic_class0.text = kwargs.pop('dwrr_traffic_class0')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def policy_map_class_scheduler_strict_priority_dwrr_traffic_class1(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
policy_map = ET.SubElement(config, "policy-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
po_name_key = ET.SubElement(policy_map, "po-name")
po_name_key.text = kwargs.pop('po_name')
class_el = ET.SubElement(policy_map, "class")
cl_name_key = ET.SubElement(class_el, "cl-name")
cl_name_key.text = kwargs.pop('cl_name')
scheduler = ET.SubElement(class_el, "scheduler")
strict_priority = ET.SubElement(scheduler, "strict-priority")
dwrr_traffic_class1 = ET.SubElement(strict_priority, "dwrr-traffic-class1")
dwrr_traffic_class1.text = kwargs.pop('dwrr_traffic_class1')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def policy_map_class_scheduler_strict_priority_dwrr_traffic_class2(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
policy_map = ET.SubElement(config, "policy-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
po_name_key = ET.SubElement(policy_map, "po-name")
po_name_key.text = kwargs.pop('po_name')
class_el = ET.SubElement(policy_map, "class")
cl_name_key = ET.SubElement(class_el, "cl-name")
cl_name_key.text = kwargs.pop('cl_name')
scheduler = ET.SubElement(class_el, "scheduler")
strict_priority = ET.SubElement(scheduler, "strict-priority")
dwrr_traffic_class2 = ET.SubElement(strict_priority, "dwrr-traffic-class2")
dwrr_traffic_class2.text = kwargs.pop('dwrr_traffic_class2')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def policy_map_class_scheduler_strict_priority_dwrr_traffic_class3(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
policy_map = ET.SubElement(config, "policy-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
po_name_key = ET.SubElement(policy_map, "po-name")
po_name_key.text = kwargs.pop('po_name')
class_el = ET.SubElement(policy_map, "class")
cl_name_key = ET.SubElement(class_el, "cl-name")
cl_name_key.text = kwargs.pop('cl_name')
scheduler = ET.SubElement(class_el, "scheduler")
strict_priority = ET.SubElement(scheduler, "strict-priority")
dwrr_traffic_class3 = ET.SubElement(strict_priority, "dwrr-traffic-class3")
dwrr_traffic_class3.text = kwargs.pop('dwrr_traffic_class3')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def policy_map_class_scheduler_strict_priority_dwrr_traffic_class4(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
policy_map = ET.SubElement(config, "policy-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
po_name_key = ET.SubElement(policy_map, "po-name")
po_name_key.text = kwargs.pop('po_name')
class_el = ET.SubElement(policy_map, "class")
cl_name_key = ET.SubElement(class_el, "cl-name")
cl_name_key.text = kwargs.pop('cl_name')
scheduler = ET.SubElement(class_el, "scheduler")
strict_priority = ET.SubElement(scheduler, "strict-priority")
dwrr_traffic_class4 = ET.SubElement(strict_priority, "dwrr-traffic-class4")
dwrr_traffic_class4.text = kwargs.pop('dwrr_traffic_class4')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def policy_map_class_scheduler_strict_priority_dwrr_traffic_class5(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
policy_map = ET.SubElement(config, "policy-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
po_name_key = ET.SubElement(policy_map, "po-name")
po_name_key.text = kwargs.pop('po_name')
class_el = ET.SubElement(policy_map, "class")
cl_name_key = ET.SubElement(class_el, "cl-name")
cl_name_key.text = kwargs.pop('cl_name')
scheduler = ET.SubElement(class_el, "scheduler")
strict_priority = ET.SubElement(scheduler, "strict-priority")
dwrr_traffic_class5 = ET.SubElement(strict_priority, "dwrr-traffic-class5")
dwrr_traffic_class5.text = kwargs.pop('dwrr_traffic_class5')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def policy_map_class_scheduler_strict_priority_dwrr_traffic_class6(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
policy_map = ET.SubElement(config, "policy-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
po_name_key = ET.SubElement(policy_map, "po-name")
po_name_key.text = kwargs.pop('po_name')
class_el = ET.SubElement(policy_map, "class")
cl_name_key = ET.SubElement(class_el, "cl-name")
cl_name_key.text = kwargs.pop('cl_name')
scheduler = ET.SubElement(class_el, "scheduler")
strict_priority = ET.SubElement(scheduler, "strict-priority")
dwrr_traffic_class6 = ET.SubElement(strict_priority, "dwrr-traffic-class6")
dwrr_traffic_class6.text = kwargs.pop('dwrr_traffic_class6')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def policy_map_class_scheduler_strict_priority_dwrr_traffic_class_last(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
policy_map = ET.SubElement(config, "policy-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
po_name_key = ET.SubElement(policy_map, "po-name")
po_name_key.text = kwargs.pop('po_name')
class_el = ET.SubElement(policy_map, "class")
cl_name_key = ET.SubElement(class_el, "cl-name")
cl_name_key.text = kwargs.pop('cl_name')
scheduler = ET.SubElement(class_el, "scheduler")
strict_priority = ET.SubElement(scheduler, "strict-priority")
dwrr_traffic_class_last = ET.SubElement(strict_priority, "dwrr-traffic-class-last")
dwrr_traffic_class_last.text = kwargs.pop('dwrr_traffic_class_last')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def policy_map_class_scheduler_strict_priority_TC1(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
policy_map = ET.SubElement(config, "policy-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
po_name_key = ET.SubElement(policy_map, "po-name")
po_name_key.text = kwargs.pop('po_name')
class_el = ET.SubElement(policy_map, "class")
cl_name_key = ET.SubElement(class_el, "cl-name")
cl_name_key.text = kwargs.pop('cl_name')
scheduler = ET.SubElement(class_el, "scheduler")
strict_priority = ET.SubElement(scheduler, "strict-priority")
TC1 = ET.SubElement(strict_priority, "TC1")
TC1.text = kwargs.pop('TC1')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def policy_map_class_scheduler_strict_priority_TC2(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
policy_map = ET.SubElement(config, "policy-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
po_name_key = ET.SubElement(policy_map, "po-name")
po_name_key.text = kwargs.pop('po_name')
class_el = ET.SubElement(policy_map, "class")
cl_name_key = ET.SubElement(class_el, "cl-name")
cl_name_key.text = kwargs.pop('cl_name')
scheduler = ET.SubElement(class_el, "scheduler")
strict_priority = ET.SubElement(scheduler, "strict-priority")
TC2 = ET.SubElement(strict_priority, "TC2")
TC2.text = kwargs.pop('TC2')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def policy_map_class_scheduler_strict_priority_TC3(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
policy_map = ET.SubElement(config, "policy-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
po_name_key = ET.SubElement(policy_map, "po-name")
po_name_key.text = kwargs.pop('po_name')
class_el = ET.SubElement(policy_map, "class")
cl_name_key = ET.SubElement(class_el, "cl-name")
cl_name_key.text = kwargs.pop('cl_name')
scheduler = ET.SubElement(class_el, "scheduler")
strict_priority = ET.SubElement(scheduler, "strict-priority")
TC3 = ET.SubElement(strict_priority, "TC3")
TC3.text = kwargs.pop('TC3')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def policy_map_class_scheduler_strict_priority_TC4(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
policy_map = ET.SubElement(config, "policy-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
po_name_key = ET.SubElement(policy_map, "po-name")
po_name_key.text = kwargs.pop('po_name')
class_el = ET.SubElement(policy_map, "class")
cl_name_key = ET.SubElement(class_el, "cl-name")
cl_name_key.text = kwargs.pop('cl_name')
scheduler = ET.SubElement(class_el, "scheduler")
strict_priority = ET.SubElement(scheduler, "strict-priority")
TC4 = ET.SubElement(strict_priority, "TC4")
TC4.text = kwargs.pop('TC4')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def policy_map_class_scheduler_strict_priority_TC5(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
policy_map = ET.SubElement(config, "policy-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
po_name_key = ET.SubElement(policy_map, "po-name")
po_name_key.text = kwargs.pop('po_name')
class_el = ET.SubElement(policy_map, "class")
cl_name_key = ET.SubElement(class_el, "cl-name")
cl_name_key.text = kwargs.pop('cl_name')
scheduler = ET.SubElement(class_el, "scheduler")
strict_priority = ET.SubElement(scheduler, "strict-priority")
TC5 = ET.SubElement(strict_priority, "TC5")
TC5.text = kwargs.pop('TC5')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def policy_map_class_scheduler_strict_priority_TC6(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
policy_map = ET.SubElement(config, "policy-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
po_name_key = ET.SubElement(policy_map, "po-name")
po_name_key.text = kwargs.pop('po_name')
class_el = ET.SubElement(policy_map, "class")
cl_name_key = ET.SubElement(class_el, "cl-name")
cl_name_key.text = kwargs.pop('cl_name')
scheduler = ET.SubElement(class_el, "scheduler")
strict_priority = ET.SubElement(scheduler, "strict-priority")
TC6 = ET.SubElement(strict_priority, "TC6")
TC6.text = kwargs.pop('TC6')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def policy_map_class_scheduler_strict_priority_TC7(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
policy_map = ET.SubElement(config, "policy-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
po_name_key = ET.SubElement(policy_map, "po-name")
po_name_key.text = kwargs.pop('po_name')
class_el = ET.SubElement(policy_map, "class")
cl_name_key = ET.SubElement(class_el, "cl-name")
cl_name_key.text = kwargs.pop('cl_name')
scheduler = ET.SubElement(class_el, "scheduler")
strict_priority = ET.SubElement(scheduler, "strict-priority")
TC7 = ET.SubElement(strict_priority, "TC7")
TC7.text = kwargs.pop('TC7')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def policy_map_class_priority_mapping_table_imprt_cee(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
policy_map = ET.SubElement(config, "policy-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
po_name_key = ET.SubElement(policy_map, "po-name")
po_name_key.text = kwargs.pop('po_name')
class_el = ET.SubElement(policy_map, "class")
cl_name_key = ET.SubElement(class_el, "cl-name")
cl_name_key.text = kwargs.pop('cl_name')
priority_mapping_table = ET.SubElement(class_el, "priority-mapping-table")
imprt = ET.SubElement(priority_mapping_table, "import")
cee = ET.SubElement(imprt, "cee")
cee.text = kwargs.pop('cee')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def system_qos_qos_service_policy_direction(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
system_qos = ET.SubElement(config, "system-qos", xmlns="urn:brocade.com:mgmt:brocade-policer")
qos = ET.SubElement(system_qos, "qos")
service_policy = ET.SubElement(qos, "service-policy")
policy_map_name_key = ET.SubElement(service_policy, "policy-map-name")
policy_map_name_key.text = kwargs.pop('policy_map_name')
direction = ET.SubElement(service_policy, "direction")
direction.text = kwargs.pop('direction')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def system_qos_qos_service_policy_policy_map_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
system_qos = ET.SubElement(config, "system-qos", xmlns="urn:brocade.com:mgmt:brocade-policer")
qos = ET.SubElement(system_qos, "qos")
service_policy = ET.SubElement(qos, "service-policy")
direction_key = ET.SubElement(service_policy, "direction")
direction_key.text = kwargs.pop('direction')
policy_map_name = ET.SubElement(service_policy, "policy-map-name")
policy_map_name.text = kwargs.pop('policy_map_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def system_qos_qos_service_policy_attach_rbridge_id_add_rb_add_range(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
system_qos = ET.SubElement(config, "system-qos", xmlns="urn:brocade.com:mgmt:brocade-policer")
qos = ET.SubElement(system_qos, "qos")
service_policy = ET.SubElement(qos, "service-policy")
direction_key = ET.SubElement(service_policy, "direction")
direction_key.text = kwargs.pop('direction')
policy_map_name_key = ET.SubElement(service_policy, "policy-map-name")
policy_map_name_key.text = kwargs.pop('policy_map_name')
attach = ET.SubElement(service_policy, "attach")
rbridge_id = ET.SubElement(attach, "rbridge-id")
add = ET.SubElement(rbridge_id, "add")
rb_add_range = ET.SubElement(add, "rb-add-range")
rb_add_range.text = kwargs.pop('rb_add_range')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def system_qos_qos_service_policy_attach_rbridge_id_remove_rb_remove_range(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
system_qos = ET.SubElement(config, "system-qos", xmlns="urn:brocade.com:mgmt:brocade-policer")
qos = ET.SubElement(system_qos, "qos")
service_policy = ET.SubElement(qos, "service-policy")
direction_key = ET.SubElement(service_policy, "direction")
direction_key.text = kwargs.pop('direction')
policy_map_name_key = ET.SubElement(service_policy, "policy-map-name")
policy_map_name_key.text = kwargs.pop('policy_map_name')
attach = ET.SubElement(service_policy, "attach")
rbridge_id = ET.SubElement(attach, "rbridge-id")
remove = ET.SubElement(rbridge_id, "remove")
rb_remove_range = ET.SubElement(remove, "rb-remove-range")
rb_remove_range.text = kwargs.pop('rb_remove_range')
callback = kwargs.pop('callback', self._callback)
return callback(config)
| 46.955294
| 120
| 0.651784
| 12,554
| 99,780
| 4.923132
| 0.008444
| 0.142901
| 0.071095
| 0.053847
| 0.997848
| 0.997848
| 0.997848
| 0.997848
| 0.997848
| 0.997848
| 0
| 0.003849
| 0.218811
| 99,780
| 2,125
| 121
| 46.955294
| 0.789062
| 0.038254
| 0
| 0.997497
| 1
| 0
| 0.168171
| 0.050531
| 0
| 0
| 0
| 0
| 0
| 1
| 0.081977
| false
| 0
| 0.001877
| 0
| 0.165832
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
91b640f4f350579f87542f8115f977fb1b41b790
| 3,207
|
py
|
Python
|
src/genie/libs/parser/iosxe/tests/ShowIsisTopology/cli/equal/golden_output2_expected.py
|
balmasea/genieparser
|
d1e71a96dfb081e0a8591707b9d4872decd5d9d3
|
[
"Apache-2.0"
] | 204
|
2018-06-27T00:55:27.000Z
|
2022-03-06T21:12:18.000Z
|
src/genie/libs/parser/iosxe/tests/ShowIsisTopology/cli/equal/golden_output2_expected.py
|
balmasea/genieparser
|
d1e71a96dfb081e0a8591707b9d4872decd5d9d3
|
[
"Apache-2.0"
] | 468
|
2018-06-19T00:33:18.000Z
|
2022-03-31T23:23:35.000Z
|
src/genie/libs/parser/iosxe/tests/ShowIsisTopology/cli/equal/golden_output2_expected.py
|
balmasea/genieparser
|
d1e71a96dfb081e0a8591707b9d4872decd5d9d3
|
[
"Apache-2.0"
] | 309
|
2019-01-16T20:21:07.000Z
|
2022-03-30T12:56:41.000Z
|
expected_output = {
"tag": {
"1": {
"level": {
1: {
"hosts": {
"R1-asr1k-43": {
"metric": 33554428,
"interface": {
"Gi0/1/4": {
"next_hop": "R3-asr1k-53",
"snpa": "c014.fe84.b306"
}
}
},
"R3-asr1k-53": {
"metric": 16777214,
"interface": {
"Gi0/1/4": {
"next_hop": "R3-asr1k-53",
"snpa": "c014.fe84.b306"
}
}
},
"R5-asr1k-11": {},
"R6-asr1k-20": {
"metric": 16777214,
"interface": {
"Gi0/0/2": {
"next_hop": "R6-asr1k-20",
"snpa": "3c57.31c1.fb32"
},
"Gi0/0/3": {
"next_hop": "R6-asr1k-20",
"snpa": "3c57.31c1.fb33"
}
}
}
},
"flex_algo": 129
},
2: {
"hosts": {
"R1-asr1k-43": {
"metric": 33554428,
"interface": {
"Gi0/1/4": {
"next_hop": "R3-asr1k-53",
"snpa": "c014.fe84.b306"
}
}
},
"R3-asr1k-53": {
"metric": 16777214,
"interface": {
"Gi0/1/4": {
"next_hop": "R3-asr1k-53",
"snpa": "c014.fe84.b306"
}
}
},
"R5-asr1k-11": {},
"R6-asr1k-20": {
"metric": 16777214,
"interface": {
"Gi0/0/2": {
"next_hop": "R6-asr1k-20",
"snpa": "3c57.31c1.fb32"
},
"Gi0/0/3": {
"next_hop": "R6-asr1k-20",
"snpa": "3c57.31c1.fb33"
}
}
}
},
"flex_algo": 129
}
}
}
}
}
| 39.109756
| 62
| 0.178983
| 153
| 3,207
| 3.679739
| 0.24183
| 0.099467
| 0.095915
| 0.099467
| 0.955595
| 0.955595
| 0.955595
| 0.955595
| 0.955595
| 0.955595
| 0
| 0.229418
| 0.715934
| 3,207
| 82
| 63
| 39.109756
| 0.388584
| 0
| 0
| 0.585366
| 0
| 0
| 0.176746
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
91bce2a12b0894ef5c9a21dc81afb3f881ef1351
| 9,949
|
py
|
Python
|
Vehicle/lattice_based_planner.py
|
yuchaotaigu/parking-assignment
|
0ff15db06b0c32cabf6a64f760481f625c464561
|
[
"MIT"
] | null | null | null |
Vehicle/lattice_based_planner.py
|
yuchaotaigu/parking-assignment
|
0ff15db06b0c32cabf6a64f760481f625c464561
|
[
"MIT"
] | null | null | null |
Vehicle/lattice_based_planner.py
|
yuchaotaigu/parking-assignment
|
0ff15db06b0c32cabf6a64f760481f625c464561
|
[
"MIT"
] | null | null | null |
import math
import sys
import numpy as np
try:
import cubic_spline_planner
except ImportError:
raise
class Primitives:
"""
Define motion premitives for path assembling
"""
def __init__(self, radius, ds):
self.cx = {0:{1:[], 2:[], 3:[], 4:[]}}
self.cx[1] = {1:[], 2:[], 3:[], 4:[]}
self.cx[2] = {1:[], 2:[], 3:[], 4:[]}
self.cx[3] = {1:[], 2:[], 3:[], 4:[]}
self.cx[4] = {1:[], 2:[], 3:[], 4:[]}
self.cx[-1] = {1:[], 2:[], 3:[], 4:[]}
self.cy = {0:{1:[], 2:[], 3:[], 4:[]}}
self.cy[1] = {1:[], 2:[], 3:[], 4:[]}
self.cy[2] = {1:[], 2:[], 3:[], 4:[]}
self.cy[3] = {1:[], 2:[], 3:[], 4:[]}
self.cy[4] = {1:[], 2:[], 3:[], 4:[]}
self.cy[-1] = {1:[], 2:[], 3:[], 4:[]}
self.cyaw = {0:{1:[], 2:[], 3:[], 4:[]}}
self.cyaw[1] = {1:[], 2:[], 3:[], 4:[]}
self.cyaw[2] = {1:[], 2:[], 3:[], 4:[]}
self.cyaw[3] = {1:[], 2:[], 3:[], 4:[]}
self.cyaw[4] = {1:[], 2:[], 3:[], 4:[]}
self.cyaw[-1] = {1:[], 2:[], 3:[], 4:[]}
self.ck = {0:{1:[], 2:[], 3:[], 4:[]}}
self.ck[1] = {1:[], 2:[], 3:[], 4:[]}
self.ck[2] = {1:[], 2:[], 3:[], 4:[]}
self.ck[3] = {1:[], 2:[], 3:[], 4:[]}
self.ck[4] = {1:[], 2:[], 3:[], 4:[]}
self.ck[-1] = {1:[], 2:[], 3:[], 4:[]}
nd = int(radius/ds)
d_theta = np.pi/4/nd
semi_pi = np.pi/2
k_r = 1/radius
for i in range(-1,5):
if i == -1:
for j in range(1,5):
if j == 1:
self.cx[i][j] = [-idx*ds for idx in range(nd)]
self.cy[i][j] = [0.0 for idx in range(nd)]
self.cyaw[i][j] = [np.pi for idx in range(nd)]
self.ck[i][j] = [0.0 for idx in range(nd)]
elif j == 2:
self.cx[i][j] = [idx*ds for idx in range(nd)]
self.cy[i][j] = [0.0 for idx in range(nd)]
self.cyaw[i][j] = [0.0 for idx in range(nd)]
self.ck[i][j] = [0.0 for idx in range(nd)]
elif j == 3:
self.cx[i][j] = [0.0 for idx in range(nd)]
self.cy[i][j] = [idx*ds for idx in range(nd)]
self.cyaw[i][j] = [np.pi/2 for idx in range(nd)]
self.ck[i][j] = [0.0 for idx in range(nd)]
elif j == 4:
self.cx[i][j] = [0.0 for idx in range(nd)]
self.cy[i][j] = [radius-idx*ds for idx in range(nd)]
self.cyaw[i][j] = [-np.pi/2 for idx in range(nd)]
self.ck[i][j] = [0.0 for idx in range(nd)]
elif i == 1:
for j in range(5):
if j == 0:
self.cx[i][j] = [radius-idx*ds for idx in range(nd)]
self.cy[i][j] = [0.0 for idx in range(nd)]
self.cyaw[i][j] = [np.pi for idx in range(nd)]
self.ck[i][j] = [0.0 for idx in range(nd)]
elif j == 1:
self.cx[i][j] = [radius-idx*ds for idx in range(2*nd)]
self.cy[i][j] = [0.0 for idx in range(2*nd)]
self.cyaw[i][j] = [np.pi for idx in range(2*nd)]
self.ck[i][j] = [0.0 for idx in range(2*nd)]
elif j == 3:
self.cx[i][j] = [radius-radius*math.sin(idx*d_theta) for idx in range(1, 2*nd)]
self.cy[i][j] = [radius-radius*math.cos(idx*d_theta) for idx in range(1, 2*nd)]
self.cyaw[i][j] = [np.pi-idx*d_theta for idx in range(1,2*nd)]
self.ck[i][j] = [-k_r for idx in range(1,2*nd)]
elif j == 4:
self.cx[i][j] = [radius+radius*math.cos(np.pi/2+idx*d_theta) for idx in range(1, 2*nd)]
self.cy[i][j] = [radius*math.sin(np.pi/2+idx*d_theta)-radius for idx in range(1, 2*nd)]
self.cyaw[i][j] = [-np.pi+idx*d_theta for idx in range(1,2*nd)]
self.ck[i][j] = [k_r for idx in range(1,2*nd)]
elif i == 2:
for j in range(5):
if j == 0:
self.cx[i][j] = [-radius+idx*ds for idx in range(nd)]
self.cy[i][j] = [0.0 for idx in range(nd)]
self.cyaw[i][j] = [0.0 for idx in range(nd)]
self.ck[i][j] = [0.0 for idx in range(nd)]
elif j == 2:
self.cx[i][j] = [-radius+idx*ds for idx in range(2*nd)]
self.cy[i][j] = [0.0 for idx in range(2*nd)]
self.cyaw[i][j] = [0.0 for idx in range(2*nd)]
self.ck[i][j] = [0.0 for idx in range(2*nd)]
elif j == 3:
self.cx[i][j] = [-radius+radius*math.sin(idx*d_theta) for idx in range(1,2*nd)]
self.cy[i][j] = [radius-radius*math.cos(idx*d_theta) for idx in range(1,2*nd)]
self.cyaw[i][j] = [idx*d_theta for idx in range(1,2*nd)]
self.ck[i][j] = [k_r for idx in range(1,2*nd)]
elif j == 4:
self.cx[i][j] = [-radius+radius*math.sin(idx*d_theta) for idx in range(1, 2*nd)]
self.cy[i][j] = [-radius+radius*math.cos(idx*d_theta) for idx in range(1, 2*nd)]
self.cyaw[i][j] = [-idx*d_theta for idx in range(1,2*nd)]
self.ck[i][j] = [-k_r for idx in range(1,2*nd)]
elif i == 3:
for j in range(5):
if j == 0:
self.cx[i][j] = [0.0 for idx in range(nd)]
self.cy[i][j] = [-radius+idx*ds for idx in range(nd)]
self.cyaw[i][j] = [np.pi/2 for idx in range(nd)]
self.ck[i][j] = [0.0 for idx in range(nd)]
elif j == 1:
self.cx[i][j] = [-radius+radius*math.cos(idx*d_theta) for idx in range(1,2*nd)]
self.cy[i][j] = [-radius+radius*math.sin(idx*d_theta) for idx in range(1,2*nd)]
self.cyaw[i][j] = [np.pi/2+idx*d_theta for idx in range(1,2*nd)]
self.ck[i][j] = [k_r for idx in range(1,2*nd)]
elif j == 2:
self.cx[i][j] = [radius+radius*math.cos(np.pi-idx*d_theta) for idx in range(1,2*nd)]
self.cy[i][j] = [-radius+radius*math.sin(idx*d_theta) for idx in range(1,2*nd)]
self.cyaw[i][j] = [np.pi/2-idx*d_theta for idx in range(1,2*nd)]
self.ck[i][j] = [-k_r for idx in range(1,2*nd)]
elif j == 3:
self.cx[i][j] = [0.0 for idx in range(2*nd)]
self.cy[i][j] = [-radius+idx*ds for idx in range(2*nd)]
self.cyaw[i][j] = [np.pi/2 for idx in range(2*nd)]
self.ck[i][j] = [0.0 for idx in range(2*nd)]
elif i == 4:
for j in range(5):
if j == 0:
self.cx[i][j] = [0.0 for idx in range(nd)]
self.cy[i][j] = [radius-idx*ds for idx in range(nd)]
self.cyaw[i][j] = [-np.pi/2 for idx in range(nd)]
self.ck[i][j] = [0.0 for idx in range(nd)]
elif j == 1:
self.cx[i][j] = [-radius+radius*math.cos(-idx*d_theta) for idx in range(1,2*nd)]
self.cy[i][j] = [radius+radius*math.sin(-idx*d_theta) for idx in range(1,2*nd)]
self.cyaw[i][j] = [-np.pi/2-idx*d_theta for idx in range(2*nd)]
self.ck[i][j] = [-k_r for idx in range(1,2*nd)]
elif j == 2:
self.cx[i][j] = [radius-radius*math.cos(idx*d_theta) for idx in range(1,2*nd)]
self.cy[i][j] = [radius-radius*math.sin(idx*d_theta) for idx in range(1,2*nd)]
self.cyaw[i][j] = [-np.pi/2+idx*d_theta for idx in range(2*nd)]
self.ck[i][j] = [k_r for idx in range(1,2*nd)]
elif j == 4:
self.cx[i][j] = [0.0 for idx in range(1,2*nd)]
self.cy[i][j] = [radius-idx*ds for idx in range(2*nd)]
self.cyaw[i][j] = [-np.pi/2 for idx in range(nd)]
self.ck[i][j] = [0.0 for idx in range(nd)]
def calc_lattice_course(primi, x_crds, y_crds, u_path):
"""
"""
path_dim = len(x_crds)
rx = []
ry = []
ryaw = []
rk = []
print(path_dim)
print(len(u_path))
u_path.astype(int)
for i in range(path_dim):
rx.extend([ix+x_crds[i] for ix in primi.cx[u_path[i]][u_path[i+1]]])
ry.extend([iy+y_crds[i] for iy in primi.cy[u_path[i]][u_path[i+1]]])
ryaw.extend([iyaw for iyaw in primi.cyaw[u_path[i]][u_path[i+1]]])
rk.extend([ik for ik in primi.ck[u_path[i]][u_path[i+1]]])
return rx, ry, ryaw, rk
def main():
import matplotlib.pyplot as plt
primi = Primitives(4.5, 0.1)
x_crds = np.array([-8, 1, 10, 19, 28.0, 28, 28, 28, 28])
y_crds = np.array([1.0, 1.0, 1.0, 1.0, 1, 10, 19, 28, 37])
u_path = np.array([-1, 2, 2, 2, 2, 3, 3, 3, 3, 0])
rx, ry, ryaw, rk = calc_lattice_course(primi, x_crds, y_crds, u_path)
plt.subplots(1)
plt.plot(rx, ry, "-r", label="spline")
plt.grid(True)
plt.axis("equal")
plt.xlabel("x[m]")
plt.ylabel("y[m]")
plt.legend()
plt.show()
if __name__ == '__main__':
main()
| 45.847926
| 111
| 0.413006
| 1,619
| 9,949
| 2.489809
| 0.060531
| 0.151079
| 0.15877
| 0.258001
| 0.828827
| 0.825105
| 0.792111
| 0.772017
| 0.772017
| 0.772017
| 0
| 0.058038
| 0.390391
| 9,949
| 216
| 112
| 46.060185
| 0.606595
| 0.004423
| 0
| 0.388889
| 0
| 0
| 0.002936
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.016667
| false
| 0
| 0.033333
| 0
| 0.061111
| 0.011111
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
91c386c543156ab215d243e9f6c7e4ef9e92264e
| 11,446
|
py
|
Python
|
venv/lib/python3.8/site-packages/spaceone/api/repository/v1/policy_pb2_grpc.py
|
choonho/plugin-prometheus-mon-webhook
|
afa7d65d12715fd0480fb4f92a9c62da2d6128e0
|
[
"Apache-2.0"
] | null | null | null |
venv/lib/python3.8/site-packages/spaceone/api/repository/v1/policy_pb2_grpc.py
|
choonho/plugin-prometheus-mon-webhook
|
afa7d65d12715fd0480fb4f92a9c62da2d6128e0
|
[
"Apache-2.0"
] | null | null | null |
venv/lib/python3.8/site-packages/spaceone/api/repository/v1/policy_pb2_grpc.py
|
choonho/plugin-prometheus-mon-webhook
|
afa7d65d12715fd0480fb4f92a9c62da2d6128e0
|
[
"Apache-2.0"
] | null | null | null |
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
"""Client and server classes corresponding to protobuf-defined services."""
import grpc
from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2
from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2
from spaceone.api.repository.v1 import policy_pb2 as spaceone_dot_api_dot_repository_dot_v1_dot_policy__pb2
class PolicyStub(object):
"""Missing associated documentation comment in .proto file."""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.create = channel.unary_unary(
'/spaceone.api.repository.v1.Policy/create',
request_serializer=spaceone_dot_api_dot_repository_dot_v1_dot_policy__pb2.CreatePolicyRequest.SerializeToString,
response_deserializer=spaceone_dot_api_dot_repository_dot_v1_dot_policy__pb2.PolicyInfo.FromString,
)
self.update = channel.unary_unary(
'/spaceone.api.repository.v1.Policy/update',
request_serializer=spaceone_dot_api_dot_repository_dot_v1_dot_policy__pb2.UpdatePolicyRequest.SerializeToString,
response_deserializer=spaceone_dot_api_dot_repository_dot_v1_dot_policy__pb2.PolicyInfo.FromString,
)
self.delete = channel.unary_unary(
'/spaceone.api.repository.v1.Policy/delete',
request_serializer=spaceone_dot_api_dot_repository_dot_v1_dot_policy__pb2.PolicyRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.get = channel.unary_unary(
'/spaceone.api.repository.v1.Policy/get',
request_serializer=spaceone_dot_api_dot_repository_dot_v1_dot_policy__pb2.GetRepositoryPolicyRequest.SerializeToString,
response_deserializer=spaceone_dot_api_dot_repository_dot_v1_dot_policy__pb2.PolicyInfo.FromString,
)
self.list = channel.unary_unary(
'/spaceone.api.repository.v1.Policy/list',
request_serializer=spaceone_dot_api_dot_repository_dot_v1_dot_policy__pb2.PolicyQuery.SerializeToString,
response_deserializer=spaceone_dot_api_dot_repository_dot_v1_dot_policy__pb2.PoliciesInfo.FromString,
)
self.stat = channel.unary_unary(
'/spaceone.api.repository.v1.Policy/stat',
request_serializer=spaceone_dot_api_dot_repository_dot_v1_dot_policy__pb2.PolicyStatQuery.SerializeToString,
response_deserializer=google_dot_protobuf_dot_struct__pb2.Struct.FromString,
)
class PolicyServicer(object):
"""Missing associated documentation comment in .proto file."""
def create(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def update(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def delete(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def get(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def list(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def stat(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_PolicyServicer_to_server(servicer, server):
rpc_method_handlers = {
'create': grpc.unary_unary_rpc_method_handler(
servicer.create,
request_deserializer=spaceone_dot_api_dot_repository_dot_v1_dot_policy__pb2.CreatePolicyRequest.FromString,
response_serializer=spaceone_dot_api_dot_repository_dot_v1_dot_policy__pb2.PolicyInfo.SerializeToString,
),
'update': grpc.unary_unary_rpc_method_handler(
servicer.update,
request_deserializer=spaceone_dot_api_dot_repository_dot_v1_dot_policy__pb2.UpdatePolicyRequest.FromString,
response_serializer=spaceone_dot_api_dot_repository_dot_v1_dot_policy__pb2.PolicyInfo.SerializeToString,
),
'delete': grpc.unary_unary_rpc_method_handler(
servicer.delete,
request_deserializer=spaceone_dot_api_dot_repository_dot_v1_dot_policy__pb2.PolicyRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'get': grpc.unary_unary_rpc_method_handler(
servicer.get,
request_deserializer=spaceone_dot_api_dot_repository_dot_v1_dot_policy__pb2.GetRepositoryPolicyRequest.FromString,
response_serializer=spaceone_dot_api_dot_repository_dot_v1_dot_policy__pb2.PolicyInfo.SerializeToString,
),
'list': grpc.unary_unary_rpc_method_handler(
servicer.list,
request_deserializer=spaceone_dot_api_dot_repository_dot_v1_dot_policy__pb2.PolicyQuery.FromString,
response_serializer=spaceone_dot_api_dot_repository_dot_v1_dot_policy__pb2.PoliciesInfo.SerializeToString,
),
'stat': grpc.unary_unary_rpc_method_handler(
servicer.stat,
request_deserializer=spaceone_dot_api_dot_repository_dot_v1_dot_policy__pb2.PolicyStatQuery.FromString,
response_serializer=google_dot_protobuf_dot_struct__pb2.Struct.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'spaceone.api.repository.v1.Policy', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
# This class is part of an EXPERIMENTAL API.
class Policy(object):
"""Missing associated documentation comment in .proto file."""
@staticmethod
def create(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/spaceone.api.repository.v1.Policy/create',
spaceone_dot_api_dot_repository_dot_v1_dot_policy__pb2.CreatePolicyRequest.SerializeToString,
spaceone_dot_api_dot_repository_dot_v1_dot_policy__pb2.PolicyInfo.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def update(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/spaceone.api.repository.v1.Policy/update',
spaceone_dot_api_dot_repository_dot_v1_dot_policy__pb2.UpdatePolicyRequest.SerializeToString,
spaceone_dot_api_dot_repository_dot_v1_dot_policy__pb2.PolicyInfo.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def delete(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/spaceone.api.repository.v1.Policy/delete',
spaceone_dot_api_dot_repository_dot_v1_dot_policy__pb2.PolicyRequest.SerializeToString,
google_dot_protobuf_dot_empty__pb2.Empty.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def get(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/spaceone.api.repository.v1.Policy/get',
spaceone_dot_api_dot_repository_dot_v1_dot_policy__pb2.GetRepositoryPolicyRequest.SerializeToString,
spaceone_dot_api_dot_repository_dot_v1_dot_policy__pb2.PolicyInfo.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def list(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/spaceone.api.repository.v1.Policy/list',
spaceone_dot_api_dot_repository_dot_v1_dot_policy__pb2.PolicyQuery.SerializeToString,
spaceone_dot_api_dot_repository_dot_v1_dot_policy__pb2.PoliciesInfo.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def stat(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/spaceone.api.repository.v1.Policy/stat',
spaceone_dot_api_dot_repository_dot_v1_dot_policy__pb2.PolicyStatQuery.SerializeToString,
google_dot_protobuf_dot_struct__pb2.Struct.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
| 48.91453
| 135
| 0.690896
| 1,191
| 11,446
| 6.219983
| 0.09152
| 0.038877
| 0.058585
| 0.071139
| 0.900243
| 0.896328
| 0.879455
| 0.826539
| 0.760934
| 0.745545
| 0
| 0.009986
| 0.238861
| 11,446
| 233
| 136
| 49.124464
| 0.840335
| 0.064914
| 0
| 0.541237
| 1
| 0
| 0.076829
| 0.048112
| 0
| 0
| 0
| 0
| 0
| 1
| 0.072165
| false
| 0
| 0.020619
| 0.030928
| 0.139175
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
37e112ceb777fe9805650ec24b77c307d98eed31
| 137
|
py
|
Python
|
checkout_sdk/customers/customers_four.py
|
riaz-bordie-cko/checkout-sdk-python
|
d9bc073306c1a98544c326be693ed722576ea895
|
[
"MIT"
] | null | null | null |
checkout_sdk/customers/customers_four.py
|
riaz-bordie-cko/checkout-sdk-python
|
d9bc073306c1a98544c326be693ed722576ea895
|
[
"MIT"
] | null | null | null |
checkout_sdk/customers/customers_four.py
|
riaz-bordie-cko/checkout-sdk-python
|
d9bc073306c1a98544c326be693ed722576ea895
|
[
"MIT"
] | null | null | null |
import checkout_sdk.customers.customers
class CustomerRequest(checkout_sdk.customers.customers.CustomerRequest):
instruments: list
| 22.833333
| 72
| 0.846715
| 14
| 137
| 8.142857
| 0.571429
| 0.192982
| 0.350877
| 0.508772
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.087591
| 137
| 5
| 73
| 27.4
| 0.912
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
5304248df9e449f0635b0cfd33ffb5ff92d9a9d1
| 432
|
py
|
Python
|
pytorch_grad_cam/__init__.py
|
iynaur/pytorch-grad-cam
|
ffec997bff40c7dba2b29005528c6a5893a11849
|
[
"MIT"
] | 2
|
2021-06-09T12:18:54.000Z
|
2021-07-18T00:10:03.000Z
|
pytorch_grad_cam/__init__.py
|
wuyongfa-genius/pytorch-grad-cam
|
c6e820df00656481b00d0ee97e438345422cd5e7
|
[
"MIT"
] | null | null | null |
pytorch_grad_cam/__init__.py
|
wuyongfa-genius/pytorch-grad-cam
|
c6e820df00656481b00d0ee97e438345422cd5e7
|
[
"MIT"
] | 1
|
2022-03-15T02:25:43.000Z
|
2022-03-15T02:25:43.000Z
|
from pytorch_grad_cam.grad_cam import GradCAM
from pytorch_grad_cam.ablation_cam import AblationCAM
from pytorch_grad_cam.xgrad_cam import XGradCAM
from pytorch_grad_cam.grad_cam_plusplus import GradCAMPlusPlus
from pytorch_grad_cam.score_cam import ScoreCAM
from pytorch_grad_cam.eigen_cam import EigenCAM
from pytorch_grad_cam.eigen_grad_cam import EigenGradCAM
from pytorch_grad_cam.guided_backprop import GuidedBackpropReLUModel
| 54
| 68
| 0.909722
| 66
| 432
| 5.560606
| 0.30303
| 0.209809
| 0.326975
| 0.392371
| 0.26158
| 0.13624
| 0
| 0
| 0
| 0
| 0
| 0
| 0.071759
| 432
| 8
| 68
| 54
| 0.915212
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
534f177c22c043a3b57cf8db63d75ef230b8881c
| 4,294
|
py
|
Python
|
tests/test_read_csv.py
|
PavelHudau/simple-safe-csv
|
682f9b321f2e2a8aeac0a799383b705fc66bb80b
|
[
"MIT"
] | null | null | null |
tests/test_read_csv.py
|
PavelHudau/simple-safe-csv
|
682f9b321f2e2a8aeac0a799383b705fc66bb80b
|
[
"MIT"
] | null | null | null |
tests/test_read_csv.py
|
PavelHudau/simple-safe-csv
|
682f9b321f2e2a8aeac0a799383b705fc66bb80b
|
[
"MIT"
] | null | null | null |
import datetime as dt
from typing import List
import pytz
import src.csv_gen as csv_gen
from .test_model_and_files import test_model as tm
def test_read_valid_file_has_header():
# GIVEN
path = "./tests/test_model_and_files/test_valid_with_header.csv"
# WHEN
data: List[tm.TestModel] = csv_gen.read_from_csv(path, tm.TestModel)
# THEN
assert len(data) == 5
assert data[0].int_field == 1
assert data[0].string_field == "hello 1"
assert data[0].datetime_field == dt.datetime(
2020, 10, 30, 10, 41, 45, 968627)
assert data[1].int_field == 2
assert data[1].string_field == "hello 2"
assert data[1].datetime_field == dt.datetime(
2020, 10, 30, 10, 42, 49, 559897, tzinfo=pytz.UTC)
assert data[2].int_field == 3
assert data[2].string_field == "hello 3"
assert data[2].datetime_field is None
assert data[3].int_field == 4
assert data[3].string_field == ""
assert data[3].datetime_field == dt.datetime(
2020, 10, 30, 10, 41, 45, 968627)
assert data[4].int_field is None
assert data[4].string_field == "hello 5"
assert data[4].datetime_field == dt.datetime(
2020, 10, 30, 10, 41, 45, 968627)
def test_read_valid_file_no_header():
# GIVEN
path = "./tests/test_model_and_files/test_valid_without_header.csv"
# WHEN
data: List[tm.TestModel] = csv_gen.read_from_csv(
path,
tm.TestModel,
has_header=False)
# THEN
assert len(data) == 5
assert data[0].int_field == 1
assert data[0].string_field == "hello 1"
assert data[0].datetime_field == dt.datetime(
2020, 10, 30, 10, 41, 45, 968627)
assert data[1].int_field == 2
assert data[1].string_field == "hello 2"
assert data[1].datetime_field == dt.datetime(
2020, 10, 30, 10, 42, 49, 559897, tzinfo=pytz.UTC)
assert data[2].int_field == 3
assert data[2].string_field == "hello 3"
assert data[2].datetime_field is None
assert data[3].int_field == 4
assert data[3].string_field == ""
assert data[3].datetime_field == dt.datetime(
2020, 10, 30, 10, 41, 45, 968627)
assert data[4].int_field is None
assert data[4].string_field == "hello 5"
assert data[4].datetime_field == dt.datetime(
2020, 10, 30, 10, 41, 45, 968627)
def test_read_file_with_header_exact_columns():
# GIVEN
path = "./tests/test_model_and_files/test_valid_with_header_two_columns.csv"
# WHEN
data: List[tm.TestModel] = csv_gen.read_from_csv(
path,
tm.TestModel,
columns=["int_field", "datetime_field"])
# THEN
assert len(data) == 4
assert data[0].int_field == 1
assert data[0].string_field is None
assert data[0].datetime_field == dt.datetime(
2020, 10, 30, 10, 41, 45, 968627)
assert data[1].int_field == 2
assert data[1].string_field is None
assert data[1].datetime_field == dt.datetime(
2020, 10, 30, 10, 42, 49, 559897, tzinfo=pytz.UTC)
assert data[2].int_field == 3
assert data[2].string_field is None
assert data[2].datetime_field is None
assert data[3].int_field is None
assert data[3].string_field is None
assert data[3].datetime_field == dt.datetime(
2020, 10, 30, 10, 41, 45, 968627)
def test_read_file_without_header_exact_columns():
# GIVEN
path = "./tests/test_model_and_files/test_valid_without_header_two_columns.csv"
# WHEN
data: List[tm.TestModel] = csv_gen.read_from_csv(
path,
tm.TestModel,
has_header=False,
columns=["int_field", "datetime_field"])
# THEN
assert len(data) == 4
assert data[0].int_field == 1
assert data[0].string_field is None
assert data[0].datetime_field == dt.datetime(
2020, 10, 30, 10, 41, 45, 968627)
assert data[1].int_field == 2
assert data[1].string_field is None
assert data[1].datetime_field == dt.datetime(
2020, 10, 30, 10, 42, 49, 559897, tzinfo=pytz.UTC)
assert data[2].int_field == 3
assert data[2].string_field is None
assert data[2].datetime_field is None
assert data[3].int_field is None
assert data[3].string_field is None
assert data[3].datetime_field == dt.datetime(
2020, 10, 30, 10, 41, 45, 968627)
| 29.013514
| 83
| 0.649045
| 667
| 4,294
| 3.986507
| 0.091454
| 0.203084
| 0.06619
| 0.102294
| 0.952238
| 0.941331
| 0.941331
| 0.941331
| 0.941331
| 0.941331
| 0
| 0.108466
| 0.227061
| 4,294
| 147
| 84
| 29.210884
| 0.692679
| 0.014672
| 0
| 0.838384
| 0
| 0
| 0.083452
| 0.05927
| 0
| 0
| 0
| 0
| 0.585859
| 1
| 0.040404
| false
| 0
| 0.050505
| 0
| 0.090909
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
536b35ec37b9114321a987198b8d28272d74962b
| 38,902
|
py
|
Python
|
calm/dsl/builtins/models/variable.py
|
tuxtof/calm-dsl
|
5af67435d8304b97e170a690068f2d5975e9bfe6
|
[
"Apache-2.0"
] | 37
|
2019-12-23T15:23:20.000Z
|
2022-03-15T11:12:11.000Z
|
calm/dsl/builtins/models/variable.py
|
gabybeitler/calm-dsl
|
bac453413cfcf800eef95d89d5a7323c83654a93
|
[
"Apache-2.0"
] | 144
|
2020-03-09T11:22:09.000Z
|
2022-03-28T21:34:09.000Z
|
calm/dsl/builtins/models/variable.py
|
gabybeitler/calm-dsl
|
bac453413cfcf800eef95d89d5a7323c83654a93
|
[
"Apache-2.0"
] | 46
|
2020-01-23T14:28:04.000Z
|
2022-03-09T04:17:10.000Z
|
import re
from .entity import EntityType, Entity
from .validator import PropertyValidator
from .task_input import _task_input
# Variable
VARIABLE_VALUE_TYPES = {
"int": "INT",
"date": "DATE",
"time": "TIME",
"dict": "DICT",
"string": "STRING",
"data_time": "DATE_TIME",
"multiline_string": "MULTILINE_STRING",
}
VARIABLE_DATA_TYPES = {
"base": "BASE",
"list": "LIST",
"single_select_list": "SINGLE_SELECT_LIST",
}
class VariableType(EntityType):
__schema_name__ = "Variable"
__openapi_type__ = "app_variable"
def compile(cls):
cdict = super().compile()
if not cdict.get("options", {}):
del cdict["options"]
if not cdict.get("regex", {}):
del cdict["regex"]
if not cdict.get("editables", {}):
del cdict["editables"]
if cdict.get("options", None):
options = cdict["options"]
# Only EScript/HTTP request info needed for dynamically fetching options
if options["type"] == "PREDEFINED":
del options["attrs"]
else:
del options["choices"] # Choices are only for PREDEFINED Type
return cdict
class VariableValidator(PropertyValidator, openapi_type="app_variable"):
__default__ = None
__kind__ = VariableType
def _var(**kwargs):
name = kwargs.get("name", None)
bases = (Entity,)
return VariableType(name, bases, kwargs)
Variable = _var()
def setvar(name, value, type_="LOCAL", **kwargs):
kwargs["name"] = name
if value is not None:
kwargs["value"] = value
kwargs["type"] = type_
return VariableType(name, (Variable,), kwargs)
def simple_variable(
value,
name=None,
label=None,
regex=None,
validate_regex=False,
is_hidden=False,
is_mandatory=False,
runtime=False,
description="",
):
kwargs = {"is_hidden": is_hidden, "is_mandatory": is_mandatory}
editables = {}
if runtime:
editables = {"value": True}
kwargs["editables"] = editables
if label is not None:
kwargs["label"] = label
if regex is not None:
if not isinstance(regex, str):
raise TypeError(
"Expected string in field regex for variable "
+ (name or "")
+ ", got {}".format(type(regex))
)
if validate_regex and regex and value:
regex_result = re.match(regex, value)
if not regex_result:
raise ValueError(
"Value '{}' doesn't match with specified regex '{}'".format(
value, regex
)
)
regex = {"value": regex, "should_validate": validate_regex}
kwargs["regex"] = regex
if description is not None:
kwargs["description"] = description
return setvar(name, value, **kwargs)
def simple_variable_secret(
value,
name=None,
label=None,
regex=None,
validate_regex=False,
is_hidden=False,
is_mandatory=False,
runtime=False,
description="",
):
kwargs = {"is_hidden": is_hidden, "is_mandatory": is_mandatory}
editables = {}
if runtime:
editables = {"value": True}
kwargs["editables"] = editables
if label is not None:
kwargs["label"] = label
if regex is not None:
if not isinstance(regex, str):
raise TypeError(
"Expected string in field regex for variable "
+ (name or "")
+ ", got {}".format(type(regex))
)
if validate_regex and regex and value:
regex_result = re.match(regex, value)
if not regex_result:
raise ValueError(
"Value '{}' doesn't match with specified regex '{}'".format(
value, regex
)
)
regex = {"value": regex, "should_validate": validate_regex}
kwargs["regex"] = regex
if description is not None:
kwargs["description"] = description
return setvar(name, value, type_="SECRET", **kwargs)
def _advanced_variable(
type_,
name=None,
value="",
label=None,
task=None,
value_type=None,
data_type=None,
regex=None,
validate_regex=False,
options=None,
is_hidden=False,
is_mandatory=False,
runtime=False,
description="",
):
kwargs = {"name": name, "value": value, "type_": type_}
if runtime:
kwargs["editables"] = {"value": True}
if label is not None:
kwargs["label"] = label
if task is not None:
if not getattr(task, "__kind__") == "app_task":
raise TypeError(
"Expected a Task for variable "
+ (name or "")
+ ", got {}".format(type(task))
)
task_attrs = task.compile().get("attrs")
if not task_attrs:
raise ValueError("Task for variable " + (name or "") + ", is not valid.")
task_type = getattr(task, "type")
if task_type not in ["HTTP", "EXEC"]:
raise ValueError(
"Task type for variable "
+ (name or "")
+ ", is not valid, Expected one of"
+ " ['HTTP', 'EXEC'], got {}".format(task_type)
)
task_attrs["type"] = task_type
kwargs["type_"] = task_type + "_" + type_
kwargs["options"] = {"type": task_type, "attrs": task_attrs}
if value_type is not None:
value_type = value_type.upper()
if value_type not in VARIABLE_VALUE_TYPES.values():
raise ValueError(
"Value type for variable "
+ (name or "")
+ ", is not valid, Expected one of"
+ " {}, got {}".format(list(VARIABLE_VALUE_TYPES.values()), value_type)
)
kwargs["value_type"] = value_type
if data_type is not None:
data_type = data_type.upper()
if data_type not in VARIABLE_DATA_TYPES.values():
raise ValueError(
"Data type for variable "
+ (name or "")
+ ", is not valid, Expected one of"
+ " {}, got {}".format(list(VARIABLE_DATA_TYPES.values()), data_type)
)
kwargs["data_type"] = data_type
if regex is not None:
if not isinstance(regex, str):
raise TypeError(
"Expected string in field regex for variable "
+ (name or "")
+ ", got {}".format(type(regex))
)
regex = {"value": regex, "should_validate": validate_regex}
kwargs["regex"] = regex
if options is not None:
if kwargs.get("options", None) is not None:
raise ValueError(
"Variable options for variable "
+ (name or "")
+ "cannot be specified since it is being "
+ "fetched from a {} task".format(kwargs["options"]["type"])
)
if not isinstance(options, list):
raise TypeError(
"Expected list of options for variable "
+ (name or "")
+ ", got {}".format(type(options))
)
choices = []
for choice in options:
if not isinstance(choice, str):
raise TypeError(
"Expected list of string choices for options for variable "
+ (name or "")
+ ", got {}".format(type(choice))
)
if validate_regex and regex:
regex_result = re.match(regex["value"], choice)
if not regex_result:
raise ValueError(
"Option '{}' doesn't match with specified regex '{}'".format(
choice, regex["value"]
)
)
choices.append(choice)
if isinstance(value, list) and data_type == "LIST":
for val in value:
if not isinstance(val, str):
raise TypeError(
"Expected list of string defaults for variable "
+ (name or "")
+ ", got {}".format(type(val))
)
if val not in choices:
raise TypeError(
"Default value for variable array with options "
+ (name or "")
+ ", contains {}, which is not one of the options".format(val)
)
value = ",".join(value)
kwargs["value"] = value
if value is None and len(choices) > 0:
value = choices[0]
kwargs["value"] = value
if data_type != "LIST" and value not in choices:
raise TypeError(
"Default value for variable with options "
+ (name or "")
+ ", is {}, which is not one of the options".format(value)
)
options = {"type": "PREDEFINED", "choices": choices}
kwargs["options"] = options
else:
# If options are None, just regex validate the value
if validate_regex and regex and value:
regex_result = re.match(regex["value"], value)
if not regex_result:
raise ValueError(
"Value '{}' doesn't match with specified regex '{}'".format(
value, regex["value"]
)
)
if is_hidden is not None:
kwargs["is_hidden"] = bool(is_hidden)
if is_mandatory is not None:
kwargs["is_mandatory"] = bool(is_mandatory)
if description is not None:
kwargs["description"] = description
return setvar(**kwargs)
def simple_variable_int(
value,
name=None,
label=None,
regex=r"^[\d]*$",
validate_regex=False,
is_hidden=False,
is_mandatory=False,
runtime=False,
description="",
):
return _advanced_variable(
"LOCAL",
name=name,
value=value,
label=label,
value_type="INT",
data_type="BASE",
regex=regex,
validate_regex=validate_regex,
is_hidden=is_hidden,
is_mandatory=is_mandatory,
runtime=runtime,
description=description,
)
def simple_variable_date(
value,
name=None,
label=None,
regex=r"^((0[1-9]|[12]\d|3[01])/(0[1-9]|1[0-2])/[12]\d{3})$",
validate_regex=False,
is_hidden=False,
is_mandatory=False,
runtime=False,
description="",
):
return _advanced_variable(
"LOCAL",
name=name,
value=value,
label=label,
value_type="DATE",
data_type="BASE",
regex=regex,
validate_regex=validate_regex,
is_hidden=is_hidden,
is_mandatory=is_mandatory,
runtime=runtime,
description=description,
)
def simple_variable_time(
value,
name=None,
label=None,
regex=r"^[\d]{2}:[\d]{2}(:[0-5]\d)?$",
validate_regex=False,
is_hidden=False,
is_mandatory=False,
runtime=False,
description="",
):
return _advanced_variable(
"LOCAL",
name=name,
value=value,
label=label,
value_type="TIME",
data_type="BASE",
regex=regex,
validate_regex=validate_regex,
is_hidden=is_hidden,
is_mandatory=is_mandatory,
runtime=runtime,
description=description,
)
def simple_variable_datetime(
value,
name=None,
label=None,
regex=r"^((0[1-9]|[12]\d|3[01])/(0[1-9]|1[0-2])/[12]\d{3})((T)|(\s-\s))[\d]{2}:[\d]{2}(:[0-5]\d)?$",
validate_regex=False,
is_hidden=False,
is_mandatory=False,
runtime=False,
description="",
):
return _advanced_variable(
"LOCAL",
name=name,
value=value,
label=label,
value_type="DATE_TIME",
data_type="BASE",
regex=regex,
validate_regex=validate_regex,
is_hidden=is_hidden,
is_mandatory=is_mandatory,
runtime=runtime,
description=description,
)
def simple_variable_multiline(
value,
name=None,
label=None,
regex=None,
validate_regex=False,
is_hidden=False,
is_mandatory=False,
runtime=False,
description="",
):
return _advanced_variable(
"LOCAL",
name=name,
value=value,
label=label,
value_type="MULTILINE_STRING",
data_type="BASE",
regex=regex,
validate_regex=validate_regex,
is_hidden=is_hidden,
is_mandatory=is_mandatory,
runtime=runtime,
description=description,
)
def simple_variable_int_secret(
value,
name=None,
label=None,
regex=r"^[\d]*$",
validate_regex=False,
is_hidden=False,
is_mandatory=False,
runtime=False,
description="",
):
return _advanced_variable(
"SECRET",
name=name,
value=value,
label=label,
value_type="INT",
data_type="BASE",
regex=regex,
validate_regex=validate_regex,
is_hidden=is_hidden,
is_mandatory=is_mandatory,
runtime=runtime,
description=description,
)
def simple_variable_date_secret(
value,
name=None,
label=None,
regex=r"^((0[1-9]|[12]\d|3[01])/(0[1-9]|1[0-2])/[12]\d{3})$",
validate_regex=False,
is_hidden=False,
is_mandatory=False,
runtime=False,
description="",
):
return _advanced_variable(
"SECRET",
name=name,
value=value,
label=label,
value_type="DATE",
data_type="BASE",
regex=regex,
validate_regex=validate_regex,
is_hidden=is_hidden,
is_mandatory=is_mandatory,
runtime=runtime,
description=description,
)
def simple_variable_time_secret(
value,
name=None,
label=None,
regex=r"^[\d]{2}:[\d]{2}(:[0-5]\d)?$",
validate_regex=False,
is_hidden=False,
is_mandatory=False,
runtime=False,
description="",
):
return _advanced_variable(
"SECRET",
name=name,
value=value,
label=label,
value_type="TIME",
data_type="BASE",
regex=regex,
validate_regex=validate_regex,
is_hidden=is_hidden,
is_mandatory=is_mandatory,
runtime=runtime,
description=description,
)
def simple_variable_datetime_secret(
value,
name=None,
label=None,
regex=r"^((0[1-9]|[12]\d|3[01])/(0[1-9]|1[0-2])/[12]\d{3})((T)|(\s-\s))[\d]{2}:[\d]{2}(:[0-5]\d)?$",
validate_regex=False,
is_hidden=False,
is_mandatory=False,
runtime=False,
description="",
):
return _advanced_variable(
"SECRET",
name=name,
value=value,
label=label,
value_type="DATE_TIME",
data_type="BASE",
regex=regex,
validate_regex=validate_regex,
is_hidden=is_hidden,
is_mandatory=is_mandatory,
runtime=runtime,
description=description,
)
def simple_variable_multiline_secret(
value,
name=None,
label=None,
regex=None,
validate_regex=False,
is_hidden=False,
is_mandatory=False,
runtime=False,
description="",
):
return _advanced_variable(
"SECRET",
name=name,
value=value,
label=label,
value_type="MULTILINE_STRING",
data_type="BASE",
regex=regex,
validate_regex=validate_regex,
is_hidden=is_hidden,
is_mandatory=is_mandatory,
runtime=runtime,
description=description,
)
def variable_string_with_predefined_options(
options,
default=None,
name=None,
label=None,
regex=None,
validate_regex=False,
is_hidden=False,
is_mandatory=False,
runtime=False,
description="",
):
return _advanced_variable(
"LOCAL",
name=name,
value=default,
label=label,
value_type="STRING",
data_type="BASE",
regex=regex,
validate_regex=validate_regex,
options=options,
is_hidden=is_hidden,
is_mandatory=is_mandatory,
runtime=runtime,
description=description,
)
def variable_int_with_predefined_options(
options,
default=None,
name=None,
label=None,
regex=r"^[\d]*$",
validate_regex=False,
is_hidden=False,
is_mandatory=False,
runtime=False,
description="",
):
return _advanced_variable(
"LOCAL",
name=name,
value=default,
label=label,
value_type="INT",
data_type="BASE",
regex=regex,
validate_regex=validate_regex,
options=options,
is_hidden=is_hidden,
is_mandatory=is_mandatory,
runtime=runtime,
description=description,
)
def variable_date_with_predefined_options(
options,
default=None,
name=None,
label=None,
regex=r"^((0[1-9]|[12]\d|3[01])/(0[1-9]|1[0-2])/[12]\d{3})$",
validate_regex=False,
is_hidden=False,
is_mandatory=False,
runtime=False,
description="",
):
return _advanced_variable(
"LOCAL",
name=name,
value=default,
label=label,
value_type="DATE",
data_type="BASE",
regex=regex,
validate_regex=validate_regex,
options=options,
is_hidden=is_hidden,
is_mandatory=is_mandatory,
runtime=runtime,
description=description,
)
def variable_time_with_predefined_options(
options,
default=None,
name=None,
label=None,
regex=r"^[\d]{2}:[\d]{2}(:[0-5]\d)?$",
validate_regex=False,
is_hidden=False,
is_mandatory=False,
runtime=False,
description="",
):
return _advanced_variable(
"LOCAL",
name=name,
value=default,
label=label,
value_type="TIME",
data_type="BASE",
regex=regex,
validate_regex=validate_regex,
options=options,
is_hidden=is_hidden,
is_mandatory=is_mandatory,
runtime=runtime,
description=description,
)
def variable_datetime_with_predefined_options(
options,
default=None,
name=None,
label=None,
regex=r"^((0[1-9]|[12]\d|3[01])/(0[1-9]|1[0-2])/[12]\d{3})((T)|(\s-\s))[\d]{2}:[\d]{2}(:[0-5]\d)?$",
validate_regex=False,
is_hidden=False,
is_mandatory=False,
runtime=False,
description="",
):
return _advanced_variable(
"LOCAL",
name=name,
value=default,
label=label,
value_type="DATE_TIME",
data_type="BASE",
regex=regex,
validate_regex=validate_regex,
options=options,
is_hidden=is_hidden,
is_mandatory=is_mandatory,
runtime=runtime,
description=description,
)
def variable_multiline_with_predefined_options(
options,
default=None,
name=None,
label=None,
regex=None,
validate_regex=False,
is_hidden=False,
is_mandatory=False,
runtime=False,
description="",
):
return _advanced_variable(
"LOCAL",
name=name,
value=default,
label=label,
value_type="MULTILINE_STRING",
data_type="BASE",
regex=regex,
validate_regex=validate_regex,
options=options,
is_hidden=is_hidden,
is_mandatory=is_mandatory,
runtime=runtime,
description=description,
)
def variable_string_with_predefined_options_array(
options,
defaults=None,
name=None,
label=None,
regex=None,
validate_regex=False,
is_hidden=False,
is_mandatory=False,
runtime=False,
description="",
):
return _advanced_variable(
"LOCAL",
name=name,
value=defaults,
label=label,
value_type="STRING",
data_type="LIST",
regex=regex,
validate_regex=validate_regex,
options=options,
is_hidden=is_hidden,
is_mandatory=is_mandatory,
runtime=runtime,
description=description,
)
def variable_int_with_predefined_options_array(
options,
defaults=None,
name=None,
label=None,
regex=r"^[\d]*$",
validate_regex=False,
is_hidden=False,
is_mandatory=False,
runtime=False,
description="",
):
return _advanced_variable(
"LOCAL",
name=name,
value=defaults,
label=label,
value_type="INT",
data_type="LIST",
regex=regex,
validate_regex=validate_regex,
options=options,
is_hidden=is_hidden,
is_mandatory=is_mandatory,
runtime=runtime,
description=description,
)
def variable_date_with_predefined_options_array(
options,
defaults=None,
name=None,
label=None,
regex=r"^((0[1-9]|[12]\d|3[01])/(0[1-9]|1[0-2])/[12]\d{3})$",
validate_regex=False,
is_hidden=False,
is_mandatory=False,
runtime=False,
description="",
):
return _advanced_variable(
"LOCAL",
name=name,
value=defaults,
label=label,
value_type="DATE",
data_type="LIST",
regex=regex,
validate_regex=validate_regex,
options=options,
is_hidden=is_hidden,
is_mandatory=is_mandatory,
runtime=runtime,
description=description,
)
def variable_time_with_predefined_options_array(
options,
defaults=None,
name=None,
label=None,
regex=r"^[\d]{2}:[\d]{2}(:[0-5]\d)?$",
validate_regex=False,
is_hidden=False,
is_mandatory=False,
runtime=False,
description="",
):
return _advanced_variable(
"LOCAL",
name=name,
value=defaults,
label=label,
value_type="TIME",
data_type="LIST",
regex=regex,
validate_regex=validate_regex,
options=options,
is_hidden=is_hidden,
is_mandatory=is_mandatory,
runtime=runtime,
description=description,
)
def variable_datetime_with_predefined_options_array(
options,
defaults=None,
name=None,
label=None,
regex=r"^((0[1-9]|[12]\d|3[01])/(0[1-9]|1[0-2])/[12]\d{3})((T)|(\s-\s))[\d]{2}:[\d]{2}(:[0-5]\d)?$",
validate_regex=False,
is_hidden=False,
is_mandatory=False,
runtime=False,
description="",
):
return _advanced_variable(
"LOCAL",
name=name,
value=defaults,
label=label,
value_type="DATE_TIME",
data_type="LIST",
regex=regex,
validate_regex=validate_regex,
options=options,
is_hidden=is_hidden,
is_mandatory=is_mandatory,
runtime=runtime,
description=description,
)
def variable_multiline_with_predefined_options_array(
options,
defaults=None,
name=None,
label=None,
regex=None,
validate_regex=False,
is_hidden=False,
is_mandatory=False,
runtime=False,
description="",
):
return _advanced_variable(
"LOCAL",
name=name,
value=defaults,
label=label,
value_type="MULTILINE_STRING",
data_type="LIST",
regex=regex,
validate_regex=validate_regex,
options=options,
is_hidden=is_hidden,
is_mandatory=is_mandatory,
runtime=runtime,
description=description,
)
def variable_string_with_options_from_task(
task,
name=None,
label=None,
regex=None,
validate_regex=False,
is_hidden=False,
is_mandatory=False,
description="",
):
return _advanced_variable(
"LOCAL",
name=name,
label=label,
value_type="STRING",
data_type="BASE",
regex=regex,
validate_regex=validate_regex,
task=task,
is_hidden=is_hidden,
is_mandatory=is_mandatory,
runtime=True,
description=description,
)
def variable_int_with_options_from_task(
task,
name=None,
label=None,
regex=r"^[\d]*$",
validate_regex=False,
is_hidden=False,
is_mandatory=False,
description="",
):
return _advanced_variable(
"LOCAL",
name=name,
label=label,
value_type="INT",
data_type="BASE",
regex=regex,
validate_regex=validate_regex,
task=task,
is_hidden=is_hidden,
is_mandatory=is_mandatory,
runtime=True,
description=description,
)
def variable_date_with_options_from_task(
task,
name=None,
label=None,
regex=r"^((0[1-9]|[12]\d|3[01])/(0[1-9]|1[0-2])/[12]\d{3})$",
validate_regex=False,
is_hidden=False,
is_mandatory=False,
description="",
):
return _advanced_variable(
"LOCAL",
name=name,
label=label,
value_type="DATE",
data_type="BASE",
regex=regex,
validate_regex=validate_regex,
task=task,
is_hidden=is_hidden,
is_mandatory=is_mandatory,
runtime=True,
description=description,
)
def variable_time_with_options_from_task(
task,
name=None,
label=None,
regex=r"^[\d]{2}:[\d]{2}(:[0-5]\d)?$",
validate_regex=False,
is_hidden=False,
is_mandatory=False,
description="",
):
return _advanced_variable(
"LOCAL",
name=name,
label=label,
value_type="TIME",
data_type="BASE",
regex=regex,
validate_regex=validate_regex,
task=task,
is_hidden=is_hidden,
is_mandatory=is_mandatory,
runtime=True,
description=description,
)
def variable_datetime_with_options_from_task(
task,
name=None,
label=None,
regex=r"^((0[1-9]|[12]\d|3[01])/(0[1-9]|1[0-2])/[12]\d{3})((T)|(\s-\s))[\d]{2}:[\d]{2}(:[0-5]\d)?$",
validate_regex=False,
is_hidden=False,
is_mandatory=False,
description="",
):
return _advanced_variable(
"LOCAL",
name=name,
label=label,
value_type="DATE_TIME",
data_type="BASE",
regex=regex,
validate_regex=validate_regex,
task=task,
is_hidden=is_hidden,
is_mandatory=is_mandatory,
runtime=True,
description=description,
)
def variable_multiline_with_options_from_task(
task,
name=None,
label=None,
regex=None,
validate_regex=False,
is_hidden=False,
is_mandatory=False,
description="",
):
return _advanced_variable(
"LOCAL",
name=name,
label=label,
value_type="MULTILINE_STRING",
data_type="BASE",
regex=regex,
validate_regex=validate_regex,
task=task,
is_hidden=is_hidden,
is_mandatory=is_mandatory,
runtime=True,
description=description,
)
def variable_string_with_options_from_task_array(
task,
name=None,
label=None,
regex=None,
validate_regex=False,
is_hidden=False,
is_mandatory=False,
description="",
):
return _advanced_variable(
"LOCAL",
name=name,
label=label,
value_type="STRING",
data_type="LIST",
regex=regex,
validate_regex=validate_regex,
task=task,
is_hidden=is_hidden,
is_mandatory=is_mandatory,
runtime=True,
description=description,
)
def variable_int_with_options_from_task_array(
task,
name=None,
label=None,
regex=r"^[\d]*$",
validate_regex=False,
is_hidden=False,
is_mandatory=False,
description="",
):
return _advanced_variable(
"LOCAL",
name=name,
label=label,
value_type="INT",
data_type="LIST",
regex=regex,
validate_regex=validate_regex,
task=task,
is_hidden=is_hidden,
is_mandatory=is_mandatory,
runtime=True,
description=description,
)
def variable_date_with_options_from_task_array(
task,
name=None,
label=None,
regex=r"^((0[1-9]|[12]\d|3[01])/(0[1-9]|1[0-2])/[12]\d{3})$",
validate_regex=False,
is_hidden=False,
is_mandatory=False,
description="",
):
return _advanced_variable(
"LOCAL",
name=name,
label=label,
value_type="DATE",
data_type="LIST",
regex=regex,
validate_regex=validate_regex,
task=task,
is_hidden=is_hidden,
is_mandatory=is_mandatory,
runtime=True,
description=description,
)
def variable_time_with_options_from_task_array(
task,
name=None,
label=None,
regex=r"^[\d]{2}:[\d]{2}(:[0-5]\d)?$",
validate_regex=False,
is_hidden=False,
is_mandatory=False,
description="",
):
return _advanced_variable(
"LOCAL",
name=name,
label=label,
value_type="TIME",
data_type="LIST",
regex=regex,
validate_regex=validate_regex,
task=task,
is_hidden=is_hidden,
is_mandatory=is_mandatory,
runtime=True,
description=description,
)
def variable_datetime_with_options_from_task_array(
task,
name=None,
label=None,
regex=r"^((0[1-9]|[12]\d|3[01])/(0[1-9]|1[0-2])/[12]\d{3})((T)|(\s-\s))[\d]{2}:[\d]{2}(:[0-5]\d)?$",
validate_regex=False,
is_hidden=False,
is_mandatory=False,
description="",
):
return _advanced_variable(
"LOCAL",
name=name,
label=label,
value_type="DATE_TIME",
data_type="LIST",
regex=regex,
validate_regex=validate_regex,
task=task,
is_hidden=is_hidden,
is_mandatory=is_mandatory,
runtime=True,
description=description,
)
def variable_multiline_with_options_from_task_array(
task,
name=None,
label=None,
regex=None,
validate_regex=False,
is_hidden=False,
is_mandatory=False,
description="",
):
return _advanced_variable(
"LOCAL",
name=name,
label=label,
value_type="MULTILINE_STRING",
data_type="LIST",
regex=regex,
validate_regex=validate_regex,
task=task,
is_hidden=is_hidden,
is_mandatory=is_mandatory,
runtime=True,
description=description,
)
class CalmVariable:
def __new__(
cls,
value,
name=None,
label=None,
regex=None,
validate_regex=False,
is_hidden=False,
is_mandatory=False,
runtime=False,
description="",
):
return simple_variable(
value,
name=name,
label=label,
regex=regex,
validate_regex=validate_regex,
is_hidden=is_hidden,
is_mandatory=is_mandatory,
runtime=runtime,
description=description,
)
class Simple:
def __new__(
cls,
value,
name=None,
label=None,
regex=None,
validate_regex=False,
is_hidden=False,
is_mandatory=False,
runtime=False,
description="",
):
return simple_variable(
value,
name=name,
label=label,
regex=regex,
validate_regex=validate_regex,
is_hidden=is_hidden,
is_mandatory=is_mandatory,
runtime=runtime,
description=description,
)
string = simple_variable
int = simple_variable_int
date = simple_variable_date
time = simple_variable_time
datetime = simple_variable_datetime
multiline = simple_variable_multiline
class Secret:
def __new__(
cls,
value,
name=None,
label=None,
regex=None,
validate_regex=False,
is_hidden=False,
is_mandatory=False,
runtime=False,
description="",
):
return simple_variable_secret(
value,
name=name,
label=label,
regex=regex,
validate_regex=validate_regex,
is_hidden=is_hidden,
is_mandatory=is_mandatory,
runtime=runtime,
description=description,
)
string = simple_variable_secret
int = simple_variable_int_secret
date = simple_variable_date_secret
time = simple_variable_time_secret
datetime = simple_variable_datetime_secret
multiline = simple_variable_multiline_secret
class WithOptions:
def __new__(
cls,
options,
default=None,
name=None,
label=None,
regex=None,
validate_regex=False,
is_hidden=False,
is_mandatory=False,
runtime=False,
description="",
):
return variable_string_with_predefined_options(
options,
default=default,
name=name,
label=label,
regex=regex,
validate_regex=validate_regex,
is_hidden=is_hidden,
is_mandatory=is_mandatory,
runtime=runtime,
description=description,
)
class Predefined:
def __new__(
cls,
options,
default=None,
name=None,
label=None,
regex=None,
validate_regex=False,
is_hidden=False,
is_mandatory=False,
runtime=False,
description="",
):
return variable_string_with_predefined_options(
options,
default=default,
name=name,
label=label,
regex=regex,
validate_regex=validate_regex,
is_hidden=is_hidden,
is_mandatory=is_mandatory,
runtime=runtime,
description=description,
)
string = variable_string_with_predefined_options
int = variable_int_with_predefined_options
date = variable_date_with_predefined_options
time = variable_time_with_predefined_options
datetime = variable_datetime_with_predefined_options
multiline = variable_multiline_with_predefined_options
class Array:
def __new__(
cls,
options,
defaults=None,
name=None,
label=None,
regex=None,
validate_regex=False,
is_hidden=False,
is_mandatory=False,
runtime=False,
description="",
):
return variable_string_with_predefined_options_array(
options,
defaults=defaults,
name=name,
label=label,
regex=regex,
validate_regex=validate_regex,
is_hidden=is_hidden,
is_mandatory=is_mandatory,
runtime=runtime,
description=description,
)
string = variable_string_with_predefined_options_array
int = variable_int_with_predefined_options_array
date = variable_date_with_predefined_options_array
time = variable_time_with_predefined_options_array
datetime = variable_datetime_with_predefined_options_array
multiline = variable_multiline_with_predefined_options_array
class FromTask:
def __new__(
cls,
task,
name=None,
label=None,
regex=None,
validate_regex=False,
is_hidden=False,
is_mandatory=False,
description="",
):
return variable_string_with_options_from_task(
task,
name=name,
label=label,
regex=regex,
validate_regex=validate_regex,
is_hidden=is_hidden,
is_mandatory=is_mandatory,
description=description,
)
string = variable_string_with_options_from_task
int = variable_int_with_options_from_task
date = variable_date_with_options_from_task
time = variable_time_with_options_from_task
datetime = variable_datetime_with_options_from_task
multiline = variable_multiline_with_options_from_task
class Array:
def __new__(
cls,
task,
name=None,
label=None,
regex=None,
validate_regex=False,
is_hidden=False,
is_mandatory=False,
description="",
):
return variable_string_with_options_from_task_array(
task,
name=name,
label=label,
regex=regex,
validate_regex=validate_regex,
is_hidden=is_hidden,
is_mandatory=is_mandatory,
description=description,
)
string = variable_string_with_options_from_task_array
int = variable_int_with_options_from_task_array
date = variable_date_with_options_from_task_array
time = variable_time_with_options_from_task_array
datetime = variable_datetime_with_options_from_task_array
multiline = variable_multiline_with_options_from_task_array
class RunbookVariable(CalmVariable):
class TaskInput:
def __new__(cls, *args, **kwargs):
return _task_input(*args, **kwargs)
| 26.161399
| 104
| 0.550409
| 4,031
| 38,902
| 5.059042
| 0.035475
| 0.086696
| 0.043642
| 0.0331
| 0.855735
| 0.846908
| 0.821311
| 0.791497
| 0.783847
| 0.776247
| 0
| 0.009953
| 0.343967
| 38,902
| 1,486
| 105
| 26.179004
| 0.789115
| 0.004293
| 0
| 0.798098
| 0
| 0.008778
| 0.086806
| 0.026181
| 0
| 0
| 0
| 0
| 0
| 1
| 0.035845
| false
| 0
| 0.002926
| 0.031456
| 0.08632
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
728954aeb99cfc51158990744946c72e9b1ecc31
| 4,353
|
py
|
Python
|
tests/v2/test_orders.py
|
rkemmy/food-first
|
69c72c0968e7bd3e05573c96267d5c6c119e4289
|
[
"MIT"
] | null | null | null |
tests/v2/test_orders.py
|
rkemmy/food-first
|
69c72c0968e7bd3e05573c96267d5c6c119e4289
|
[
"MIT"
] | 1
|
2018-10-01T17:43:54.000Z
|
2018-10-01T17:43:54.000Z
|
tests/v2/test_orders.py
|
rkemmy/food-first
|
69c72c0968e7bd3e05573c96267d5c6c119e4289
|
[
"MIT"
] | null | null | null |
import json
import unittest
from app import create_app
from testdb import CreateTables
from .base_test import TestApp
from .test_meals import TestMeals
class TestOrders(TestApp):
def test_post_order(self):
token = self.get_token()
self.client.post('/api/v2/menu',
headers = {"content-type":"application/json", "Authorization": "Bearer {}".format(token)},
data=json.dumps(self.meal)
)
response = self.client.post('/api/v2/users/orders',
headers = {"content-type":"application/json", "Authorization": "Bearer {}".format(token)},
data=json.dumps(self.order)
)
print(response.data)
self.assertEqual(response.status_code, 201)
def test_get_all_orders(self):
token = self.get_token()
self.client.post('/api/v2/menu',
headers = {"content-type":"application/json", "Authorization": "Bearer {}".format(token)},
data=json.dumps(self.meal)
)
self.client.post('/api/v2/users/orders',
headers = {"content-type":"application/json", "Authorization": "Bearer {}".format(token)},
data=json.dumps(self.order)
)
response = self.client.get('/api/v2/users/orders',
headers = {"content-type":"application/json", "Authorization": "Bearer {}".format(token)},
)
print(response.data)
self.assertEqual(response.status_code, 200)
def test_get_all_orders(self):
token = self.get_token()
self.client.post('/api/v2/menu',
headers = {"content-type":"application/json", "Authorization": "Bearer {}".format(token)},
data=json.dumps(self.meal)
)
self.client.post('/api/v2/users/orders',
headers = {"content-type":"application/json", "Authorization": "Bearer {}".format(token)},
data=json.dumps(self.order)
)
response = self.client.get('/api/v2/users/orders/1',
headers = {"content-type":"application/json", "Authorization": "Bearer {}".format(token)},
)
self.assertEqual(response.status_code, 200)
def test_get_all_orders(self):
token = self.get_token()
self.client.post('/api/v2/menu',
headers = {"content-type":"application/json", "Authorization": "Bearer {}".format(token)},
data=json.dumps(self.meal)
)
self.client.post('/api/v2/users/orders',
headers = {"content-type":"application/json", "Authorization": "Bearer {}".format(token)},
data=json.dumps(self.order)
)
response = self.client.put('/api/v2/users/orders/1',
headers = {"content-type":"application/json", "Authorization": "Bearer {}".format(token)},
data=json.dumps({"status":"Processing"})
)
print(response.data)
self.assertEqual(response.status_code, 201)
def test_delete_order(self):
token = self.get_token()
self.client.post('/api/v2/menu',
headers = {"content-type":"application/json", "Authorization": "Bearer {}".format(token)},
data=json.dumps(self.meal)
)
self.client.post('/api/v2/users/orders',
headers = {"content-type":"application/json", "Authorization": "Bearer {}".format(token)},
data=json.dumps(self.order)
)
response = self.client.delete('/api/v2/users/orders/1',
headers = {"content-type":"application/json", "Authorization": "Bearer {}".format(token)},
)
self.assertEqual(response.status_code, 200)
def test_get_user_order_history(self):
token = self.get_token()
self.client.post('/api/v2/menu',
headers = {"content-type":"application/json", "Authorization": "Bearer {}".format(token)},
data=json.dumps(self.meal)
)
self.client.post('/api/v2/users/orders',
headers = {"content-type":"application/json", "Authorization": "Bearer {}".format(token)},
data=json.dumps(self.order)
)
response = self.client.get('api/v2/users/history',
headers = {"content-type":"application/json", "Authorization": "Bearer {}".format(token)},
)
self.assertEqual(response.status_code, 200)
| 34.275591
| 102
| 0.589708
| 470
| 4,353
| 5.393617
| 0.108511
| 0.067061
| 0.12071
| 0.194477
| 0.912426
| 0.912426
| 0.912426
| 0.912426
| 0.90572
| 0.90572
| 0
| 0.011561
| 0.244889
| 4,353
| 126
| 103
| 34.547619
| 0.759659
| 0
| 0
| 0.630435
| 0
| 0
| 0.267648
| 0.015176
| 0
| 0
| 0
| 0
| 0.065217
| 1
| 0.065217
| false
| 0
| 0.065217
| 0
| 0.141304
| 0.032609
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
72bacc91bac93d10f628732ea2e25cd2ed5fedeb
| 6,044
|
py
|
Python
|
pyva/tests/TestDependentRules.py
|
holoyan/python-data-validation
|
e928c4131072c53cb8ace1fbaa83216f06ab6bfe
|
[
"MIT"
] | 3
|
2021-03-16T05:47:46.000Z
|
2021-03-23T17:43:55.000Z
|
pyva/tests/TestDependentRules.py
|
holoyan/python-data-validation
|
e928c4131072c53cb8ace1fbaa83216f06ab6bfe
|
[
"MIT"
] | null | null | null |
pyva/tests/TestDependentRules.py
|
holoyan/python-data-validation
|
e928c4131072c53cb8ace1fbaa83216f06ab6bfe
|
[
"MIT"
] | null | null | null |
import unittest
from pyva import Validator
class TestDependentRules(unittest.TestCase):
def test_required_if(self):
v = Validator({
'user': {
'name': 'John',
'age': 26,
}
}, {
'user': 'required',
'user.name': 'required_if:user.age,26'
})
self.assertTrue(v.passes())
def test_required_if_fails(self):
v = Validator({
'user': {
'age': 26,
}
}, {
'user': 'required',
'user.name': 'required_if:user.age,26',
'user.wife': 'required_if:user.age,26,30,69',
'user.not_required': 'required_if:user.age,30,69',
})
self.assertTrue(not v.passes())
self.assertTrue('user.name' in v.failed_rules)
self.assertTrue('user.wife' in v.failed_rules)
self.assertTrue('user.not_required' not in v.failed_rules)
def test_required_with(self):
v = Validator({
'user': {
'name': 'John',
'age': 26,
'wife': {
'name': 'Anna',
'age': 20
}
}
}, {
'user': 'required',
'user.name': 'required_with:user.age,user.wife'
})
self.assertTrue(v.passes())
def test_required_with_fails(self):
v = Validator({
'user': {
'age': 20,
}
}, {
'user': 'required',
'user.name': 'required_with:user.age',
'user.wife': 'required_with:user.age'
})
self.assertTrue(not v.passes())
self.assertTrue('user.name' in v.failed_rules)
self.assertTrue('user.wife' in v.failed_rules)
def test_required_with_all(self):
v = Validator({
'user': {
'name': 'John',
'age': 26,
'wife': {
'name': 'Anna',
'age': 20
}
}
}, {
'user': 'required',
'user.name': 'required_with_all:user.age,user.wife'
})
self.assertTrue(v.passes())
def test_required_with_all_pass_without_all_params(self):
v = Validator({
'user': {
'wife': {
'name': 'Anna',
'age': 20
}
}
}, {
'user': 'required',
'user.name': 'required_with_all:user.age,user.wife'
})
self.assertTrue(v.passes())
def test_required_with_all_fails(self):
v = Validator({
'user': {
'age': 26,
'wife': {
'name': 'Anna',
'age': 20
}
}
}, {
'user': 'required',
'user.name': 'required_with_all:user.age,user.wife'
})
self.assertTrue(v.fails())
self.assertTrue('user.name' in v.failed_rules)
def test_required_without(self):
v = Validator({
'user': {
'name': 'John',
'age': None,
'wife': {
'name': 'Anna',
'age': 20
}
}
}, {
'user': 'required',
'user.name': 'required_without:user.age,user.wife'
})
self.assertTrue(v.passes())
def test_required_without_fails(self):
v = Validator({
'user': {
'age': None,
}
}, {
'user': 'required',
'user.name': 'required_without:user.age'
})
self.assertTrue(v.fails())
self.assertTrue('user.name' in v.failed_rules)
def test_required_without_all(self):
v = Validator({
'user': {
'age': 25,
'wife': {
'name': 'Anna',
'age': 20
}
}
}, {
'user': 'required',
'user.name': 'required_without:user.age,user.wife'
})
self.assertTrue(v.passes())
def test_required_without_all_fails(self):
v = Validator({
'user': {
'age': None,
}
}, {
'user': 'required',
'user.name': 'required_without_all:user.age,user.wife'
})
self.assertTrue(v.fails())
self.assertTrue('user.name' in v.failed_rules)
def test_required_without_all_fails_with_empty_data(self):
v = Validator({
'user': {
}
}, {
'user': 'required',
'user.name': 'required_without_all:user.age,user.wife'
})
self.assertTrue(v.fails())
self.assertTrue('user.name' in v.failed_rules)
def test_required_unless(self):
v = Validator({
'user': {
'age': 25
}
}, {
'user': 'required',
'user.name': 'required_unless:user.age,25'
})
self.assertTrue(v.passes())
def test_required_unless_fails(self):
v = Validator({
'user': {
'age': 25
}
}, {
'user': 'required',
'user.name': 'required_unless:user.age:26,27'
})
self.assertTrue(v.fails())
self.assertTrue('user.name' in v.failed_rules)
def test_present(self):
v = Validator({
'user': {
'age': 25
}
}, {
'user': 'required',
'user.age': 'present'
})
self.assertTrue(v.passes())
def test_present_fails(self):
v = Validator({
'user': {
}
}, {
'user': 'required',
'user.age': 'present'
})
self.assertTrue(v.fails())
self.assertTrue('user.age' in v.failed_rules)
| 25.50211
| 66
| 0.428855
| 549
| 6,044
| 4.564663
| 0.078324
| 0.081006
| 0.089385
| 0.114924
| 0.888268
| 0.877494
| 0.84996
| 0.759377
| 0.699122
| 0.683559
| 0
| 0.015046
| 0.428193
| 6,044
| 236
| 67
| 25.610169
| 0.710069
| 0
| 0
| 0.727723
| 0
| 0
| 0.203342
| 0.085208
| 0
| 0
| 0
| 0
| 0.133663
| 1
| 0.079208
| false
| 0.054455
| 0.009901
| 0
| 0.094059
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
72c5b66b95779bf83f3afadcc4f05fcccee51da6
| 261
|
py
|
Python
|
Modulo III/aula017c-listasParte1.py
|
ascaniopy/python
|
6d8892b7b9ff803b7422a61e68a383ec6ac7d62d
|
[
"MIT"
] | null | null | null |
Modulo III/aula017c-listasParte1.py
|
ascaniopy/python
|
6d8892b7b9ff803b7422a61e68a383ec6ac7d62d
|
[
"MIT"
] | null | null | null |
Modulo III/aula017c-listasParte1.py
|
ascaniopy/python
|
6d8892b7b9ff803b7422a61e68a383ec6ac7d62d
|
[
"MIT"
] | null | null | null |
a = [2, 3, 4, 7]
b = a # Aqui troca nas duaas listas.
b[2] = 8
print('Lista A:', a)
print('Lista B:', b)
print()
print()
a = [2, 3, 4, 7]
b = a[:] # Aqui troca só na lista B.
b[2] = 8
print('Lista A:', a)
print('Lista B:', b)
| 14.5
| 52
| 0.45977
| 49
| 261
| 2.44898
| 0.326531
| 0.333333
| 0.175
| 0.066667
| 0.716667
| 0.716667
| 0.716667
| 0.716667
| 0.716667
| 0.45
| 0
| 0.068571
| 0.329502
| 261
| 17
| 53
| 15.352941
| 0.617143
| 0.206897
| 0
| 0.833333
| 0
| 0
| 0.159204
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.5
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
72d85d2fa1135583581b68708708de5f01c9dea1
| 375,763
|
py
|
Python
|
cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_segment_routing_srv6_oper.py
|
CiscoDevNet/ydk-py
|
073731fea50694d0bc6cd8ebf10fec308dcc0aa9
|
[
"ECL-2.0",
"Apache-2.0"
] | 177
|
2016-03-15T17:03:51.000Z
|
2022-03-18T16:48:44.000Z
|
cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_segment_routing_srv6_oper.py
|
CiscoDevNet/ydk-py
|
073731fea50694d0bc6cd8ebf10fec308dcc0aa9
|
[
"ECL-2.0",
"Apache-2.0"
] | 18
|
2016-03-30T10:45:22.000Z
|
2020-07-14T16:28:13.000Z
|
cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_segment_routing_srv6_oper.py
|
CiscoDevNet/ydk-py
|
073731fea50694d0bc6cd8ebf10fec308dcc0aa9
|
[
"ECL-2.0",
"Apache-2.0"
] | 85
|
2016-03-16T20:38:57.000Z
|
2022-02-22T04:26:02.000Z
|
""" Cisco_IOS_XR_segment_routing_srv6_oper
This module contains a collection of YANG definitions
for Cisco IOS\-XR segment\-routing\-srv6 package operational data.
This module contains definitions
for the following management objects\:
srv6\: Segment Routing with IPv6 dataplane
Copyright (c) 2013\-2018 by Cisco Systems, Inc.
All rights reserved.
"""
import sys
from collections import OrderedDict
from ydk.types import Entity as _Entity_
from ydk.types import EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64
from ydk.types import Entity, EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64
from ydk.filters import YFilter
from ydk.errors import YError, YModelError
from ydk.errors.error_handler import handle_type_error as _handle_type_error
class SidAllocation(Enum):
"""
SidAllocation (Enum Class)
SID allocation type
.. data:: unknown = 0
Unknown
.. data:: dynamic = 1
Dynamic
.. data:: explicit = 2
Explicit
"""
unknown = Enum.YLeaf(0, "unknown")
dynamic = Enum.YLeaf(1, "dynamic")
explicit = Enum.YLeaf(2, "explicit")
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['SidAllocation']
class SidState(Enum):
"""
SidState (Enum Class)
SID manager SID state
.. data:: unknown = 0
Unknown
.. data:: in_use = 1
In Use
.. data:: pending = 2
Pending
.. data:: stale = 3
Stale
"""
unknown = Enum.YLeaf(0, "unknown")
in_use = Enum.YLeaf(1, "in-use")
pending = Enum.YLeaf(2, "pending")
stale = Enum.YLeaf(3, "stale")
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['SidState']
class Srv6EndFunction(Enum):
"""
Srv6EndFunction (Enum Class)
SRv6 End Function Type
.. data:: unknown = 0
Unknown
.. data:: end = 1
End (no PSP/USP)
.. data:: end_with_psp = 2
End with PSP
.. data:: end_with_usp = 3
End with USP
.. data:: end_with_psp_usp = 4
End with PSP/USP
.. data:: end_x = 5
End.X (no PSP/USP)
.. data:: end_x_with_psp = 6
End.X with PSP
.. data:: end_x_with_usp = 7
End.X with USP
.. data:: end_x_with_psp_usp = 8
End.X with PSP/USP
.. data:: end_tbl = 9
End.T (no PSP/USP)
.. data:: end_tbl_with_psp = 10
End.T with PSP
.. data:: end_tbl_with_usp = 11
End.T with USP
.. data:: end_tbl_with_psp_usp = 12
End.T with PSP/USP
.. data:: end_b6 = 13
End.B6
.. data:: end_b6_encaps = 14
End.B6.Encaps
.. data:: end_bm = 15
End.BM
.. data:: end_dx6 = 16
End.DX6
.. data:: end_dx4 = 17
End.DX4
.. data:: end_dt6 = 18
End.DT6
.. data:: end_dt4 = 19
End.DT4
.. data:: end_dt46 = 20
End.DT46
.. data:: end_dx2 = 21
End.DX2
.. data:: end_dx2v = 22
End.DX2V
.. data:: end_dx2u = 23
End.DX2U
.. data:: end_dx2m = 24
End.DX2M
.. data:: end_otp = 25
End.OTP
.. data:: end_s = 26
End.S
"""
unknown = Enum.YLeaf(0, "unknown")
end = Enum.YLeaf(1, "end")
end_with_psp = Enum.YLeaf(2, "end-with-psp")
end_with_usp = Enum.YLeaf(3, "end-with-usp")
end_with_psp_usp = Enum.YLeaf(4, "end-with-psp-usp")
end_x = Enum.YLeaf(5, "end-x")
end_x_with_psp = Enum.YLeaf(6, "end-x-with-psp")
end_x_with_usp = Enum.YLeaf(7, "end-x-with-usp")
end_x_with_psp_usp = Enum.YLeaf(8, "end-x-with-psp-usp")
end_tbl = Enum.YLeaf(9, "end-tbl")
end_tbl_with_psp = Enum.YLeaf(10, "end-tbl-with-psp")
end_tbl_with_usp = Enum.YLeaf(11, "end-tbl-with-usp")
end_tbl_with_psp_usp = Enum.YLeaf(12, "end-tbl-with-psp-usp")
end_b6 = Enum.YLeaf(13, "end-b6")
end_b6_encaps = Enum.YLeaf(14, "end-b6-encaps")
end_bm = Enum.YLeaf(15, "end-bm")
end_dx6 = Enum.YLeaf(16, "end-dx6")
end_dx4 = Enum.YLeaf(17, "end-dx4")
end_dt6 = Enum.YLeaf(18, "end-dt6")
end_dt4 = Enum.YLeaf(19, "end-dt4")
end_dt46 = Enum.YLeaf(20, "end-dt46")
end_dx2 = Enum.YLeaf(21, "end-dx2")
end_dx2v = Enum.YLeaf(22, "end-dx2v")
end_dx2u = Enum.YLeaf(23, "end-dx2u")
end_dx2m = Enum.YLeaf(24, "end-dx2m")
end_otp = Enum.YLeaf(25, "end-otp")
end_s = Enum.YLeaf(26, "end-s")
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6EndFunction']
class Srv6OutOfResourceState(Enum):
"""
Srv6OutOfResourceState (Enum Class)
SRv6 Out of Resource State
.. data:: oor_green = 0
Resources Available
.. data:: oor_yellow = 1
Resources Warning. Have exceeded minor
threshold
.. data:: oor_red = 2
Out of Resources. Have exceeded major threshold
"""
oor_green = Enum.YLeaf(0, "oor-green")
oor_yellow = Enum.YLeaf(1, "oor-yellow")
oor_red = Enum.YLeaf(2, "oor-red")
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6OutOfResourceState']
class Srv6(_Entity_):
"""
Segment Routing with IPv6 dataplane
.. attribute:: active
Active SRv6 operational data
**type**\: :py:class:`Active <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Active>`
**config**\: False
.. attribute:: standby
Standby SRv6 operational data
**type**\: :py:class:`Standby <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Standby>`
**config**\: False
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6, self).__init__()
self._top_entity = None
self.yang_name = "srv6"
self.yang_parent_name = "Cisco-IOS-XR-segment-routing-srv6-oper"
self.is_top_level_class = True
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("active", ("active", Srv6.Active)), ("standby", ("standby", Srv6.Standby))])
self._leafs = OrderedDict()
self.active = Srv6.Active()
self.active.parent = self
self._children_name_map["active"] = "active"
self.standby = Srv6.Standby()
self.standby.parent = self
self._children_name_map["standby"] = "standby"
self._segment_path = lambda: "Cisco-IOS-XR-segment-routing-srv6-oper:srv6"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6, [], name, value)
class Active(_Entity_):
"""
Active SRv6 operational data
.. attribute:: locator_all_stale_sids
Operational container for all Stale SIDs across all Locators
**type**\: :py:class:`LocatorAllStaleSids <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Active.LocatorAllStaleSids>`
**config**\: False
.. attribute:: manager
SID Manager information
**type**\: :py:class:`Manager <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Active.Manager>`
**config**\: False
.. attribute:: locators
SRv6 locators related information
**type**\: :py:class:`Locators <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Active.Locators>`
**config**\: False
.. attribute:: locator_all_sids
Operational container for all (Active and Stale) SIDs across all Locators
**type**\: :py:class:`LocatorAllSids <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Active.LocatorAllSids>`
**config**\: False
.. attribute:: locator_all_active_sids
Operational container for Active SIDs across all Locators
**type**\: :py:class:`LocatorAllActiveSids <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Active.LocatorAllActiveSids>`
**config**\: False
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Active, self).__init__()
self.yang_name = "active"
self.yang_parent_name = "srv6"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("locator-all-stale-sids", ("locator_all_stale_sids", Srv6.Active.LocatorAllStaleSids)), ("manager", ("manager", Srv6.Active.Manager)), ("locators", ("locators", Srv6.Active.Locators)), ("locator-all-sids", ("locator_all_sids", Srv6.Active.LocatorAllSids)), ("locator-all-active-sids", ("locator_all_active_sids", Srv6.Active.LocatorAllActiveSids))])
self._leafs = OrderedDict()
self.locator_all_stale_sids = Srv6.Active.LocatorAllStaleSids()
self.locator_all_stale_sids.parent = self
self._children_name_map["locator_all_stale_sids"] = "locator-all-stale-sids"
self.manager = Srv6.Active.Manager()
self.manager.parent = self
self._children_name_map["manager"] = "manager"
self.locators = Srv6.Active.Locators()
self.locators.parent = self
self._children_name_map["locators"] = "locators"
self.locator_all_sids = Srv6.Active.LocatorAllSids()
self.locator_all_sids.parent = self
self._children_name_map["locator_all_sids"] = "locator-all-sids"
self.locator_all_active_sids = Srv6.Active.LocatorAllActiveSids()
self.locator_all_active_sids.parent = self
self._children_name_map["locator_all_active_sids"] = "locator-all-active-sids"
self._segment_path = lambda: "active"
self._absolute_path = lambda: "Cisco-IOS-XR-segment-routing-srv6-oper:srv6/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Active, [], name, value)
class LocatorAllStaleSids(_Entity_):
"""
Operational container for all Stale SIDs across
all Locators
.. attribute:: locator_all_stale_sid
Operational data for given locator and SID opcode
**type**\: list of :py:class:`LocatorAllStaleSid <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Active.LocatorAllStaleSids.LocatorAllStaleSid>`
**config**\: False
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Active.LocatorAllStaleSids, self).__init__()
self.yang_name = "locator-all-stale-sids"
self.yang_parent_name = "active"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("locator-all-stale-sid", ("locator_all_stale_sid", Srv6.Active.LocatorAllStaleSids.LocatorAllStaleSid))])
self._leafs = OrderedDict()
self.locator_all_stale_sid = YList(self)
self._segment_path = lambda: "locator-all-stale-sids"
self._absolute_path = lambda: "Cisco-IOS-XR-segment-routing-srv6-oper:srv6/active/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Active.LocatorAllStaleSids, [], name, value)
class LocatorAllStaleSid(_Entity_):
"""
Operational data for given locator and SID
opcode
.. attribute:: locator_name (key)
Locator name
**type**\: str
**length:** 1..58
**config**\: False
.. attribute:: sid_opcode (key)
Sid opcode
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: sid_context
SID Context
**type**\: :py:class:`SidContext <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Active.LocatorAllStaleSids.LocatorAllStaleSid.SidContext>`
**config**\: False
.. attribute:: create_timestamp
Creation timestamp
**type**\: :py:class:`CreateTimestamp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Active.LocatorAllStaleSids.LocatorAllStaleSid.CreateTimestamp>`
**config**\: False
.. attribute:: sid
SID
**type**\: str
**config**\: False
.. attribute:: allocation_type
Allocation Type
**type**\: :py:class:`SidAllocation <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.SidAllocation>`
**config**\: False
.. attribute:: function_type
Function Type
**type**\: :py:class:`Srv6EndFunction <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6EndFunction>`
**config**\: False
.. attribute:: state
State
**type**\: :py:class:`SidState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.SidState>`
**config**\: False
.. attribute:: has_forwarding
Rewrite done or not
**type**\: bool
**config**\: False
.. attribute:: locator
Associated locator
**type**\: str
**config**\: False
.. attribute:: owner
Owner
**type**\: list of :py:class:`Owner <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Active.LocatorAllStaleSids.LocatorAllStaleSid.Owner>`
**config**\: False
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Active.LocatorAllStaleSids.LocatorAllStaleSid, self).__init__()
self.yang_name = "locator-all-stale-sid"
self.yang_parent_name = "locator-all-stale-sids"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['locator_name','sid_opcode']
self._child_classes = OrderedDict([("sid-context", ("sid_context", Srv6.Active.LocatorAllStaleSids.LocatorAllStaleSid.SidContext)), ("create-timestamp", ("create_timestamp", Srv6.Active.LocatorAllStaleSids.LocatorAllStaleSid.CreateTimestamp)), ("owner", ("owner", Srv6.Active.LocatorAllStaleSids.LocatorAllStaleSid.Owner))])
self._leafs = OrderedDict([
('locator_name', (YLeaf(YType.str, 'locator-name'), ['str'])),
('sid_opcode', (YLeaf(YType.uint32, 'sid-opcode'), ['int'])),
('sid', (YLeaf(YType.str, 'sid'), ['str'])),
('allocation_type', (YLeaf(YType.enumeration, 'allocation-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper', 'SidAllocation', '')])),
('function_type', (YLeaf(YType.enumeration, 'function-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper', 'Srv6EndFunction', '')])),
('state', (YLeaf(YType.enumeration, 'state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper', 'SidState', '')])),
('has_forwarding', (YLeaf(YType.boolean, 'has-forwarding'), ['bool'])),
('locator', (YLeaf(YType.str, 'locator'), ['str'])),
])
self.locator_name = None
self.sid_opcode = None
self.sid = None
self.allocation_type = None
self.function_type = None
self.state = None
self.has_forwarding = None
self.locator = None
self.sid_context = Srv6.Active.LocatorAllStaleSids.LocatorAllStaleSid.SidContext()
self.sid_context.parent = self
self._children_name_map["sid_context"] = "sid-context"
self.create_timestamp = Srv6.Active.LocatorAllStaleSids.LocatorAllStaleSid.CreateTimestamp()
self.create_timestamp.parent = self
self._children_name_map["create_timestamp"] = "create-timestamp"
self.owner = YList(self)
self._segment_path = lambda: "locator-all-stale-sid" + "[locator-name='" + str(self.locator_name) + "']" + "[sid-opcode='" + str(self.sid_opcode) + "']"
self._absolute_path = lambda: "Cisco-IOS-XR-segment-routing-srv6-oper:srv6/active/locator-all-stale-sids/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Active.LocatorAllStaleSids.LocatorAllStaleSid, ['locator_name', 'sid_opcode', 'sid', 'allocation_type', 'function_type', 'state', 'has_forwarding', 'locator'], name, value)
class SidContext(_Entity_):
"""
SID Context
.. attribute:: key
SID Key
**type**\: :py:class:`Key <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Active.LocatorAllStaleSids.LocatorAllStaleSid.SidContext.Key>`
**config**\: False
.. attribute:: application_data
Application opaque data
**type**\: str
**pattern:** ([0\-9a\-fA\-F]{2}(\:[0\-9a\-fA\-F]{2})\*)?
**config**\: False
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Active.LocatorAllStaleSids.LocatorAllStaleSid.SidContext, self).__init__()
self.yang_name = "sid-context"
self.yang_parent_name = "locator-all-stale-sid"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("key", ("key", Srv6.Active.LocatorAllStaleSids.LocatorAllStaleSid.SidContext.Key))])
self._leafs = OrderedDict([
('application_data', (YLeaf(YType.str, 'application-data'), ['str'])),
])
self.application_data = None
self.key = Srv6.Active.LocatorAllStaleSids.LocatorAllStaleSid.SidContext.Key()
self.key.parent = self
self._children_name_map["key"] = "key"
self._segment_path = lambda: "sid-context"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Active.LocatorAllStaleSids.LocatorAllStaleSid.SidContext, ['application_data'], name, value)
class Key(_Entity_):
"""
SID Key
.. attribute:: e
End (PSP) SID context
**type**\: :py:class:`E <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Active.LocatorAllStaleSids.LocatorAllStaleSid.SidContext.Key.E>`
**config**\: False
.. attribute:: x
End.X (PSP) SID context
**type**\: :py:class:`X <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Active.LocatorAllStaleSids.LocatorAllStaleSid.SidContext.Key.X>`
**config**\: False
.. attribute:: dx4
End.DX4 SID context
**type**\: :py:class:`Dx4 <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Active.LocatorAllStaleSids.LocatorAllStaleSid.SidContext.Key.Dx4>`
**config**\: False
.. attribute:: dt4
End.DT4 SID context
**type**\: :py:class:`Dt4 <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Active.LocatorAllStaleSids.LocatorAllStaleSid.SidContext.Key.Dt4>`
**config**\: False
.. attribute:: sid_context_type
SIDContextType
**type**\: :py:class:`Srv6EndFunction <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6EndFunction>`
**config**\: False
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Active.LocatorAllStaleSids.LocatorAllStaleSid.SidContext.Key, self).__init__()
self.yang_name = "key"
self.yang_parent_name = "sid-context"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("e", ("e", Srv6.Active.LocatorAllStaleSids.LocatorAllStaleSid.SidContext.Key.E)), ("x", ("x", Srv6.Active.LocatorAllStaleSids.LocatorAllStaleSid.SidContext.Key.X)), ("dx4", ("dx4", Srv6.Active.LocatorAllStaleSids.LocatorAllStaleSid.SidContext.Key.Dx4)), ("dt4", ("dt4", Srv6.Active.LocatorAllStaleSids.LocatorAllStaleSid.SidContext.Key.Dt4))])
self._leafs = OrderedDict([
('sid_context_type', (YLeaf(YType.enumeration, 'sid-context-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper', 'Srv6EndFunction', '')])),
])
self.sid_context_type = None
self.e = Srv6.Active.LocatorAllStaleSids.LocatorAllStaleSid.SidContext.Key.E()
self.e.parent = self
self._children_name_map["e"] = "e"
self.x = Srv6.Active.LocatorAllStaleSids.LocatorAllStaleSid.SidContext.Key.X()
self.x.parent = self
self._children_name_map["x"] = "x"
self.dx4 = Srv6.Active.LocatorAllStaleSids.LocatorAllStaleSid.SidContext.Key.Dx4()
self.dx4.parent = self
self._children_name_map["dx4"] = "dx4"
self.dt4 = Srv6.Active.LocatorAllStaleSids.LocatorAllStaleSid.SidContext.Key.Dt4()
self.dt4.parent = self
self._children_name_map["dt4"] = "dt4"
self._segment_path = lambda: "key"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Active.LocatorAllStaleSids.LocatorAllStaleSid.SidContext.Key, ['sid_context_type'], name, value)
class E(_Entity_):
"""
End (PSP) SID context
.. attribute:: table_id
Table Id
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: opaque_id
Additional differentiator \- opaque to SIDMgr
**type**\: int
**range:** 0..255
**config**\: False
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Active.LocatorAllStaleSids.LocatorAllStaleSid.SidContext.Key.E, self).__init__()
self.yang_name = "e"
self.yang_parent_name = "key"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('table_id', (YLeaf(YType.uint32, 'table-id'), ['int'])),
('opaque_id', (YLeaf(YType.uint8, 'opaque-id'), ['int'])),
])
self.table_id = None
self.opaque_id = None
self._segment_path = lambda: "e"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Active.LocatorAllStaleSids.LocatorAllStaleSid.SidContext.Key.E, ['table_id', 'opaque_id'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Active.LocatorAllStaleSids.LocatorAllStaleSid.SidContext.Key.E']['meta_info']
class X(_Entity_):
"""
End.X (PSP) SID context
.. attribute:: is_protected
Is protected?
**type**\: bool
**config**\: False
.. attribute:: opaque_id
Additional differentiator \- opaque to SIDMgr
**type**\: int
**range:** 0..255
**config**\: False
.. attribute:: interface
Nexthop interface
**type**\: str
**pattern:** [a\-zA\-Z0\-9.\_/\-]+
**config**\: False
.. attribute:: nexthop_address
Nexthop IP address
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Active.LocatorAllStaleSids.LocatorAllStaleSid.SidContext.Key.X, self).__init__()
self.yang_name = "x"
self.yang_parent_name = "key"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('is_protected', (YLeaf(YType.boolean, 'is-protected'), ['bool'])),
('opaque_id', (YLeaf(YType.uint8, 'opaque-id'), ['int'])),
('interface', (YLeaf(YType.str, 'interface'), ['str'])),
('nexthop_address', (YLeaf(YType.str, 'nexthop-address'), ['str'])),
])
self.is_protected = None
self.opaque_id = None
self.interface = None
self.nexthop_address = None
self._segment_path = lambda: "x"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Active.LocatorAllStaleSids.LocatorAllStaleSid.SidContext.Key.X, ['is_protected', 'opaque_id', 'interface', 'nexthop_address'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Active.LocatorAllStaleSids.LocatorAllStaleSid.SidContext.Key.X']['meta_info']
class Dx4(_Entity_):
"""
End.DX4 SID context
.. attribute:: table_id
Table ID
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: next_hop_set_id
Next Hop Set ID
**type**\: int
**range:** 0..4294967295
**config**\: False
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Active.LocatorAllStaleSids.LocatorAllStaleSid.SidContext.Key.Dx4, self).__init__()
self.yang_name = "dx4"
self.yang_parent_name = "key"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('table_id', (YLeaf(YType.uint32, 'table-id'), ['int'])),
('next_hop_set_id', (YLeaf(YType.uint32, 'next-hop-set-id'), ['int'])),
])
self.table_id = None
self.next_hop_set_id = None
self._segment_path = lambda: "dx4"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Active.LocatorAllStaleSids.LocatorAllStaleSid.SidContext.Key.Dx4, ['table_id', 'next_hop_set_id'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Active.LocatorAllStaleSids.LocatorAllStaleSid.SidContext.Key.Dx4']['meta_info']
class Dt4(_Entity_):
"""
End.DT4 SID context
.. attribute:: table_id
Table ID
**type**\: int
**range:** 0..4294967295
**config**\: False
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Active.LocatorAllStaleSids.LocatorAllStaleSid.SidContext.Key.Dt4, self).__init__()
self.yang_name = "dt4"
self.yang_parent_name = "key"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('table_id', (YLeaf(YType.uint32, 'table-id'), ['int'])),
])
self.table_id = None
self._segment_path = lambda: "dt4"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Active.LocatorAllStaleSids.LocatorAllStaleSid.SidContext.Key.Dt4, ['table_id'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Active.LocatorAllStaleSids.LocatorAllStaleSid.SidContext.Key.Dt4']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Active.LocatorAllStaleSids.LocatorAllStaleSid.SidContext.Key']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Active.LocatorAllStaleSids.LocatorAllStaleSid.SidContext']['meta_info']
class CreateTimestamp(_Entity_):
"""
Creation timestamp
.. attribute:: time_in_nano_seconds
Timestamp in nano seconds
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
**units**\: nanosecond
.. attribute:: age_in_nano_seconds
Age in nano seconds
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
**units**\: nanosecond
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Active.LocatorAllStaleSids.LocatorAllStaleSid.CreateTimestamp, self).__init__()
self.yang_name = "create-timestamp"
self.yang_parent_name = "locator-all-stale-sid"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('time_in_nano_seconds', (YLeaf(YType.uint64, 'time-in-nano-seconds'), ['int'])),
('age_in_nano_seconds', (YLeaf(YType.uint64, 'age-in-nano-seconds'), ['int'])),
])
self.time_in_nano_seconds = None
self.age_in_nano_seconds = None
self._segment_path = lambda: "create-timestamp"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Active.LocatorAllStaleSids.LocatorAllStaleSid.CreateTimestamp, ['time_in_nano_seconds', 'age_in_nano_seconds'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Active.LocatorAllStaleSids.LocatorAllStaleSid.CreateTimestamp']['meta_info']
class Owner(_Entity_):
"""
Owner
.. attribute:: owner
Owner
**type**\: str
**config**\: False
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Active.LocatorAllStaleSids.LocatorAllStaleSid.Owner, self).__init__()
self.yang_name = "owner"
self.yang_parent_name = "locator-all-stale-sid"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('owner', (YLeaf(YType.str, 'owner'), ['str'])),
])
self.owner = None
self._segment_path = lambda: "owner"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Active.LocatorAllStaleSids.LocatorAllStaleSid.Owner, ['owner'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Active.LocatorAllStaleSids.LocatorAllStaleSid.Owner']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Active.LocatorAllStaleSids.LocatorAllStaleSid']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Active.LocatorAllStaleSids']['meta_info']
class Manager(_Entity_):
"""
SID Manager information
.. attribute:: sid_mgr_params
SID Mgr parameters
**type**\: :py:class:`SidMgrParams <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Active.Manager.SidMgrParams>`
**config**\: False
.. attribute:: sid_mgr_summary
SID Mgr summary info
**type**\: :py:class:`SidMgrSummary <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Active.Manager.SidMgrSummary>`
**config**\: False
.. attribute:: platform_capabilities
Platform Capabilities
**type**\: :py:class:`PlatformCapabilities <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Active.Manager.PlatformCapabilities>`
**config**\: False
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Active.Manager, self).__init__()
self.yang_name = "manager"
self.yang_parent_name = "active"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("sid-mgr-params", ("sid_mgr_params", Srv6.Active.Manager.SidMgrParams)), ("sid-mgr-summary", ("sid_mgr_summary", Srv6.Active.Manager.SidMgrSummary)), ("platform-capabilities", ("platform_capabilities", Srv6.Active.Manager.PlatformCapabilities))])
self._leafs = OrderedDict()
self.sid_mgr_params = Srv6.Active.Manager.SidMgrParams()
self.sid_mgr_params.parent = self
self._children_name_map["sid_mgr_params"] = "sid-mgr-params"
self.sid_mgr_summary = Srv6.Active.Manager.SidMgrSummary()
self.sid_mgr_summary.parent = self
self._children_name_map["sid_mgr_summary"] = "sid-mgr-summary"
self.platform_capabilities = Srv6.Active.Manager.PlatformCapabilities()
self.platform_capabilities.parent = self
self._children_name_map["platform_capabilities"] = "platform-capabilities"
self._segment_path = lambda: "manager"
self._absolute_path = lambda: "Cisco-IOS-XR-segment-routing-srv6-oper:srv6/active/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Active.Manager, [], name, value)
class SidMgrParams(_Entity_):
"""
SID Mgr parameters
.. attribute:: encap_hop_limit
Encap Hop\-limit info
**type**\: :py:class:`EncapHopLimit <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Active.Manager.SidMgrParams.EncapHopLimit>`
**config**\: False
.. attribute:: srv6_enabled
Is SRv6 enabled?
**type**\: bool
**config**\: False
.. attribute:: configured_encap_source_address
Configured Encap Source address
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: default_encap_source_address
Default Encap Source address
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: encap_ttl_propagate
Is TTL propagate enabled?
**type**\: bool
**config**\: False
.. attribute:: is_sid_holdtime_configured
Is SID Holdtime configured?
**type**\: bool
**config**\: False
.. attribute:: sid_holdtime_mins_configured
Configured SID Holdtime in mins
**type**\: int
**range:** 0..4294967295
**config**\: False
**units**\: minute
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Active.Manager.SidMgrParams, self).__init__()
self.yang_name = "sid-mgr-params"
self.yang_parent_name = "manager"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("encap-hop-limit", ("encap_hop_limit", Srv6.Active.Manager.SidMgrParams.EncapHopLimit))])
self._leafs = OrderedDict([
('srv6_enabled', (YLeaf(YType.boolean, 'srv6-enabled'), ['bool'])),
('configured_encap_source_address', (YLeaf(YType.str, 'configured-encap-source-address'), ['str'])),
('default_encap_source_address', (YLeaf(YType.str, 'default-encap-source-address'), ['str'])),
('encap_ttl_propagate', (YLeaf(YType.boolean, 'encap-ttl-propagate'), ['bool'])),
('is_sid_holdtime_configured', (YLeaf(YType.boolean, 'is-sid-holdtime-configured'), ['bool'])),
('sid_holdtime_mins_configured', (YLeaf(YType.uint32, 'sid-holdtime-mins-configured'), ['int'])),
])
self.srv6_enabled = None
self.configured_encap_source_address = None
self.default_encap_source_address = None
self.encap_ttl_propagate = None
self.is_sid_holdtime_configured = None
self.sid_holdtime_mins_configured = None
self.encap_hop_limit = Srv6.Active.Manager.SidMgrParams.EncapHopLimit()
self.encap_hop_limit.parent = self
self._children_name_map["encap_hop_limit"] = "encap-hop-limit"
self._segment_path = lambda: "sid-mgr-params"
self._absolute_path = lambda: "Cisco-IOS-XR-segment-routing-srv6-oper:srv6/active/manager/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Active.Manager.SidMgrParams, ['srv6_enabled', 'configured_encap_source_address', 'default_encap_source_address', 'encap_ttl_propagate', 'is_sid_holdtime_configured', 'sid_holdtime_mins_configured'], name, value)
class EncapHopLimit(_Entity_):
"""
Encap Hop\-limit info
.. attribute:: use_default
Use default IPv6 hop\-limit value
**type**\: bool
**config**\: False
.. attribute:: do_propagate
Propagate IP TTL to Encap IPv6 hop\-limit
**type**\: bool
**config**\: False
.. attribute:: value
Specific value set for hop\-limit count
**type**\: int
**range:** 0..255
**config**\: False
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Active.Manager.SidMgrParams.EncapHopLimit, self).__init__()
self.yang_name = "encap-hop-limit"
self.yang_parent_name = "sid-mgr-params"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('use_default', (YLeaf(YType.boolean, 'use-default'), ['bool'])),
('do_propagate', (YLeaf(YType.boolean, 'do-propagate'), ['bool'])),
('value', (YLeaf(YType.uint8, 'value'), ['int'])),
])
self.use_default = None
self.do_propagate = None
self.value = None
self._segment_path = lambda: "encap-hop-limit"
self._absolute_path = lambda: "Cisco-IOS-XR-segment-routing-srv6-oper:srv6/active/manager/sid-mgr-params/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Active.Manager.SidMgrParams.EncapHopLimit, ['use_default', 'do_propagate', 'value'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Active.Manager.SidMgrParams.EncapHopLimit']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Active.Manager.SidMgrParams']['meta_info']
class SidMgrSummary(_Entity_):
"""
SID Mgr summary info
.. attribute:: sids_out_of_resource_summary
SIDs Global Out of Resource info
**type**\: :py:class:`SidsOutOfResourceSummary <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Active.Manager.SidMgrSummary.SidsOutOfResourceSummary>`
**config**\: False
.. attribute:: locators_count
Number of locators
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: oper_locators_count
Number of operational locators
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: sids_count
Number of SIDs
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: stale_sids_count
Number of Stale SIDs
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: maximum_sids_count
Global Maximum number of SIDs
**type**\: int
**range:** 0..4294967295
**config**\: False
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Active.Manager.SidMgrSummary, self).__init__()
self.yang_name = "sid-mgr-summary"
self.yang_parent_name = "manager"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("sids-out-of-resource-summary", ("sids_out_of_resource_summary", Srv6.Active.Manager.SidMgrSummary.SidsOutOfResourceSummary))])
self._leafs = OrderedDict([
('locators_count', (YLeaf(YType.uint16, 'locators-count'), ['int'])),
('oper_locators_count', (YLeaf(YType.uint16, 'oper-locators-count'), ['int'])),
('sids_count', (YLeaf(YType.uint32, 'sids-count'), ['int'])),
('stale_sids_count', (YLeaf(YType.uint32, 'stale-sids-count'), ['int'])),
('maximum_sids_count', (YLeaf(YType.uint32, 'maximum-sids-count'), ['int'])),
])
self.locators_count = None
self.oper_locators_count = None
self.sids_count = None
self.stale_sids_count = None
self.maximum_sids_count = None
self.sids_out_of_resource_summary = Srv6.Active.Manager.SidMgrSummary.SidsOutOfResourceSummary()
self.sids_out_of_resource_summary.parent = self
self._children_name_map["sids_out_of_resource_summary"] = "sids-out-of-resource-summary"
self._segment_path = lambda: "sid-mgr-summary"
self._absolute_path = lambda: "Cisco-IOS-XR-segment-routing-srv6-oper:srv6/active/manager/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Active.Manager.SidMgrSummary, ['locators_count', 'oper_locators_count', 'sids_count', 'stale_sids_count', 'maximum_sids_count'], name, value)
class SidsOutOfResourceSummary(_Entity_):
"""
SIDs Global Out of Resource info
.. attribute:: out_of_resources_state
Global Resources State for SIDs
**type**\: :py:class:`Srv6OutOfResourceState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6OutOfResourceState>`
**config**\: False
.. attribute:: oor_yellow_free_sid_threshold
Threshold for Number of Free SID below which OOR Yellow State is reached
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: oor_green_free_sid_threshold
Threshold for Number of Free SID above which OOR Green State is restored
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: oor_green_count
Number of times Resources Warning or Out of Resources state has been cleared
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: oor_yellow_count
Number of times system went into Resources Warning state
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: oor_red_count
Number of times system went into Out of Resources state
**type**\: int
**range:** 0..4294967295
**config**\: False
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Active.Manager.SidMgrSummary.SidsOutOfResourceSummary, self).__init__()
self.yang_name = "sids-out-of-resource-summary"
self.yang_parent_name = "sid-mgr-summary"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('out_of_resources_state', (YLeaf(YType.enumeration, 'out-of-resources-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper', 'Srv6OutOfResourceState', '')])),
('oor_yellow_free_sid_threshold', (YLeaf(YType.uint32, 'oor-yellow-free-sid-threshold'), ['int'])),
('oor_green_free_sid_threshold', (YLeaf(YType.uint32, 'oor-green-free-sid-threshold'), ['int'])),
('oor_green_count', (YLeaf(YType.uint32, 'oor-green-count'), ['int'])),
('oor_yellow_count', (YLeaf(YType.uint32, 'oor-yellow-count'), ['int'])),
('oor_red_count', (YLeaf(YType.uint32, 'oor-red-count'), ['int'])),
])
self.out_of_resources_state = None
self.oor_yellow_free_sid_threshold = None
self.oor_green_free_sid_threshold = None
self.oor_green_count = None
self.oor_yellow_count = None
self.oor_red_count = None
self._segment_path = lambda: "sids-out-of-resource-summary"
self._absolute_path = lambda: "Cisco-IOS-XR-segment-routing-srv6-oper:srv6/active/manager/sid-mgr-summary/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Active.Manager.SidMgrSummary.SidsOutOfResourceSummary, ['out_of_resources_state', 'oor_yellow_free_sid_threshold', 'oor_green_free_sid_threshold', 'oor_green_count', 'oor_yellow_count', 'oor_red_count'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Active.Manager.SidMgrSummary.SidsOutOfResourceSummary']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Active.Manager.SidMgrSummary']['meta_info']
class PlatformCapabilities(_Entity_):
"""
Platform Capabilities
.. attribute:: support
Feature support
**type**\: :py:class:`Support <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Active.Manager.PlatformCapabilities.Support>`
**config**\: False
.. attribute:: max_sid
Maximum Sids
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: sid_holdtime_mins
Freed SID holdtime in mins
**type**\: int
**range:** 0..4294967295
**config**\: False
**units**\: minute
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Active.Manager.PlatformCapabilities, self).__init__()
self.yang_name = "platform-capabilities"
self.yang_parent_name = "manager"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("support", ("support", Srv6.Active.Manager.PlatformCapabilities.Support))])
self._leafs = OrderedDict([
('max_sid', (YLeaf(YType.uint32, 'max-sid'), ['int'])),
('sid_holdtime_mins', (YLeaf(YType.uint32, 'sid-holdtime-mins'), ['int'])),
])
self.max_sid = None
self.sid_holdtime_mins = None
self.support = Srv6.Active.Manager.PlatformCapabilities.Support()
self.support.parent = self
self._children_name_map["support"] = "support"
self._segment_path = lambda: "platform-capabilities"
self._absolute_path = lambda: "Cisco-IOS-XR-segment-routing-srv6-oper:srv6/active/manager/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Active.Manager.PlatformCapabilities, ['max_sid', 'sid_holdtime_mins'], name, value)
class Support(_Entity_):
"""
Feature support
.. attribute:: signaled_parameters
Signaled Parameters
**type**\: :py:class:`SignaledParameters <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Active.Manager.PlatformCapabilities.Support.SignaledParameters>`
**config**\: False
.. attribute:: srv6
SRv6 support
**type**\: bool
**config**\: False
.. attribute:: tilfa
TI LFA support
**type**\: bool
**config**\: False
.. attribute:: microloop_avoidance
Microloop\-avoidance support
**type**\: bool
**config**\: False
.. attribute:: end_func
Supported end functions
**type**\: list of :py:class:`EndFunc <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Active.Manager.PlatformCapabilities.Support.EndFunc>`
**config**\: False
.. attribute:: transit_func
Supported Transit functions
**type**\: list of :py:class:`TransitFunc <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Active.Manager.PlatformCapabilities.Support.TransitFunc>`
**config**\: False
.. attribute:: security_rule
Security rules
**type**\: list of :py:class:`SecurityRule <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Active.Manager.PlatformCapabilities.Support.SecurityRule>`
**config**\: False
.. attribute:: counter
Counters
**type**\: list of :py:class:`Counter <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Active.Manager.PlatformCapabilities.Support.Counter>`
**config**\: False
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Active.Manager.PlatformCapabilities.Support, self).__init__()
self.yang_name = "support"
self.yang_parent_name = "platform-capabilities"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("signaled-parameters", ("signaled_parameters", Srv6.Active.Manager.PlatformCapabilities.Support.SignaledParameters)), ("end-func", ("end_func", Srv6.Active.Manager.PlatformCapabilities.Support.EndFunc)), ("transit-func", ("transit_func", Srv6.Active.Manager.PlatformCapabilities.Support.TransitFunc)), ("security-rule", ("security_rule", Srv6.Active.Manager.PlatformCapabilities.Support.SecurityRule)), ("counter", ("counter", Srv6.Active.Manager.PlatformCapabilities.Support.Counter))])
self._leafs = OrderedDict([
('srv6', (YLeaf(YType.boolean, 'srv6'), ['bool'])),
('tilfa', (YLeaf(YType.boolean, 'tilfa'), ['bool'])),
('microloop_avoidance', (YLeaf(YType.boolean, 'microloop-avoidance'), ['bool'])),
])
self.srv6 = None
self.tilfa = None
self.microloop_avoidance = None
self.signaled_parameters = Srv6.Active.Manager.PlatformCapabilities.Support.SignaledParameters()
self.signaled_parameters.parent = self
self._children_name_map["signaled_parameters"] = "signaled-parameters"
self.end_func = YList(self)
self.transit_func = YList(self)
self.security_rule = YList(self)
self.counter = YList(self)
self._segment_path = lambda: "support"
self._absolute_path = lambda: "Cisco-IOS-XR-segment-routing-srv6-oper:srv6/active/manager/platform-capabilities/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Active.Manager.PlatformCapabilities.Support, ['srv6', 'tilfa', 'microloop_avoidance'], name, value)
class SignaledParameters(_Entity_):
"""
Signaled Parameters
.. attribute:: max_sl
Max value of SegmentLeft field in received SRH
**type**\: int
**range:** 0..255
**config**\: False
.. attribute:: max_end_pop_srh
Max num of SIDs in rcvd SRH for pop
**type**\: int
**range:** 0..255
**config**\: False
.. attribute:: max_t_insert
Max num of SIDs for T.Insert op
**type**\: int
**range:** 0..255
**config**\: False
.. attribute:: max_t_encap
Max num of SIDs for T.Encaps op
**type**\: int
**range:** 0..255
**config**\: False
.. attribute:: max_end_d
Max num of SIDs in rcvd SRH for decap
**type**\: int
**range:** 0..255
**config**\: False
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Active.Manager.PlatformCapabilities.Support.SignaledParameters, self).__init__()
self.yang_name = "signaled-parameters"
self.yang_parent_name = "support"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('max_sl', (YLeaf(YType.uint8, 'max-sl'), ['int'])),
('max_end_pop_srh', (YLeaf(YType.uint8, 'max-end-pop-srh'), ['int'])),
('max_t_insert', (YLeaf(YType.uint8, 'max-t-insert'), ['int'])),
('max_t_encap', (YLeaf(YType.uint8, 'max-t-encap'), ['int'])),
('max_end_d', (YLeaf(YType.uint8, 'max-end-d'), ['int'])),
])
self.max_sl = None
self.max_end_pop_srh = None
self.max_t_insert = None
self.max_t_encap = None
self.max_end_d = None
self._segment_path = lambda: "signaled-parameters"
self._absolute_path = lambda: "Cisco-IOS-XR-segment-routing-srv6-oper:srv6/active/manager/platform-capabilities/support/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Active.Manager.PlatformCapabilities.Support.SignaledParameters, ['max_sl', 'max_end_pop_srh', 'max_t_insert', 'max_t_encap', 'max_end_d'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Active.Manager.PlatformCapabilities.Support.SignaledParameters']['meta_info']
class EndFunc(_Entity_):
"""
Supported end functions
.. attribute:: string
String
**type**\: str
**config**\: False
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Active.Manager.PlatformCapabilities.Support.EndFunc, self).__init__()
self.yang_name = "end-func"
self.yang_parent_name = "support"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('string', (YLeaf(YType.str, 'string'), ['str'])),
])
self.string = None
self._segment_path = lambda: "end-func"
self._absolute_path = lambda: "Cisco-IOS-XR-segment-routing-srv6-oper:srv6/active/manager/platform-capabilities/support/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Active.Manager.PlatformCapabilities.Support.EndFunc, ['string'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Active.Manager.PlatformCapabilities.Support.EndFunc']['meta_info']
class TransitFunc(_Entity_):
"""
Supported Transit functions
.. attribute:: string
String
**type**\: str
**config**\: False
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Active.Manager.PlatformCapabilities.Support.TransitFunc, self).__init__()
self.yang_name = "transit-func"
self.yang_parent_name = "support"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('string', (YLeaf(YType.str, 'string'), ['str'])),
])
self.string = None
self._segment_path = lambda: "transit-func"
self._absolute_path = lambda: "Cisco-IOS-XR-segment-routing-srv6-oper:srv6/active/manager/platform-capabilities/support/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Active.Manager.PlatformCapabilities.Support.TransitFunc, ['string'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Active.Manager.PlatformCapabilities.Support.TransitFunc']['meta_info']
class SecurityRule(_Entity_):
"""
Security rules
.. attribute:: string
String
**type**\: str
**config**\: False
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Active.Manager.PlatformCapabilities.Support.SecurityRule, self).__init__()
self.yang_name = "security-rule"
self.yang_parent_name = "support"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('string', (YLeaf(YType.str, 'string'), ['str'])),
])
self.string = None
self._segment_path = lambda: "security-rule"
self._absolute_path = lambda: "Cisco-IOS-XR-segment-routing-srv6-oper:srv6/active/manager/platform-capabilities/support/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Active.Manager.PlatformCapabilities.Support.SecurityRule, ['string'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Active.Manager.PlatformCapabilities.Support.SecurityRule']['meta_info']
class Counter(_Entity_):
"""
Counters
.. attribute:: string
String
**type**\: str
**config**\: False
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Active.Manager.PlatformCapabilities.Support.Counter, self).__init__()
self.yang_name = "counter"
self.yang_parent_name = "support"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('string', (YLeaf(YType.str, 'string'), ['str'])),
])
self.string = None
self._segment_path = lambda: "counter"
self._absolute_path = lambda: "Cisco-IOS-XR-segment-routing-srv6-oper:srv6/active/manager/platform-capabilities/support/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Active.Manager.PlatformCapabilities.Support.Counter, ['string'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Active.Manager.PlatformCapabilities.Support.Counter']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Active.Manager.PlatformCapabilities.Support']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Active.Manager.PlatformCapabilities']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Active.Manager']['meta_info']
class Locators(_Entity_):
"""
SRv6 locators related information
.. attribute:: locator
Operational data for given SRv6 locator
**type**\: list of :py:class:`Locator <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Active.Locators.Locator>`
**config**\: False
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Active.Locators, self).__init__()
self.yang_name = "locators"
self.yang_parent_name = "active"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("locator", ("locator", Srv6.Active.Locators.Locator))])
self._leafs = OrderedDict()
self.locator = YList(self)
self._segment_path = lambda: "locators"
self._absolute_path = lambda: "Cisco-IOS-XR-segment-routing-srv6-oper:srv6/active/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Active.Locators, [], name, value)
class Locator(_Entity_):
"""
Operational data for given SRv6 locator
.. attribute:: name (key)
Locator name
**type**\: str
**length:** 1..58
**config**\: False
.. attribute:: info
Operational data for given SRv6 locator
**type**\: :py:class:`Info <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Active.Locators.Locator.Info>`
**config**\: False
.. attribute:: sids
SRv6 locator SID table
**type**\: :py:class:`Sids <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Active.Locators.Locator.Sids>`
**config**\: False
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Active.Locators.Locator, self).__init__()
self.yang_name = "locator"
self.yang_parent_name = "locators"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['name']
self._child_classes = OrderedDict([("info", ("info", Srv6.Active.Locators.Locator.Info)), ("sids", ("sids", Srv6.Active.Locators.Locator.Sids))])
self._leafs = OrderedDict([
('name', (YLeaf(YType.str, 'name'), ['str'])),
])
self.name = None
self.info = Srv6.Active.Locators.Locator.Info()
self.info.parent = self
self._children_name_map["info"] = "info"
self.sids = Srv6.Active.Locators.Locator.Sids()
self.sids.parent = self
self._children_name_map["sids"] = "sids"
self._segment_path = lambda: "locator" + "[name='" + str(self.name) + "']"
self._absolute_path = lambda: "Cisco-IOS-XR-segment-routing-srv6-oper:srv6/active/locators/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Active.Locators.Locator, ['name'], name, value)
class Info(_Entity_):
"""
Operational data for given SRv6 locator
.. attribute:: interface
Locator IM intf info
**type**\: :py:class:`Interface <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Active.Locators.Locator.Info.Interface>`
**config**\: False
.. attribute:: create_timestamp
Creation timestamp
**type**\: :py:class:`CreateTimestamp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Active.Locators.Locator.Info.CreateTimestamp>`
**config**\: False
.. attribute:: name
Locator Name
**type**\: str
**config**\: False
.. attribute:: id
Locator ID
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: prefix
Locator Prefix
**type**\: str
**config**\: False
.. attribute:: is_operational
Locator status is Up or Down
**type**\: bool
**config**\: False
.. attribute:: is_default
Locator is the default locator
**type**\: bool
**config**\: False
.. attribute:: out_of_resources_state
Locator Resources State for SIDs
**type**\: :py:class:`Srv6OutOfResourceState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6OutOfResourceState>`
**config**\: False
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Active.Locators.Locator.Info, self).__init__()
self.yang_name = "info"
self.yang_parent_name = "locator"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("interface", ("interface", Srv6.Active.Locators.Locator.Info.Interface)), ("create-timestamp", ("create_timestamp", Srv6.Active.Locators.Locator.Info.CreateTimestamp))])
self._leafs = OrderedDict([
('name', (YLeaf(YType.str, 'name'), ['str'])),
('id', (YLeaf(YType.uint32, 'id'), ['int'])),
('prefix', (YLeaf(YType.str, 'prefix'), ['str'])),
('is_operational', (YLeaf(YType.boolean, 'is-operational'), ['bool'])),
('is_default', (YLeaf(YType.boolean, 'is-default'), ['bool'])),
('out_of_resources_state', (YLeaf(YType.enumeration, 'out-of-resources-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper', 'Srv6OutOfResourceState', '')])),
])
self.name = None
self.id = None
self.prefix = None
self.is_operational = None
self.is_default = None
self.out_of_resources_state = None
self.interface = Srv6.Active.Locators.Locator.Info.Interface()
self.interface.parent = self
self._children_name_map["interface"] = "interface"
self.create_timestamp = Srv6.Active.Locators.Locator.Info.CreateTimestamp()
self.create_timestamp.parent = self
self._children_name_map["create_timestamp"] = "create-timestamp"
self._segment_path = lambda: "info"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Active.Locators.Locator.Info, ['name', 'id', 'prefix', 'is_operational', 'is_default', 'out_of_resources_state'], name, value)
class Interface(_Entity_):
"""
Locator IM intf info
.. attribute:: name
Interface name
**type**\: str
**config**\: False
.. attribute:: if_handle
Interface handle
**type**\: str
**pattern:** [0\-9a\-fA\-F]{1,8}
**config**\: False
.. attribute:: programmed_prefix
Interface prefix/addr programmed
**type**\: str
**config**\: False
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Active.Locators.Locator.Info.Interface, self).__init__()
self.yang_name = "interface"
self.yang_parent_name = "info"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('name', (YLeaf(YType.str, 'name'), ['str'])),
('if_handle', (YLeaf(YType.str, 'if-handle'), ['str'])),
('programmed_prefix', (YLeaf(YType.str, 'programmed-prefix'), ['str'])),
])
self.name = None
self.if_handle = None
self.programmed_prefix = None
self._segment_path = lambda: "interface"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Active.Locators.Locator.Info.Interface, ['name', 'if_handle', 'programmed_prefix'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Active.Locators.Locator.Info.Interface']['meta_info']
class CreateTimestamp(_Entity_):
"""
Creation timestamp
.. attribute:: time_in_nano_seconds
Timestamp in nano seconds
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
**units**\: nanosecond
.. attribute:: age_in_nano_seconds
Age in nano seconds
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
**units**\: nanosecond
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Active.Locators.Locator.Info.CreateTimestamp, self).__init__()
self.yang_name = "create-timestamp"
self.yang_parent_name = "info"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('time_in_nano_seconds', (YLeaf(YType.uint64, 'time-in-nano-seconds'), ['int'])),
('age_in_nano_seconds', (YLeaf(YType.uint64, 'age-in-nano-seconds'), ['int'])),
])
self.time_in_nano_seconds = None
self.age_in_nano_seconds = None
self._segment_path = lambda: "create-timestamp"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Active.Locators.Locator.Info.CreateTimestamp, ['time_in_nano_seconds', 'age_in_nano_seconds'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Active.Locators.Locator.Info.CreateTimestamp']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Active.Locators.Locator.Info']['meta_info']
class Sids(_Entity_):
"""
SRv6 locator SID table
.. attribute:: sid
Operational data for given SRv6 SID
**type**\: list of :py:class:`Sid <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Active.Locators.Locator.Sids.Sid>`
**config**\: False
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Active.Locators.Locator.Sids, self).__init__()
self.yang_name = "sids"
self.yang_parent_name = "locator"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("sid", ("sid", Srv6.Active.Locators.Locator.Sids.Sid))])
self._leafs = OrderedDict()
self.sid = YList(self)
self._segment_path = lambda: "sids"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Active.Locators.Locator.Sids, [], name, value)
class Sid(_Entity_):
"""
Operational data for given SRv6 SID
.. attribute:: address (key)
IPv6 address
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: sid_context
SID Context
**type**\: :py:class:`SidContext <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Active.Locators.Locator.Sids.Sid.SidContext>`
**config**\: False
.. attribute:: create_timestamp
Creation timestamp
**type**\: :py:class:`CreateTimestamp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Active.Locators.Locator.Sids.Sid.CreateTimestamp>`
**config**\: False
.. attribute:: sid
SID
**type**\: str
**config**\: False
.. attribute:: allocation_type
Allocation Type
**type**\: :py:class:`SidAllocation <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.SidAllocation>`
**config**\: False
.. attribute:: function_type
Function Type
**type**\: :py:class:`Srv6EndFunction <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6EndFunction>`
**config**\: False
.. attribute:: state
State
**type**\: :py:class:`SidState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.SidState>`
**config**\: False
.. attribute:: has_forwarding
Rewrite done or not
**type**\: bool
**config**\: False
.. attribute:: locator
Associated locator
**type**\: str
**config**\: False
.. attribute:: owner
Owner
**type**\: list of :py:class:`Owner <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Active.Locators.Locator.Sids.Sid.Owner>`
**config**\: False
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Active.Locators.Locator.Sids.Sid, self).__init__()
self.yang_name = "sid"
self.yang_parent_name = "sids"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['address']
self._child_classes = OrderedDict([("sid-context", ("sid_context", Srv6.Active.Locators.Locator.Sids.Sid.SidContext)), ("create-timestamp", ("create_timestamp", Srv6.Active.Locators.Locator.Sids.Sid.CreateTimestamp)), ("owner", ("owner", Srv6.Active.Locators.Locator.Sids.Sid.Owner))])
self._leafs = OrderedDict([
('address', (YLeaf(YType.str, 'address'), ['str','str'])),
('sid', (YLeaf(YType.str, 'sid'), ['str'])),
('allocation_type', (YLeaf(YType.enumeration, 'allocation-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper', 'SidAllocation', '')])),
('function_type', (YLeaf(YType.enumeration, 'function-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper', 'Srv6EndFunction', '')])),
('state', (YLeaf(YType.enumeration, 'state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper', 'SidState', '')])),
('has_forwarding', (YLeaf(YType.boolean, 'has-forwarding'), ['bool'])),
('locator', (YLeaf(YType.str, 'locator'), ['str'])),
])
self.address = None
self.sid = None
self.allocation_type = None
self.function_type = None
self.state = None
self.has_forwarding = None
self.locator = None
self.sid_context = Srv6.Active.Locators.Locator.Sids.Sid.SidContext()
self.sid_context.parent = self
self._children_name_map["sid_context"] = "sid-context"
self.create_timestamp = Srv6.Active.Locators.Locator.Sids.Sid.CreateTimestamp()
self.create_timestamp.parent = self
self._children_name_map["create_timestamp"] = "create-timestamp"
self.owner = YList(self)
self._segment_path = lambda: "sid" + "[address='" + str(self.address) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Active.Locators.Locator.Sids.Sid, ['address', 'sid', 'allocation_type', 'function_type', 'state', 'has_forwarding', 'locator'], name, value)
class SidContext(_Entity_):
"""
SID Context
.. attribute:: key
SID Key
**type**\: :py:class:`Key <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Active.Locators.Locator.Sids.Sid.SidContext.Key>`
**config**\: False
.. attribute:: application_data
Application opaque data
**type**\: str
**pattern:** ([0\-9a\-fA\-F]{2}(\:[0\-9a\-fA\-F]{2})\*)?
**config**\: False
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Active.Locators.Locator.Sids.Sid.SidContext, self).__init__()
self.yang_name = "sid-context"
self.yang_parent_name = "sid"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("key", ("key", Srv6.Active.Locators.Locator.Sids.Sid.SidContext.Key))])
self._leafs = OrderedDict([
('application_data', (YLeaf(YType.str, 'application-data'), ['str'])),
])
self.application_data = None
self.key = Srv6.Active.Locators.Locator.Sids.Sid.SidContext.Key()
self.key.parent = self
self._children_name_map["key"] = "key"
self._segment_path = lambda: "sid-context"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Active.Locators.Locator.Sids.Sid.SidContext, ['application_data'], name, value)
class Key(_Entity_):
"""
SID Key
.. attribute:: e
End (PSP) SID context
**type**\: :py:class:`E <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Active.Locators.Locator.Sids.Sid.SidContext.Key.E>`
**config**\: False
.. attribute:: x
End.X (PSP) SID context
**type**\: :py:class:`X <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Active.Locators.Locator.Sids.Sid.SidContext.Key.X>`
**config**\: False
.. attribute:: dx4
End.DX4 SID context
**type**\: :py:class:`Dx4 <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Active.Locators.Locator.Sids.Sid.SidContext.Key.Dx4>`
**config**\: False
.. attribute:: dt4
End.DT4 SID context
**type**\: :py:class:`Dt4 <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Active.Locators.Locator.Sids.Sid.SidContext.Key.Dt4>`
**config**\: False
.. attribute:: sid_context_type
SIDContextType
**type**\: :py:class:`Srv6EndFunction <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6EndFunction>`
**config**\: False
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Active.Locators.Locator.Sids.Sid.SidContext.Key, self).__init__()
self.yang_name = "key"
self.yang_parent_name = "sid-context"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("e", ("e", Srv6.Active.Locators.Locator.Sids.Sid.SidContext.Key.E)), ("x", ("x", Srv6.Active.Locators.Locator.Sids.Sid.SidContext.Key.X)), ("dx4", ("dx4", Srv6.Active.Locators.Locator.Sids.Sid.SidContext.Key.Dx4)), ("dt4", ("dt4", Srv6.Active.Locators.Locator.Sids.Sid.SidContext.Key.Dt4))])
self._leafs = OrderedDict([
('sid_context_type', (YLeaf(YType.enumeration, 'sid-context-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper', 'Srv6EndFunction', '')])),
])
self.sid_context_type = None
self.e = Srv6.Active.Locators.Locator.Sids.Sid.SidContext.Key.E()
self.e.parent = self
self._children_name_map["e"] = "e"
self.x = Srv6.Active.Locators.Locator.Sids.Sid.SidContext.Key.X()
self.x.parent = self
self._children_name_map["x"] = "x"
self.dx4 = Srv6.Active.Locators.Locator.Sids.Sid.SidContext.Key.Dx4()
self.dx4.parent = self
self._children_name_map["dx4"] = "dx4"
self.dt4 = Srv6.Active.Locators.Locator.Sids.Sid.SidContext.Key.Dt4()
self.dt4.parent = self
self._children_name_map["dt4"] = "dt4"
self._segment_path = lambda: "key"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Active.Locators.Locator.Sids.Sid.SidContext.Key, ['sid_context_type'], name, value)
class E(_Entity_):
"""
End (PSP) SID context
.. attribute:: table_id
Table Id
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: opaque_id
Additional differentiator \- opaque to SIDMgr
**type**\: int
**range:** 0..255
**config**\: False
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Active.Locators.Locator.Sids.Sid.SidContext.Key.E, self).__init__()
self.yang_name = "e"
self.yang_parent_name = "key"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('table_id', (YLeaf(YType.uint32, 'table-id'), ['int'])),
('opaque_id', (YLeaf(YType.uint8, 'opaque-id'), ['int'])),
])
self.table_id = None
self.opaque_id = None
self._segment_path = lambda: "e"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Active.Locators.Locator.Sids.Sid.SidContext.Key.E, ['table_id', 'opaque_id'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Active.Locators.Locator.Sids.Sid.SidContext.Key.E']['meta_info']
class X(_Entity_):
"""
End.X (PSP) SID context
.. attribute:: is_protected
Is protected?
**type**\: bool
**config**\: False
.. attribute:: opaque_id
Additional differentiator \- opaque to SIDMgr
**type**\: int
**range:** 0..255
**config**\: False
.. attribute:: interface
Nexthop interface
**type**\: str
**pattern:** [a\-zA\-Z0\-9.\_/\-]+
**config**\: False
.. attribute:: nexthop_address
Nexthop IP address
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Active.Locators.Locator.Sids.Sid.SidContext.Key.X, self).__init__()
self.yang_name = "x"
self.yang_parent_name = "key"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('is_protected', (YLeaf(YType.boolean, 'is-protected'), ['bool'])),
('opaque_id', (YLeaf(YType.uint8, 'opaque-id'), ['int'])),
('interface', (YLeaf(YType.str, 'interface'), ['str'])),
('nexthop_address', (YLeaf(YType.str, 'nexthop-address'), ['str'])),
])
self.is_protected = None
self.opaque_id = None
self.interface = None
self.nexthop_address = None
self._segment_path = lambda: "x"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Active.Locators.Locator.Sids.Sid.SidContext.Key.X, ['is_protected', 'opaque_id', 'interface', 'nexthop_address'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Active.Locators.Locator.Sids.Sid.SidContext.Key.X']['meta_info']
class Dx4(_Entity_):
"""
End.DX4 SID context
.. attribute:: table_id
Table ID
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: next_hop_set_id
Next Hop Set ID
**type**\: int
**range:** 0..4294967295
**config**\: False
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Active.Locators.Locator.Sids.Sid.SidContext.Key.Dx4, self).__init__()
self.yang_name = "dx4"
self.yang_parent_name = "key"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('table_id', (YLeaf(YType.uint32, 'table-id'), ['int'])),
('next_hop_set_id', (YLeaf(YType.uint32, 'next-hop-set-id'), ['int'])),
])
self.table_id = None
self.next_hop_set_id = None
self._segment_path = lambda: "dx4"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Active.Locators.Locator.Sids.Sid.SidContext.Key.Dx4, ['table_id', 'next_hop_set_id'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Active.Locators.Locator.Sids.Sid.SidContext.Key.Dx4']['meta_info']
class Dt4(_Entity_):
"""
End.DT4 SID context
.. attribute:: table_id
Table ID
**type**\: int
**range:** 0..4294967295
**config**\: False
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Active.Locators.Locator.Sids.Sid.SidContext.Key.Dt4, self).__init__()
self.yang_name = "dt4"
self.yang_parent_name = "key"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('table_id', (YLeaf(YType.uint32, 'table-id'), ['int'])),
])
self.table_id = None
self._segment_path = lambda: "dt4"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Active.Locators.Locator.Sids.Sid.SidContext.Key.Dt4, ['table_id'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Active.Locators.Locator.Sids.Sid.SidContext.Key.Dt4']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Active.Locators.Locator.Sids.Sid.SidContext.Key']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Active.Locators.Locator.Sids.Sid.SidContext']['meta_info']
class CreateTimestamp(_Entity_):
"""
Creation timestamp
.. attribute:: time_in_nano_seconds
Timestamp in nano seconds
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
**units**\: nanosecond
.. attribute:: age_in_nano_seconds
Age in nano seconds
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
**units**\: nanosecond
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Active.Locators.Locator.Sids.Sid.CreateTimestamp, self).__init__()
self.yang_name = "create-timestamp"
self.yang_parent_name = "sid"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('time_in_nano_seconds', (YLeaf(YType.uint64, 'time-in-nano-seconds'), ['int'])),
('age_in_nano_seconds', (YLeaf(YType.uint64, 'age-in-nano-seconds'), ['int'])),
])
self.time_in_nano_seconds = None
self.age_in_nano_seconds = None
self._segment_path = lambda: "create-timestamp"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Active.Locators.Locator.Sids.Sid.CreateTimestamp, ['time_in_nano_seconds', 'age_in_nano_seconds'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Active.Locators.Locator.Sids.Sid.CreateTimestamp']['meta_info']
class Owner(_Entity_):
"""
Owner
.. attribute:: owner
Owner
**type**\: str
**config**\: False
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Active.Locators.Locator.Sids.Sid.Owner, self).__init__()
self.yang_name = "owner"
self.yang_parent_name = "sid"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('owner', (YLeaf(YType.str, 'owner'), ['str'])),
])
self.owner = None
self._segment_path = lambda: "owner"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Active.Locators.Locator.Sids.Sid.Owner, ['owner'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Active.Locators.Locator.Sids.Sid.Owner']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Active.Locators.Locator.Sids.Sid']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Active.Locators.Locator.Sids']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Active.Locators.Locator']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Active.Locators']['meta_info']
class LocatorAllSids(_Entity_):
"""
Operational container for all (Active and Stale)
SIDs across all Locators
.. attribute:: locator_all_sid
Operational data for given locator and SID opcode
**type**\: list of :py:class:`LocatorAllSid <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Active.LocatorAllSids.LocatorAllSid>`
**config**\: False
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Active.LocatorAllSids, self).__init__()
self.yang_name = "locator-all-sids"
self.yang_parent_name = "active"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("locator-all-sid", ("locator_all_sid", Srv6.Active.LocatorAllSids.LocatorAllSid))])
self._leafs = OrderedDict()
self.locator_all_sid = YList(self)
self._segment_path = lambda: "locator-all-sids"
self._absolute_path = lambda: "Cisco-IOS-XR-segment-routing-srv6-oper:srv6/active/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Active.LocatorAllSids, [], name, value)
class LocatorAllSid(_Entity_):
"""
Operational data for given locator and SID
opcode
.. attribute:: locator_name (key)
Locator name
**type**\: str
**length:** 1..58
**config**\: False
.. attribute:: sid_opcode (key)
Sid opcode
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: sid_context
SID Context
**type**\: :py:class:`SidContext <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Active.LocatorAllSids.LocatorAllSid.SidContext>`
**config**\: False
.. attribute:: create_timestamp
Creation timestamp
**type**\: :py:class:`CreateTimestamp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Active.LocatorAllSids.LocatorAllSid.CreateTimestamp>`
**config**\: False
.. attribute:: sid
SID
**type**\: str
**config**\: False
.. attribute:: allocation_type
Allocation Type
**type**\: :py:class:`SidAllocation <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.SidAllocation>`
**config**\: False
.. attribute:: function_type
Function Type
**type**\: :py:class:`Srv6EndFunction <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6EndFunction>`
**config**\: False
.. attribute:: state
State
**type**\: :py:class:`SidState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.SidState>`
**config**\: False
.. attribute:: has_forwarding
Rewrite done or not
**type**\: bool
**config**\: False
.. attribute:: locator
Associated locator
**type**\: str
**config**\: False
.. attribute:: owner
Owner
**type**\: list of :py:class:`Owner <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Active.LocatorAllSids.LocatorAllSid.Owner>`
**config**\: False
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Active.LocatorAllSids.LocatorAllSid, self).__init__()
self.yang_name = "locator-all-sid"
self.yang_parent_name = "locator-all-sids"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['locator_name','sid_opcode']
self._child_classes = OrderedDict([("sid-context", ("sid_context", Srv6.Active.LocatorAllSids.LocatorAllSid.SidContext)), ("create-timestamp", ("create_timestamp", Srv6.Active.LocatorAllSids.LocatorAllSid.CreateTimestamp)), ("owner", ("owner", Srv6.Active.LocatorAllSids.LocatorAllSid.Owner))])
self._leafs = OrderedDict([
('locator_name', (YLeaf(YType.str, 'locator-name'), ['str'])),
('sid_opcode', (YLeaf(YType.uint32, 'sid-opcode'), ['int'])),
('sid', (YLeaf(YType.str, 'sid'), ['str'])),
('allocation_type', (YLeaf(YType.enumeration, 'allocation-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper', 'SidAllocation', '')])),
('function_type', (YLeaf(YType.enumeration, 'function-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper', 'Srv6EndFunction', '')])),
('state', (YLeaf(YType.enumeration, 'state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper', 'SidState', '')])),
('has_forwarding', (YLeaf(YType.boolean, 'has-forwarding'), ['bool'])),
('locator', (YLeaf(YType.str, 'locator'), ['str'])),
])
self.locator_name = None
self.sid_opcode = None
self.sid = None
self.allocation_type = None
self.function_type = None
self.state = None
self.has_forwarding = None
self.locator = None
self.sid_context = Srv6.Active.LocatorAllSids.LocatorAllSid.SidContext()
self.sid_context.parent = self
self._children_name_map["sid_context"] = "sid-context"
self.create_timestamp = Srv6.Active.LocatorAllSids.LocatorAllSid.CreateTimestamp()
self.create_timestamp.parent = self
self._children_name_map["create_timestamp"] = "create-timestamp"
self.owner = YList(self)
self._segment_path = lambda: "locator-all-sid" + "[locator-name='" + str(self.locator_name) + "']" + "[sid-opcode='" + str(self.sid_opcode) + "']"
self._absolute_path = lambda: "Cisco-IOS-XR-segment-routing-srv6-oper:srv6/active/locator-all-sids/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Active.LocatorAllSids.LocatorAllSid, ['locator_name', 'sid_opcode', 'sid', 'allocation_type', 'function_type', 'state', 'has_forwarding', 'locator'], name, value)
class SidContext(_Entity_):
"""
SID Context
.. attribute:: key
SID Key
**type**\: :py:class:`Key <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Active.LocatorAllSids.LocatorAllSid.SidContext.Key>`
**config**\: False
.. attribute:: application_data
Application opaque data
**type**\: str
**pattern:** ([0\-9a\-fA\-F]{2}(\:[0\-9a\-fA\-F]{2})\*)?
**config**\: False
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Active.LocatorAllSids.LocatorAllSid.SidContext, self).__init__()
self.yang_name = "sid-context"
self.yang_parent_name = "locator-all-sid"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("key", ("key", Srv6.Active.LocatorAllSids.LocatorAllSid.SidContext.Key))])
self._leafs = OrderedDict([
('application_data', (YLeaf(YType.str, 'application-data'), ['str'])),
])
self.application_data = None
self.key = Srv6.Active.LocatorAllSids.LocatorAllSid.SidContext.Key()
self.key.parent = self
self._children_name_map["key"] = "key"
self._segment_path = lambda: "sid-context"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Active.LocatorAllSids.LocatorAllSid.SidContext, ['application_data'], name, value)
class Key(_Entity_):
"""
SID Key
.. attribute:: e
End (PSP) SID context
**type**\: :py:class:`E <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Active.LocatorAllSids.LocatorAllSid.SidContext.Key.E>`
**config**\: False
.. attribute:: x
End.X (PSP) SID context
**type**\: :py:class:`X <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Active.LocatorAllSids.LocatorAllSid.SidContext.Key.X>`
**config**\: False
.. attribute:: dx4
End.DX4 SID context
**type**\: :py:class:`Dx4 <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Active.LocatorAllSids.LocatorAllSid.SidContext.Key.Dx4>`
**config**\: False
.. attribute:: dt4
End.DT4 SID context
**type**\: :py:class:`Dt4 <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Active.LocatorAllSids.LocatorAllSid.SidContext.Key.Dt4>`
**config**\: False
.. attribute:: sid_context_type
SIDContextType
**type**\: :py:class:`Srv6EndFunction <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6EndFunction>`
**config**\: False
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Active.LocatorAllSids.LocatorAllSid.SidContext.Key, self).__init__()
self.yang_name = "key"
self.yang_parent_name = "sid-context"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("e", ("e", Srv6.Active.LocatorAllSids.LocatorAllSid.SidContext.Key.E)), ("x", ("x", Srv6.Active.LocatorAllSids.LocatorAllSid.SidContext.Key.X)), ("dx4", ("dx4", Srv6.Active.LocatorAllSids.LocatorAllSid.SidContext.Key.Dx4)), ("dt4", ("dt4", Srv6.Active.LocatorAllSids.LocatorAllSid.SidContext.Key.Dt4))])
self._leafs = OrderedDict([
('sid_context_type', (YLeaf(YType.enumeration, 'sid-context-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper', 'Srv6EndFunction', '')])),
])
self.sid_context_type = None
self.e = Srv6.Active.LocatorAllSids.LocatorAllSid.SidContext.Key.E()
self.e.parent = self
self._children_name_map["e"] = "e"
self.x = Srv6.Active.LocatorAllSids.LocatorAllSid.SidContext.Key.X()
self.x.parent = self
self._children_name_map["x"] = "x"
self.dx4 = Srv6.Active.LocatorAllSids.LocatorAllSid.SidContext.Key.Dx4()
self.dx4.parent = self
self._children_name_map["dx4"] = "dx4"
self.dt4 = Srv6.Active.LocatorAllSids.LocatorAllSid.SidContext.Key.Dt4()
self.dt4.parent = self
self._children_name_map["dt4"] = "dt4"
self._segment_path = lambda: "key"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Active.LocatorAllSids.LocatorAllSid.SidContext.Key, ['sid_context_type'], name, value)
class E(_Entity_):
"""
End (PSP) SID context
.. attribute:: table_id
Table Id
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: opaque_id
Additional differentiator \- opaque to SIDMgr
**type**\: int
**range:** 0..255
**config**\: False
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Active.LocatorAllSids.LocatorAllSid.SidContext.Key.E, self).__init__()
self.yang_name = "e"
self.yang_parent_name = "key"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('table_id', (YLeaf(YType.uint32, 'table-id'), ['int'])),
('opaque_id', (YLeaf(YType.uint8, 'opaque-id'), ['int'])),
])
self.table_id = None
self.opaque_id = None
self._segment_path = lambda: "e"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Active.LocatorAllSids.LocatorAllSid.SidContext.Key.E, ['table_id', 'opaque_id'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Active.LocatorAllSids.LocatorAllSid.SidContext.Key.E']['meta_info']
class X(_Entity_):
"""
End.X (PSP) SID context
.. attribute:: is_protected
Is protected?
**type**\: bool
**config**\: False
.. attribute:: opaque_id
Additional differentiator \- opaque to SIDMgr
**type**\: int
**range:** 0..255
**config**\: False
.. attribute:: interface
Nexthop interface
**type**\: str
**pattern:** [a\-zA\-Z0\-9.\_/\-]+
**config**\: False
.. attribute:: nexthop_address
Nexthop IP address
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Active.LocatorAllSids.LocatorAllSid.SidContext.Key.X, self).__init__()
self.yang_name = "x"
self.yang_parent_name = "key"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('is_protected', (YLeaf(YType.boolean, 'is-protected'), ['bool'])),
('opaque_id', (YLeaf(YType.uint8, 'opaque-id'), ['int'])),
('interface', (YLeaf(YType.str, 'interface'), ['str'])),
('nexthop_address', (YLeaf(YType.str, 'nexthop-address'), ['str'])),
])
self.is_protected = None
self.opaque_id = None
self.interface = None
self.nexthop_address = None
self._segment_path = lambda: "x"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Active.LocatorAllSids.LocatorAllSid.SidContext.Key.X, ['is_protected', 'opaque_id', 'interface', 'nexthop_address'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Active.LocatorAllSids.LocatorAllSid.SidContext.Key.X']['meta_info']
class Dx4(_Entity_):
"""
End.DX4 SID context
.. attribute:: table_id
Table ID
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: next_hop_set_id
Next Hop Set ID
**type**\: int
**range:** 0..4294967295
**config**\: False
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Active.LocatorAllSids.LocatorAllSid.SidContext.Key.Dx4, self).__init__()
self.yang_name = "dx4"
self.yang_parent_name = "key"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('table_id', (YLeaf(YType.uint32, 'table-id'), ['int'])),
('next_hop_set_id', (YLeaf(YType.uint32, 'next-hop-set-id'), ['int'])),
])
self.table_id = None
self.next_hop_set_id = None
self._segment_path = lambda: "dx4"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Active.LocatorAllSids.LocatorAllSid.SidContext.Key.Dx4, ['table_id', 'next_hop_set_id'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Active.LocatorAllSids.LocatorAllSid.SidContext.Key.Dx4']['meta_info']
class Dt4(_Entity_):
"""
End.DT4 SID context
.. attribute:: table_id
Table ID
**type**\: int
**range:** 0..4294967295
**config**\: False
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Active.LocatorAllSids.LocatorAllSid.SidContext.Key.Dt4, self).__init__()
self.yang_name = "dt4"
self.yang_parent_name = "key"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('table_id', (YLeaf(YType.uint32, 'table-id'), ['int'])),
])
self.table_id = None
self._segment_path = lambda: "dt4"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Active.LocatorAllSids.LocatorAllSid.SidContext.Key.Dt4, ['table_id'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Active.LocatorAllSids.LocatorAllSid.SidContext.Key.Dt4']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Active.LocatorAllSids.LocatorAllSid.SidContext.Key']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Active.LocatorAllSids.LocatorAllSid.SidContext']['meta_info']
class CreateTimestamp(_Entity_):
"""
Creation timestamp
.. attribute:: time_in_nano_seconds
Timestamp in nano seconds
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
**units**\: nanosecond
.. attribute:: age_in_nano_seconds
Age in nano seconds
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
**units**\: nanosecond
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Active.LocatorAllSids.LocatorAllSid.CreateTimestamp, self).__init__()
self.yang_name = "create-timestamp"
self.yang_parent_name = "locator-all-sid"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('time_in_nano_seconds', (YLeaf(YType.uint64, 'time-in-nano-seconds'), ['int'])),
('age_in_nano_seconds', (YLeaf(YType.uint64, 'age-in-nano-seconds'), ['int'])),
])
self.time_in_nano_seconds = None
self.age_in_nano_seconds = None
self._segment_path = lambda: "create-timestamp"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Active.LocatorAllSids.LocatorAllSid.CreateTimestamp, ['time_in_nano_seconds', 'age_in_nano_seconds'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Active.LocatorAllSids.LocatorAllSid.CreateTimestamp']['meta_info']
class Owner(_Entity_):
"""
Owner
.. attribute:: owner
Owner
**type**\: str
**config**\: False
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Active.LocatorAllSids.LocatorAllSid.Owner, self).__init__()
self.yang_name = "owner"
self.yang_parent_name = "locator-all-sid"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('owner', (YLeaf(YType.str, 'owner'), ['str'])),
])
self.owner = None
self._segment_path = lambda: "owner"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Active.LocatorAllSids.LocatorAllSid.Owner, ['owner'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Active.LocatorAllSids.LocatorAllSid.Owner']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Active.LocatorAllSids.LocatorAllSid']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Active.LocatorAllSids']['meta_info']
class LocatorAllActiveSids(_Entity_):
"""
Operational container for Active SIDs across all
Locators
.. attribute:: locator_all_active_sid
Operational data for given locator and SID opcode
**type**\: list of :py:class:`LocatorAllActiveSid <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Active.LocatorAllActiveSids.LocatorAllActiveSid>`
**config**\: False
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Active.LocatorAllActiveSids, self).__init__()
self.yang_name = "locator-all-active-sids"
self.yang_parent_name = "active"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("locator-all-active-sid", ("locator_all_active_sid", Srv6.Active.LocatorAllActiveSids.LocatorAllActiveSid))])
self._leafs = OrderedDict()
self.locator_all_active_sid = YList(self)
self._segment_path = lambda: "locator-all-active-sids"
self._absolute_path = lambda: "Cisco-IOS-XR-segment-routing-srv6-oper:srv6/active/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Active.LocatorAllActiveSids, [], name, value)
class LocatorAllActiveSid(_Entity_):
"""
Operational data for given locator and SID
opcode
.. attribute:: locator_name (key)
Locator name
**type**\: str
**length:** 1..58
**config**\: False
.. attribute:: sid_opcode (key)
Sid opcode
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: sid_context
SID Context
**type**\: :py:class:`SidContext <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Active.LocatorAllActiveSids.LocatorAllActiveSid.SidContext>`
**config**\: False
.. attribute:: create_timestamp
Creation timestamp
**type**\: :py:class:`CreateTimestamp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Active.LocatorAllActiveSids.LocatorAllActiveSid.CreateTimestamp>`
**config**\: False
.. attribute:: sid
SID
**type**\: str
**config**\: False
.. attribute:: allocation_type
Allocation Type
**type**\: :py:class:`SidAllocation <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.SidAllocation>`
**config**\: False
.. attribute:: function_type
Function Type
**type**\: :py:class:`Srv6EndFunction <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6EndFunction>`
**config**\: False
.. attribute:: state
State
**type**\: :py:class:`SidState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.SidState>`
**config**\: False
.. attribute:: has_forwarding
Rewrite done or not
**type**\: bool
**config**\: False
.. attribute:: locator
Associated locator
**type**\: str
**config**\: False
.. attribute:: owner
Owner
**type**\: list of :py:class:`Owner <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Active.LocatorAllActiveSids.LocatorAllActiveSid.Owner>`
**config**\: False
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Active.LocatorAllActiveSids.LocatorAllActiveSid, self).__init__()
self.yang_name = "locator-all-active-sid"
self.yang_parent_name = "locator-all-active-sids"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['locator_name','sid_opcode']
self._child_classes = OrderedDict([("sid-context", ("sid_context", Srv6.Active.LocatorAllActiveSids.LocatorAllActiveSid.SidContext)), ("create-timestamp", ("create_timestamp", Srv6.Active.LocatorAllActiveSids.LocatorAllActiveSid.CreateTimestamp)), ("owner", ("owner", Srv6.Active.LocatorAllActiveSids.LocatorAllActiveSid.Owner))])
self._leafs = OrderedDict([
('locator_name', (YLeaf(YType.str, 'locator-name'), ['str'])),
('sid_opcode', (YLeaf(YType.uint32, 'sid-opcode'), ['int'])),
('sid', (YLeaf(YType.str, 'sid'), ['str'])),
('allocation_type', (YLeaf(YType.enumeration, 'allocation-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper', 'SidAllocation', '')])),
('function_type', (YLeaf(YType.enumeration, 'function-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper', 'Srv6EndFunction', '')])),
('state', (YLeaf(YType.enumeration, 'state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper', 'SidState', '')])),
('has_forwarding', (YLeaf(YType.boolean, 'has-forwarding'), ['bool'])),
('locator', (YLeaf(YType.str, 'locator'), ['str'])),
])
self.locator_name = None
self.sid_opcode = None
self.sid = None
self.allocation_type = None
self.function_type = None
self.state = None
self.has_forwarding = None
self.locator = None
self.sid_context = Srv6.Active.LocatorAllActiveSids.LocatorAllActiveSid.SidContext()
self.sid_context.parent = self
self._children_name_map["sid_context"] = "sid-context"
self.create_timestamp = Srv6.Active.LocatorAllActiveSids.LocatorAllActiveSid.CreateTimestamp()
self.create_timestamp.parent = self
self._children_name_map["create_timestamp"] = "create-timestamp"
self.owner = YList(self)
self._segment_path = lambda: "locator-all-active-sid" + "[locator-name='" + str(self.locator_name) + "']" + "[sid-opcode='" + str(self.sid_opcode) + "']"
self._absolute_path = lambda: "Cisco-IOS-XR-segment-routing-srv6-oper:srv6/active/locator-all-active-sids/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Active.LocatorAllActiveSids.LocatorAllActiveSid, ['locator_name', 'sid_opcode', 'sid', 'allocation_type', 'function_type', 'state', 'has_forwarding', 'locator'], name, value)
class SidContext(_Entity_):
"""
SID Context
.. attribute:: key
SID Key
**type**\: :py:class:`Key <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Active.LocatorAllActiveSids.LocatorAllActiveSid.SidContext.Key>`
**config**\: False
.. attribute:: application_data
Application opaque data
**type**\: str
**pattern:** ([0\-9a\-fA\-F]{2}(\:[0\-9a\-fA\-F]{2})\*)?
**config**\: False
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Active.LocatorAllActiveSids.LocatorAllActiveSid.SidContext, self).__init__()
self.yang_name = "sid-context"
self.yang_parent_name = "locator-all-active-sid"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("key", ("key", Srv6.Active.LocatorAllActiveSids.LocatorAllActiveSid.SidContext.Key))])
self._leafs = OrderedDict([
('application_data', (YLeaf(YType.str, 'application-data'), ['str'])),
])
self.application_data = None
self.key = Srv6.Active.LocatorAllActiveSids.LocatorAllActiveSid.SidContext.Key()
self.key.parent = self
self._children_name_map["key"] = "key"
self._segment_path = lambda: "sid-context"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Active.LocatorAllActiveSids.LocatorAllActiveSid.SidContext, ['application_data'], name, value)
class Key(_Entity_):
"""
SID Key
.. attribute:: e
End (PSP) SID context
**type**\: :py:class:`E <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Active.LocatorAllActiveSids.LocatorAllActiveSid.SidContext.Key.E>`
**config**\: False
.. attribute:: x
End.X (PSP) SID context
**type**\: :py:class:`X <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Active.LocatorAllActiveSids.LocatorAllActiveSid.SidContext.Key.X>`
**config**\: False
.. attribute:: dx4
End.DX4 SID context
**type**\: :py:class:`Dx4 <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Active.LocatorAllActiveSids.LocatorAllActiveSid.SidContext.Key.Dx4>`
**config**\: False
.. attribute:: dt4
End.DT4 SID context
**type**\: :py:class:`Dt4 <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Active.LocatorAllActiveSids.LocatorAllActiveSid.SidContext.Key.Dt4>`
**config**\: False
.. attribute:: sid_context_type
SIDContextType
**type**\: :py:class:`Srv6EndFunction <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6EndFunction>`
**config**\: False
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Active.LocatorAllActiveSids.LocatorAllActiveSid.SidContext.Key, self).__init__()
self.yang_name = "key"
self.yang_parent_name = "sid-context"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("e", ("e", Srv6.Active.LocatorAllActiveSids.LocatorAllActiveSid.SidContext.Key.E)), ("x", ("x", Srv6.Active.LocatorAllActiveSids.LocatorAllActiveSid.SidContext.Key.X)), ("dx4", ("dx4", Srv6.Active.LocatorAllActiveSids.LocatorAllActiveSid.SidContext.Key.Dx4)), ("dt4", ("dt4", Srv6.Active.LocatorAllActiveSids.LocatorAllActiveSid.SidContext.Key.Dt4))])
self._leafs = OrderedDict([
('sid_context_type', (YLeaf(YType.enumeration, 'sid-context-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper', 'Srv6EndFunction', '')])),
])
self.sid_context_type = None
self.e = Srv6.Active.LocatorAllActiveSids.LocatorAllActiveSid.SidContext.Key.E()
self.e.parent = self
self._children_name_map["e"] = "e"
self.x = Srv6.Active.LocatorAllActiveSids.LocatorAllActiveSid.SidContext.Key.X()
self.x.parent = self
self._children_name_map["x"] = "x"
self.dx4 = Srv6.Active.LocatorAllActiveSids.LocatorAllActiveSid.SidContext.Key.Dx4()
self.dx4.parent = self
self._children_name_map["dx4"] = "dx4"
self.dt4 = Srv6.Active.LocatorAllActiveSids.LocatorAllActiveSid.SidContext.Key.Dt4()
self.dt4.parent = self
self._children_name_map["dt4"] = "dt4"
self._segment_path = lambda: "key"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Active.LocatorAllActiveSids.LocatorAllActiveSid.SidContext.Key, ['sid_context_type'], name, value)
class E(_Entity_):
"""
End (PSP) SID context
.. attribute:: table_id
Table Id
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: opaque_id
Additional differentiator \- opaque to SIDMgr
**type**\: int
**range:** 0..255
**config**\: False
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Active.LocatorAllActiveSids.LocatorAllActiveSid.SidContext.Key.E, self).__init__()
self.yang_name = "e"
self.yang_parent_name = "key"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('table_id', (YLeaf(YType.uint32, 'table-id'), ['int'])),
('opaque_id', (YLeaf(YType.uint8, 'opaque-id'), ['int'])),
])
self.table_id = None
self.opaque_id = None
self._segment_path = lambda: "e"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Active.LocatorAllActiveSids.LocatorAllActiveSid.SidContext.Key.E, ['table_id', 'opaque_id'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Active.LocatorAllActiveSids.LocatorAllActiveSid.SidContext.Key.E']['meta_info']
class X(_Entity_):
"""
End.X (PSP) SID context
.. attribute:: is_protected
Is protected?
**type**\: bool
**config**\: False
.. attribute:: opaque_id
Additional differentiator \- opaque to SIDMgr
**type**\: int
**range:** 0..255
**config**\: False
.. attribute:: interface
Nexthop interface
**type**\: str
**pattern:** [a\-zA\-Z0\-9.\_/\-]+
**config**\: False
.. attribute:: nexthop_address
Nexthop IP address
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Active.LocatorAllActiveSids.LocatorAllActiveSid.SidContext.Key.X, self).__init__()
self.yang_name = "x"
self.yang_parent_name = "key"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('is_protected', (YLeaf(YType.boolean, 'is-protected'), ['bool'])),
('opaque_id', (YLeaf(YType.uint8, 'opaque-id'), ['int'])),
('interface', (YLeaf(YType.str, 'interface'), ['str'])),
('nexthop_address', (YLeaf(YType.str, 'nexthop-address'), ['str'])),
])
self.is_protected = None
self.opaque_id = None
self.interface = None
self.nexthop_address = None
self._segment_path = lambda: "x"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Active.LocatorAllActiveSids.LocatorAllActiveSid.SidContext.Key.X, ['is_protected', 'opaque_id', 'interface', 'nexthop_address'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Active.LocatorAllActiveSids.LocatorAllActiveSid.SidContext.Key.X']['meta_info']
class Dx4(_Entity_):
"""
End.DX4 SID context
.. attribute:: table_id
Table ID
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: next_hop_set_id
Next Hop Set ID
**type**\: int
**range:** 0..4294967295
**config**\: False
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Active.LocatorAllActiveSids.LocatorAllActiveSid.SidContext.Key.Dx4, self).__init__()
self.yang_name = "dx4"
self.yang_parent_name = "key"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('table_id', (YLeaf(YType.uint32, 'table-id'), ['int'])),
('next_hop_set_id', (YLeaf(YType.uint32, 'next-hop-set-id'), ['int'])),
])
self.table_id = None
self.next_hop_set_id = None
self._segment_path = lambda: "dx4"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Active.LocatorAllActiveSids.LocatorAllActiveSid.SidContext.Key.Dx4, ['table_id', 'next_hop_set_id'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Active.LocatorAllActiveSids.LocatorAllActiveSid.SidContext.Key.Dx4']['meta_info']
class Dt4(_Entity_):
"""
End.DT4 SID context
.. attribute:: table_id
Table ID
**type**\: int
**range:** 0..4294967295
**config**\: False
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Active.LocatorAllActiveSids.LocatorAllActiveSid.SidContext.Key.Dt4, self).__init__()
self.yang_name = "dt4"
self.yang_parent_name = "key"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('table_id', (YLeaf(YType.uint32, 'table-id'), ['int'])),
])
self.table_id = None
self._segment_path = lambda: "dt4"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Active.LocatorAllActiveSids.LocatorAllActiveSid.SidContext.Key.Dt4, ['table_id'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Active.LocatorAllActiveSids.LocatorAllActiveSid.SidContext.Key.Dt4']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Active.LocatorAllActiveSids.LocatorAllActiveSid.SidContext.Key']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Active.LocatorAllActiveSids.LocatorAllActiveSid.SidContext']['meta_info']
class CreateTimestamp(_Entity_):
"""
Creation timestamp
.. attribute:: time_in_nano_seconds
Timestamp in nano seconds
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
**units**\: nanosecond
.. attribute:: age_in_nano_seconds
Age in nano seconds
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
**units**\: nanosecond
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Active.LocatorAllActiveSids.LocatorAllActiveSid.CreateTimestamp, self).__init__()
self.yang_name = "create-timestamp"
self.yang_parent_name = "locator-all-active-sid"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('time_in_nano_seconds', (YLeaf(YType.uint64, 'time-in-nano-seconds'), ['int'])),
('age_in_nano_seconds', (YLeaf(YType.uint64, 'age-in-nano-seconds'), ['int'])),
])
self.time_in_nano_seconds = None
self.age_in_nano_seconds = None
self._segment_path = lambda: "create-timestamp"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Active.LocatorAllActiveSids.LocatorAllActiveSid.CreateTimestamp, ['time_in_nano_seconds', 'age_in_nano_seconds'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Active.LocatorAllActiveSids.LocatorAllActiveSid.CreateTimestamp']['meta_info']
class Owner(_Entity_):
"""
Owner
.. attribute:: owner
Owner
**type**\: str
**config**\: False
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Active.LocatorAllActiveSids.LocatorAllActiveSid.Owner, self).__init__()
self.yang_name = "owner"
self.yang_parent_name = "locator-all-active-sid"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('owner', (YLeaf(YType.str, 'owner'), ['str'])),
])
self.owner = None
self._segment_path = lambda: "owner"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Active.LocatorAllActiveSids.LocatorAllActiveSid.Owner, ['owner'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Active.LocatorAllActiveSids.LocatorAllActiveSid.Owner']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Active.LocatorAllActiveSids.LocatorAllActiveSid']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Active.LocatorAllActiveSids']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Active']['meta_info']
class Standby(_Entity_):
"""
Standby SRv6 operational data
.. attribute:: manager
SID Manager information
**type**\: :py:class:`Manager <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Standby.Manager>`
**config**\: False
.. attribute:: locators
SRv6 locators related information
**type**\: :py:class:`Locators <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Standby.Locators>`
**config**\: False
.. attribute:: locator_all_sids
Operational container for all (Active and Stale) SIDs across all Locators
**type**\: :py:class:`LocatorAllSids <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Standby.LocatorAllSids>`
**config**\: False
.. attribute:: locator_all_active_sids
Operational container for Active SIDs across all Locators
**type**\: :py:class:`LocatorAllActiveSids <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Standby.LocatorAllActiveSids>`
**config**\: False
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Standby, self).__init__()
self.yang_name = "standby"
self.yang_parent_name = "srv6"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("manager", ("manager", Srv6.Standby.Manager)), ("locators", ("locators", Srv6.Standby.Locators)), ("locator-all-sids", ("locator_all_sids", Srv6.Standby.LocatorAllSids)), ("locator-all-active-sids", ("locator_all_active_sids", Srv6.Standby.LocatorAllActiveSids))])
self._leafs = OrderedDict()
self.manager = Srv6.Standby.Manager()
self.manager.parent = self
self._children_name_map["manager"] = "manager"
self.locators = Srv6.Standby.Locators()
self.locators.parent = self
self._children_name_map["locators"] = "locators"
self.locator_all_sids = Srv6.Standby.LocatorAllSids()
self.locator_all_sids.parent = self
self._children_name_map["locator_all_sids"] = "locator-all-sids"
self.locator_all_active_sids = Srv6.Standby.LocatorAllActiveSids()
self.locator_all_active_sids.parent = self
self._children_name_map["locator_all_active_sids"] = "locator-all-active-sids"
self._segment_path = lambda: "standby"
self._absolute_path = lambda: "Cisco-IOS-XR-segment-routing-srv6-oper:srv6/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Standby, [], name, value)
class Manager(_Entity_):
"""
SID Manager information
.. attribute:: sid_mgr_params
SID Mgr parameters
**type**\: :py:class:`SidMgrParams <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Standby.Manager.SidMgrParams>`
**config**\: False
.. attribute:: sid_mgr_summary
SID Mgr summary info
**type**\: :py:class:`SidMgrSummary <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Standby.Manager.SidMgrSummary>`
**config**\: False
.. attribute:: platform_capabilities
Platform Capabilities
**type**\: :py:class:`PlatformCapabilities <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Standby.Manager.PlatformCapabilities>`
**config**\: False
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Standby.Manager, self).__init__()
self.yang_name = "manager"
self.yang_parent_name = "standby"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("sid-mgr-params", ("sid_mgr_params", Srv6.Standby.Manager.SidMgrParams)), ("sid-mgr-summary", ("sid_mgr_summary", Srv6.Standby.Manager.SidMgrSummary)), ("platform-capabilities", ("platform_capabilities", Srv6.Standby.Manager.PlatformCapabilities))])
self._leafs = OrderedDict()
self.sid_mgr_params = Srv6.Standby.Manager.SidMgrParams()
self.sid_mgr_params.parent = self
self._children_name_map["sid_mgr_params"] = "sid-mgr-params"
self.sid_mgr_summary = Srv6.Standby.Manager.SidMgrSummary()
self.sid_mgr_summary.parent = self
self._children_name_map["sid_mgr_summary"] = "sid-mgr-summary"
self.platform_capabilities = Srv6.Standby.Manager.PlatformCapabilities()
self.platform_capabilities.parent = self
self._children_name_map["platform_capabilities"] = "platform-capabilities"
self._segment_path = lambda: "manager"
self._absolute_path = lambda: "Cisco-IOS-XR-segment-routing-srv6-oper:srv6/standby/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Standby.Manager, [], name, value)
class SidMgrParams(_Entity_):
"""
SID Mgr parameters
.. attribute:: encap_hop_limit
Encap Hop\-limit info
**type**\: :py:class:`EncapHopLimit <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Standby.Manager.SidMgrParams.EncapHopLimit>`
**config**\: False
.. attribute:: srv6_enabled
Is SRv6 enabled?
**type**\: bool
**config**\: False
.. attribute:: configured_encap_source_address
Configured Encap Source address
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: default_encap_source_address
Default Encap Source address
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: encap_ttl_propagate
Is TTL propagate enabled?
**type**\: bool
**config**\: False
.. attribute:: is_sid_holdtime_configured
Is SID Holdtime configured?
**type**\: bool
**config**\: False
.. attribute:: sid_holdtime_mins_configured
Configured SID Holdtime in mins
**type**\: int
**range:** 0..4294967295
**config**\: False
**units**\: minute
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Standby.Manager.SidMgrParams, self).__init__()
self.yang_name = "sid-mgr-params"
self.yang_parent_name = "manager"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("encap-hop-limit", ("encap_hop_limit", Srv6.Standby.Manager.SidMgrParams.EncapHopLimit))])
self._leafs = OrderedDict([
('srv6_enabled', (YLeaf(YType.boolean, 'srv6-enabled'), ['bool'])),
('configured_encap_source_address', (YLeaf(YType.str, 'configured-encap-source-address'), ['str'])),
('default_encap_source_address', (YLeaf(YType.str, 'default-encap-source-address'), ['str'])),
('encap_ttl_propagate', (YLeaf(YType.boolean, 'encap-ttl-propagate'), ['bool'])),
('is_sid_holdtime_configured', (YLeaf(YType.boolean, 'is-sid-holdtime-configured'), ['bool'])),
('sid_holdtime_mins_configured', (YLeaf(YType.uint32, 'sid-holdtime-mins-configured'), ['int'])),
])
self.srv6_enabled = None
self.configured_encap_source_address = None
self.default_encap_source_address = None
self.encap_ttl_propagate = None
self.is_sid_holdtime_configured = None
self.sid_holdtime_mins_configured = None
self.encap_hop_limit = Srv6.Standby.Manager.SidMgrParams.EncapHopLimit()
self.encap_hop_limit.parent = self
self._children_name_map["encap_hop_limit"] = "encap-hop-limit"
self._segment_path = lambda: "sid-mgr-params"
self._absolute_path = lambda: "Cisco-IOS-XR-segment-routing-srv6-oper:srv6/standby/manager/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Standby.Manager.SidMgrParams, ['srv6_enabled', 'configured_encap_source_address', 'default_encap_source_address', 'encap_ttl_propagate', 'is_sid_holdtime_configured', 'sid_holdtime_mins_configured'], name, value)
class EncapHopLimit(_Entity_):
"""
Encap Hop\-limit info
.. attribute:: use_default
Use default IPv6 hop\-limit value
**type**\: bool
**config**\: False
.. attribute:: do_propagate
Propagate IP TTL to Encap IPv6 hop\-limit
**type**\: bool
**config**\: False
.. attribute:: value
Specific value set for hop\-limit count
**type**\: int
**range:** 0..255
**config**\: False
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Standby.Manager.SidMgrParams.EncapHopLimit, self).__init__()
self.yang_name = "encap-hop-limit"
self.yang_parent_name = "sid-mgr-params"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('use_default', (YLeaf(YType.boolean, 'use-default'), ['bool'])),
('do_propagate', (YLeaf(YType.boolean, 'do-propagate'), ['bool'])),
('value', (YLeaf(YType.uint8, 'value'), ['int'])),
])
self.use_default = None
self.do_propagate = None
self.value = None
self._segment_path = lambda: "encap-hop-limit"
self._absolute_path = lambda: "Cisco-IOS-XR-segment-routing-srv6-oper:srv6/standby/manager/sid-mgr-params/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Standby.Manager.SidMgrParams.EncapHopLimit, ['use_default', 'do_propagate', 'value'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Standby.Manager.SidMgrParams.EncapHopLimit']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Standby.Manager.SidMgrParams']['meta_info']
class SidMgrSummary(_Entity_):
"""
SID Mgr summary info
.. attribute:: sids_out_of_resource_summary
SIDs Global Out of Resource info
**type**\: :py:class:`SidsOutOfResourceSummary <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Standby.Manager.SidMgrSummary.SidsOutOfResourceSummary>`
**config**\: False
.. attribute:: locators_count
Number of locators
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: oper_locators_count
Number of operational locators
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: sids_count
Number of SIDs
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: stale_sids_count
Number of Stale SIDs
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: maximum_sids_count
Global Maximum number of SIDs
**type**\: int
**range:** 0..4294967295
**config**\: False
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Standby.Manager.SidMgrSummary, self).__init__()
self.yang_name = "sid-mgr-summary"
self.yang_parent_name = "manager"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("sids-out-of-resource-summary", ("sids_out_of_resource_summary", Srv6.Standby.Manager.SidMgrSummary.SidsOutOfResourceSummary))])
self._leafs = OrderedDict([
('locators_count', (YLeaf(YType.uint16, 'locators-count'), ['int'])),
('oper_locators_count', (YLeaf(YType.uint16, 'oper-locators-count'), ['int'])),
('sids_count', (YLeaf(YType.uint32, 'sids-count'), ['int'])),
('stale_sids_count', (YLeaf(YType.uint32, 'stale-sids-count'), ['int'])),
('maximum_sids_count', (YLeaf(YType.uint32, 'maximum-sids-count'), ['int'])),
])
self.locators_count = None
self.oper_locators_count = None
self.sids_count = None
self.stale_sids_count = None
self.maximum_sids_count = None
self.sids_out_of_resource_summary = Srv6.Standby.Manager.SidMgrSummary.SidsOutOfResourceSummary()
self.sids_out_of_resource_summary.parent = self
self._children_name_map["sids_out_of_resource_summary"] = "sids-out-of-resource-summary"
self._segment_path = lambda: "sid-mgr-summary"
self._absolute_path = lambda: "Cisco-IOS-XR-segment-routing-srv6-oper:srv6/standby/manager/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Standby.Manager.SidMgrSummary, ['locators_count', 'oper_locators_count', 'sids_count', 'stale_sids_count', 'maximum_sids_count'], name, value)
class SidsOutOfResourceSummary(_Entity_):
"""
SIDs Global Out of Resource info
.. attribute:: out_of_resources_state
Global Resources State for SIDs
**type**\: :py:class:`Srv6OutOfResourceState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6OutOfResourceState>`
**config**\: False
.. attribute:: oor_yellow_free_sid_threshold
Threshold for Number of Free SID below which OOR Yellow State is reached
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: oor_green_free_sid_threshold
Threshold for Number of Free SID above which OOR Green State is restored
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: oor_green_count
Number of times Resources Warning or Out of Resources state has been cleared
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: oor_yellow_count
Number of times system went into Resources Warning state
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: oor_red_count
Number of times system went into Out of Resources state
**type**\: int
**range:** 0..4294967295
**config**\: False
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Standby.Manager.SidMgrSummary.SidsOutOfResourceSummary, self).__init__()
self.yang_name = "sids-out-of-resource-summary"
self.yang_parent_name = "sid-mgr-summary"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('out_of_resources_state', (YLeaf(YType.enumeration, 'out-of-resources-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper', 'Srv6OutOfResourceState', '')])),
('oor_yellow_free_sid_threshold', (YLeaf(YType.uint32, 'oor-yellow-free-sid-threshold'), ['int'])),
('oor_green_free_sid_threshold', (YLeaf(YType.uint32, 'oor-green-free-sid-threshold'), ['int'])),
('oor_green_count', (YLeaf(YType.uint32, 'oor-green-count'), ['int'])),
('oor_yellow_count', (YLeaf(YType.uint32, 'oor-yellow-count'), ['int'])),
('oor_red_count', (YLeaf(YType.uint32, 'oor-red-count'), ['int'])),
])
self.out_of_resources_state = None
self.oor_yellow_free_sid_threshold = None
self.oor_green_free_sid_threshold = None
self.oor_green_count = None
self.oor_yellow_count = None
self.oor_red_count = None
self._segment_path = lambda: "sids-out-of-resource-summary"
self._absolute_path = lambda: "Cisco-IOS-XR-segment-routing-srv6-oper:srv6/standby/manager/sid-mgr-summary/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Standby.Manager.SidMgrSummary.SidsOutOfResourceSummary, ['out_of_resources_state', 'oor_yellow_free_sid_threshold', 'oor_green_free_sid_threshold', 'oor_green_count', 'oor_yellow_count', 'oor_red_count'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Standby.Manager.SidMgrSummary.SidsOutOfResourceSummary']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Standby.Manager.SidMgrSummary']['meta_info']
class PlatformCapabilities(_Entity_):
"""
Platform Capabilities
.. attribute:: support
Feature support
**type**\: :py:class:`Support <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Standby.Manager.PlatformCapabilities.Support>`
**config**\: False
.. attribute:: max_sid
Maximum Sids
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: sid_holdtime_mins
Freed SID holdtime in mins
**type**\: int
**range:** 0..4294967295
**config**\: False
**units**\: minute
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Standby.Manager.PlatformCapabilities, self).__init__()
self.yang_name = "platform-capabilities"
self.yang_parent_name = "manager"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("support", ("support", Srv6.Standby.Manager.PlatformCapabilities.Support))])
self._leafs = OrderedDict([
('max_sid', (YLeaf(YType.uint32, 'max-sid'), ['int'])),
('sid_holdtime_mins', (YLeaf(YType.uint32, 'sid-holdtime-mins'), ['int'])),
])
self.max_sid = None
self.sid_holdtime_mins = None
self.support = Srv6.Standby.Manager.PlatformCapabilities.Support()
self.support.parent = self
self._children_name_map["support"] = "support"
self._segment_path = lambda: "platform-capabilities"
self._absolute_path = lambda: "Cisco-IOS-XR-segment-routing-srv6-oper:srv6/standby/manager/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Standby.Manager.PlatformCapabilities, ['max_sid', 'sid_holdtime_mins'], name, value)
class Support(_Entity_):
"""
Feature support
.. attribute:: signaled_parameters
Signaled Parameters
**type**\: :py:class:`SignaledParameters <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Standby.Manager.PlatformCapabilities.Support.SignaledParameters>`
**config**\: False
.. attribute:: srv6
SRv6 support
**type**\: bool
**config**\: False
.. attribute:: tilfa
TI LFA support
**type**\: bool
**config**\: False
.. attribute:: microloop_avoidance
Microloop\-avoidance support
**type**\: bool
**config**\: False
.. attribute:: end_func
Supported end functions
**type**\: list of :py:class:`EndFunc <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Standby.Manager.PlatformCapabilities.Support.EndFunc>`
**config**\: False
.. attribute:: transit_func
Supported Transit functions
**type**\: list of :py:class:`TransitFunc <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Standby.Manager.PlatformCapabilities.Support.TransitFunc>`
**config**\: False
.. attribute:: security_rule
Security rules
**type**\: list of :py:class:`SecurityRule <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Standby.Manager.PlatformCapabilities.Support.SecurityRule>`
**config**\: False
.. attribute:: counter
Counters
**type**\: list of :py:class:`Counter <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Standby.Manager.PlatformCapabilities.Support.Counter>`
**config**\: False
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Standby.Manager.PlatformCapabilities.Support, self).__init__()
self.yang_name = "support"
self.yang_parent_name = "platform-capabilities"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("signaled-parameters", ("signaled_parameters", Srv6.Standby.Manager.PlatformCapabilities.Support.SignaledParameters)), ("end-func", ("end_func", Srv6.Standby.Manager.PlatformCapabilities.Support.EndFunc)), ("transit-func", ("transit_func", Srv6.Standby.Manager.PlatformCapabilities.Support.TransitFunc)), ("security-rule", ("security_rule", Srv6.Standby.Manager.PlatformCapabilities.Support.SecurityRule)), ("counter", ("counter", Srv6.Standby.Manager.PlatformCapabilities.Support.Counter))])
self._leafs = OrderedDict([
('srv6', (YLeaf(YType.boolean, 'srv6'), ['bool'])),
('tilfa', (YLeaf(YType.boolean, 'tilfa'), ['bool'])),
('microloop_avoidance', (YLeaf(YType.boolean, 'microloop-avoidance'), ['bool'])),
])
self.srv6 = None
self.tilfa = None
self.microloop_avoidance = None
self.signaled_parameters = Srv6.Standby.Manager.PlatformCapabilities.Support.SignaledParameters()
self.signaled_parameters.parent = self
self._children_name_map["signaled_parameters"] = "signaled-parameters"
self.end_func = YList(self)
self.transit_func = YList(self)
self.security_rule = YList(self)
self.counter = YList(self)
self._segment_path = lambda: "support"
self._absolute_path = lambda: "Cisco-IOS-XR-segment-routing-srv6-oper:srv6/standby/manager/platform-capabilities/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Standby.Manager.PlatformCapabilities.Support, ['srv6', 'tilfa', 'microloop_avoidance'], name, value)
class SignaledParameters(_Entity_):
"""
Signaled Parameters
.. attribute:: max_sl
Max value of SegmentLeft field in received SRH
**type**\: int
**range:** 0..255
**config**\: False
.. attribute:: max_end_pop_srh
Max num of SIDs in rcvd SRH for pop
**type**\: int
**range:** 0..255
**config**\: False
.. attribute:: max_t_insert
Max num of SIDs for T.Insert op
**type**\: int
**range:** 0..255
**config**\: False
.. attribute:: max_t_encap
Max num of SIDs for T.Encaps op
**type**\: int
**range:** 0..255
**config**\: False
.. attribute:: max_end_d
Max num of SIDs in rcvd SRH for decap
**type**\: int
**range:** 0..255
**config**\: False
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Standby.Manager.PlatformCapabilities.Support.SignaledParameters, self).__init__()
self.yang_name = "signaled-parameters"
self.yang_parent_name = "support"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('max_sl', (YLeaf(YType.uint8, 'max-sl'), ['int'])),
('max_end_pop_srh', (YLeaf(YType.uint8, 'max-end-pop-srh'), ['int'])),
('max_t_insert', (YLeaf(YType.uint8, 'max-t-insert'), ['int'])),
('max_t_encap', (YLeaf(YType.uint8, 'max-t-encap'), ['int'])),
('max_end_d', (YLeaf(YType.uint8, 'max-end-d'), ['int'])),
])
self.max_sl = None
self.max_end_pop_srh = None
self.max_t_insert = None
self.max_t_encap = None
self.max_end_d = None
self._segment_path = lambda: "signaled-parameters"
self._absolute_path = lambda: "Cisco-IOS-XR-segment-routing-srv6-oper:srv6/standby/manager/platform-capabilities/support/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Standby.Manager.PlatformCapabilities.Support.SignaledParameters, ['max_sl', 'max_end_pop_srh', 'max_t_insert', 'max_t_encap', 'max_end_d'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Standby.Manager.PlatformCapabilities.Support.SignaledParameters']['meta_info']
class EndFunc(_Entity_):
"""
Supported end functions
.. attribute:: string
String
**type**\: str
**config**\: False
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Standby.Manager.PlatformCapabilities.Support.EndFunc, self).__init__()
self.yang_name = "end-func"
self.yang_parent_name = "support"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('string', (YLeaf(YType.str, 'string'), ['str'])),
])
self.string = None
self._segment_path = lambda: "end-func"
self._absolute_path = lambda: "Cisco-IOS-XR-segment-routing-srv6-oper:srv6/standby/manager/platform-capabilities/support/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Standby.Manager.PlatformCapabilities.Support.EndFunc, ['string'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Standby.Manager.PlatformCapabilities.Support.EndFunc']['meta_info']
class TransitFunc(_Entity_):
"""
Supported Transit functions
.. attribute:: string
String
**type**\: str
**config**\: False
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Standby.Manager.PlatformCapabilities.Support.TransitFunc, self).__init__()
self.yang_name = "transit-func"
self.yang_parent_name = "support"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('string', (YLeaf(YType.str, 'string'), ['str'])),
])
self.string = None
self._segment_path = lambda: "transit-func"
self._absolute_path = lambda: "Cisco-IOS-XR-segment-routing-srv6-oper:srv6/standby/manager/platform-capabilities/support/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Standby.Manager.PlatformCapabilities.Support.TransitFunc, ['string'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Standby.Manager.PlatformCapabilities.Support.TransitFunc']['meta_info']
class SecurityRule(_Entity_):
"""
Security rules
.. attribute:: string
String
**type**\: str
**config**\: False
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Standby.Manager.PlatformCapabilities.Support.SecurityRule, self).__init__()
self.yang_name = "security-rule"
self.yang_parent_name = "support"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('string', (YLeaf(YType.str, 'string'), ['str'])),
])
self.string = None
self._segment_path = lambda: "security-rule"
self._absolute_path = lambda: "Cisco-IOS-XR-segment-routing-srv6-oper:srv6/standby/manager/platform-capabilities/support/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Standby.Manager.PlatformCapabilities.Support.SecurityRule, ['string'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Standby.Manager.PlatformCapabilities.Support.SecurityRule']['meta_info']
class Counter(_Entity_):
"""
Counters
.. attribute:: string
String
**type**\: str
**config**\: False
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Standby.Manager.PlatformCapabilities.Support.Counter, self).__init__()
self.yang_name = "counter"
self.yang_parent_name = "support"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('string', (YLeaf(YType.str, 'string'), ['str'])),
])
self.string = None
self._segment_path = lambda: "counter"
self._absolute_path = lambda: "Cisco-IOS-XR-segment-routing-srv6-oper:srv6/standby/manager/platform-capabilities/support/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Standby.Manager.PlatformCapabilities.Support.Counter, ['string'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Standby.Manager.PlatformCapabilities.Support.Counter']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Standby.Manager.PlatformCapabilities.Support']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Standby.Manager.PlatformCapabilities']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Standby.Manager']['meta_info']
class Locators(_Entity_):
"""
SRv6 locators related information
.. attribute:: locator
Operational data for given SRv6 locator
**type**\: list of :py:class:`Locator <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Standby.Locators.Locator>`
**config**\: False
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Standby.Locators, self).__init__()
self.yang_name = "locators"
self.yang_parent_name = "standby"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("locator", ("locator", Srv6.Standby.Locators.Locator))])
self._leafs = OrderedDict()
self.locator = YList(self)
self._segment_path = lambda: "locators"
self._absolute_path = lambda: "Cisco-IOS-XR-segment-routing-srv6-oper:srv6/standby/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Standby.Locators, [], name, value)
class Locator(_Entity_):
"""
Operational data for given SRv6 locator
.. attribute:: name (key)
Locator name
**type**\: str
**length:** 1..58
**config**\: False
.. attribute:: info
Operational data for given SRv6 locator
**type**\: :py:class:`Info <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Standby.Locators.Locator.Info>`
**config**\: False
.. attribute:: sids
SRv6 locator SID table
**type**\: :py:class:`Sids <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Standby.Locators.Locator.Sids>`
**config**\: False
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Standby.Locators.Locator, self).__init__()
self.yang_name = "locator"
self.yang_parent_name = "locators"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['name']
self._child_classes = OrderedDict([("info", ("info", Srv6.Standby.Locators.Locator.Info)), ("sids", ("sids", Srv6.Standby.Locators.Locator.Sids))])
self._leafs = OrderedDict([
('name', (YLeaf(YType.str, 'name'), ['str'])),
])
self.name = None
self.info = Srv6.Standby.Locators.Locator.Info()
self.info.parent = self
self._children_name_map["info"] = "info"
self.sids = Srv6.Standby.Locators.Locator.Sids()
self.sids.parent = self
self._children_name_map["sids"] = "sids"
self._segment_path = lambda: "locator" + "[name='" + str(self.name) + "']"
self._absolute_path = lambda: "Cisco-IOS-XR-segment-routing-srv6-oper:srv6/standby/locators/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Standby.Locators.Locator, ['name'], name, value)
class Info(_Entity_):
"""
Operational data for given SRv6 locator
.. attribute:: interface
Locator IM intf info
**type**\: :py:class:`Interface <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Standby.Locators.Locator.Info.Interface>`
**config**\: False
.. attribute:: create_timestamp
Creation timestamp
**type**\: :py:class:`CreateTimestamp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Standby.Locators.Locator.Info.CreateTimestamp>`
**config**\: False
.. attribute:: name
Locator Name
**type**\: str
**config**\: False
.. attribute:: id
Locator ID
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: prefix
Locator Prefix
**type**\: str
**config**\: False
.. attribute:: is_operational
Locator status is Up or Down
**type**\: bool
**config**\: False
.. attribute:: is_default
Locator is the default locator
**type**\: bool
**config**\: False
.. attribute:: out_of_resources_state
Locator Resources State for SIDs
**type**\: :py:class:`Srv6OutOfResourceState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6OutOfResourceState>`
**config**\: False
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Standby.Locators.Locator.Info, self).__init__()
self.yang_name = "info"
self.yang_parent_name = "locator"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("interface", ("interface", Srv6.Standby.Locators.Locator.Info.Interface)), ("create-timestamp", ("create_timestamp", Srv6.Standby.Locators.Locator.Info.CreateTimestamp))])
self._leafs = OrderedDict([
('name', (YLeaf(YType.str, 'name'), ['str'])),
('id', (YLeaf(YType.uint32, 'id'), ['int'])),
('prefix', (YLeaf(YType.str, 'prefix'), ['str'])),
('is_operational', (YLeaf(YType.boolean, 'is-operational'), ['bool'])),
('is_default', (YLeaf(YType.boolean, 'is-default'), ['bool'])),
('out_of_resources_state', (YLeaf(YType.enumeration, 'out-of-resources-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper', 'Srv6OutOfResourceState', '')])),
])
self.name = None
self.id = None
self.prefix = None
self.is_operational = None
self.is_default = None
self.out_of_resources_state = None
self.interface = Srv6.Standby.Locators.Locator.Info.Interface()
self.interface.parent = self
self._children_name_map["interface"] = "interface"
self.create_timestamp = Srv6.Standby.Locators.Locator.Info.CreateTimestamp()
self.create_timestamp.parent = self
self._children_name_map["create_timestamp"] = "create-timestamp"
self._segment_path = lambda: "info"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Standby.Locators.Locator.Info, ['name', 'id', 'prefix', 'is_operational', 'is_default', 'out_of_resources_state'], name, value)
class Interface(_Entity_):
"""
Locator IM intf info
.. attribute:: name
Interface name
**type**\: str
**config**\: False
.. attribute:: if_handle
Interface handle
**type**\: str
**pattern:** [0\-9a\-fA\-F]{1,8}
**config**\: False
.. attribute:: programmed_prefix
Interface prefix/addr programmed
**type**\: str
**config**\: False
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Standby.Locators.Locator.Info.Interface, self).__init__()
self.yang_name = "interface"
self.yang_parent_name = "info"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('name', (YLeaf(YType.str, 'name'), ['str'])),
('if_handle', (YLeaf(YType.str, 'if-handle'), ['str'])),
('programmed_prefix', (YLeaf(YType.str, 'programmed-prefix'), ['str'])),
])
self.name = None
self.if_handle = None
self.programmed_prefix = None
self._segment_path = lambda: "interface"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Standby.Locators.Locator.Info.Interface, ['name', 'if_handle', 'programmed_prefix'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Standby.Locators.Locator.Info.Interface']['meta_info']
class CreateTimestamp(_Entity_):
"""
Creation timestamp
.. attribute:: time_in_nano_seconds
Timestamp in nano seconds
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
**units**\: nanosecond
.. attribute:: age_in_nano_seconds
Age in nano seconds
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
**units**\: nanosecond
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Standby.Locators.Locator.Info.CreateTimestamp, self).__init__()
self.yang_name = "create-timestamp"
self.yang_parent_name = "info"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('time_in_nano_seconds', (YLeaf(YType.uint64, 'time-in-nano-seconds'), ['int'])),
('age_in_nano_seconds', (YLeaf(YType.uint64, 'age-in-nano-seconds'), ['int'])),
])
self.time_in_nano_seconds = None
self.age_in_nano_seconds = None
self._segment_path = lambda: "create-timestamp"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Standby.Locators.Locator.Info.CreateTimestamp, ['time_in_nano_seconds', 'age_in_nano_seconds'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Standby.Locators.Locator.Info.CreateTimestamp']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Standby.Locators.Locator.Info']['meta_info']
class Sids(_Entity_):
"""
SRv6 locator SID table
.. attribute:: sid
Operational data for given SRv6 SID
**type**\: list of :py:class:`Sid <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Standby.Locators.Locator.Sids.Sid>`
**config**\: False
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Standby.Locators.Locator.Sids, self).__init__()
self.yang_name = "sids"
self.yang_parent_name = "locator"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("sid", ("sid", Srv6.Standby.Locators.Locator.Sids.Sid))])
self._leafs = OrderedDict()
self.sid = YList(self)
self._segment_path = lambda: "sids"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Standby.Locators.Locator.Sids, [], name, value)
class Sid(_Entity_):
"""
Operational data for given SRv6 SID
.. attribute:: address (key)
IPv6 address
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: sid_context
SID Context
**type**\: :py:class:`SidContext <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Standby.Locators.Locator.Sids.Sid.SidContext>`
**config**\: False
.. attribute:: create_timestamp
Creation timestamp
**type**\: :py:class:`CreateTimestamp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Standby.Locators.Locator.Sids.Sid.CreateTimestamp>`
**config**\: False
.. attribute:: sid
SID
**type**\: str
**config**\: False
.. attribute:: allocation_type
Allocation Type
**type**\: :py:class:`SidAllocation <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.SidAllocation>`
**config**\: False
.. attribute:: function_type
Function Type
**type**\: :py:class:`Srv6EndFunction <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6EndFunction>`
**config**\: False
.. attribute:: state
State
**type**\: :py:class:`SidState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.SidState>`
**config**\: False
.. attribute:: has_forwarding
Rewrite done or not
**type**\: bool
**config**\: False
.. attribute:: locator
Associated locator
**type**\: str
**config**\: False
.. attribute:: owner
Owner
**type**\: list of :py:class:`Owner <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Standby.Locators.Locator.Sids.Sid.Owner>`
**config**\: False
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Standby.Locators.Locator.Sids.Sid, self).__init__()
self.yang_name = "sid"
self.yang_parent_name = "sids"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['address']
self._child_classes = OrderedDict([("sid-context", ("sid_context", Srv6.Standby.Locators.Locator.Sids.Sid.SidContext)), ("create-timestamp", ("create_timestamp", Srv6.Standby.Locators.Locator.Sids.Sid.CreateTimestamp)), ("owner", ("owner", Srv6.Standby.Locators.Locator.Sids.Sid.Owner))])
self._leafs = OrderedDict([
('address', (YLeaf(YType.str, 'address'), ['str','str'])),
('sid', (YLeaf(YType.str, 'sid'), ['str'])),
('allocation_type', (YLeaf(YType.enumeration, 'allocation-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper', 'SidAllocation', '')])),
('function_type', (YLeaf(YType.enumeration, 'function-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper', 'Srv6EndFunction', '')])),
('state', (YLeaf(YType.enumeration, 'state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper', 'SidState', '')])),
('has_forwarding', (YLeaf(YType.boolean, 'has-forwarding'), ['bool'])),
('locator', (YLeaf(YType.str, 'locator'), ['str'])),
])
self.address = None
self.sid = None
self.allocation_type = None
self.function_type = None
self.state = None
self.has_forwarding = None
self.locator = None
self.sid_context = Srv6.Standby.Locators.Locator.Sids.Sid.SidContext()
self.sid_context.parent = self
self._children_name_map["sid_context"] = "sid-context"
self.create_timestamp = Srv6.Standby.Locators.Locator.Sids.Sid.CreateTimestamp()
self.create_timestamp.parent = self
self._children_name_map["create_timestamp"] = "create-timestamp"
self.owner = YList(self)
self._segment_path = lambda: "sid" + "[address='" + str(self.address) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Standby.Locators.Locator.Sids.Sid, ['address', 'sid', 'allocation_type', 'function_type', 'state', 'has_forwarding', 'locator'], name, value)
class SidContext(_Entity_):
"""
SID Context
.. attribute:: key
SID Key
**type**\: :py:class:`Key <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Standby.Locators.Locator.Sids.Sid.SidContext.Key>`
**config**\: False
.. attribute:: application_data
Application opaque data
**type**\: str
**pattern:** ([0\-9a\-fA\-F]{2}(\:[0\-9a\-fA\-F]{2})\*)?
**config**\: False
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Standby.Locators.Locator.Sids.Sid.SidContext, self).__init__()
self.yang_name = "sid-context"
self.yang_parent_name = "sid"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("key", ("key", Srv6.Standby.Locators.Locator.Sids.Sid.SidContext.Key))])
self._leafs = OrderedDict([
('application_data', (YLeaf(YType.str, 'application-data'), ['str'])),
])
self.application_data = None
self.key = Srv6.Standby.Locators.Locator.Sids.Sid.SidContext.Key()
self.key.parent = self
self._children_name_map["key"] = "key"
self._segment_path = lambda: "sid-context"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Standby.Locators.Locator.Sids.Sid.SidContext, ['application_data'], name, value)
class Key(_Entity_):
"""
SID Key
.. attribute:: e
End (PSP) SID context
**type**\: :py:class:`E <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Standby.Locators.Locator.Sids.Sid.SidContext.Key.E>`
**config**\: False
.. attribute:: x
End.X (PSP) SID context
**type**\: :py:class:`X <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Standby.Locators.Locator.Sids.Sid.SidContext.Key.X>`
**config**\: False
.. attribute:: dx4
End.DX4 SID context
**type**\: :py:class:`Dx4 <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Standby.Locators.Locator.Sids.Sid.SidContext.Key.Dx4>`
**config**\: False
.. attribute:: dt4
End.DT4 SID context
**type**\: :py:class:`Dt4 <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Standby.Locators.Locator.Sids.Sid.SidContext.Key.Dt4>`
**config**\: False
.. attribute:: sid_context_type
SIDContextType
**type**\: :py:class:`Srv6EndFunction <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6EndFunction>`
**config**\: False
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Standby.Locators.Locator.Sids.Sid.SidContext.Key, self).__init__()
self.yang_name = "key"
self.yang_parent_name = "sid-context"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("e", ("e", Srv6.Standby.Locators.Locator.Sids.Sid.SidContext.Key.E)), ("x", ("x", Srv6.Standby.Locators.Locator.Sids.Sid.SidContext.Key.X)), ("dx4", ("dx4", Srv6.Standby.Locators.Locator.Sids.Sid.SidContext.Key.Dx4)), ("dt4", ("dt4", Srv6.Standby.Locators.Locator.Sids.Sid.SidContext.Key.Dt4))])
self._leafs = OrderedDict([
('sid_context_type', (YLeaf(YType.enumeration, 'sid-context-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper', 'Srv6EndFunction', '')])),
])
self.sid_context_type = None
self.e = Srv6.Standby.Locators.Locator.Sids.Sid.SidContext.Key.E()
self.e.parent = self
self._children_name_map["e"] = "e"
self.x = Srv6.Standby.Locators.Locator.Sids.Sid.SidContext.Key.X()
self.x.parent = self
self._children_name_map["x"] = "x"
self.dx4 = Srv6.Standby.Locators.Locator.Sids.Sid.SidContext.Key.Dx4()
self.dx4.parent = self
self._children_name_map["dx4"] = "dx4"
self.dt4 = Srv6.Standby.Locators.Locator.Sids.Sid.SidContext.Key.Dt4()
self.dt4.parent = self
self._children_name_map["dt4"] = "dt4"
self._segment_path = lambda: "key"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Standby.Locators.Locator.Sids.Sid.SidContext.Key, ['sid_context_type'], name, value)
class E(_Entity_):
"""
End (PSP) SID context
.. attribute:: table_id
Table Id
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: opaque_id
Additional differentiator \- opaque to SIDMgr
**type**\: int
**range:** 0..255
**config**\: False
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Standby.Locators.Locator.Sids.Sid.SidContext.Key.E, self).__init__()
self.yang_name = "e"
self.yang_parent_name = "key"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('table_id', (YLeaf(YType.uint32, 'table-id'), ['int'])),
('opaque_id', (YLeaf(YType.uint8, 'opaque-id'), ['int'])),
])
self.table_id = None
self.opaque_id = None
self._segment_path = lambda: "e"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Standby.Locators.Locator.Sids.Sid.SidContext.Key.E, ['table_id', 'opaque_id'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Standby.Locators.Locator.Sids.Sid.SidContext.Key.E']['meta_info']
class X(_Entity_):
"""
End.X (PSP) SID context
.. attribute:: is_protected
Is protected?
**type**\: bool
**config**\: False
.. attribute:: opaque_id
Additional differentiator \- opaque to SIDMgr
**type**\: int
**range:** 0..255
**config**\: False
.. attribute:: interface
Nexthop interface
**type**\: str
**pattern:** [a\-zA\-Z0\-9.\_/\-]+
**config**\: False
.. attribute:: nexthop_address
Nexthop IP address
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Standby.Locators.Locator.Sids.Sid.SidContext.Key.X, self).__init__()
self.yang_name = "x"
self.yang_parent_name = "key"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('is_protected', (YLeaf(YType.boolean, 'is-protected'), ['bool'])),
('opaque_id', (YLeaf(YType.uint8, 'opaque-id'), ['int'])),
('interface', (YLeaf(YType.str, 'interface'), ['str'])),
('nexthop_address', (YLeaf(YType.str, 'nexthop-address'), ['str'])),
])
self.is_protected = None
self.opaque_id = None
self.interface = None
self.nexthop_address = None
self._segment_path = lambda: "x"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Standby.Locators.Locator.Sids.Sid.SidContext.Key.X, ['is_protected', 'opaque_id', 'interface', 'nexthop_address'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Standby.Locators.Locator.Sids.Sid.SidContext.Key.X']['meta_info']
class Dx4(_Entity_):
"""
End.DX4 SID context
.. attribute:: table_id
Table ID
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: next_hop_set_id
Next Hop Set ID
**type**\: int
**range:** 0..4294967295
**config**\: False
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Standby.Locators.Locator.Sids.Sid.SidContext.Key.Dx4, self).__init__()
self.yang_name = "dx4"
self.yang_parent_name = "key"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('table_id', (YLeaf(YType.uint32, 'table-id'), ['int'])),
('next_hop_set_id', (YLeaf(YType.uint32, 'next-hop-set-id'), ['int'])),
])
self.table_id = None
self.next_hop_set_id = None
self._segment_path = lambda: "dx4"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Standby.Locators.Locator.Sids.Sid.SidContext.Key.Dx4, ['table_id', 'next_hop_set_id'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Standby.Locators.Locator.Sids.Sid.SidContext.Key.Dx4']['meta_info']
class Dt4(_Entity_):
"""
End.DT4 SID context
.. attribute:: table_id
Table ID
**type**\: int
**range:** 0..4294967295
**config**\: False
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Standby.Locators.Locator.Sids.Sid.SidContext.Key.Dt4, self).__init__()
self.yang_name = "dt4"
self.yang_parent_name = "key"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('table_id', (YLeaf(YType.uint32, 'table-id'), ['int'])),
])
self.table_id = None
self._segment_path = lambda: "dt4"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Standby.Locators.Locator.Sids.Sid.SidContext.Key.Dt4, ['table_id'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Standby.Locators.Locator.Sids.Sid.SidContext.Key.Dt4']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Standby.Locators.Locator.Sids.Sid.SidContext.Key']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Standby.Locators.Locator.Sids.Sid.SidContext']['meta_info']
class CreateTimestamp(_Entity_):
"""
Creation timestamp
.. attribute:: time_in_nano_seconds
Timestamp in nano seconds
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
**units**\: nanosecond
.. attribute:: age_in_nano_seconds
Age in nano seconds
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
**units**\: nanosecond
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Standby.Locators.Locator.Sids.Sid.CreateTimestamp, self).__init__()
self.yang_name = "create-timestamp"
self.yang_parent_name = "sid"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('time_in_nano_seconds', (YLeaf(YType.uint64, 'time-in-nano-seconds'), ['int'])),
('age_in_nano_seconds', (YLeaf(YType.uint64, 'age-in-nano-seconds'), ['int'])),
])
self.time_in_nano_seconds = None
self.age_in_nano_seconds = None
self._segment_path = lambda: "create-timestamp"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Standby.Locators.Locator.Sids.Sid.CreateTimestamp, ['time_in_nano_seconds', 'age_in_nano_seconds'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Standby.Locators.Locator.Sids.Sid.CreateTimestamp']['meta_info']
class Owner(_Entity_):
"""
Owner
.. attribute:: owner
Owner
**type**\: str
**config**\: False
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Standby.Locators.Locator.Sids.Sid.Owner, self).__init__()
self.yang_name = "owner"
self.yang_parent_name = "sid"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('owner', (YLeaf(YType.str, 'owner'), ['str'])),
])
self.owner = None
self._segment_path = lambda: "owner"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Standby.Locators.Locator.Sids.Sid.Owner, ['owner'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Standby.Locators.Locator.Sids.Sid.Owner']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Standby.Locators.Locator.Sids.Sid']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Standby.Locators.Locator.Sids']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Standby.Locators.Locator']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Standby.Locators']['meta_info']
class LocatorAllSids(_Entity_):
"""
Operational container for all (Active and Stale)
SIDs across all Locators
.. attribute:: locator_all_sid
Operational data for given locator and SID opcode
**type**\: list of :py:class:`LocatorAllSid <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Standby.LocatorAllSids.LocatorAllSid>`
**config**\: False
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Standby.LocatorAllSids, self).__init__()
self.yang_name = "locator-all-sids"
self.yang_parent_name = "standby"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("locator-all-sid", ("locator_all_sid", Srv6.Standby.LocatorAllSids.LocatorAllSid))])
self._leafs = OrderedDict()
self.locator_all_sid = YList(self)
self._segment_path = lambda: "locator-all-sids"
self._absolute_path = lambda: "Cisco-IOS-XR-segment-routing-srv6-oper:srv6/standby/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Standby.LocatorAllSids, [], name, value)
class LocatorAllSid(_Entity_):
"""
Operational data for given locator and SID
opcode
.. attribute:: locator_name (key)
Locator name
**type**\: str
**length:** 1..58
**config**\: False
.. attribute:: sid_opcode (key)
Sid opcode
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: sid_context
SID Context
**type**\: :py:class:`SidContext <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Standby.LocatorAllSids.LocatorAllSid.SidContext>`
**config**\: False
.. attribute:: create_timestamp
Creation timestamp
**type**\: :py:class:`CreateTimestamp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Standby.LocatorAllSids.LocatorAllSid.CreateTimestamp>`
**config**\: False
.. attribute:: sid
SID
**type**\: str
**config**\: False
.. attribute:: allocation_type
Allocation Type
**type**\: :py:class:`SidAllocation <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.SidAllocation>`
**config**\: False
.. attribute:: function_type
Function Type
**type**\: :py:class:`Srv6EndFunction <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6EndFunction>`
**config**\: False
.. attribute:: state
State
**type**\: :py:class:`SidState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.SidState>`
**config**\: False
.. attribute:: has_forwarding
Rewrite done or not
**type**\: bool
**config**\: False
.. attribute:: locator
Associated locator
**type**\: str
**config**\: False
.. attribute:: owner
Owner
**type**\: list of :py:class:`Owner <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Standby.LocatorAllSids.LocatorAllSid.Owner>`
**config**\: False
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Standby.LocatorAllSids.LocatorAllSid, self).__init__()
self.yang_name = "locator-all-sid"
self.yang_parent_name = "locator-all-sids"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['locator_name','sid_opcode']
self._child_classes = OrderedDict([("sid-context", ("sid_context", Srv6.Standby.LocatorAllSids.LocatorAllSid.SidContext)), ("create-timestamp", ("create_timestamp", Srv6.Standby.LocatorAllSids.LocatorAllSid.CreateTimestamp)), ("owner", ("owner", Srv6.Standby.LocatorAllSids.LocatorAllSid.Owner))])
self._leafs = OrderedDict([
('locator_name', (YLeaf(YType.str, 'locator-name'), ['str'])),
('sid_opcode', (YLeaf(YType.uint32, 'sid-opcode'), ['int'])),
('sid', (YLeaf(YType.str, 'sid'), ['str'])),
('allocation_type', (YLeaf(YType.enumeration, 'allocation-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper', 'SidAllocation', '')])),
('function_type', (YLeaf(YType.enumeration, 'function-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper', 'Srv6EndFunction', '')])),
('state', (YLeaf(YType.enumeration, 'state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper', 'SidState', '')])),
('has_forwarding', (YLeaf(YType.boolean, 'has-forwarding'), ['bool'])),
('locator', (YLeaf(YType.str, 'locator'), ['str'])),
])
self.locator_name = None
self.sid_opcode = None
self.sid = None
self.allocation_type = None
self.function_type = None
self.state = None
self.has_forwarding = None
self.locator = None
self.sid_context = Srv6.Standby.LocatorAllSids.LocatorAllSid.SidContext()
self.sid_context.parent = self
self._children_name_map["sid_context"] = "sid-context"
self.create_timestamp = Srv6.Standby.LocatorAllSids.LocatorAllSid.CreateTimestamp()
self.create_timestamp.parent = self
self._children_name_map["create_timestamp"] = "create-timestamp"
self.owner = YList(self)
self._segment_path = lambda: "locator-all-sid" + "[locator-name='" + str(self.locator_name) + "']" + "[sid-opcode='" + str(self.sid_opcode) + "']"
self._absolute_path = lambda: "Cisco-IOS-XR-segment-routing-srv6-oper:srv6/standby/locator-all-sids/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Standby.LocatorAllSids.LocatorAllSid, ['locator_name', 'sid_opcode', 'sid', 'allocation_type', 'function_type', 'state', 'has_forwarding', 'locator'], name, value)
class SidContext(_Entity_):
"""
SID Context
.. attribute:: key
SID Key
**type**\: :py:class:`Key <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Standby.LocatorAllSids.LocatorAllSid.SidContext.Key>`
**config**\: False
.. attribute:: application_data
Application opaque data
**type**\: str
**pattern:** ([0\-9a\-fA\-F]{2}(\:[0\-9a\-fA\-F]{2})\*)?
**config**\: False
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Standby.LocatorAllSids.LocatorAllSid.SidContext, self).__init__()
self.yang_name = "sid-context"
self.yang_parent_name = "locator-all-sid"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("key", ("key", Srv6.Standby.LocatorAllSids.LocatorAllSid.SidContext.Key))])
self._leafs = OrderedDict([
('application_data', (YLeaf(YType.str, 'application-data'), ['str'])),
])
self.application_data = None
self.key = Srv6.Standby.LocatorAllSids.LocatorAllSid.SidContext.Key()
self.key.parent = self
self._children_name_map["key"] = "key"
self._segment_path = lambda: "sid-context"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Standby.LocatorAllSids.LocatorAllSid.SidContext, ['application_data'], name, value)
class Key(_Entity_):
"""
SID Key
.. attribute:: e
End (PSP) SID context
**type**\: :py:class:`E <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Standby.LocatorAllSids.LocatorAllSid.SidContext.Key.E>`
**config**\: False
.. attribute:: x
End.X (PSP) SID context
**type**\: :py:class:`X <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Standby.LocatorAllSids.LocatorAllSid.SidContext.Key.X>`
**config**\: False
.. attribute:: dx4
End.DX4 SID context
**type**\: :py:class:`Dx4 <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Standby.LocatorAllSids.LocatorAllSid.SidContext.Key.Dx4>`
**config**\: False
.. attribute:: dt4
End.DT4 SID context
**type**\: :py:class:`Dt4 <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Standby.LocatorAllSids.LocatorAllSid.SidContext.Key.Dt4>`
**config**\: False
.. attribute:: sid_context_type
SIDContextType
**type**\: :py:class:`Srv6EndFunction <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6EndFunction>`
**config**\: False
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Standby.LocatorAllSids.LocatorAllSid.SidContext.Key, self).__init__()
self.yang_name = "key"
self.yang_parent_name = "sid-context"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("e", ("e", Srv6.Standby.LocatorAllSids.LocatorAllSid.SidContext.Key.E)), ("x", ("x", Srv6.Standby.LocatorAllSids.LocatorAllSid.SidContext.Key.X)), ("dx4", ("dx4", Srv6.Standby.LocatorAllSids.LocatorAllSid.SidContext.Key.Dx4)), ("dt4", ("dt4", Srv6.Standby.LocatorAllSids.LocatorAllSid.SidContext.Key.Dt4))])
self._leafs = OrderedDict([
('sid_context_type', (YLeaf(YType.enumeration, 'sid-context-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper', 'Srv6EndFunction', '')])),
])
self.sid_context_type = None
self.e = Srv6.Standby.LocatorAllSids.LocatorAllSid.SidContext.Key.E()
self.e.parent = self
self._children_name_map["e"] = "e"
self.x = Srv6.Standby.LocatorAllSids.LocatorAllSid.SidContext.Key.X()
self.x.parent = self
self._children_name_map["x"] = "x"
self.dx4 = Srv6.Standby.LocatorAllSids.LocatorAllSid.SidContext.Key.Dx4()
self.dx4.parent = self
self._children_name_map["dx4"] = "dx4"
self.dt4 = Srv6.Standby.LocatorAllSids.LocatorAllSid.SidContext.Key.Dt4()
self.dt4.parent = self
self._children_name_map["dt4"] = "dt4"
self._segment_path = lambda: "key"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Standby.LocatorAllSids.LocatorAllSid.SidContext.Key, ['sid_context_type'], name, value)
class E(_Entity_):
"""
End (PSP) SID context
.. attribute:: table_id
Table Id
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: opaque_id
Additional differentiator \- opaque to SIDMgr
**type**\: int
**range:** 0..255
**config**\: False
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Standby.LocatorAllSids.LocatorAllSid.SidContext.Key.E, self).__init__()
self.yang_name = "e"
self.yang_parent_name = "key"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('table_id', (YLeaf(YType.uint32, 'table-id'), ['int'])),
('opaque_id', (YLeaf(YType.uint8, 'opaque-id'), ['int'])),
])
self.table_id = None
self.opaque_id = None
self._segment_path = lambda: "e"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Standby.LocatorAllSids.LocatorAllSid.SidContext.Key.E, ['table_id', 'opaque_id'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Standby.LocatorAllSids.LocatorAllSid.SidContext.Key.E']['meta_info']
class X(_Entity_):
"""
End.X (PSP) SID context
.. attribute:: is_protected
Is protected?
**type**\: bool
**config**\: False
.. attribute:: opaque_id
Additional differentiator \- opaque to SIDMgr
**type**\: int
**range:** 0..255
**config**\: False
.. attribute:: interface
Nexthop interface
**type**\: str
**pattern:** [a\-zA\-Z0\-9.\_/\-]+
**config**\: False
.. attribute:: nexthop_address
Nexthop IP address
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Standby.LocatorAllSids.LocatorAllSid.SidContext.Key.X, self).__init__()
self.yang_name = "x"
self.yang_parent_name = "key"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('is_protected', (YLeaf(YType.boolean, 'is-protected'), ['bool'])),
('opaque_id', (YLeaf(YType.uint8, 'opaque-id'), ['int'])),
('interface', (YLeaf(YType.str, 'interface'), ['str'])),
('nexthop_address', (YLeaf(YType.str, 'nexthop-address'), ['str'])),
])
self.is_protected = None
self.opaque_id = None
self.interface = None
self.nexthop_address = None
self._segment_path = lambda: "x"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Standby.LocatorAllSids.LocatorAllSid.SidContext.Key.X, ['is_protected', 'opaque_id', 'interface', 'nexthop_address'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Standby.LocatorAllSids.LocatorAllSid.SidContext.Key.X']['meta_info']
class Dx4(_Entity_):
"""
End.DX4 SID context
.. attribute:: table_id
Table ID
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: next_hop_set_id
Next Hop Set ID
**type**\: int
**range:** 0..4294967295
**config**\: False
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Standby.LocatorAllSids.LocatorAllSid.SidContext.Key.Dx4, self).__init__()
self.yang_name = "dx4"
self.yang_parent_name = "key"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('table_id', (YLeaf(YType.uint32, 'table-id'), ['int'])),
('next_hop_set_id', (YLeaf(YType.uint32, 'next-hop-set-id'), ['int'])),
])
self.table_id = None
self.next_hop_set_id = None
self._segment_path = lambda: "dx4"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Standby.LocatorAllSids.LocatorAllSid.SidContext.Key.Dx4, ['table_id', 'next_hop_set_id'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Standby.LocatorAllSids.LocatorAllSid.SidContext.Key.Dx4']['meta_info']
class Dt4(_Entity_):
"""
End.DT4 SID context
.. attribute:: table_id
Table ID
**type**\: int
**range:** 0..4294967295
**config**\: False
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Standby.LocatorAllSids.LocatorAllSid.SidContext.Key.Dt4, self).__init__()
self.yang_name = "dt4"
self.yang_parent_name = "key"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('table_id', (YLeaf(YType.uint32, 'table-id'), ['int'])),
])
self.table_id = None
self._segment_path = lambda: "dt4"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Standby.LocatorAllSids.LocatorAllSid.SidContext.Key.Dt4, ['table_id'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Standby.LocatorAllSids.LocatorAllSid.SidContext.Key.Dt4']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Standby.LocatorAllSids.LocatorAllSid.SidContext.Key']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Standby.LocatorAllSids.LocatorAllSid.SidContext']['meta_info']
class CreateTimestamp(_Entity_):
"""
Creation timestamp
.. attribute:: time_in_nano_seconds
Timestamp in nano seconds
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
**units**\: nanosecond
.. attribute:: age_in_nano_seconds
Age in nano seconds
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
**units**\: nanosecond
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Standby.LocatorAllSids.LocatorAllSid.CreateTimestamp, self).__init__()
self.yang_name = "create-timestamp"
self.yang_parent_name = "locator-all-sid"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('time_in_nano_seconds', (YLeaf(YType.uint64, 'time-in-nano-seconds'), ['int'])),
('age_in_nano_seconds', (YLeaf(YType.uint64, 'age-in-nano-seconds'), ['int'])),
])
self.time_in_nano_seconds = None
self.age_in_nano_seconds = None
self._segment_path = lambda: "create-timestamp"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Standby.LocatorAllSids.LocatorAllSid.CreateTimestamp, ['time_in_nano_seconds', 'age_in_nano_seconds'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Standby.LocatorAllSids.LocatorAllSid.CreateTimestamp']['meta_info']
class Owner(_Entity_):
"""
Owner
.. attribute:: owner
Owner
**type**\: str
**config**\: False
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Standby.LocatorAllSids.LocatorAllSid.Owner, self).__init__()
self.yang_name = "owner"
self.yang_parent_name = "locator-all-sid"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('owner', (YLeaf(YType.str, 'owner'), ['str'])),
])
self.owner = None
self._segment_path = lambda: "owner"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Standby.LocatorAllSids.LocatorAllSid.Owner, ['owner'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Standby.LocatorAllSids.LocatorAllSid.Owner']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Standby.LocatorAllSids.LocatorAllSid']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Standby.LocatorAllSids']['meta_info']
class LocatorAllActiveSids(_Entity_):
"""
Operational container for Active SIDs across all
Locators
.. attribute:: locator_all_active_sid
Operational data for given locator and SID opcode
**type**\: list of :py:class:`LocatorAllActiveSid <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Standby.LocatorAllActiveSids.LocatorAllActiveSid>`
**config**\: False
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Standby.LocatorAllActiveSids, self).__init__()
self.yang_name = "locator-all-active-sids"
self.yang_parent_name = "standby"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("locator-all-active-sid", ("locator_all_active_sid", Srv6.Standby.LocatorAllActiveSids.LocatorAllActiveSid))])
self._leafs = OrderedDict()
self.locator_all_active_sid = YList(self)
self._segment_path = lambda: "locator-all-active-sids"
self._absolute_path = lambda: "Cisco-IOS-XR-segment-routing-srv6-oper:srv6/standby/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Standby.LocatorAllActiveSids, [], name, value)
class LocatorAllActiveSid(_Entity_):
"""
Operational data for given locator and SID
opcode
.. attribute:: locator_name (key)
Locator name
**type**\: str
**length:** 1..58
**config**\: False
.. attribute:: sid_opcode (key)
Sid opcode
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: sid_context
SID Context
**type**\: :py:class:`SidContext <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Standby.LocatorAllActiveSids.LocatorAllActiveSid.SidContext>`
**config**\: False
.. attribute:: create_timestamp
Creation timestamp
**type**\: :py:class:`CreateTimestamp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Standby.LocatorAllActiveSids.LocatorAllActiveSid.CreateTimestamp>`
**config**\: False
.. attribute:: sid
SID
**type**\: str
**config**\: False
.. attribute:: allocation_type
Allocation Type
**type**\: :py:class:`SidAllocation <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.SidAllocation>`
**config**\: False
.. attribute:: function_type
Function Type
**type**\: :py:class:`Srv6EndFunction <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6EndFunction>`
**config**\: False
.. attribute:: state
State
**type**\: :py:class:`SidState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.SidState>`
**config**\: False
.. attribute:: has_forwarding
Rewrite done or not
**type**\: bool
**config**\: False
.. attribute:: locator
Associated locator
**type**\: str
**config**\: False
.. attribute:: owner
Owner
**type**\: list of :py:class:`Owner <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Standby.LocatorAllActiveSids.LocatorAllActiveSid.Owner>`
**config**\: False
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Standby.LocatorAllActiveSids.LocatorAllActiveSid, self).__init__()
self.yang_name = "locator-all-active-sid"
self.yang_parent_name = "locator-all-active-sids"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['locator_name','sid_opcode']
self._child_classes = OrderedDict([("sid-context", ("sid_context", Srv6.Standby.LocatorAllActiveSids.LocatorAllActiveSid.SidContext)), ("create-timestamp", ("create_timestamp", Srv6.Standby.LocatorAllActiveSids.LocatorAllActiveSid.CreateTimestamp)), ("owner", ("owner", Srv6.Standby.LocatorAllActiveSids.LocatorAllActiveSid.Owner))])
self._leafs = OrderedDict([
('locator_name', (YLeaf(YType.str, 'locator-name'), ['str'])),
('sid_opcode', (YLeaf(YType.uint32, 'sid-opcode'), ['int'])),
('sid', (YLeaf(YType.str, 'sid'), ['str'])),
('allocation_type', (YLeaf(YType.enumeration, 'allocation-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper', 'SidAllocation', '')])),
('function_type', (YLeaf(YType.enumeration, 'function-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper', 'Srv6EndFunction', '')])),
('state', (YLeaf(YType.enumeration, 'state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper', 'SidState', '')])),
('has_forwarding', (YLeaf(YType.boolean, 'has-forwarding'), ['bool'])),
('locator', (YLeaf(YType.str, 'locator'), ['str'])),
])
self.locator_name = None
self.sid_opcode = None
self.sid = None
self.allocation_type = None
self.function_type = None
self.state = None
self.has_forwarding = None
self.locator = None
self.sid_context = Srv6.Standby.LocatorAllActiveSids.LocatorAllActiveSid.SidContext()
self.sid_context.parent = self
self._children_name_map["sid_context"] = "sid-context"
self.create_timestamp = Srv6.Standby.LocatorAllActiveSids.LocatorAllActiveSid.CreateTimestamp()
self.create_timestamp.parent = self
self._children_name_map["create_timestamp"] = "create-timestamp"
self.owner = YList(self)
self._segment_path = lambda: "locator-all-active-sid" + "[locator-name='" + str(self.locator_name) + "']" + "[sid-opcode='" + str(self.sid_opcode) + "']"
self._absolute_path = lambda: "Cisco-IOS-XR-segment-routing-srv6-oper:srv6/standby/locator-all-active-sids/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Standby.LocatorAllActiveSids.LocatorAllActiveSid, ['locator_name', 'sid_opcode', 'sid', 'allocation_type', 'function_type', 'state', 'has_forwarding', 'locator'], name, value)
class SidContext(_Entity_):
"""
SID Context
.. attribute:: key
SID Key
**type**\: :py:class:`Key <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Standby.LocatorAllActiveSids.LocatorAllActiveSid.SidContext.Key>`
**config**\: False
.. attribute:: application_data
Application opaque data
**type**\: str
**pattern:** ([0\-9a\-fA\-F]{2}(\:[0\-9a\-fA\-F]{2})\*)?
**config**\: False
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Standby.LocatorAllActiveSids.LocatorAllActiveSid.SidContext, self).__init__()
self.yang_name = "sid-context"
self.yang_parent_name = "locator-all-active-sid"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("key", ("key", Srv6.Standby.LocatorAllActiveSids.LocatorAllActiveSid.SidContext.Key))])
self._leafs = OrderedDict([
('application_data', (YLeaf(YType.str, 'application-data'), ['str'])),
])
self.application_data = None
self.key = Srv6.Standby.LocatorAllActiveSids.LocatorAllActiveSid.SidContext.Key()
self.key.parent = self
self._children_name_map["key"] = "key"
self._segment_path = lambda: "sid-context"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Standby.LocatorAllActiveSids.LocatorAllActiveSid.SidContext, ['application_data'], name, value)
class Key(_Entity_):
"""
SID Key
.. attribute:: e
End (PSP) SID context
**type**\: :py:class:`E <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Standby.LocatorAllActiveSids.LocatorAllActiveSid.SidContext.Key.E>`
**config**\: False
.. attribute:: x
End.X (PSP) SID context
**type**\: :py:class:`X <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Standby.LocatorAllActiveSids.LocatorAllActiveSid.SidContext.Key.X>`
**config**\: False
.. attribute:: dx4
End.DX4 SID context
**type**\: :py:class:`Dx4 <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Standby.LocatorAllActiveSids.LocatorAllActiveSid.SidContext.Key.Dx4>`
**config**\: False
.. attribute:: dt4
End.DT4 SID context
**type**\: :py:class:`Dt4 <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6.Standby.LocatorAllActiveSids.LocatorAllActiveSid.SidContext.Key.Dt4>`
**config**\: False
.. attribute:: sid_context_type
SIDContextType
**type**\: :py:class:`Srv6EndFunction <ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper.Srv6EndFunction>`
**config**\: False
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Standby.LocatorAllActiveSids.LocatorAllActiveSid.SidContext.Key, self).__init__()
self.yang_name = "key"
self.yang_parent_name = "sid-context"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("e", ("e", Srv6.Standby.LocatorAllActiveSids.LocatorAllActiveSid.SidContext.Key.E)), ("x", ("x", Srv6.Standby.LocatorAllActiveSids.LocatorAllActiveSid.SidContext.Key.X)), ("dx4", ("dx4", Srv6.Standby.LocatorAllActiveSids.LocatorAllActiveSid.SidContext.Key.Dx4)), ("dt4", ("dt4", Srv6.Standby.LocatorAllActiveSids.LocatorAllActiveSid.SidContext.Key.Dt4))])
self._leafs = OrderedDict([
('sid_context_type', (YLeaf(YType.enumeration, 'sid-context-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_segment_routing_srv6_oper', 'Srv6EndFunction', '')])),
])
self.sid_context_type = None
self.e = Srv6.Standby.LocatorAllActiveSids.LocatorAllActiveSid.SidContext.Key.E()
self.e.parent = self
self._children_name_map["e"] = "e"
self.x = Srv6.Standby.LocatorAllActiveSids.LocatorAllActiveSid.SidContext.Key.X()
self.x.parent = self
self._children_name_map["x"] = "x"
self.dx4 = Srv6.Standby.LocatorAllActiveSids.LocatorAllActiveSid.SidContext.Key.Dx4()
self.dx4.parent = self
self._children_name_map["dx4"] = "dx4"
self.dt4 = Srv6.Standby.LocatorAllActiveSids.LocatorAllActiveSid.SidContext.Key.Dt4()
self.dt4.parent = self
self._children_name_map["dt4"] = "dt4"
self._segment_path = lambda: "key"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Standby.LocatorAllActiveSids.LocatorAllActiveSid.SidContext.Key, ['sid_context_type'], name, value)
class E(_Entity_):
"""
End (PSP) SID context
.. attribute:: table_id
Table Id
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: opaque_id
Additional differentiator \- opaque to SIDMgr
**type**\: int
**range:** 0..255
**config**\: False
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Standby.LocatorAllActiveSids.LocatorAllActiveSid.SidContext.Key.E, self).__init__()
self.yang_name = "e"
self.yang_parent_name = "key"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('table_id', (YLeaf(YType.uint32, 'table-id'), ['int'])),
('opaque_id', (YLeaf(YType.uint8, 'opaque-id'), ['int'])),
])
self.table_id = None
self.opaque_id = None
self._segment_path = lambda: "e"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Standby.LocatorAllActiveSids.LocatorAllActiveSid.SidContext.Key.E, ['table_id', 'opaque_id'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Standby.LocatorAllActiveSids.LocatorAllActiveSid.SidContext.Key.E']['meta_info']
class X(_Entity_):
"""
End.X (PSP) SID context
.. attribute:: is_protected
Is protected?
**type**\: bool
**config**\: False
.. attribute:: opaque_id
Additional differentiator \- opaque to SIDMgr
**type**\: int
**range:** 0..255
**config**\: False
.. attribute:: interface
Nexthop interface
**type**\: str
**pattern:** [a\-zA\-Z0\-9.\_/\-]+
**config**\: False
.. attribute:: nexthop_address
Nexthop IP address
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Standby.LocatorAllActiveSids.LocatorAllActiveSid.SidContext.Key.X, self).__init__()
self.yang_name = "x"
self.yang_parent_name = "key"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('is_protected', (YLeaf(YType.boolean, 'is-protected'), ['bool'])),
('opaque_id', (YLeaf(YType.uint8, 'opaque-id'), ['int'])),
('interface', (YLeaf(YType.str, 'interface'), ['str'])),
('nexthop_address', (YLeaf(YType.str, 'nexthop-address'), ['str'])),
])
self.is_protected = None
self.opaque_id = None
self.interface = None
self.nexthop_address = None
self._segment_path = lambda: "x"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Standby.LocatorAllActiveSids.LocatorAllActiveSid.SidContext.Key.X, ['is_protected', 'opaque_id', 'interface', 'nexthop_address'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Standby.LocatorAllActiveSids.LocatorAllActiveSid.SidContext.Key.X']['meta_info']
class Dx4(_Entity_):
"""
End.DX4 SID context
.. attribute:: table_id
Table ID
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: next_hop_set_id
Next Hop Set ID
**type**\: int
**range:** 0..4294967295
**config**\: False
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Standby.LocatorAllActiveSids.LocatorAllActiveSid.SidContext.Key.Dx4, self).__init__()
self.yang_name = "dx4"
self.yang_parent_name = "key"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('table_id', (YLeaf(YType.uint32, 'table-id'), ['int'])),
('next_hop_set_id', (YLeaf(YType.uint32, 'next-hop-set-id'), ['int'])),
])
self.table_id = None
self.next_hop_set_id = None
self._segment_path = lambda: "dx4"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Standby.LocatorAllActiveSids.LocatorAllActiveSid.SidContext.Key.Dx4, ['table_id', 'next_hop_set_id'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Standby.LocatorAllActiveSids.LocatorAllActiveSid.SidContext.Key.Dx4']['meta_info']
class Dt4(_Entity_):
"""
End.DT4 SID context
.. attribute:: table_id
Table ID
**type**\: int
**range:** 0..4294967295
**config**\: False
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Standby.LocatorAllActiveSids.LocatorAllActiveSid.SidContext.Key.Dt4, self).__init__()
self.yang_name = "dt4"
self.yang_parent_name = "key"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('table_id', (YLeaf(YType.uint32, 'table-id'), ['int'])),
])
self.table_id = None
self._segment_path = lambda: "dt4"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Standby.LocatorAllActiveSids.LocatorAllActiveSid.SidContext.Key.Dt4, ['table_id'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Standby.LocatorAllActiveSids.LocatorAllActiveSid.SidContext.Key.Dt4']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Standby.LocatorAllActiveSids.LocatorAllActiveSid.SidContext.Key']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Standby.LocatorAllActiveSids.LocatorAllActiveSid.SidContext']['meta_info']
class CreateTimestamp(_Entity_):
"""
Creation timestamp
.. attribute:: time_in_nano_seconds
Timestamp in nano seconds
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
**units**\: nanosecond
.. attribute:: age_in_nano_seconds
Age in nano seconds
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
**units**\: nanosecond
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Standby.LocatorAllActiveSids.LocatorAllActiveSid.CreateTimestamp, self).__init__()
self.yang_name = "create-timestamp"
self.yang_parent_name = "locator-all-active-sid"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('time_in_nano_seconds', (YLeaf(YType.uint64, 'time-in-nano-seconds'), ['int'])),
('age_in_nano_seconds', (YLeaf(YType.uint64, 'age-in-nano-seconds'), ['int'])),
])
self.time_in_nano_seconds = None
self.age_in_nano_seconds = None
self._segment_path = lambda: "create-timestamp"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Standby.LocatorAllActiveSids.LocatorAllActiveSid.CreateTimestamp, ['time_in_nano_seconds', 'age_in_nano_seconds'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Standby.LocatorAllActiveSids.LocatorAllActiveSid.CreateTimestamp']['meta_info']
class Owner(_Entity_):
"""
Owner
.. attribute:: owner
Owner
**type**\: str
**config**\: False
"""
_prefix = 'segment-routing-srv6-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Srv6.Standby.LocatorAllActiveSids.LocatorAllActiveSid.Owner, self).__init__()
self.yang_name = "owner"
self.yang_parent_name = "locator-all-active-sid"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('owner', (YLeaf(YType.str, 'owner'), ['str'])),
])
self.owner = None
self._segment_path = lambda: "owner"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Srv6.Standby.LocatorAllActiveSids.LocatorAllActiveSid.Owner, ['owner'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Standby.LocatorAllActiveSids.LocatorAllActiveSid.Owner']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Standby.LocatorAllActiveSids.LocatorAllActiveSid']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Standby.LocatorAllActiveSids']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6.Standby']['meta_info']
def clone_ptr(self):
self._top_entity = Srv6()
return self._top_entity
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_segment_routing_srv6_oper as meta
return meta._meta_table['Srv6']['meta_info']
| 47.224205
| 552
| 0.424741
| 29,643
| 375,763
| 5.067672
| 0.011976
| 0.032272
| 0.040341
| 0.06312
| 0.980822
| 0.976415
| 0.972607
| 0.965098
| 0.953489
| 0.94602
| 0
| 0.024163
| 0.479041
| 375,763
| 7,956
| 553
| 47.230141
| 0.743222
| 0.181894
| 0
| 0.841567
| 0
| 0.008477
| 0.131708
| 0.065438
| 0.001462
| 0
| 0
| 0
| 0
| 1
| 0.095294
| false
| 0
| 0.034785
| 0
| 0.206665
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f4128c5458426802b6b59bc7366bb5ec99123c24
| 5,973
|
py
|
Python
|
src/genie/libs/parser/sros/tests/ShowRouterIsisAdjacencyDetail/cli/equal/golden_output1_expected.py
|
balmasea/genieparser
|
d1e71a96dfb081e0a8591707b9d4872decd5d9d3
|
[
"Apache-2.0"
] | 204
|
2018-06-27T00:55:27.000Z
|
2022-03-06T21:12:18.000Z
|
src/genie/libs/parser/sros/tests/ShowRouterIsisAdjacencyDetail/cli/equal/golden_output1_expected.py
|
balmasea/genieparser
|
d1e71a96dfb081e0a8591707b9d4872decd5d9d3
|
[
"Apache-2.0"
] | 468
|
2018-06-19T00:33:18.000Z
|
2022-03-31T23:23:35.000Z
|
src/genie/libs/parser/sros/tests/ShowRouterIsisAdjacencyDetail/cli/equal/golden_output1_expected.py
|
balmasea/genieparser
|
d1e71a96dfb081e0a8591707b9d4872decd5d9d3
|
[
"Apache-2.0"
] | 309
|
2019-01-16T20:21:07.000Z
|
2022-03-30T12:56:41.000Z
|
expected_output = {
"instance": {
"0": {
"level": {
"L2": {
"interfaces": {
"To-GENIE01R07-LAG-7": {
"system_id": {
"0691.58ff.79a2": {
"hold_time": 22,
"hostname": "GENIE01R07",
"ipv4_adj_sid": "Label 524213",
"ipv4_neighbor": "10.11.97.22",
"ipv6_neighbor": "::",
"l_circ_typ": "L2",
"last_restart_at": "Never",
"max_hold": 30,
"mt_enabled": "No",
"nbr_sys_typ": "L2",
"number_of_restarts": 0,
"priority": 0,
"restart_support": "Disabled",
"restart_supressed": "Disabled",
"restart_status": "Not currently being helped",
"snpa": "00:23:3e:ff:a6:27",
"state": "Up",
"topology": "Unicast",
"up_time": "58d 03:24:48",
}
}
},
"To-GENIE04XR1-LAG-4": {
"system_id": {
"0670.70ff.b258": {
"hold_time": 23,
"hostname": "GENIE04XR1",
"ipv4_adj_sid": "Label 524127",
"ipv4_neighbor": "10.11.79.245",
"ipv6_neighbor": "::",
"l_circ_typ": "L2",
"last_restart_at": "Never",
"max_hold": 30,
"mt_enabled": "No",
"nbr_sys_typ": "L2",
"number_of_restarts": 0,
"priority": 0,
"restart_support": "Disabled",
"restart_supressed": "Disabled",
"restart_status": "Not currently being helped",
"snpa": "84:26:2b:ff:e9:9e",
"state": "Up",
"topology": "Unicast",
"up_time": "36d 23:21:57",
}
}
},
"To-GENIE03R07-LAG-9": {
"system_id": {
"0691.58ff.79aa": {
"hold_time": 22,
"hostname": "GENIE03R07",
"ipv4_adj_sid": "Label 524214",
"ipv4_neighbor": "10.11.79.242",
"ipv6_neighbor": "::",
"l_circ_typ": "L2",
"last_restart_at": "Never",
"max_hold": 30,
"mt_enabled": "No",
"nbr_sys_typ": "L2",
"number_of_restarts": 0,
"priority": 0,
"restart_support": "Disabled",
"restart_supressed": "Disabled",
"restart_status": "Not currently being helped",
"snpa": "00:23:3e:ff:bc:27",
"state": "Up",
"topology": "Unicast",
"up_time": "58d 03:24:48",
}
}
},
}
}
}
},
"1": {
"level": {
"L2": {
"interfaces": {
"To-GENIE01R07-LAG-7": {
"system_id": {
"0691.58ff.79a2": {
"hold_time": 22,
"hostname": "GENIE01R07",
"ipv4_adj_sid": "Label 524213",
"ipv4_neighbor": "10.11.97.22",
"ipv6_neighbor": "::",
"l_circ_typ": "L2",
"last_restart_at": "Never",
"max_hold": 30,
"mt_enabled": "No",
"nbr_sys_typ": "L2",
"number_of_restarts": 0,
"priority": 0,
"restart_support": "Disabled",
"restart_supressed": "Disabled",
"restart_status": "Not currently being helped",
"snpa": "00:23:3e:ff:a6:27",
"state": "Up",
"topology": "Unicast",
"up_time": "58d 03:24:48",
}
}
}
}
}
}
},
}
}
| 49.363636
| 83
| 0.249288
| 331
| 5,973
| 4.253776
| 0.283988
| 0.028409
| 0.028409
| 0.042614
| 0.841619
| 0.816051
| 0.796165
| 0.796165
| 0.796165
| 0.796165
| 0
| 0.113181
| 0.649422
| 5,973
| 120
| 84
| 49.775
| 0.559217
| 0
| 0
| 0.666667
| 0
| 0
| 0.257492
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f43a4b1cfd51ebd0e493f3eae1642e826267da57
| 16,366
|
py
|
Python
|
models/datasets.py
|
AK391/SciCo
|
f16af4f579fdc3bdafdb5021ee528cbc7bf2f716
|
[
"Apache-2.0"
] | 16
|
2021-04-24T07:18:54.000Z
|
2022-02-24T09:36:53.000Z
|
models/datasets.py
|
AK391/SciCo
|
f16af4f579fdc3bdafdb5021ee528cbc7bf2f716
|
[
"Apache-2.0"
] | 4
|
2021-05-18T09:32:19.000Z
|
2022-02-08T09:06:15.000Z
|
models/datasets.py
|
AK391/SciCo
|
f16af4f579fdc3bdafdb5021ee528cbc7bf2f716
|
[
"Apache-2.0"
] | 3
|
2021-09-17T18:22:27.000Z
|
2022-01-11T02:48:45.000Z
|
import torch
from torch.utils import data
import collections
from itertools import product, combinations
import numpy as np
import jsonlines
class CrossEncoderDataset(data.Dataset):
def __init__(self, data_path,
full_doc=True,
multiclass='multiclass',
sep_token='</s>',
is_training=True,
cdlm=False):
super(CrossEncoderDataset, self).__init__()
with jsonlines.open(data_path, 'r') as f:
self.data = [topic for topic in f]
for i, topic in enumerate(self.data):
self.data[i]['mention_text'] = np.array([' '.join(topic['flatten_tokens'][start:end + 1])
for start, end, _ in topic['flatten_mentions']])
self.sep = sep_token
self.cdlm = cdlm
self.full_doc = full_doc
if multiclass not in {'coref', 'hypernym', 'multiclass'}:
raise ValueError(f"The multiclass value needs to be in (coref, hypernym, multiclass), got {multiclass}.")
self.multiclass = multiclass
self.is_training = is_training
self.pairs, self.labels = [], []
self.first, self.second = [], []
self.info_pairs = []
for i, topic in enumerate(self.data):
if self.multiclass == 'multiclass':
inputs, labels, info_pairs = self.get_topic_pairs(topic)
elif self.multiclass == 'hypernym':
inputs, labels, info_pairs = self.get_topic_pair_for_hypernym(topic)
else:
inputs, labels, info_pairs = self.get_topic_pairs_for_binary_classification(topic)
self.pairs.extend(inputs)
self.labels.extend(labels)
pair_nums = len(info_pairs)
info_pairs = np.concatenate((np.array([i] * pair_nums).reshape(pair_nums, 1),
info_pairs), axis=1)
self.info_pairs.extend(info_pairs)
if self.multiclass == 'multiclass' or self.multiclass == 'hypernym':
self.labels = torch.tensor(self.labels, dtype=torch.long)
else:
self.labels = torch.tensor(self.labels, dtype=torch.float)
def __len__(self):
return len(self.labels)
def __getitem__(self, idx):
return self.pairs[idx], self.labels[idx]
def get_topic_pairs_for_binary_classification(self, topic):
mentions = []
for mention in topic['mentions']:
if self.full_doc:
mentions.append(self.get_full_doc_mention(mention, topic['tokens']))
else:
mentions.append(self.get_sentence_context(mention, topic['tokens'], topic['sentences']))
mentions = np.array(mentions)
if self.is_training:
first, second = zip(*combinations(range(len(mentions)), r=2))
else:
first, second = zip(*product(range(len(mentions)), repeat=2))
first, second = np.array(first), np.array(second)
seps = np.array([self.sep] * len(first))
inputs = np.char.add(np.char.add(mentions[first], seps), mentions[second]).tolist()
labels = [topic['mentions'][x][-1] == topic['mentions'][y][-1]
for x, y in zip(first, second)]
return inputs, labels, list(zip(first, second))
def get_topic_pair_for_hypernym(self, topic):
'''
:param topic:
:return:
'''
relations = [(x, y) for x, y in topic['relations']]
mentions = []
for mention in topic['mentions']:
if self.full_doc:
mentions.append(self.get_full_doc_mention(mention, topic['tokens']))
else:
mentions.append(self.get_sentence_context(mention, topic['tokens'], topic['sentences']))
mentions = np.array(mentions)
first, second = zip(*[(x, y) for x, y in product(range(len(mentions)), repeat=2) if x != y])
first, second = np.array(first), np.array(second)
seps = np.array([self.sep] * len(first))
inputs = np.char.add(np.char.add(mentions[first], seps), mentions[second]).tolist()
labels = []
for x, y in zip(first, second):
cluster_x, cluster_y = topic['mentions'][x][-1], topic['mentions'][y][-1]
if (cluster_x, cluster_y) in relations:
labels.append(1)
elif (cluster_y, cluster_x) in relations:
labels.append(2)
else:
labels.append(0)
return inputs, labels, list(zip(first, second))
def get_topic_pairs(self, topic):
'''
:param topic:
:return:
'''
relations = [(x, y) for x, y in topic['relations']]
mentions = []
for mention in topic['mentions']:
if self.full_doc:
mentions.append(self.get_full_doc_mention(mention, topic['tokens']))
else:
mentions.append(self.get_sentence_context(mention, topic['tokens'], topic['sentences']))
mentions = np.array(mentions)
first, second = zip(*[(x, y) for x, y in product(range(len(mentions)), repeat=2) if x != y])
first, second = np.array(first), np.array(second)
seps = np.array([self.sep] * len(first))
inputs = np.char.add(np.char.add(mentions[first], seps), mentions[second]).tolist()
labels = []
for x, y in zip(first, second):
cluster_x, cluster_y = topic['mentions'][x][-1], topic['mentions'][y][-1]
if cluster_x == cluster_y:
labels.append(1)
elif (cluster_x, cluster_y) in relations:
labels.append(2)
elif (cluster_y, cluster_x) in relations:
labels.append(3)
else:
labels.append(0)
return inputs, labels, list(zip(first, second))
def get_full_doc_mention(self, mention, tokens):
doc_id, start, end, _ = mention
mention_rep = tokens[doc_id][:start] + ['<m>']
mention_rep += tokens[doc_id][start:end + 1] + ['</m>']
mention_rep += tokens[doc_id][end + 1:] + [self.sep]
if self.cdlm:
mention_rep = ['<doc-s>'] + mention_rep + ['</doc-s>']
return ' '.join(mention_rep)
def get_sentence_context(self, mention, tokens, sentences):
doc_id, start, end, _ = mention
sent_start, sent_end = 0, len(tokens) - 1
i = 0
while i < len(sentences[doc_id]):
sent_start, sent_end = sentences[doc_id][i]
if start >= sent_start and end <= sent_end:
break
i += 1
mention_rep = tokens[doc_id][sent_start:start] + ['<m>']
mention_rep += tokens[doc_id][start:end + 1] + ['</m>']
mention_rep += tokens[doc_id][end + 1:sent_end] + [self.sep]
if self.cdlm:
mention_rep = ['<doc-s>'] + mention_rep + ['</doc-s>']
return ' '.join(mention_rep)
class BiEncoderDataset(data.Dataset):
def __init__(self, data_path, full_doc=True, multiclass='multiclass', sep_token='</s>', is_training=True):
super(BiEncoderDataset, self).__init__()
with jsonlines.open(data_path, 'r') as f:
self.data = [topic for topic in f]
for i, topic in enumerate(self.data):
self.data[i]['mention_text'] = np.array([' '.join(topic['flatten_tokens'][start:end + 1])
for start, end, _ in topic['flatten_mentions']])
self.sep = sep_token
self.full_doc = full_doc
if multiclass not in {'coref', 'hypernym', 'multiclass'}:
raise ValueError(f"The multiclass value needs to be in (coref, hypernym, multiclass), got {multiclass}.")
self.multiclass = multiclass
self.is_training = is_training
self.pairs, self.labels = [], []
self.first, self.second = [], []
self.info_pairs = []
for i, topic in enumerate(self.data):
if self.multiclass == 'multiclass':
m1, m2, labels, info_pairs = self.get_topic_pairs(topic)
elif self.multiclass == 'hypernym':
m1, m2, labels, info_pairs = self.get_topic_pair_for_hypernym(topic)
else:
m1, m2, labels, info_pairs = self.get_topic_pairs_for_binary_classification(topic)
self.first.extend(m1)
self.second.extend(m2)
# self.pairs.extend(inputs)
self.labels.extend(labels)
pair_nums = len(info_pairs)
info_pairs = np.concatenate((np.array([i] * pair_nums).reshape(pair_nums, 1),
info_pairs), axis=1)
self.info_pairs.extend(info_pairs)
if self.multiclass == 'multiclass' or self.multiclass == 'hypernym':
self.labels = torch.tensor(self.labels, dtype=torch.long)
else:
self.labels = torch.tensor(self.labels, dtype=torch.float)
def __len__(self):
return len(self.labels)
def __getitem__(self, idx):
return self.first[idx], self.second[idx], self.labels[idx]
def get_topic_pairs_for_binary_classification(self, topic):
mentions = []
for mention in topic['mentions']:
if self.full_doc:
mentions.append(self.get_full_doc_mention(mention, topic['tokens']))
else:
mentions.append(self.get_sentence_context(mention, topic['tokens'], topic['sentences']))
mentions = np.array(mentions)
if self.is_training:
first, second = zip(*combinations(range(len(mentions)), r=2))
else:
first, second = zip(*product(range(len(mentions)), repeat=2))
first, second = np.array(first), np.array(second)
m1 = mentions[first]
m2 = mentions[second]
#
# seps = np.array([self.sep] * len(first))
# inputs = np.char.add(np.char.add(mentions[first], seps), mentions[second]).tolist()
labels = [topic['mentions'][x][-1] == topic['mentions'][y][-1]
for x, y in zip(first, second)]
return m1, m2, labels, list(zip(first, second))
def get_topic_pair_for_hypernym(self, topic):
'''
:param topic:
:return:
'''
relations = [(x, y) for x, y in topic['relations']]
mentions = []
for mention in topic['mentions']:
if self.full_doc:
mentions.append(self.get_full_doc_mention(mention, topic['tokens']))
else:
mentions.append(self.get_sentence_context(mention, topic['tokens'], topic['sentences']))
mentions = np.array(mentions)
first, second = zip(*[(x, y) for x, y in product(range(len(mentions)), repeat=2) if x != y])
first, second = np.array(first), np.array(second)
m1 = mentions[first]
m2 = mentions[second]
# seps = np.array([self.sep] * len(first))
# inputs = np.char.add(np.char.add(mentions[first], seps), mentions[second]).tolist()
labels = []
for x, y in zip(first, second):
cluster_x, cluster_y = topic['mentions'][x][-1], topic['mentions'][y][-1]
if (cluster_x, cluster_y) in relations:
labels.append(1)
elif (cluster_y, cluster_x) in relations:
labels.append(2)
else:
labels.append(0)
return m1, m2, labels, list(zip(first, second))
def get_topic_pairs(self, topic):
'''
:param topic:
:return:
'''
relations = [(x, y) for x, y in topic['relations']]
mentions = []
for mention in topic['mentions']:
if self.full_doc:
mentions.append(self.get_full_doc_mention(mention, topic['tokens']))
else:
mentions.append(self.get_sentence_context(mention, topic['tokens'], topic['sentences']))
mentions = np.array(mentions)
first, second = zip(*[(x, y) for x, y in product(range(len(mentions)), repeat=2) if x != y])
first, second = np.array(first), np.array(second)
m1 = mentions[first]
m2 = mentions[second]
# seps = np.array([self.sep] * len(first))
# inputs = np.char.add(np.char.add(mentions[first], seps), mentions[second]).tolist()
labels = []
for x, y in zip(first, second):
cluster_x, cluster_y = topic['mentions'][x][-1], topic['mentions'][y][-1]
if cluster_x == cluster_y:
labels.append(1)
elif (cluster_x, cluster_y) in relations:
labels.append(2)
elif (cluster_y, cluster_x) in relations:
labels.append(3)
else:
labels.append(0)
return m1, m2, labels, list(zip(first, second))
def get_full_doc_mention(self, mention, tokens):
doc_id, start, end, _ = mention
mention_rep = tokens[doc_id][:start] + ['<m>']
mention_rep += tokens[doc_id][start:end + 1] + ['</m>']
mention_rep += tokens[doc_id][end + 1:]
return ' '.join(mention_rep)
def get_sentence_context(self, mention, tokens, sentences):
doc_id, start, end, _ = mention
sent_start, sent_end = 0, len(tokens) - 1
i = 0
while i < len(sentences[doc_id]):
sent_start, sent_end = sentences[doc_id][i]
if start >= sent_start and end <= sent_end:
break
i += 1
mention_rep = tokens[doc_id][sent_start:start] + ['<m>']
mention_rep += tokens[doc_id][start:end + 1] + ['</m>']
mention_rep += tokens[doc_id][end + 1:sent_end] + [self.sep]
return ' '.join(mention_rep)
class ClusterForHypernymDataset(data.Dataset):
'''
generate all pairs of clusters (product)
where each cluster is the concatenation of the all the mentions
in the document
labels:
0: not related
1: neutral == hypernym
2: entailment == hyponym
'''
def __init__(self, data_path, with_context=False):
super(ClusterForHypernymDataset, self).__init__()
with jsonlines.open(data_path, 'r') as f:
self.data = [topic for topic in f]
self.with_context = with_context
self.premise = []
self.hypothesis = []
self.labels = []
for i, topic in enumerate(self.data):
self.data[i]['mention_text'] = np.array([' '.join(topic['flatten_tokens'][start:end + 1])
for start, end, _ in topic['flatten_mentions']])
for topic_num, topic in enumerate(self.data):
premise, hypothesis, labels = self.get_topic_candidates(topic)
self.premise.extend(premise)
self.hypothesis.extend(hypothesis)
self.labels.extend(labels)
def __len__(self):
return len(self.labels)
def __getitem__(self, idx):
return self.premise[idx], self.hypothesis[idx], self.labels[idx]
def get_topic_candidates(self, topic):
clusters = collections.defaultdict(list)
for i, (_, _, _, c_id) in enumerate(topic['mentions']):
clusters[c_id].append(i)
relations = [(x, y) for x, y in topic['relations']]
cluster_ids, candidates = [], []
for c_id, mentions in clusters.items():
mention_text = topic['mention_text'][mentions]
sampled = np.random.choice(mention_text, min(len(mention_text), 10), replace=False)
candidates.append(', '.join(sampled))
cluster_ids.append(c_id)
permutations = [(x, y) for x, y in product(range(len(candidates)), repeat=2) if x != y]
labels = []
for x, y in permutations:
if (x, y) in relations:
labels.append(1)
elif (y, x) in relations:
labels.append(2)
else:
labels.append(0)
first, second = zip(*permutations)
first, second = torch.tensor(first), torch.tensor(second)
candidates = np.array(candidates)
premise = candidates[first]
hypothesis = candidates[second]
return premise, hypothesis, labels
| 34.167015
| 117
| 0.565746
| 1,967
| 16,366
| 4.551093
| 0.074733
| 0.007373
| 0.008043
| 0.013293
| 0.861819
| 0.850983
| 0.850983
| 0.844057
| 0.84227
| 0.835121
| 0
| 0.007532
| 0.302334
| 16,366
| 479
| 118
| 34.167015
| 0.776493
| 0.041855
| 0
| 0.796774
| 0
| 0
| 0.054647
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.064516
| false
| 0
| 0.019355
| 0.019355
| 0.148387
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
be2ed178c725e4810d768cd0d341238ab0e87966
| 7,840
|
py
|
Python
|
test/test_cases__yb_chunk_dml_by_integer_yyyymmdd.py
|
eloemosynator/YbEasyCli
|
b35ebe03da07898cfa06ff687cba29cd83268c31
|
[
"MIT"
] | null | null | null |
test/test_cases__yb_chunk_dml_by_integer_yyyymmdd.py
|
eloemosynator/YbEasyCli
|
b35ebe03da07898cfa06ff687cba29cd83268c31
|
[
"MIT"
] | 4
|
2020-06-03T18:11:29.000Z
|
2022-03-07T20:41:16.000Z
|
test/test_cases__yb_chunk_dml_by_integer_yyyymmdd.py
|
eloemosynator/YbEasyCli
|
b35ebe03da07898cfa06ff687cba29cd83268c31
|
[
"MIT"
] | 2
|
2020-05-27T23:43:03.000Z
|
2022-03-03T23:16:15.000Z
|
map_out = {
r'\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}.\d{1,6}(-|\+)\d{2}' : 'YYYY-MM-DD HH:MM:SS.FFFFFF-TZ'
, r'\d{2}:\d{2}:\d{2}.\d{1,6}' : 'HH:MM:SS.FFFFFF'
, r'\d{4}-\d{2}-\d{2}' : 'YYYY-MM-DD'}
test_cases = [
test_case(
cmd=('yb_chunk_dml_by_integer_yyyymmdd.py @{argsdir}/yb_chunk_dml_by_integer_yyyymmdd__args1 '
'--execute_chunk_dml')
, exit_code=0
, stdout="""-- Running DML chunking.
--YYYY-MM-DD HH:MM:SS.FFFFFF-TZ: Starting YYYYMMDD Integer Date Chunking, first calculating date group counts
--YYYY-MM-DD HH:MM:SS.FFFFFF-TZ: Build Chunk DMLs
--YYYY-MM-DD HH:MM:SS.FFFFFF-TZ: Chunk: 1, Rows: 166582, Range 20200101 <= col19 < 20200111
--YYYY-MM-DD HH:MM:SS.FFFFFF-TZ: Chunk: 2, Rows: 100018, Range 20200111 <= col19 < 20200902
--YYYY-MM-DD HH:MM:SS.FFFFFF-TZ: Chunk: 3, Rows: 101800, Range 20200902 <= col19 < 20210426
--YYYY-MM-DD HH:MM:SS.FFFFFF-TZ: Chunk: 4, Rows: 100376, Range 20210426 <= col19 < 20211215
--YYYY-MM-DD HH:MM:SS.FFFFFF-TZ: Chunk: 5, Rows: 100212, Range 20211215 <= col19 < 20220727
--YYYY-MM-DD HH:MM:SS.FFFFFF-TZ: Chunk: 6, Rows: 100988, Range 20220727 <= col19 < 20230415
--YYYY-MM-DD HH:MM:SS.FFFFFF-TZ: Chunk: 7, Rows: 102860, Range 20230415 <= col19 < 20240222
--YYYY-MM-DD HH:MM:SS.FFFFFF-TZ: Chunk: 8, Rows: 100266, Range 20240222 <= col19 < 20250401
--YYYY-MM-DD HH:MM:SS.FFFFFF-TZ: Chunk: 9, Rows: 100036, Range 20250401 <= col19 < 20320311
--YYYY-MM-DD HH:MM:SS.FFFFFF-TZ: Chunk: 10, Rows: 26862, Range 20320311 <= col19 < 20420307
--YYYY-MM-DD HH:MM:SS.FFFFFF-TZ: Chunk: 11, Rows: 0, col19 IS NULL
--YYYY-MM-DD HH:MM:SS.FFFFFF-TZ: Completed YYYYMMDD Integer Date Chunked DML
--Total Rows : 1000000
--IS NULL Rows : 0
--Running total check: PASSED
--Duration : HH:MM:SS.FFFFFF
--Overhead duration : HH:MM:SS.FFFFFF
--Total Chunks : 11
--Min chunk size : 100000
--Largest chunk size : 166582
--Average chunk size : 90909
-- Completed DML chunking."""
, stderr=''
, map_out=map_out)
, test_case(
cmd=('yb_chunk_dml_by_integer_yyyymmdd.py @{argsdir}/yb_chunk_dml_by_integer_yyyymmdd__args1 '
'--print_chunk_dml --null_chunk_off --verbose_chunk_off')
, exit_code=0
, stdout="""-- Running DML chunking.
INSERT INTO new_chunked_table SELECT * FROM {db1}.dev.data_types_t WHERE /* chunk_clause(chunk: 1, size: 166582) >>>*/ 20200101 <= col19 AND col19 < 20200111 /*<<< chunk_clause */;
INSERT INTO new_chunked_table SELECT * FROM {db1}.dev.data_types_t WHERE /* chunk_clause(chunk: 2, size: 100018) >>>*/ 20200111 <= col19 AND col19 < 20200902 /*<<< chunk_clause */;
INSERT INTO new_chunked_table SELECT * FROM {db1}.dev.data_types_t WHERE /* chunk_clause(chunk: 3, size: 101800) >>>*/ 20200902 <= col19 AND col19 < 20210426 /*<<< chunk_clause */;
INSERT INTO new_chunked_table SELECT * FROM {db1}.dev.data_types_t WHERE /* chunk_clause(chunk: 4, size: 100376) >>>*/ 20210426 <= col19 AND col19 < 20211215 /*<<< chunk_clause */;
INSERT INTO new_chunked_table SELECT * FROM {db1}.dev.data_types_t WHERE /* chunk_clause(chunk: 5, size: 100212) >>>*/ 20211215 <= col19 AND col19 < 20220727 /*<<< chunk_clause */;
INSERT INTO new_chunked_table SELECT * FROM {db1}.dev.data_types_t WHERE /* chunk_clause(chunk: 6, size: 100988) >>>*/ 20220727 <= col19 AND col19 < 20230415 /*<<< chunk_clause */;
INSERT INTO new_chunked_table SELECT * FROM {db1}.dev.data_types_t WHERE /* chunk_clause(chunk: 7, size: 102860) >>>*/ 20230415 <= col19 AND col19 < 20240222 /*<<< chunk_clause */;
INSERT INTO new_chunked_table SELECT * FROM {db1}.dev.data_types_t WHERE /* chunk_clause(chunk: 8, size: 100266) >>>*/ 20240222 <= col19 AND col19 < 20250401 /*<<< chunk_clause */;
INSERT INTO new_chunked_table SELECT * FROM {db1}.dev.data_types_t WHERE /* chunk_clause(chunk: 9, size: 100036) >>>*/ 20250401 <= col19 AND col19 < 20320311 /*<<< chunk_clause */;
INSERT INTO new_chunked_table SELECT * FROM {db1}.dev.data_types_t WHERE /* chunk_clause(chunk: 10, size: 26862) >>>*/ 20320311 <= col19 AND col19 < 20420307 /*<<< chunk_clause */;
-- Completed DML chunking."""
, stderr='')
, test_case(
cmd=('yb_chunk_dml_by_integer_yyyymmdd.py @{argsdir}/yb_chunk_dml_by_integer_yyyymmdd__args1 '
'--print_chunk_dml')
, exit_code=0
, stdout="""-- Running DML chunking.
--2020-08-22 23:04:57.77992-06: Starting YYYYMMDD Integer Date Chunking, first calculating date group counts
--2020-08-22 23:04:58.202254-06: Build Chunk DMLs
--2020-08-22 23:04:58.202609-06: Chunk: 1, Rows: 166582, Range 20200101 <= col19 < 20200111
INSERT INTO new_chunked_table SELECT * FROM {db1}.dev.data_types_t WHERE /* chunk_clause(chunk: 1, size: 166582) >>>*/ 20200101 <= col19 AND col19 < 20200111 /*<<< chunk_clause */;
--2020-08-22 23:04:58.203502-06: Chunk: 2, Rows: 100018, Range 20200111 <= col19 < 20200902
INSERT INTO new_chunked_table SELECT * FROM {db1}.dev.data_types_t WHERE /* chunk_clause(chunk: 2, size: 100018) >>>*/ 20200111 <= col19 AND col19 < 20200902 /*<<< chunk_clause */;
--2020-08-22 23:04:58.203782-06: Chunk: 3, Rows: 101800, Range 20200902 <= col19 < 20210426
INSERT INTO new_chunked_table SELECT * FROM {db1}.dev.data_types_t WHERE /* chunk_clause(chunk: 3, size: 101800) >>>*/ 20200902 <= col19 AND col19 < 20210426 /*<<< chunk_clause */;
--2020-08-22 23:04:58.204023-06: Chunk: 4, Rows: 100376, Range 20210426 <= col19 < 20211215
INSERT INTO new_chunked_table SELECT * FROM {db1}.dev.data_types_t WHERE /* chunk_clause(chunk: 4, size: 100376) >>>*/ 20210426 <= col19 AND col19 < 20211215 /*<<< chunk_clause */;
--2020-08-22 23:04:58.204269-06: Chunk: 5, Rows: 100212, Range 20211215 <= col19 < 20220727
INSERT INTO new_chunked_table SELECT * FROM {db1}.dev.data_types_t WHERE /* chunk_clause(chunk: 5, size: 100212) >>>*/ 20211215 <= col19 AND col19 < 20220727 /*<<< chunk_clause */;
--2020-08-22 23:04:58.204521-06: Chunk: 6, Rows: 100988, Range 20220727 <= col19 < 20230415
INSERT INTO new_chunked_table SELECT * FROM {db1}.dev.data_types_t WHERE /* chunk_clause(chunk: 6, size: 100988) >>>*/ 20220727 <= col19 AND col19 < 20230415 /*<<< chunk_clause */;
--2020-08-22 23:04:58.204862-06: Chunk: 7, Rows: 102860, Range 20230415 <= col19 < 20240222
INSERT INTO new_chunked_table SELECT * FROM {db1}.dev.data_types_t WHERE /* chunk_clause(chunk: 7, size: 102860) >>>*/ 20230415 <= col19 AND col19 < 20240222 /*<<< chunk_clause */;
--2020-08-22 23:04:58.205211-06: Chunk: 8, Rows: 100266, Range 20240222 <= col19 < 20250401
INSERT INTO new_chunked_table SELECT * FROM {db1}.dev.data_types_t WHERE /* chunk_clause(chunk: 8, size: 100266) >>>*/ 20240222 <= col19 AND col19 < 20250401 /*<<< chunk_clause */;
--2020-08-22 23:04:58.207026-06: Chunk: 9, Rows: 100036, Range 20250401 <= col19 < 20320311
INSERT INTO new_chunked_table SELECT * FROM {db1}.dev.data_types_t WHERE /* chunk_clause(chunk: 9, size: 100036) >>>*/ 20250401 <= col19 AND col19 < 20320311 /*<<< chunk_clause */;
--2020-08-22 23:04:58.207984-06: Chunk: 10, Rows: 26862, Range 20320311 <= col19 < 20420307
INSERT INTO new_chunked_table SELECT * FROM {db1}.dev.data_types_t WHERE /* chunk_clause(chunk: 10, size: 26862) >>>*/ 20320311 <= col19 AND col19 < 20420307 /*<<< chunk_clause */;
--2020-08-22 23:04:58.208485-06: Chunk: 11, Rows: 0, col19 IS NULL
INSERT INTO new_chunked_table SELECT * FROM {db1}.dev.data_types_t WHERE col19 IS NULL;
--2020-08-22 23:04:58.208789-06: Completed YYYYMMDD Integer Date Chunked DML
--Total Rows : 1000000
--IS NULL Rows : 0
--Running total check: PASSED
--Duration : 00:00:00.430099
--Overhead duration : 00:00:00.430176
--Total Chunks : 11
--Min chunk size : 100000
--Largest chunk size : 166582
--Average chunk size : 90909
-- Completed DML chunking."""
, stderr=''
, map_out=map_out)
]
| 79.191919
| 180
| 0.687245
| 1,210
| 7,840
| 4.302479
| 0.110744
| 0.084518
| 0.052439
| 0.080676
| 0.944679
| 0.934307
| 0.926239
| 0.910104
| 0.71456
| 0.63907
| 0
| 0.223143
| 0.153444
| 7,840
| 99
| 181
| 79.191919
| 0.561248
| 0
| 0
| 0.541667
| 0
| 0.489583
| 0.939549
| 0.099222
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.020833
| 0
| 0
| 0
| 0.020833
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
be36fc28793ced7a4eaab578f30c6d8ae5dbf2e9
| 180
|
py
|
Python
|
autogalaxy/pipeline/phase/interferometer/__init__.py
|
jonathanfrawley/PyAutoGalaxy_copy
|
1cedbfdcf65020538128163f7d8a7f8e169646e0
|
[
"MIT"
] | null | null | null |
autogalaxy/pipeline/phase/interferometer/__init__.py
|
jonathanfrawley/PyAutoGalaxy_copy
|
1cedbfdcf65020538128163f7d8a7f8e169646e0
|
[
"MIT"
] | null | null | null |
autogalaxy/pipeline/phase/interferometer/__init__.py
|
jonathanfrawley/PyAutoGalaxy_copy
|
1cedbfdcf65020538128163f7d8a7f8e169646e0
|
[
"MIT"
] | null | null | null |
from .phase import PhaseInterferometer
from autogalaxy.pipeline.phase.interferometer.result import Result
from autogalaxy.pipeline.phase.interferometer.analysis import Analysis
| 45
| 71
| 0.866667
| 20
| 180
| 7.8
| 0.45
| 0.179487
| 0.282051
| 0.346154
| 0.525641
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.083333
| 180
| 3
| 72
| 60
| 0.945455
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
be372335774e02a099493ac92c527a96a09dd81c
| 5,455
|
py
|
Python
|
src/bmcAPI.py
|
ciscops/Project_Federate
|
af1cd5c1a069772e9a6fba5c488e5c51124a25dc
|
[
"RSA-MD"
] | 1
|
2021-12-08T06:18:20.000Z
|
2021-12-08T06:18:20.000Z
|
src/bmcAPI.py
|
ciscops/Project_Federate
|
af1cd5c1a069772e9a6fba5c488e5c51124a25dc
|
[
"RSA-MD"
] | null | null | null |
src/bmcAPI.py
|
ciscops/Project_Federate
|
af1cd5c1a069772e9a6fba5c488e5c51124a25dc
|
[
"RSA-MD"
] | 1
|
2021-12-08T06:18:27.000Z
|
2021-12-08T06:18:27.000Z
|
#!/usr/bin/env python3
import requests
import urllib3
import json
#commented out portions make the API request and formats the response in JSON
def get_Bmc_Token(bmc):
url = 'http://{}/api/jwt/login'.format(bmc['bmc_host'])
headers = {'Content-Type': 'application/x-www-form-urlencoded'}
# resp = requests.post(url, auth=(bmc['bmc_username'], bmc['bmc_password']),
# headers=headers, verify=False)
#Validate response
'''if 'error' in resp.json():
print('ERROR: Failed to retrieve Access Token!')
print('REASON: {}'.format(resp.json()['error']))
result = ''
else:
result = resp.json()['Token']'''
print('BMC token API call: \n URL: {} \n Headers: {} \n Authorization: {}\n\n'.format(url, headers, (bmc['bmc_username'], bmc['bmc_password'])))
result = 'TOKEN GOES HERE'
return result
#This function creates a BMC incident ticket for DNAC events
#commented out portion makes the API request
def create_Bmc_Incident_Dnac(bmc, event):
url = 'http://{}/api/arsys/v1/entry/HPD:IncidentInterface_Create?fields=values(Incident Number)'.format(bmc['bmc_host'])
headers = {
'Authorization': 'AR-JWT ' + bmc['bmc_Token'],
'Content-Type': 'application/json'
}
severity = event['severity']
body = {
'values': {
'First_Name': 'First Name Here',
'Last_Name': 'Last Name Here',
'Description': event['description'],
'Impact': 'Impact Here',
'Urgency': severity,
'Status': 'Status Here',
'Reported Source': 'Source Here',
'Service_Type': 'Service Type Here'
}
}
# resp = requests.post(url, headers=headers, data=body,
#verify=False)
resp = {
"URL": url,
"Headers": headers,
"Body": body
}
print('BMC incident API response with DNAC event: \n {}'.format(resp))
return resp
#This function creates a BMC incident ticket for Prime events
#It is separate from the DNAC ticket because its severity field has different values
#commented out portion makes the API request
def create_Bmc_Incident_Prime(bmc, event):
url = 'http://{}/api/arsys/v1/entry/HPD:IncidentInterface_Create?fields=values(Incident Number)'.format(bmc['bmc_host'])
headers = {
'Authorization': 'AR-JWT ' + bmc['bmc_Token'],
'Content-Type': 'application/json'
}
severity = event['severity']
body = {
'values': {
'First_Name': 'First Name Here',
'Last_Name': 'Last Name Here',
'Description': event['description'],
'Impact': 'Impact Here',
'Urgency': severity,
'Status': 'Status Here',
'Reported Source': 'Source Here',
'Service_Type': 'Service Type Here'
}
}
# resp = requests.post(url, headers=headers, data=body,
#verify=False)
resp = {
"URL": url,
"Headers": headers,
"Body": body
}
print('BMC incident API response with Prime event: \n {}'.format(resp))
return resp
#This function creates a BMC incident ticket for EPNM events
#It is separate from the DNAC ticket because its severity field has different values
#commented out portion makes the API request
def create_Bmc_Incident_Epnm(bmc, event):
url = 'http://{}/api/arsys/v1/entry/HPD:IncidentInterface_Create?fields=values(Incident Number)'.format(bmc['bmc_host'])
headers = {
'Authorization': 'AR-JWT ' + bmc['bmc_Token'],
'Content-Type': 'application/json'
}
severity = event['severity']
body = {
'values': {
'First_Name': 'First Name Here',
'Last_Name': 'Last Name Here',
'Description': event['description'],
'Impact': 'Impact Here',
'Urgency': severity,
'Status': 'Status Here',
'Reported Source': 'Source Here',
'Service_Type': 'Service Type Here'
}
}
# resp = requests.post(url, headers=headers, data=body,
#verify=False)
resp = {
"URL": url,
"Headers": headers,
"Body": body
}
print('BMC incident API response with EPNM event: \n {}'.format(resp))
return resp
#This function creates a BMC incident ticket for EPNM events
#It is separate from the DNAC ticket because its severity field has different values
#commented out portion makes the API request
def create_Bmc_Incident_Sdwan(bmc, event):
url = 'http://{}/api/arsys/v1/entry/HPD:IncidentInterface_Create?fields=values(Incident Number)'.format(bmc['bmc_host'])
headers = {
'Authorization': 'AR-JWT ' + bmc['bmc_Token'],
'Content-Type': 'application/json'
}
severity = event['severity']
body = {
'values': {
'First_Name': 'First Name Here',
'Last_Name': 'Last Name Here',
'Description': event['description'],
'Impact': 'Impact Here',
'Urgency': severity,
'Status': 'Status Here',
'Reported Source': 'Source Here',
'Service_Type': 'Service Type Here'
}
}
# resp = requests.post(url, headers=headers, data=body,
#verify=False)
resp = {
"URL": url,
"Headers": headers,
"Body": body
}
print('BMC incident API response with SDWAN event: \n {}'.format(resp))
return resp
| 33.060606
| 148
| 0.596334
| 624
| 5,455
| 5.144231
| 0.174679
| 0.024299
| 0.042368
| 0.024922
| 0.839564
| 0.832399
| 0.806854
| 0.806854
| 0.794393
| 0.794393
| 0
| 0.001498
| 0.265811
| 5,455
| 164
| 149
| 33.262195
| 0.8
| 0.210082
| 0
| 0.707965
| 0
| 0.044248
| 0.434152
| 0.008108
| 0
| 0
| 0
| 0
| 0
| 1
| 0.044248
| false
| 0.00885
| 0.026549
| 0
| 0.115044
| 0.044248
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
be4fc2d1208c1ae53a00ab2c26a0b489a599bcf0
| 10,901
|
py
|
Python
|
ue_eval_scripts/calibration.py
|
chryssa-zrv/UA_COMET
|
527e7c86bd0a0d8ff90efda58e820108a5666b92
|
[
"Apache-2.0"
] | null | null | null |
ue_eval_scripts/calibration.py
|
chryssa-zrv/UA_COMET
|
527e7c86bd0a0d8ff90efda58e820108a5666b92
|
[
"Apache-2.0"
] | null | null | null |
ue_eval_scripts/calibration.py
|
chryssa-zrv/UA_COMET
|
527e7c86bd0a0d8ff90efda58e820108a5666b92
|
[
"Apache-2.0"
] | null | null | null |
from scipy.special import erfinv
import numpy as np
import itertools
from tqdm import tqdm
import math
NUM_BINS = 100
def compute_calibration_error_non_parametric(target, scores, num_bins=NUM_BINS//5, scaling_val=1, scaling_sum=0):
matches = []
gammas = np.linspace(0, 1, num_bins)
median = np.median(scores, axis=1)
for gamma in gammas:
tmp = (np.quantile(scores, (1+gamma)/2, axis=1) - np.quantile(scores, (1-gamma)/2, axis=1))*scaling_val + scaling_sum
lower = median - tmp
upper = median + tmp
correct = np.logical_and(lower <= target, target <= upper).sum()/len(target)
matches.append(correct)
calibration_error = (np.abs(gammas - matches)).mean()
return calibration_error, gammas, matches
def optimize_calibration_error_non_parametric(target, scores, scaling_vals, scaling_sums, num_bins=NUM_BINS//5):
best = np.inf
best_scale = np.nan
best_sum = np.nan
for (scaling_sum, scaling_val) in tqdm(itertools.product(scaling_sums, scaling_vals)):
calibration_error, _, _ = compute_calibration_error_non_parametric(
target, scores, num_bins, scaling_val, scaling_sum)
if calibration_error < best:
best_scale = scaling_val
best_sum = scaling_sum
best = calibration_error
return best, best_scale, best_sum
def compute_calibration_error_non_parametric_base(target, mean, s, num_bins=NUM_BINS//5):
matches = []
gammas = np.linspace(0, 1, num_bins)
median = mean
for gamma in gammas:
lower = median - s*gamma/2
upper = median + s*gamma/2
correct = np.logical_and(lower <= target, target <= upper).sum()/len(target)
matches.append(correct)
calibration_error = (np.abs(gammas - np.array(matches))).mean()
return calibration_error, gammas, matches
def optimize_calibration_error_non_parametric_base(target, mean, s_vals, num_bins=NUM_BINS//5):
best = np.inf
best_s = np.nan
for s in tqdm(s_vals):
calibration_error, _, _ = compute_calibration_error_non_parametric_base(
target, mean, s, num_bins)
if calibration_error < best:
best_s = s
best = calibration_error
return best, best_s
def probit(p):
return np.sqrt(2)*erfinv(2*p-1)
def compute_calibration_error(target, mean, std, std_sum=0, std_scale=1,
num_bins=NUM_BINS):
matches = []
gammas = np.linspace(0, 1, num_bins)
std_transformed = np.sqrt(std_sum**2 + (std_scale*std)**2)
for gamma in gammas:
lower = mean + std_transformed * probit((1-gamma)/2)
upper = mean + std_transformed * probit((1+gamma)/2)
correct = np.logical_and(
lower <= target, target <= upper).sum() / len(target)
matches.append(correct)
calibration_error = (np.abs(gammas - matches)).mean()
return calibration_error, gammas, matches
def optimize_calibration_error(target, mean, std, std_sums, std_scales,
num_bins=NUM_BINS):
best = np.inf
best_std_sum = np.nan
best_std_scale = np.nan
for (std_sum, std_scale) in tqdm(itertools.product(std_sums, std_scales)):
calibration_error, _, _ = compute_calibration_error(
target, mean, std, std_sum, std_scale, num_bins)
if calibration_error < best:
best_std_sum = std_sum
best_std_scale = std_scale
best = calibration_error
return best, best_std_sum, best_std_scale
# from https://arxiv.org/pdf/2005.12496.pdf
def compute_sharpness(std, std_sum=0, std_scale=1):
std_transformed = np.sqrt(std_sum**2 + (std_scale*std)**2)
sharpness = np.mean(std_transformed**2)
return sharpness
# from https://openreview.net/pdf?id=ryg8wpEtvB
def compute_ence(target, mean, std, std_sum=0, std_scale=1,
num_bins=100):
matches = []
gammas = np.linspace(0, len(target) , num_bins+1)
std_transformed = np.sqrt(std_sum**2 + (std_scale*std)**2)
sorted_idxs = np.argsort(std_transformed)
std_sorted = [std_transformed[i] for i in sorted_idxs]
mean_sorted = [mean[i] for i in sorted_idxs]
target_sorted = [target[i] for i in sorted_idxs]
for i,_ in enumerate(gammas):
if i+1<len(gammas):
lower = math.floor(gammas[i])
upper = math.floor(gammas[i+1])
bin_mean = np.asarray(mean_sorted[lower:upper])
bin_target = np.asarray(target_sorted[lower:upper])
bin_std = np.asarray(std_sorted[lower:upper])
width = upper-lower
epsilon=0.001
if not width>0.0:
width = epsilon
rmse = np.sqrt(1/width * np.sum((bin_mean-bin_target)**2))
mvar = np.sqrt(1/width * np.sum(bin_std**2))
nse = np.abs((mvar-rmse)/mvar)
matches.append(nse)
ence = np.mean(matches)
return ence, np.linspace(1, 100 , num_bins), matches
# from https://openreview.net/pdf?id=ryg8wpEtvB
def compute_ence_rn(target, mean, std, std_sum=0, std_scale=1,
num_bins=100):
matches = []
gammas = np.linspace(0, len(target) , num_bins+1)
std_transformed = np.sqrt(std_sum**2 + (std_scale*std)**2)
sorted_idxs = np.argsort(std_transformed)
std_sorted = [std_transformed[i] for i in sorted_idxs]
mean_sorted = [mean[i] for i in sorted_idxs]
target_sorted = [target[i] for i in sorted_idxs]
for i,_ in enumerate(gammas):
if i+1<len(gammas):
lower = math.floor(gammas[i])
upper = math.floor(gammas[i+1])
bin_mean = np.asarray(mean_sorted[lower:upper])
bin_target = np.asarray(target_sorted[lower:upper])
bin_std = np.asarray(std_sorted[lower:upper])
width = upper-lower
epsilon=0.001
if not width>0.0:
width = epsilon
rmse = np.sqrt(1/width * np.sum((bin_mean-bin_target)**2))
mvar = np.sqrt(1/width * np.sum(bin_std**2))
nse = np.abs((mvar-rmse)/rmse)
matches.append(nse)
ence_rn = np.mean(matches)
return ence_rn, np.linspace(1, 100 , num_bins), matches
# from https://openreview.net/pdf?id=ryg8wpEtvB
def compute_ence_nn(target, mean, std, std_sum=0, std_scale=1,
num_bins=100):
matches = []
gammas = np.linspace(0, len(target) , num_bins+1)
std_transformed = np.sqrt(std_sum**2 + (std_scale*std)**2)
sorted_idxs = np.argsort(std_transformed)
std_sorted = [std_transformed[i] for i in sorted_idxs]
mean_sorted = [mean[i] for i in sorted_idxs]
target_sorted = [target[i] for i in sorted_idxs]
for i,_ in enumerate(gammas):
if i+1<len(gammas):
lower = math.floor(gammas[i])
upper = math.floor(gammas[i+1])
bin_mean = np.asarray(mean_sorted[lower:upper])
bin_target = np.asarray(target_sorted[lower:upper])
bin_std = np.asarray(std_sorted[lower:upper])
width = upper-lower
epsilon=0.001
if not width>0.0:
width = epsilon
rmse = np.sqrt(1/width * np.sum((bin_mean-bin_target)**2))
mvar = np.sqrt(1/width * np.sum(bin_std**2))
nse = np.abs((mvar-rmse))
matches.append(nse)
ence_nn = np.mean(matches)
return ence_nn, np.linspace(1, 100 , num_bins), matches
# From https://arxiv.org/pdf/2006.10255.pdf
def compute_ecpe(target, mean, std, std_sum=0, std_scale=1,
num_bins=100):
calibration_error, gammas, matches = compute_calibration_error(
target, mean, std, std_sum, std_scale, num_bins)
return calibration_error, gammas, matches
def compute_mcpe(target, mean, std, std_sum=0, std_scale=1,
num_bins=100):
matches = []
gammas = np.linspace(0, 1, num_bins)
std_transformed = np.sqrt(std_sum**2 + (std_scale*std)**2)
for gamma in gammas:
lower = mean + std_transformed * probit((1-gamma)/2)
upper = mean + std_transformed * probit((1+gamma)/2)
correct = np.logical_and(
lower <= target, target <= upper).sum() / len(target)
matches.append(correct)
calibration_error = np.max((np.abs(gammas - matches)))
return calibration_error, gammas, matches
# EPIW sharpness for parametric approach
def compute_epiw(mean, std, std_sum=0, std_scale=1, num_bins=100):
matches = []
gammas = np.linspace(0, 1, num_bins)
std_transformed = np.sqrt(std_sum**2 + (std_scale*std)**2)
for gamma in gammas:
lower = mean + std_transformed * probit((1-gamma)/2)
upper = mean + std_transformed * probit((1+gamma)/2)
width = upper-lower
msk = np.ma.masked_invalid(width)
width = np.ma.filled(msk, fill_value = 10)
matches.append(width.mean())
sharpness = np.ma.masked_invalid(matches).mean()
return sharpness, gammas, matches
# sharpness for non-parametric approach
def compute_epiw_np(scores, std_sum=0, std_scale=1, num_bins=20):
matches = []
gammas = np.linspace(0, 1, num_bins)
median = np.median(scores, axis=1)
for gamma in gammas:
tmp = (np.quantile(scores, (1+gamma)/2, axis=1) - np.quantile(scores, (1-gamma)/2, axis=1))*std_scale + std_sum
lower = median - tmp
upper = median + tmp
width = upper-lower
msk = np.ma.masked_invalid(width)
width = np.ma.filled(msk, fill_value = 10)
matches.append(width.mean())
sharpness = np.ma.masked_invalid(matches).mean()
return sharpness, gammas, matches
def compute_epiw_np_base(mean, s, num_bins=20):
matches = []
gammas = np.linspace(0, 1, num_bins)
median = mean
for gamma in gammas:
lower = median - s*gamma/2
upper = median + s*gamma/2
width = np.array(upper) - np.array(lower)
msk = np.ma.masked_invalid(width)
width = np.ma.filled(msk, fill_value = 10)
matches.append(width.mean())
sharpness = np.ma.masked_invalid(matches).mean()
return sharpness, gammas, matches
def compute_mpiw(mean, std, std_sum=0, std_scale=1,
num_bins=100):
matches = []
gammas = np.linspace(0, 1, num_bins)
std_transformed = np.sqrt(std_sum**2 + (std_scale*std)**2)
for gamma in gammas:
lower = mean + std_transformed * probit((1-gamma)/2)
upper = mean + std_transformed * probit((1+gamma)/2)
width = upper-lower
msk = np.ma.masked_invalid(width)
width = np.ma.filled(msk, fill_value = 10)
matches.append(width.max())
sharpness = np.ma.masked_invalid(matches).max()
return sharpness, gammas, matches
| 37.850694
| 125
| 0.62453
| 1,528
| 10,901
| 4.26178
| 0.073953
| 0.042998
| 0.020885
| 0.038851
| 0.8707
| 0.81895
| 0.787162
| 0.766892
| 0.752457
| 0.721284
| 0
| 0.025559
| 0.257041
| 10,901
| 287
| 126
| 37.982578
| 0.778491
| 0.027429
| 0
| 0.725322
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.072961
| false
| 0
| 0.021459
| 0.004292
| 0.167382
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
be51208ae77daf2a279fd4f0027253f617531f29
| 134
|
py
|
Python
|
utils/__init__.py
|
sajith-rahim/papyrus
|
1f027274670b6492caaeb09e6ad6f80d2ebff390
|
[
"Apache-2.0"
] | null | null | null |
utils/__init__.py
|
sajith-rahim/papyrus
|
1f027274670b6492caaeb09e6ad6f80d2ebff390
|
[
"Apache-2.0"
] | null | null | null |
utils/__init__.py
|
sajith-rahim/papyrus
|
1f027274670b6492caaeb09e6ad6f80d2ebff390
|
[
"Apache-2.0"
] | null | null | null |
from .data_utils import *
from .device_utils import *
from .tracker_utils import *
from .os_utils import *
from .config_utils import *
| 26.8
| 28
| 0.783582
| 20
| 134
| 5
| 0.4
| 0.55
| 0.6
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.141791
| 134
| 5
| 29
| 26.8
| 0.869565
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
feb71db55174638cee1006116ccfd6e4a1b52d6f
| 17,555
|
py
|
Python
|
DCGANs.py
|
AidasLiaudanskas/Experiments_on_GANs
|
3047d44185d4e311e8a51dc7389737fca4436b94
|
[
"MIT"
] | null | null | null |
DCGANs.py
|
AidasLiaudanskas/Experiments_on_GANs
|
3047d44185d4e311e8a51dc7389737fca4436b94
|
[
"MIT"
] | null | null | null |
DCGANs.py
|
AidasLiaudanskas/Experiments_on_GANs
|
3047d44185d4e311e8a51dc7389737fca4436b94
|
[
"MIT"
] | null | null | null |
"""
File containing different DCGAN architectures
"""
import tensorflow as tf
import tflib as lib
from functools import partial
from helpers import Batchnorm, ResidualBlock
FLAGS = tf.app.flags.FLAGS
class DCGAN:
"""
Class containing all the parameters necessary for G or D network construction.
Useful for training many models in a row.
"""
def __init__(self):
self.model_dim = self.G_dim = self.D_dim = FLAGS.model_dim
self.OUTPUT_DIM = FLAGS.output_dim
self.N_CH = FLAGS.n_ch
self.height = self.width = FLAGS.height
def set_dim(self, dim):
self.model_dim = self.G_dim = self.D_dim = dim
def set_G_dim(self, dim):
self.G_dim = dim
def get_G_dim(self):
return self.G_dim
def get_D_dim(self):
return self.D_dim
def set_D_dim(self, dim):
self.D_dim = dim
def DCGANG_Mnist(self, n_samples, noise=None, bn=True, nonlinearity=tf.nn.relu):
"""
Describes the Generator architecture for TF model.
"""
dim = self.G_dim
lib.ops.conv2d.set_weights_stdev(0.02)
lib.ops.deconv2d.set_weights_stdev(0.02)
lib.ops.linear.set_weights_stdev(0.02)
batchnorm_tf = partial(
tf.layers.batch_normalization, reuse=tf.AUTO_REUSE)
if noise is None:
noise = tf.random_normal([n_samples, 128])
output = lib.ops.linear.Linear(
'Generator.Input', 128, 4 * 4 * 4 * dim, noise)
if FLAGS.data_format == "NHWC":
bn_axis = 3
output = tf.reshape(output, [-1, 4, 4, 4 * dim])
bn_axes = [0, 1, 2]
else:
output = tf.reshape(output, [-1, 4 * dim, 4, 4])
bn_axes = [0, 2, 3]
bn_axis = 1
print("Shape before batch_norm: ", output.shape)
if bn:
output = batchnorm_tf(
output, name='Generator.BN1', fused=True, axis=bn_axis)
output = nonlinearity(output)
print("Shape before Generator2: ", output.shape)
output = lib.ops.deconv2d.Deconv2D(
'Generator.2', 4 * dim, 2 * dim, 5, output)
if bn:
# Batchnorm
output = batchnorm_tf(
output, name='Generator.BN2', fused=True, axis=bn_axis)
output = nonlinearity(output)
print("Shape before Generator3: ", output.shape)
output = lib.ops.deconv2d.Deconv2D(
'Generator.3', 2 * dim, 1 * dim, 5, output)
if bn:
output = batchnorm_tf(
output, name='Generator.BN3', fused=True, axis=bn_axis)
output = nonlinearity(output)
print("Shape before Generator4: ", output.shape)
output = lib.ops.deconv2d.Deconv2D(
'Generator.4', 1 * dim, self.N_CH, 5, output)
if bn:
output = batchnorm_tf(
output, name='Generator.BN4', fused=True, axis=bn_axis)
print("Shape after Generator4: ", output.shape)
output = nonlinearity(output)
output = tf.tanh(output)
lib.ops.conv2d.unset_weights_stdev()
lib.ops.deconv2d.unset_weights_stdev()
lib.ops.linear.unset_weights_stdev()
return tf.reshape(output, [-1, self.OUTPUT_DIM])
def DCGAND_Mnist(self, inputs, bn=True, nonlinearity=tf.nn.relu):
"""
Describes the Discriminator architecture for TF model.
"""
dim = self.D_dim
batchnorm_tf = partial(tf.layers.batch_normalization,
reuse=tf.AUTO_REUSE)
print("Discriminator inputs shape=", inputs.shape)
if FLAGS.data_format == "NHWC":
output = tf.reshape(
inputs, [-1, self.height, self.width, self.N_CH])
bn_axes = [0, 1, 2]
bn_axis = 3
else:
output = tf.reshape(
inputs, [-1, self.N_CH, self.height, self.width])
bn_axes = [0, 2, 3]
bn_axis = 1
lib.ops.conv2d.set_weights_stdev(0.02)
lib.ops.deconv2d.set_weights_stdev(0.02)
lib.ops.linear.set_weights_stdev(0.02)
output = lib.ops.conv2d.Conv2D(
'Discriminator.1', self.N_CH, dim, 5, output, stride=2)
output = nonlinearity(output)
output = lib.ops.conv2d.Conv2D(
'Discriminator.2', dim, 2 * dim, 5, output, stride=2)
if bn:
output = batchnorm_tf(
output, name='Discriminator.BN2', fused=True, axis=bn_axis)
output = nonlinearity(output)
output = lib.ops.conv2d.Conv2D(
'Discriminator.3', 2 * dim, 4 * dim, 5, output, stride=2)
if bn:
output = batchnorm_tf(
output, name='Discriminator.BN3', fused=True, axis=bn_axis)
output = nonlinearity(output)
output = lib.ops.conv2d.Conv2D(
'Discriminator.4', 4 * dim, 4 * dim, 5, output, stride=2)
if bn:
output = batchnorm_tf(
output, name='Discriminator.BN4', fused=True, axis=bn_axis)
output = nonlinearity(output)
print("Disc output shape: ", output.shape)
pre_output = output = tf.reshape(
output, [FLAGS.batch_size, 4 * 4 * dim])
output = lib.ops.linear.Linear(
'Discriminator.Output', 4 * 4 * dim, 1, output)
lib.ops.conv2d.unset_weights_stdev()
lib.ops.deconv2d.unset_weights_stdev()
lib.ops.linear.unset_weights_stdev()
print("Discriminator output shape: ", tf.reshape(output, [-1]).shape)
return tf.reshape(output, [-1]), pre_output
def DCGANG_1(self, n_samples, noise=None, bn=True, nonlinearity=tf.nn.relu, verbose=False):
"""
Describes the Generator architecture for TF model.
"""
dim = self.G_dim
lib.ops.conv2d.set_weights_stdev(0.02)
lib.ops.deconv2d.set_weights_stdev(0.02)
lib.ops.linear.set_weights_stdev(0.02)
batchnorm_tf = partial(
tf.layers.batch_normalization, reuse=tf.AUTO_REUSE)
if noise is None:
noise = tf.random_normal([n_samples, 128])
output = lib.ops.linear.Linear(
'Generator.Input', 128, 4 * 4 * 8 * dim, noise)
if FLAGS.data_format == "NHWC":
bn_axis = 3
output = tf.reshape(output, [-1, 4, 4, 8 * dim])
bn_axes = [0, 1, 2]
else:
output = tf.reshape(output, [-1, 8 * dim, 4, 4])
bn_axes = [0, 2, 3]
bn_axis = 1
if verbose:
print("Shape before batch_norm: ", output.shape)
if bn:
output = Batchnorm('Generator.BN1', bn_axes, output)
output = nonlinearity(output)
if verbose:
print("Shape before Generator2: ", output.shape)
output = lib.ops.deconv2d.Deconv2D(
'Generator.2', 8 * dim, 4 * dim, 5, output)
if bn:
# Batchnorm
output = Batchnorm('Generator.BN2', bn_axes, output)
output = nonlinearity(output)
if verbose:
print("Shape before Generator3: ", output.shape)
output = lib.ops.deconv2d.Deconv2D(
'Generator.3', 4 * dim, 2 * dim, 5, output)
if bn:
output = Batchnorm('Generator.BN3', bn_axes, output)
output = nonlinearity(output)
if verbose:
print("Shape before Generator4: ", output.shape)
output = lib.ops.deconv2d.Deconv2D(
'Generator.4', 2 * dim, dim, 5, output)
if bn:
output = Batchnorm('Generator.BN4', bn_axes, output)
if verbose:
print("Shape after Generator4: ", output.shape)
output = nonlinearity(output)
if verbose:
print("Shape before Generator5: ", output.shape)
output = lib.ops.deconv2d.Deconv2D(
'Generator.5', dim, self.N_CH, 5, output)
if verbose:
print("Shape after Generator5: ", output.shape)
output = tf.tanh(output)
lib.ops.conv2d.unset_weights_stdev()
lib.ops.deconv2d.unset_weights_stdev()
lib.ops.linear.unset_weights_stdev()
return tf.reshape(output, [-1, self.OUTPUT_DIM])
def DCGANG_2(self, n_samples, noise=None, bn=True, nonlinearity=tf.nn.relu):
"""
Describes the Generator architecture for TF model. Added an extra layer at the input
Be aware of potential bug with batchn_norm axes
"""
dim = self.G_dim
lib.ops.conv2d.set_weights_stdev(0.02)
lib.ops.deconv2d.set_weights_stdev(0.02)
lib.ops.linear.set_weights_stdev(0.02)
if noise is None:
noise = tf.random_normal([n_samples, 128])
output = lib.ops.linear.Linear(
'Generator.Input', 128, 4 * 4 * 16 * dim, noise)
if FLAGS.data_format == "NHWC":
output = tf.reshape(output, [-1, 4, 4, 16 * dim])
bn_axes = [0, 1, 2]
else:
output = tf.reshape(output, [-1, 16 * dim, 4, 4])
bn_axes = [0, 2, 3]
if bn:
output = Batchnorm('Generator.BN1', bn_axes, output)
output = nonlinearity(output)
output = lib.ops.deconv2d.Deconv2D(
'Generator.1', 16 * dim, 8 * dim, 5, output)
if bn:
output = Batchnorm('Generator.BN1.1', bn_axes, output)
output = nonlinearity(output)
output = lib.ops.deconv2d.Deconv2D(
'Generator.2', 8 * dim, 4 * dim, 5, output)
if bn:
output = Batchnorm('Generator.BN2', bn_axes, output)
output = nonlinearity(output)
output = lib.ops.deconv2d.Deconv2D(
'Generator.3', 4 * dim, 2 * dim, 5, output)
if bn:
output = Batchnorm('Generator.BN3', bn_axes, output)
output = nonlinearity(output)
output = lib.ops.deconv2d.Deconv2D(
'Generator.4', 2 * dim, dim, 5, output)
if bn:
output = Batchnorm('Generator.BN4', bn_axes, output)
output = nonlinearity(output)
output = lib.ops.deconv2d.Deconv2D(
'Generator.5', dim, self.N_CH, 5, output)
output = tf.tanh(output)
lib.ops.conv2d.unset_weights_stdev()
lib.ops.deconv2d.unset_weights_stdev()
lib.ops.linear.unset_weights_stdev()
return tf.reshape(output, [-1, self.OUTPUT_DIM])
def GoodGenerator(self, n_samples, noise=None, dim=FLAGS.model_dim, nonlinearity=tf.nn.relu):
"""
Taken directly from the code of WGAN-GP paper and modified a bit
"""
batchnorm_tf = partial(tf.layers.batch_normalization,
reuse=tf.AUTO_REUSE)
n_samples = int(n_samples)
dim = self.G_dim
if noise is None:
noise = tf.random_normal([n_samples, 128])
output = lib.ops.linear.Linear(
'Generator.Input', 128, 4 * 4 * 8 * dim, noise)
if FLAGS.data_format == "NHWC":
bn_axis = 3
output = tf.reshape(output, [-1, 4, 4, 8 * dim])
bn_axes = [0, 1, 2]
else:
output = tf.reshape(output, [-1, 8 * dim, 4, 4])
bn_axes = [0, 2, 3]
bn_axis = 1
output = ResidualBlock('Generator.Res1', 8 * dim,
8 * dim, 3, output, resample='up')
output = ResidualBlock('Generator.Res2', 8 * dim,
4 * dim, 3, output, resample='up')
output = ResidualBlock('Generator.Res3', 4 * dim,
2 * dim, 3, output, resample='up')
output = ResidualBlock('Generator.Res4', 2 * dim,
1 * dim, 3, output, resample='up')
# output = Batchnorm('Generator.OutputN', bn_axes, output)
output = batchnorm_tf(
output, name='Generator.OutputN', fused=True, axis=bn_axis)
output = tf.nn.relu(output)
output = lib.ops.conv2d.Conv2D(
'Generator.Output', 1 * dim, 3, 3, output)
output = tf.tanh(output)
return tf.reshape(output, [-1, FLAGS.output_dim])
def GoodDiscriminator(self, inputs):
dim = self.D_dim
if FLAGS.data_format == "NHWC":
output = tf.reshape(
inputs, [-1, self.height, self.width, self.N_CH])
else:
output = tf.reshape(
inputs, [-1, self.N_CH, self.height, self.width])
output = lib.ops.conv2d.Conv2D(
'Discriminator.Input', 3, dim, 3, output, he_init=False)
output = ResidualBlock('Discriminator.Res1', dim,
2 * dim, 3, output, resample='down')
output = ResidualBlock('Discriminator.Res2', 2 * dim,
4 * dim, 3, output, resample='down')
output = ResidualBlock('Discriminator.Res3', 4 * dim,
8 * dim, 3, output, resample='down')
output = ResidualBlock('Discriminator.Res4', 8 * dim,
8 * dim, 3, output, resample='down')
pre_output = output = tf.reshape(output, [-1, 4 * 4 * 8 * dim])
output = lib.ops.linear.Linear(
'Discriminator.Output', 4 * 4 * 8 * dim, 1, output)
return tf.reshape(output, [-1]), pre_output
def DCGAND_1(self, inputs, bn=True, nonlinearity=tf.nn.relu):
"""
Describes the Discriminator architecture for TF model.
"""
dim = self.D_dim
batchnorm_tf = partial(tf.layers.batch_normalization,
reuse=tf.AUTO_REUSE)
if FLAGS.data_format == "NHWC":
output = tf.reshape(
inputs, [-1, self.height, self.width, self.N_CH])
bn_axes = [0, 1, 2]
bn_axis = 3
else:
output = tf.reshape(
inputs, [-1, self.N_CH, self.height, self.width])
bn_axes = [0, 2, 3]
bn_axis = 1
lib.ops.conv2d.set_weights_stdev(0.02)
lib.ops.deconv2d.set_weights_stdev(0.02)
lib.ops.linear.set_weights_stdev(0.02)
output = lib.ops.conv2d.Conv2D(
'Discriminator.1', self.N_CH, dim, 5, output, stride=2)
output = nonlinearity(output)
output = lib.ops.conv2d.Conv2D(
'Discriminator.2', dim, 2 * dim, 5, output, stride=2)
if bn:
output = batchnorm_tf(
output, name='Discriminator.BN2', fused=True, axis=bn_axis)
output = nonlinearity(output)
output = lib.ops.conv2d.Conv2D(
'Discriminator.3', 2 * dim, 4 * dim, 5, output, stride=2)
if bn:
output = batchnorm_tf(
output, name='Discriminator.BN3', fused=True, axis=bn_axis)
output = nonlinearity(output)
output = lib.ops.conv2d.Conv2D(
'Discriminator.4', 4 * dim, 8 * dim, 5, output, stride=2)
if bn:
output = batchnorm_tf(
output, name='Discriminator.BN4', fused=True, axis=bn_axis)
output = nonlinearity(output)
pre_output = output = tf.reshape(output, [FLAGS.batch_size, -1])
width = output.shape.as_list()[1]
output = lib.ops.linear.Linear(
'Discriminator.Output', width, 1, output)
lib.ops.conv2d.unset_weights_stdev()
lib.ops.deconv2d.unset_weights_stdev()
lib.ops.linear.unset_weights_stdev()
return tf.reshape(output, [-1]), pre_output
def DCGAND_2(self, inputs, bn=True, nonlinearity=tf.nn.relu):
"""
Describes the Discriminator architecture for TF model. Added an extra layer at the output
"""
dim = self.D_dim
if FLAGS.data_format == "NHWC":
output = tf.reshape(
inputs, [-1, self.height, self.width, self.N_CH])
bn_axes = [0, 1, 2]
else:
output = tf.reshape(
inputs, [-1, self.N_CH, self.height, self.width])
bn_axes = [0, 2, 3]
lib.ops.conv2d.set_weights_stdev(0.02)
lib.ops.deconv2d.set_weights_stdev(0.02)
lib.ops.linear.set_weights_stdev(0.02)
output = lib.ops.conv2d.Conv2D(
'Discriminator.1', self.N_CH, dim, 5, output, stride=2)
output = nonlinearity(output)
output = lib.ops.conv2d.Conv2D(
'Discriminator.2', dim, 2 * dim, 5, output, stride=2)
if bn:
output = Batchnorm('Discriminator.BN2', bn_axes, output)
output = nonlinearity(output)
output = lib.ops.conv2d.Conv2D(
'Discriminator.3', 2 * dim, 4 * dim, 5, output, stride=2)
if bn:
output = Batchnorm('Discriminator.BN3', bn_axes, output)
output = nonlinearity(output)
output = lib.ops.conv2d.Conv2D(
'Discriminator.4', 4 * dim, 8 * dim, 5, output, stride=2)
if bn:
output = Batchnorm('Discriminator.BN4', bn_axes, output)
output = nonlinearity(output)
output = lib.ops.conv2d.Conv2D(
'Discriminator.5', 8 * dim, 16 * dim, 5, output, stride=2)
if bn:
output = Batchnorm('Discriminator.BN5', bn_axes, output)
output = nonlinearity(output)
pre_output = output = tf.reshape(output, [-1, 4 * 4 * dim])
output = lib.ops.linear.Linear(
'Discriminator.Output', 4 * 4 * dim, 1, output)
lib.ops.conv2d.unset_weights_stdev()
lib.ops.deconv2d.unset_weights_stdev()
lib.ops.linear.unset_weights_stdev()
return tf.reshape(output, [-1]), pre_output
| 38.163043
| 97
| 0.564227
| 2,176
| 17,555
| 4.439338
| 0.072151
| 0.044099
| 0.050932
| 0.041304
| 0.881574
| 0.87619
| 0.841097
| 0.829607
| 0.774017
| 0.731884
| 0
| 0.038862
| 0.315466
| 17,555
| 459
| 98
| 38.246187
| 0.765
| 0.042381
| 0
| 0.773842
| 0
| 0
| 0.085174
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.038147
| false
| 0
| 0.010899
| 0.00545
| 0.079019
| 0.040872
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2299ab39ae62895b22bd5afe9434785b649ae968
| 6,657
|
py
|
Python
|
graphtheory/eulerian/tests/test_euler.py
|
gitter-badger/graphs-dict
|
2be1a5b140feb050eec799d6cadf6de5eef01745
|
[
"BSD-3-Clause"
] | 36
|
2015-09-20T20:55:39.000Z
|
2021-09-20T05:49:03.000Z
|
graphtheory/eulerian/tests/test_euler.py
|
gitter-badger/graphs-dict
|
2be1a5b140feb050eec799d6cadf6de5eef01745
|
[
"BSD-3-Clause"
] | 6
|
2016-03-25T21:41:46.000Z
|
2020-02-12T03:18:59.000Z
|
graphtheory/eulerian/tests/test_euler.py
|
gitter-badger/graphs-dict
|
2be1a5b140feb050eec799d6cadf6de5eef01745
|
[
"BSD-3-Clause"
] | 9
|
2016-09-12T07:57:27.000Z
|
2022-03-21T16:15:39.000Z
|
#!/usr/bin/python
import unittest
from graphtheory.structures.edges import Edge
from graphtheory.structures.graphs import Graph
from graphtheory.eulerian.euler import EulerianCycleDFS
from graphtheory.eulerian.euler import EulerianCycleDFSWithEdges
# 0 --- 1 2
# | | / |
# | | / |
# | | / |
# 3 --- 4 --- 5
class TestEulerianCycleUndirected(unittest.TestCase):
def setUp(self):
self.N = 6 # number of nodes
self.G = Graph(self.N, directed=False)
self.nodes = range(self.N)
self.edges = [
Edge(0, 1), Edge(0, 3), Edge(1, 4), Edge(3, 4), Edge(4, 2),
Edge(4, 5), Edge(2, 5)]
for node in self.nodes:
self.G.add_node(node)
for edge in self.edges:
self.G.add_edge(edge)
def test_euler_dfs(self):
algorithm = EulerianCycleDFS(self.G)
algorithm.run(0)
expected_cycle = [0, 1, 4, 2, 5, 4, 3, 0]
self.assertEqual(len(algorithm.eulerian_cycle), len(self.edges) + 1)
self.assertEqual(algorithm.eulerian_cycle, expected_cycle)
def test_euler_dfs_with_edges(self):
algorithm = EulerianCycleDFSWithEdges(self.G)
algorithm.run(0)
#expected_cycle = [0, 1, 4, 2, 5, 4, 3, 0]
expected_cycle = [
Edge(0, 1), Edge(1, 4), Edge(4, 2), Edge(2, 5), Edge(5, 4),
Edge(4, 3), Edge(3, 0)]
self.assertEqual(len(algorithm.eulerian_cycle), len(self.edges))
self.assertEqual(algorithm.eulerian_cycle, expected_cycle)
def test_eulerian(self):
self.G.add_edge(Edge(1, 2))
self.assertRaises(ValueError, EulerianCycleDFS, self.G)
self.assertRaises(ValueError, EulerianCycleDFSWithEdges, self.G)
def tearDown(self): pass
# 0 --- 1 2 --- 3
# \ | | /
# \ | | /
# \ | | /
# 4 --- 5
# | /
# | /
# | /
# 6
class TestEulerianCycleUndirected2(unittest.TestCase):
def setUp(self):
self.N = 7 # number of nodes
self.G = Graph(self.N, directed=False)
self.nodes = range(self.N)
self.edges = [
Edge(0, 1), Edge(0, 4), Edge(1, 4), Edge(2, 3), Edge(2, 5),
Edge(3, 5), Edge(4, 5), Edge(4, 6), Edge(5, 6)]
for node in self.nodes:
self.G.add_node(node)
for edge in self.edges:
self.G.add_edge(edge)
def test_euler_dfs(self):
algorithm = EulerianCycleDFS(self.G)
algorithm.run(6)
expected_cycle = [6, 4, 0, 1, 4, 5, 2, 3, 5, 6]
self.assertEqual(len(algorithm.eulerian_cycle), len(self.edges) + 1)
self.assertEqual(algorithm.eulerian_cycle, expected_cycle)
def test_euler_dfs_with_edges(self):
algorithm = EulerianCycleDFSWithEdges(self.G)
algorithm.run(6)
#expected_cycle = [6, 4, 0, 1, 4, 5, 2, 3, 5, 6]
expected_cycle = [
Edge(6, 4), Edge(4, 0), Edge(0, 1), Edge(1, 4),
Edge(4, 5), Edge(5, 2), Edge(2, 3), Edge(3, 5), Edge(5, 6)]
self.assertEqual(len(algorithm.eulerian_cycle), len(self.edges))
self.assertEqual(algorithm.eulerian_cycle, expected_cycle)
def test_eulerian(self):
self.G.add_edge(Edge(1, 2))
self.assertRaises(ValueError, EulerianCycleDFS, self.G)
self.assertRaises(ValueError, EulerianCycleDFSWithEdges, self.G)
def tearDown(self): pass
# 0 --o 1 2
# o | / o
# | | / |
# | o o |
# 3 o-- 4 --o 5
class TestEulerianCycleDirected(unittest.TestCase):
def setUp(self):
self.N = 6 # number of nodes
self.G = Graph(self.N, directed=True)
self.nodes = range(self.N)
self.edges = [
Edge(0, 1), Edge(3, 0), Edge(1, 4), Edge(4, 3), Edge(2, 4),
Edge(4, 5), Edge(5, 2)]
for node in self.nodes:
self.G.add_node(node)
for edge in self.edges:
self.G.add_edge(edge)
def test_euler_dfs(self):
algorithm = EulerianCycleDFS(self.G)
algorithm.run(0)
expected_cycle = [0, 1, 4, 5, 2, 4, 3, 0]
self.assertEqual(len(algorithm.eulerian_cycle), len(self.edges) + 1)
self.assertEqual(algorithm.eulerian_cycle, expected_cycle)
def test_euler_dfs_with_edges(self):
algorithm = EulerianCycleDFSWithEdges(self.G)
algorithm.run(0)
#expected_cycle = [0, 1, 4, 5, 2, 4, 3, 0]
expected_cycle = [
Edge(0, 1), Edge(1, 4), Edge(4, 5), Edge(5, 2), Edge(2, 4),
Edge(4, 3), Edge(3, 0)]
self.assertEqual(len(algorithm.eulerian_cycle), len(self.edges))
self.assertEqual(algorithm.eulerian_cycle, expected_cycle)
def test_eulerian(self):
self.G.add_edge(Edge(1, 2))
self.assertRaises(ValueError, EulerianCycleDFS, self.G)
self.assertRaises(ValueError, EulerianCycleDFSWithEdges, self.G)
def tearDown(self): pass
# 0 --o 1 2 --o 3
# o | o /
# \ | | /
# \ o | o
# 4 --o 5
# o /
# | /
# | o
# 6
class TestEulerianCycleDirected2(unittest.TestCase):
def setUp(self):
self.N = 7 # number of nodes
self.G = Graph(self.N, directed=True)
self.nodes = range(self.N)
self.edges = [
Edge(0, 1), Edge(4, 0), Edge(1, 4), Edge(2, 3), Edge(5, 2),
Edge(3, 5), Edge(4, 5), Edge(6, 4), Edge(5, 6)]
for node in self.nodes:
self.G.add_node(node)
for edge in self.edges:
self.G.add_edge(edge)
def test_euler_dfs(self):
algorithm = EulerianCycleDFS(self.G)
algorithm.run(6)
expected_cycle = [6, 4, 0, 1, 4, 5, 2, 3, 5, 6]
self.assertEqual(len(algorithm.eulerian_cycle), len(self.edges) + 1)
self.assertEqual(algorithm.eulerian_cycle, expected_cycle)
def test_euler_dfs_with_edges(self):
algorithm = EulerianCycleDFSWithEdges(self.G)
algorithm.run(6)
#expected_cycle = [6, 4, 0, 1, 4, 5, 2, 3, 5, 6]
expected_cycle = [
Edge(6, 4), Edge(4, 0), Edge(0, 1), Edge(1, 4), Edge(4, 5),
Edge(5, 2), Edge(2, 3), Edge(3, 5), Edge(5, 6)]
self.assertEqual(len(algorithm.eulerian_cycle), len(self.edges))
self.assertEqual(algorithm.eulerian_cycle, expected_cycle)
def test_eulerian(self):
self.G.add_edge(Edge(1, 2))
self.assertRaises(ValueError, EulerianCycleDFS, self.G)
self.assertRaises(ValueError, EulerianCycleDFSWithEdges, self.G)
def tearDown(self): pass
if __name__ == "__main__":
unittest.main()
# EOF
| 33.621212
| 76
| 0.57263
| 897
| 6,657
| 4.156076
| 0.071349
| 0.042918
| 0.094421
| 0.021459
| 0.90397
| 0.876341
| 0.873391
| 0.852736
| 0.852736
| 0.852736
| 0
| 0.053053
| 0.286465
| 6,657
| 197
| 77
| 33.791878
| 0.731789
| 0.097942
| 0
| 0.793893
| 0
| 0
| 0.001341
| 0
| 0
| 0
| 0
| 0
| 0.183206
| 1
| 0.152672
| false
| 0.030534
| 0.038168
| 0
| 0.221374
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
22e59062122b1df056be0f98fd8bcaec0fef6e1c
| 70,618
|
py
|
Python
|
cogent/src/codes/kernels/tc_code_kernel_load_inputs_details.py
|
Lcrypto/CGO2019-AE
|
cba7598b42f10eab655a8907a6db71094c1f558d
|
[
"BSD-4-Clause"
] | 4
|
2019-12-03T16:08:14.000Z
|
2020-08-26T16:38:54.000Z
|
cogent/src/codes/kernels/tc_code_kernel_load_inputs_details.py
|
Lcrypto/CGO2019-AE
|
cba7598b42f10eab655a8907a6db71094c1f558d
|
[
"BSD-4-Clause"
] | null | null | null |
cogent/src/codes/kernels/tc_code_kernel_load_inputs_details.py
|
Lcrypto/CGO2019-AE
|
cba7598b42f10eab655a8907a6db71094c1f558d
|
[
"BSD-4-Clause"
] | 1
|
2020-03-03T20:31:37.000Z
|
2020-03-03T20:31:37.000Z
|
import copy
import src.generators.tc_helper as tc_helper
import src.codes.kernels.tc_code_kernel_load_inputs_abstract as tc_code_kernel_load_inputs_abstract
from inspect import currentframe, getframeinfo
#
# [To-Do] Need to Make a Complete Algorithm about How to Load Inputs
#
def tc_gen_code_Kernel_Load_Inputs_Left(f, tensor_contraction, l_internal_idx, opt_load_t2, size_tb_x, size_tb_y, size_sm_p7, size_tb_ext, str_str_t2, num_internal_indices, idx_kernel, str_stride_int, opt_pre_computed, l_t3_mapping_tb_2D, l_t3_mapping_reg, l_t3_slices):
#
# Modulo (related to For-Statement)
#
opt_modulo = 1 # 1: TRUE // modulo operation is possible
# -1: FALSE // modulo operation is impossible
#
# To-Do: What is "opt_special??" This is identical to "opt_load_t2"
#
opt_special = -1
if num_internal_indices == 1:
idx_count = 0
for each_idx in tensor_contraction[0][4]:
if each_idx == l_internal_idx[0]:
if idx_count == 0:
opt_special = 1
idx_count = idx_count + 1
#
# w/o pre-computed
# This is not yet generalized.
#
str_input_addr_left = ""
if opt_pre_computed == -1:
f.write("\t\t\t// without pre-computed arrays\n")
#
l_tb_idx = list()
for each_axis in l_t3_mapping_tb_2D:
for each_idx in each_axis:
l_tb_idx.append(each_idx)
#
l_input_idx_left = tensor_contraction[0][4]
rev_l_input_idx_left = list(reversed(tensor_contraction[0][4]))
#
l_ext_tb_smem = list()
for idx_ext_tb in tensor_contraction[0][4]:
if tc_helper.tc_gen_helper_find_1d(l_t3_mapping_reg, idx_ext_tb) == -1:
if tc_helper.tc_gen_helper_find_1d(l_internal_idx, idx_ext_tb) == -1:
l_ext_tb_smem.append(idx_ext_tb)
#
opt_print = 1
#
# [1-1] TB_X -> K && "TB_Y -> E_A"
#
if opt_load_t2 == -1:
#
if opt_print == 1:
print ("[Code Generator][Kernel][Load Inputs-Left] opt_load_t2: ", opt_load_t2, ": TB_X -> E_A && TB_Y -> K")
#
# there are three ways to load inputs: [1] Directly
# [2] Swapped
# [3] Manually
#
method_load_t2 = 1
list_swappable_pair = list() # original -> alternative
#
# For "TB_Y -> E_A"
# # of Indices mapped on TB_Y == # of Indices mapped on SMEM
#
if len(l_t3_mapping_tb_2D[1]) == len(l_ext_tb_smem):
#
# Check if [1] or not
#
for each_idx in range(0, len(l_ext_tb_smem)):
print (">>> l_ext_tb_smem[each_idx]: ", l_ext_tb_smem[each_idx], ", l_t3_mapping_tb_2D[0][each_idx]: ", l_t3_mapping_tb_2D[1][each_idx])
if tc_helper.tc_gen_helper_find(l_t3_slices, l_ext_tb_smem[each_idx]) != tc_helper.tc_gen_helper_find(l_t3_slices, l_t3_mapping_tb_2D[1][each_idx]):
method_load_t2 = 2
#
# Check if [2] or not
#
if method_load_t2 != 1:
tmp_l_ext_tb_smem = copy.deepcopy(l_ext_tb_smem)
for each_tb_idx in l_t3_mapping_tb_2D[1]:
#
#
#
idx_count = 0
for each_input_idx in tmp_l_ext_tb_smem:
if tc_helper.tc_gen_helper_find(l_t3_slices, each_tb_idx) == tc_helper.tc_gen_helper_find(l_t3_slices, each_input_idx):
list_swappable_pair.append([each_input_idx, each_tb_idx])
tmp_l_ext_tb_smem.pop(idx_count)
#
idx_count = idx_count + 1
#
if len(tmp_l_ext_tb_smem) != 0:
method_load_t2 = 3
#else:
# print ("[LEFT][2] list_swappable_pair: ", list_swappable_pair)
#
del tmp_l_ext_tb_smem
else:
#
# [1]
#
for each_idx in range(0, len(l_ext_tb_smem)):
if tc_helper.tc_gen_helper_find(l_t3_slices, l_ext_tb_smem[each_idx]) == tc_helper.tc_gen_helper_find(l_t3_slices, l_t3_mapping_tb_2D[1][each_idx]):
list_swappable_pair.append([l_ext_tb_smem[each_idx], l_t3_mapping_tb_2D[1][each_idx]])
print ("[LEFT][IF][1] list_swappable_pair: ", list_swappable_pair)
#
# (3) //
#
else:
#print ("len(l_t3_mapping_tb_2D[1]) != len(l_ext_tb_smem): Need Extra Variables")
method_load_t2 = 3
#
print ("[Code Generator][Kernel][Load Inputs-LEFT] When we load input, we will use [", method_load_t2, "] method")
#
# Both (1) and (2)
#
if method_load_t2 < 3:
print ("[Code Generator][Kernel][Load Inputs-Left] Both (1) and (2) Cases")
#
# (3)
#
else:
print ("[Code Generator][Kernel][Load Inputs-Left] (3) Case")
print ("[Code Generator][Kernel][Load Inputs-Left] |TB_Y| = ", size_tb_y, ", |SMEM| = ", size_tb_ext)
#
# 1: [1]
# 2: [2]
#
opt_how_to_manually = 1
#
# [SMEM_X] How to Load Input Tensors on SMEM_X
#
int_num_idx_smem_y = 0
int_num_idx_tb_y = len(l_t3_mapping_tb_2D[1])
for each_idx in rev_l_input_idx_left:
if tc_helper.tc_gen_helper_find_1d(l_internal_idx, each_idx) == -1:
if tc_helper.tc_gen_helper_find_1d(l_t3_mapping_reg, each_idx) == -1:
print ("This Index is mapped on SMEM_Y Directly: ", each_idx, ", |idx| = ", tc_helper.tc_gen_helper_find(l_t3_slices, each_idx))
int_num_idx_smem_y += 1
#
# [TB_Y]
#
for each_idx in l_t3_mapping_tb_2D[1]:
print ("[TB_Y] ", each_idx, ", |idx| = ", tc_helper.tc_gen_helper_find(l_t3_slices, each_idx))
#
#
#
print ("# of External Indices mapped on SMEM_Y: ", int_num_idx_smem_y)
print ("# of External Indices mapped on TB_Y: ", int_num_idx_tb_y)
#
# [Manually]
#
if int_num_idx_smem_y == 1 and int_num_idx_tb_y == 1:
print ("[1] # of External Indices mapped on SMEM_Y == # of External Indices mapped on TB_Y == 1")
else:
print ("[2] Otherwise, we need temporal indices to indicate indices mapped on SMEM_Y")
f.write("\t\t\t// tmp tmp tmp\n")
#
#
#
idx_count = 0
for each_idx in rev_l_input_idx_left:
print ("rev_l_input_idx_left: ", rev_l_input_idx_left, ", each_idx: ", each_idx)
#
# [Current Ver.] only one index can mapped on REG
#
str_specific_idx = ""
if tc_helper.tc_gen_helper_find_1d(l_t3_mapping_reg, each_idx) != -1:
#
# index (ll) -> for-statement
#
str_specific_idx = "ll"
else:
if method_load_t2 == 1:
str_specific_idx = "idx_" + str(tc_helper.tc_gen_helper_find(list_swappable_pair, each_idx))
elif method_load_t2 == 2:
str_specific_idx = "idx_" + str(tc_helper.tc_gen_helper_find(list_swappable_pair, each_idx))
else:
print ("Need to Fix IT!")
str_specific_idx = "idx_#"
#
# Internal Index
#
if tc_helper.tc_gen_helper_find_1d(l_internal_idx, each_idx) != -1:
if idx_count == 0:
str_input_addr_left = ""
else:
str_input_addr_left = "(" + str_input_addr_left + ") * size_" + each_idx
#
# External Index
#
else:
if idx_count == 0:
str_input_addr_left = "blk_idx_" + each_idx + " * SIZE_SLICE_" + str(idx_kernel) + "_" + each_idx.capitalize() + " + " + str_specific_idx
else:
str_input_addr_left = "blk_idx_" + each_idx + " * SIZE_SLICE_" + str(idx_kernel) + "_" + each_idx.capitalize() + " + " + str_specific_idx + " + (" + str_input_addr_left + ") * size_" + each_idx
#
# Need to Skip for the Internal Index on SVI
#
idx_count = idx_count + 1
#
del list_swappable_pair
#print ("result: ", str_input_addr_left)
#
# [1-2] "TB_X -> EA" && TB_Y -> K
#
else:
#
if opt_print == 1:
print ("[Code Generator][Kernel][Load Inputs-Left] opt_load_t2: ", opt_load_t2, ": TB_X -> K && TB_Y -> E_A")
#
# there are three ways to load inputs: [1] Directly
# [2] Swapped
# [3] Manually
#
method_load_t2 = 1
list_swappable_pair = list()
if len(l_t3_mapping_tb_2D[0]) == len(l_ext_tb_smem):
#
# Check If [1] or not
#
#print ("Need To Check If [1] or not")
for each_idx in range(0, len(l_ext_tb_smem)):
print (">>> l_ext_tb_smem[each_idx]: ", l_ext_tb_smem[each_idx], ", l_t3_mapping_tb_2D[0][each_idx]: ", l_t3_mapping_tb_2D[0][each_idx])
if tc_helper.tc_gen_helper_find(l_t3_slices, l_ext_tb_smem[each_idx]) != tc_helper.tc_gen_helper_find(l_t3_slices, l_t3_mapping_tb_2D[0][each_idx]):
method_load_t2 = 2
#
# Check If [2] or not
#
if method_load_t2 != 1:
#print ("Need to Check If [2] or not")
tmp_l_ext_tb_smem = copy.deepcopy(l_ext_tb_smem)
for each_tb_idx in l_t3_mapping_tb_2D[0]:
#
#
#
idx_count = 0
for each_input_idx in tmp_l_ext_tb_smem:
if tc_helper.tc_gen_helper_find(l_t3_slices, each_tb_idx) == tc_helper.tc_gen_helper_find(l_t3_slices, each_input_idx):
list_swappable_pair.append([each_input_idx, each_tb_idx])
tmp_l_ext_tb_smem.pop(idx_count)
#
idx_count = idx_count + 1
#
if len(tmp_l_ext_tb_smem) != 0:
method_load_t2 = 3
#else:
#print ("[LEFT][2] list_swappable_pair: ", list_swappable_pair)
#
del tmp_l_ext_tb_smem
else:
for each_idx in range(0, len(l_ext_tb_smem)):
if tc_helper.tc_gen_helper_find(l_t3_slices, l_ext_tb_smem[each_idx]) == tc_helper.tc_gen_helper_find(l_t3_slices, l_t3_mapping_tb_2D[0][each_idx]):
list_swappable_pair.append([l_ext_tb_smem[each_idx], l_t3_mapping_tb_2D[0][each_idx]])
print ("[LEFT][ELSE][1] list_swappable_pair: ", list_swappable_pair)
else:
#
#
#
#print ("len(l_t3_mapping_tb_2D[0]) != len(l_ext_tb_smem): Need Extra Variables")
method_load_t2 = 3
#
print ("[Code Generator][Kernel][Load Inputs-LEFT] When we load input, we will use [", method_load_t2, "] method")
#
#
#
idx_count = 0
for each_idx in rev_l_input_idx_left:
print ("rev_l_input_idx_left: ", rev_l_input_idx_left, ", each_idx: ", each_idx)
#
# [Current Ver.] only one index can be mapped on REG
#
str_specific_idx = ""
if tc_helper.tc_gen_helper_find_1d(l_t3_mapping_reg, each_idx) != -1:
#
# index (ll) -> for-statement
#
str_specific_idx = "ll"
else:
if method_load_t2 == 1:
str_specific_idx = "idx_" + each_idx
elif method_load_t2 == 2:
str_specific_idx = "idx_" + str(tc_helper.tc_gen_helper_find(list_swappable_pair, each_idx))
else:
print ("Need to Fix IT!!!!")
str_specific_idx = "idx_#"
#
# Internal Index
#
if tc_helper.tc_gen_helper_find_1d(l_internal_idx, each_idx) != -1:
if idx_count == 0:
str_input_addr_left = ""
else:
str_input_addr_left = "(" + str_input_addr_left + ") * size_" + each_idx
#
# External Index
#
else:
if idx_count == 0:
str_input_addr_left = "blk_idx_" + each_idx + " * SIZE_SLICE_" + str(idx_kernel) + "_" + each_idx.capitalize() + " + " + str_specific_idx
else:
str_input_addr_left = "blk_idx_" + each_idx + " * SIZE_SLICE_" + str(idx_kernel) + "_" + each_idx.capitalize() + " + " + str_specific_idx + " + (" + str_input_addr_left + ") * size_" + each_idx
#
# Need to Skip for the internal index on SVI.
#
idx_count = idx_count + 1
#
del list_swappable_pair
#
#
#
del l_tb_idx
del l_ext_tb_smem
#
# Load Tensor Inputs to sm_a[][]
# [1] The FVI in an Input is an Internal Index
#
if opt_load_t2 == -1:
#
# [1-1] |TB_X| < |T_k|: Need Multiple-Load-Instructions
#
if size_tb_x < size_sm_p7:
for inner_step in range(0, int(size_sm_p7 / size_tb_x)):
f.write("\t\t\tsm_a[threadIdx.x + " + str(int(inner_step * size_tb_x)) + "][threadIdx.y + ll * SIZE_TB_" + str(idx_kernel) + "_Y] = ")
f.write("dev_" + tensor_contraction[0][3] + "[")
#
#
#
if opt_pre_computed == -1:
f.write(str_input_addr_left)
else:
f.write("dev_" + tensor_contraction[0][3] + "_addr[threadIdx.y + ")
f.write("ll * " + "SIZE_TB_" + str(idx_kernel) + "_Y")
f.write(" + blockIdx.x * (" + str_str_t2 + ")]")
#
# |K| > 1
#
if num_internal_indices > 1:
f.write(" + const_internal_" + tensor_contraction[0][3] + "_offset[(threadIdx.x + " + str(int(inner_step * size_tb_x)) + " + l)]];\n")
#
# |K| <= 1
#
else:
if opt_special == 1:
f.write(" + (threadIdx.x + " + str(int(inner_step * size_tb_x)) + " + l)]; // 1\n")
else:
#f.write(" + (threadIdx.x + " + str(int(inner_step * size_tb_x)) + " + l) * " + tensor_contraction[0][1] + "];\n")
f.write(" + (threadIdx.x + " + str(int(inner_step * size_tb_x)) + " + l) * " + str_stride_int + "]; // 2\n")
#
# [1-2] |TB_X| >= |T_K|:
#
else:
f.write("\t\t\t// |TB_X| >= |T_K|\n")
f.write("\t\t\tsm_a[threadIdx.x][threadIdx.y + ll * SIZE_TB_" + str(idx_kernel) + "_Y] = ") # LHS
f.write("dev_" + tensor_contraction[0][3] + "[") # RHS
#
if opt_pre_computed == -1:
f.write(str_input_addr_left)
else:
f.write("dev_" + tensor_contraction[0][3] + "_addr[threadIdx.y + ")
f.write("ll * " + "SIZE_TB_" + str(idx_kernel) + "_Y")
f.write(" + blockIdx.x * (" + str_str_t2 + ")]")
#
# |K| > 1
#
if num_internal_indices > 1:
f.write(" + const_internal_" + tensor_contraction[0][3] + "_offset[threadIdx.x + l]];\n")
#
# |K| <= 1
#
else:
if opt_special == 1:
f.write(" + (threadIdx.x + l)]; // 5\n")
else:
#f.write(" + (threadIdx.x + l) * " + tensor_contraction[0][1] + "];\n")
f.write(" + (threadIdx.x + l) * " + str_stride_int + "]; // 4 \n")
#
# [2] The FVI in an Input is an External Index.
#
else:
#
#
#
if size_tb_y < size_sm_p7:
for inner_step in range(0, int(size_sm_p7 / size_tb_y)):
f.write("\t\t\tsm_a[threadIdx.y + " + str(int(inner_step * size_tb_y)) + "][threadIdx.x + ll * SIZE_TB_" + str(idx_kernel) + "_X] = ")
f.write("dev_" + tensor_contraction[0][3] + "[")
#
#
#
if opt_pre_computed == -1:
f.write(str_input_addr_left)
else:
f.write("dev_" + tensor_contraction[0][3] + "_addr[threadIdx.x + ")
f.write("ll * " + "SIZE_TB_" + str(idx_kernel) + "_X")
f.write(" + blockIdx.x * (" + str_str_t2 + ")]")
#
#
#
if num_internal_indices > 1:
f.write(" + const_internal_" + tensor_contraction[0][3] + "_offset[threadIdx.y + " + str(int(inner_step * size_tb_y)) + " + l]];\n")
else:
if opt_special == 1:
f.write(" + (threadIdx.y + " + str(int(inner_step * size_tb_y)) + " + l)]; // 12\n")
else:
f.write(" + (threadIdx.y + " + str(int(inner_step * size_tb_y)) + " + l) * " + str_stride_int + "]; // 11\n")
else:
f.write("\t\t\tsm_a[threadIdx.y][threadIdx.x + ll * SIZE_TB_" + str(idx_kernel) + "_X] = ")
f.write("dev_" + tensor_contraction[0][3] + "[")
#
if opt_pre_computed == -1:
f.write(str_input_addr_left)
else:
f.write("dev_" + tensor_contraction[0][3] + "_addr[threadIdx.x + ")
f.write("ll * " + "SIZE_TB_" + str(idx_kernel) + "_X")
f.write(" + blockIdx.x * (" + str_str_t2 + ")]")
if num_internal_indices > 1:
f.write(" + const_internal_" + tensor_contraction[0][3] + "_offset[threadIdx.y + l]];\n")
else:
if opt_special == 1:
f.write(" + (threadIdx.y + l)]; // 9\n")
else:
f.write(" + (threadIdx.y + l) * " + str_stride_int + "]; // 8\n")
#
#
#
def tc_gen_code_Kernel_Load_Inputs_Right(f, tensor_contraction, l_internal_idx, opt_load_v2, size_tb_x, size_tb_y, size_sm_p7, size_tb_ext, str_str_v2, num_internal_indices, idx_kernel, str_stride_int, opt_pre_computed, l_t3_mapping_tb_2D, l_t3_mapping_reg, opt_full_partial_ext, opt_full_partial_int, l_t3_slices):
#
# [DEBUG]
#
#frameinfo = getframeinfo(currentframe())
#print (frameinfo.filename, frameinfo.lineno)
print ("===============================================================================================")
print ("[Code Generator][Kernel][Load Input-Right] Start")
#
# Modulo (related to For-Statement)
#
opt_modulo = 1 # 1: TRUE // modulo operation is possible
# -1: FALSE // modulo operation is impossible
#
#
#
opt_special = -1
if num_internal_indices == 1:
idx_count = 0
for each_idx in tensor_contraction[1][4]:
if each_idx == l_internal_idx[0]:
if idx_count == 0:
opt_special = 1
idx_count = idx_count + 1
#
# [Option] w/o pre-computed arrays
#
str_input_addr_right = ""
if opt_pre_computed == -1:
f.write("\t\t\t// without pre-computed arrays (Right)\n")
#
l_tb_idx = list()
for each_axis in l_t3_mapping_tb_2D:
for each_idx in each_axis:
l_tb_idx.append(each_idx)
#
l_input_idx_right = tensor_contraction[1][4]
rev_l_input_idx_right = list(reversed(tensor_contraction[1][4]))
#
l_ext_tb_smem = list()
for idx_ext_tb in l_input_idx_right:
if tc_helper.tc_gen_helper_find_1d(l_t3_mapping_reg, idx_ext_tb) == -1:
if tc_helper.tc_gen_helper_find_1d(l_internal_idx, idx_ext_tb) == -1:
l_ext_tb_smem.append(idx_ext_tb)
#print ("l_ext_tb_smem: ", l_ext_tb_smem)
#
#
opt_print = 1
#
# [1-1] TB_X -> K && "TB_Y" -> E_B
#
if opt_load_v2 == -1:
#
if opt_print == 1:
print ("[Code Generator][Kernel][Load Input-Right] opt_load_v2: ", opt_load_v2, ": TB_X -> E_B && TB_Y -> K")
#
# (1) Directly
# (2) Swapped
# (3) Manually
#
method_load_v2 = 1
list_swappable_pair = list()
#
# For "TB_Y -> E_B,"
# # of Indices mapped on TB_Y == # of Indices mapped on SMEM
#
print ("l_t3_mapping_tb_2D[1]: ", l_t3_mapping_tb_2D[1])
print ("l_ext_tb_smem: ", l_ext_tb_smem)
if len(l_t3_mapping_tb_2D[1]) == len(l_ext_tb_smem):
#
# Check if (1) is or not
#
for each_idx in range(0, len(l_ext_tb_smem)):
if tc_helper.tc_gen_helper_find(l_t3_slices, l_ext_tb_smem[each_idx]) != tc_helper.tc_gen_helper_find(l_t3_slices, l_t3_mapping_tb_2D[1][each_idx]):
method_load_v2 = 2
#
# Check if (2) is or not
#
if method_load_v2 != 1:
tmp_l_ext_tb_smem = copy.deepcopy(l_ext_tb_smem)
for each_tb_idx in l_t3_mapping_tb_2D[1]:
#
#
#
idx_count = 0
for each_input_idx in tmp_l_ext_tb_smem:
if tc_helper.tc_gen_helper_find(l_t3_slices, each_tb_idx) == tc_helper.tc_gen_helper_find(l_t3_slices, each_input_idx):
list_swappable_pair.append([each_input_idx, each_tb_idx])
tmp_l_ext_tb_smem.pop(idx_count)
#
idx_count = idx_count + 1
#
if len(tmp_l_ext_tb_smem) != 0:
method_load_v2 = 3
#else:
#print ("[RIGHT][2] list_swappable_pair: ", list_swappable_pair)
#
del tmp_l_ext_tb_smem
else:
#
# (1)
#
for each_idx in range(0, len(l_ext_tb_smem)):
if tc_helper.tc_gen_helper_find(l_t3_slices, l_ext_tb_smem[each_idx]) == tc_helper.tc_gen_helper_find(l_t3_slices, l_t3_mapping_tb_2D[1][each_idx]):
list_swappable_pair.append([l_ext_tb_smem[each_idx], l_t3_mapping_tb_2D[1][each_idx]])
#print ("[RIGHT][1] list_swappable_pair: ", list_swappable_pair)
else:
print ("len(l_t3_mapping_tb_2D[1]) != len(l_ext_tb_smem): Need Extra Variables", len(l_t3_mapping_tb_2D[1]), ", ", len(l_ext_tb_smem))
method_load_v2 = 3
#
print ("[Code Generator][Kernel][Load Inputs-RIGHT] When we load input, we will use [", method_load_v2, "] method (if)")
#
#
#
idx_count = 0
for each_idx in rev_l_input_idx_right:
#
#
#
str_specific_idx = ""
if tc_helper.tc_gen_helper_find_1d(l_t3_mapping_reg, each_idx) != -1:
#
#
#
str_specific_idx = "ll"
else:
if method_load_v2 == 1:
str_specific_idx = "idx_" + str(tc_helper.tc_gen_helper_find(list_swappable_pair, each_idx))
elif method_load_v2 == 2:
str_specific_idx = "idx_" + str(tc_helper.tc_gen_helper_find(list_swappable_pair, each_idx))
else:
print ("method_load_v2 == 3: not yet supported")
str_specific_idx = "idx_#"
#
# Internal Idex
#
if tc_helper.tc_gen_helper_find_1d(l_internal_idx, each_idx) != -1:
if idx_count == 0:
str_input_addr_right = ""
else:
str_input_addr_right = "(" + str_input_addr_right + ") * size_" + each_idx
#
# External Index
#
else:
if idx_count == 0:
str_input_addr_right = "blk_idx_" + each_idx + " * SIZE_SLICE_" + str(idx_kernel) + "_" + each_idx.capitalize() + " + " + str_specific_idx
else:
str_input_addr_right = "blk_idx_" + each_idx + " * SIZE_SLICE_" + str(idx_kernel) + "_" + each_idx.capitalize() + " + " + str_specific_idx + " + (" + str_input_addr_right + ") * size_" + each_idx
#
#
#
idx_count = idx_count + 1
#
del list_swappable_pair
#
# [1-2] "TB_X" -> E_B && TB_Y -> K
#
else:
#
if opt_print == 1:
print ("[Code Generator][Kernel][Load Input-Right] opt_load_v2: ", opt_load_v2, ": TB_X -> K && TB_Y -> E_B")
#
# (1) Directly
# (2) Swapped
# (3) Manually (Partially Swapped or Manually)
#
method_load_v2 = 1
list_swappable_pair = list()
'''
if opt_print == 1:
#
idx_count = 0
for each_idx in l_t3_mapping_tb_2D[0]:
print ("TB_X[", idx_count ,"]: ", each_idx)
idx_count += 1
#
idx_count = 0
for each_idx in l_ext_tb_smem:
print ("SMEM_X[", idx_count, "]: ", each_idx)
print ("len(l_t3_mapping_tb_2D[0]) == len(l_ext_tb_smem): ", len(l_t3_mapping_tb_2D[0]), ", ", len(l_ext_tb_smem))
'''
#
# len(l_t3_mapping_tb_2D[0]): # of Indices mapped on x-axis (for the FVI)
# len(l_ext_tb_smem): # of External Indices except for indices mapped on Register Tiles.
#
if len(l_t3_mapping_tb_2D[0]) == len(l_ext_tb_smem):
#
# Check if (1) is or not
#
for each_idx in range(0, len(l_ext_tb_smem)):
if tc_helper.tc_gen_helper_find(l_t3_slices, l_ext_tb_smem[each_idx]) != tc_helper.tc_gen_helper_find(l_t3_slices, l_t3_mapping_tb_2D[0][each_idx]):
#
# (1) is not possible.
#
method_load_v2 = 2
#
# Check if (2) is or not
#
if method_load_v2 != 1:
#
# Need to Check if there are swappable indices or not.
#
tmp_l_ext_tb_smem = copy.deepcopy(l_ext_tb_smem)
for each_tb_idx in l_t3_mapping_tb_2D[0]:
#
#
#
idx_count = 0
for each_input_idx in tmp_l_ext_tb_smem:
if tc_helper.tc_gen_helper_find(l_t3_slices, each_tb_idx) == tc_helper.tc_gen_helper_find(l_t3_slices, each_input_idx):
list_swappable_pair.append([each_input_idx, each_tb_idx])
tmp_l_ext_tb_smem.pop(idx_count)
#
idx_count = idx_count + 1
#
# (3) Manually
#
if len(tmp_l_ext_tb_smem) != 0:
method_load_v2 = 3
#
del tmp_l_ext_tb_smem
#
# To Handle (1)
#
else:
#
# (1) Directly
# >>> output: list_swappable_pair
#
for each_idx in range(0, len(l_ext_tb_smem)):
if tc_helper.tc_gen_helper_find(l_t3_slices, l_ext_tb_smem[each_idx]) == tc_helper.tc_gen_helper_find(l_t3_slices, l_t3_mapping_tb_2D[0][each_idx]):
list_swappable_pair.append([l_ext_tb_smem[each_idx], l_t3_mapping_tb_2D[0][each_idx]])
#
#print ("[RIGHT][1] list_swappable_pair: ", list_swappable_pair)
#
# Both (1) and (2) are Impossible
#
else:
#
# (3) Manually
#
method_load_v2 = 3
#
# After figuring out how to handle boundary cases according to tile-sizes and mappings,
# it needs to find a way to calculate the input tensor's addresses.
#
if opt_print == 1:
print ("[Code Generator][Kernel][Load Inputs-RIGHT] When we load input, we will use [", method_load_v2, "] method (else)")
print (">>> l_t3_slices: ", l_t3_slices)
print (">>> TB_X: ", l_t3_mapping_tb_2D[0])
print (">>> TB_Y: ", l_t3_mapping_tb_2D[1])
print (">>> REG_X: ", l_t3_mapping_reg[0])
print (">>> REG_Y: ", l_t3_mapping_reg[1])
print (">>> size_tb_ext: ", size_tb_ext, ", size_tb_x: ", size_tb_x, ", size_tb_y: ", size_tb_y, "size_sm_p7: ", size_sm_p7)
print (">>> list_swappable_pair: ", list_swappable_pair)
#
# (1) and (2)
#
if method_load_v2 < 3:
idx_count = 0
for each_idx in rev_l_input_idx_right:
print ("rev_l_input_idx_right: ", rev_l_input_idx_right, ", each_idx: ", each_idx)
#
# an index mapped on REG
# the others should be mapped on TB
#
str_specific_idx = ""
print ("tc_helper.tc_gen_helper_find_1d(l_internal_idx, each_idx): ", tc_helper.tc_gen_helper_find_1d(l_internal_idx, each_idx))
if tc_helper.tc_gen_helper_find_1d(l_internal_idx, each_idx) == -1:
if tc_helper.tc_gen_helper_find_1d(l_t3_mapping_reg, each_idx) != -1:
#
#
#
str_specific_idx = "ll"
else:
#
# (1) Directly
#
if method_load_v2 == 1:
str_specific_idx = "idx_" + str(tc_helper.tc_gen_helper_find(list_swappable_pair, each_idx))
#
# (2) Swappable
#
elif method_load_v2 == 2:
str_specific_idx = "idx_" + str(tc_helper.tc_gen_helper_find(list_swappable_pair, each_idx))
#
# (3) Manually
#
else:
print ("==================================================================")
print ("method_load_v2 == 3: not yet supported")
print ("|TB_X| = ", size_tb_x, ", |SMEM_X| = ", size_tb_ext)
print ("==================================================================")
str_specific_idx = "idx_#"
#
#
#
else:
#
str_specific_idx = ""
#
# Internal Idex
# [To-Do] Multiple-Internal Indices
#
if tc_helper.tc_gen_helper_find_1d(l_internal_idx, each_idx) != -1:
if idx_count == 0:
str_input_addr_right = ""
else:
str_input_addr_right = "(" + str_input_addr_right + ") * size_" + each_idx
#
#
#
else:
print (">>>> ", each_idx, ", str_specific_idx: ", str_specific_idx)
if idx_count == 0:
str_input_addr_right = "blk_idx_" + each_idx + " * SIZE_SLICE_" + str(idx_kernel) + "_" + each_idx.capitalize() + " + " + str_specific_idx
else:
str_input_addr_right = "blk_idx_" + each_idx + " * SIZE_SLICE_" + str(idx_kernel) + "_" + each_idx.capitalize() + " + " + str_specific_idx + " + (" + str_input_addr_right + ") * size_" + each_idx
#
#
#
idx_count = idx_count + 1
#
del list_swappable_pair
#
# (3)
#
else:
print ("[Else]==================================================================")
print (" method_load_v2 == 3: not yet supported")
print (" |TB_X| = ", size_tb_x, ", |SMEM_X| = ", size_tb_ext)
print (" rev_l_input_idx_right: ", rev_l_input_idx_right)
print ("========================================================================")
#
# 1: [1]
# 2: [2]
#
opt_how_to_manually = 1
#
# [SMEM_X] How to Load Input Tensors on SMEM_X
#
int_num_idx_smem_x = 0
int_num_idx_tb_x = len(l_t3_mapping_tb_2D[0])
for each_idx in rev_l_input_idx_right:
if tc_helper.tc_gen_helper_find_1d(l_internal_idx, each_idx) == -1:
if tc_helper.tc_gen_helper_find_1d(l_t3_mapping_reg, each_idx) == -1:
print ("This Index is mapped on SMEM_X Directly: ", each_idx, ", |idx| = ", tc_helper.tc_gen_helper_find(l_t3_slices, each_idx))
int_num_idx_smem_x += 1
#
# [TB_X] Indices mapped on TB_X
#
for each_idx in l_t3_mapping_tb_2D[0]:
print ("[TB_X] ", each_idx, ", |idx| = ", tc_helper.tc_gen_helper_find(l_t3_slices, each_idx))
print ("# of External Indices mapped on SMEM_X: ", int_num_idx_smem_x)
print ("# of External Indices mapped on TB_X: ", int_num_idx_tb_x)
#
# [Manually]
# [1] If # of External Indices mapped on SMEM_X == # of Indices mapped on TB_X == 1,
# [2] Otherwise,
# [2-1] If (# of External Indices mapped on SMEM_X == # of Indices mapped on TB_X) > 1,
# : But, non-swappable
# : New Temporal Indices
# [2-2] Otherwise, # of External Indices mapped on SMEM_X != # of Indices mapped on TB_X,
# [2-2-1] # of Indices mapped on TB_X
# : Can be Swappable with something special
# [2-2-2] Others
# : New Temporal Indices
#
#
# [Question][1] If # of External Indices mapped on SMEM_X == # of Indices mapped on TB_X == 1
#
if int_num_idx_smem_x == 1 and int_num_idx_tb_x == 1:
print ("[1] If # of External Indices mapped on SMEM_X == # of Indices mapped on TB_X == 1,")
#
# (1) |TB_X| = |SMEM_X| // Should be Swappable (2)
# (2) |TB_X| > |SMEM_X| // Should handle differently between full-tiles and partial-tiles
# (3) |TB_X| < |SMEM_X| // Should handle differently between full-tiles and partial-tiles
# // In For-Statement, there will be multiple-load instructions.
#
print (" >>> |TB_X| = ", size_tb_x, ", |SMEM_X| = ", size_tb_ext)
#
# (2) |TB_X| > |SMEM_X|
# : Should handle differently between full-tiles and partial-tiles
# : The number of iterations in For-Statement can be reduced if |TB_X| % |SMEM_X| == 0.
# (2-1) Option #1 [Full-Tile]
# (2-2) Option #2 [Partial-Tile]
#
if size_tb_x > size_tb_ext:
print ("|TB_X| > |SMEM_X|")
#
# |TB_X| % |SMEM_X| == 0
#
if size_tb_x % size_tb_ext == 0:
print ("|TB_X| % |SMEM_X| == 0")
#
# |TB_X| % |SMEM_X| != 0
#
else:
print ("|TB_X| % |SMEM_X| != 0")
print (" >>> Not Yet Supported!")
opt_modulo = -1
#
# (3) |TB_X| < |SMEM_X|
# : Should handle differently between full-tiles and partial-tiles
# : The number of Load-Instructions can be increasedif |SMEM_X| % |TB_X| == 0.
# (3-1)
#
else:
print ("|TB_X| < |SMEM_X|")
#
# |SMEM_X| % |TB_X| == 0
#
if size_tb_ext % size_tb_x == 0:
print ("|SMEM_X| % |TB_X| == 0")
#
# |SMEM_X| & |TB_X| != 0
#
else:
print ("|SMEM_X| % |TB_X| != 0")
print (" >>> Not Yet Supported!")
opt_modulo = -1
#
# [Question][2] Otherwise, we need temporal indices to indicate indices mapped on SMEM_X
#
else:
print ("[2] Otherwise, we need temporal indices to indicate indices mapped on SMEM_X")
print (" >>> Not Yet Supported!")
opt_modulo = -1
opt_how_to_manually = 2
#
# [Solution][1] If |# of External Indices mapped on SMEM_X| == |# of Indices mapped on TB_X| == 1
#
if opt_how_to_manually == 1:
print ("[Solution][1] If # Ext. Idx. mapped on SMEM_X == # of Ext. Idx. mapped on TB_X == 1")
#
#
#
idx_count = 0
for each_idx in rev_l_input_idx_right:
str_specific_idx = ""
#
# For External Indices
#
if tc_helper.tc_gen_helper_find_1d(l_internal_idx, each_idx) == -1:
#
# Indices mapped on REG
#
if tc_helper.tc_gen_helper_find_1d(l_t3_mapping_reg, each_idx) != -1:
#
# (Outer-Loop)
#
if opt_modulo == 1 and opt_full_partial_ext == -1:
str_specific_idx = "(ll * " + str(int(size_tb_x / size_tb_ext)) + ") + (idx_" + l_t3_mapping_tb_2D[0][0] + " / SIZE_SLICE_" + str(idx_kernel) + "_" + each_idx.capitalize() + ")"
else:
str_specific_idx = "ll"
#
# Indices mapped on TB
#
else:
#
#
#
if opt_modulo == 1 and opt_full_partial_ext == -1:
str_specific_idx = "(idx_" + l_t3_mapping_tb_2D[0][0] + " % SIZE_SLICE_" + str(idx_kernel) + "_" + each_idx.capitalize() + ")"
else:
str_specific_idx = "idx_" + l_t3_mapping_tb_2D[0][0]
#
# For Internal Indices,
#
else:
#
str_specific_idx = ""
#
# Internal Index
# [To-Do] Multiple-Internal Indices
#
if tc_helper.tc_gen_helper_find_1d(l_internal_idx, each_idx) != -1:
if idx_count == 0:
str_input_addr_right = ""
else:
str_input_addr_right = "(" + str_input_addr_right + ") * size_" + each_idx
#
# External Index
#
else:
print (">>>> ", each_idx, ", str_specific_idx: ", str_specific_idx)
if idx_count == 0:
str_input_addr_right = "blk_idx_" + each_idx + " * SIZE_SLICE_" + str(idx_kernel) + "_" + each_idx.capitalize() + " + " + str_specific_idx
else:
str_input_addr_right = "blk_idx_" + each_idx + " * SIZE_SLICE_" + str(idx_kernel) + "_" + each_idx.capitalize() + " + " + str_specific_idx + " + (" + str_input_addr_right + ") * size_" + each_idx
#
#
#
idx_count = idx_count + 1
#
# [Solution][2] Otherwise, we need temporal indices to indicate indices mapped on SMEM_X
#
else:
print ("[Solution][2] Temporal Indices to Indicate Indices mapped on SMEM_X (Not Support Yet!)")
#
# [2-1] Partially Swapped
#
if int_num_idx_tb_x >= int_num_idx_smem_x:
print ("[Code Generator] Tried to Check if Partially-Swapped is Possible or not.")
opt_partially_swapped = -1 # -1: false, 0: true (=), 1: true (>)
idx_count = 0
for each_idx in rev_l_input_idx_right:
if tc_helper.tc_gen_helper_find_1d(l_internal_idx, each_idx) == -1: # external indices
if tc_helper.tc_gen_helper_find_1d(l_t3_mapping_reg, each_idx) == -1: # mapped on smem_x
print ("[smem] each_idx: ", each_idx, ", ", tc_helper.tc_gen_helper_find(l_t3_slices, each_idx))
print ("[tb_x] ", l_t3_mapping_tb_2D[0][idx_count], ", ", tc_helper.tc_gen_helper_find(l_t3_slices, l_t3_mapping_tb_2D[0][idx_count]))
#
# First Index
#
if idx_count == 0:
if tc_helper.tc_gen_helper_find(l_t3_slices, l_t3_mapping_tb_2D[0][idx_count]) >= tc_helper.tc_gen_helper_find(l_t3_slices, each_idx):
#
# 0: true (=)
#
if tc_helper.tc_gen_helper_find(l_t3_slices, each_idx) == tc_helper.tc_gen_helper_find(l_t3_slices, l_t3_mapping_tb_2D[0][idx_count]):
print (" (if) 0: true (=)")
opt_partially_swapped = 0
#
# 1: true (>)
#
else:
print (" (if) 1: true (>)")
opt_partially_swapped = 1
#
list_swappable_pair.append([each_idx, l_t3_mapping_tb_2D[0][idx_count]])
#
# The Others (opt_partially_swapped == 0)
#
else:
#
# previous opt_partially_swapped should be "0"
#
if opt_partially_swapped != 0:
opt_partially_swapped = -1
else:
if tc_helper.tc_gen_helper_find(l_t3_slices, each_idx) >= tc_helper.tc_gen_helper_find(l_t3_slices, l_t3_mapping_tb_2D[0][idx_count]):
if tc_helper.tc_gen_helper_find(l_t3_slices, each_idx) == tc_helper.tc_gen_helper_find(l_t3_slices, l_t3_mapping_tb_2D[0][idx_count]):
opt_partially_swapped = 0
else:
opt_partially_swapped = 1
#
list_swappable_pair.append([each_idx, l_t3_mapping_tb_2D[0][idx_count]])
else:
opt_partially_swapped = -1
#
idx_count += 1
print (">>> opt_partially_swapped: ", opt_partially_swapped)
print (">>> list_swappable_pair: ", list_swappable_pair)
#
# To Calculate Address
#
idx_count = 0
for each_idx in rev_l_input_idx_right:
str_specific_idx = ""
#
# >>>> Index Part
#
# External Indices
#
if tc_helper.tc_gen_helper_find_1d(l_internal_idx, each_idx) == -1:
#
# Indices mapped on REG
#
if tc_helper.tc_gen_helper_find_1d(l_t3_mapping_reg, each_idx) != -1:
#
# (Outer-Loop)
#
if opt_modulo == 1 and opt_full_partial_ext == -1:
str_specific_idx = "(ll * " + str(int(size_tb_x / size_tb_ext)) + ") + (idx_" + l_t3_mapping_tb_2D[0][0] + " / SIZE_SLICE_" + str(idx_kernel) + "_" + each_idx.capitalize() + ")"
else:
str_specific_idx = "ll"
#
# Indices mapped on TB
#
else:
#
#
#
if opt_modulo == 1 and opt_full_partial_ext == -1:
str_specific_idx = "(idx_" + l_t3_mapping_tb_2D[0][0] + " % SIZE_SLICE_" + str(idx_kernel) + "_" + each_idx.capitalize() + ")"
else:
str_specific_idx = "idx_" + l_t3_mapping_tb_2D[0][0]
#
# Internal Indices
#
else:
#
str_specific_idx = ""
#
# >>>> Address-Part
#
# Internal Index
#
if tc_helper.tc_gen_helper_find_1d(l_internal_idx, each_idx) != -1:
print ("[int] ", each_idx)
if idx_count == 0:
str_input_addr_right = ""
else:
str_input_addr_right = "(" + str_input_addr_right + ") * size_" + each_idx
#
# External Index
#
else:
print ("[ext] ", each_idx)
if idx_count == 0:
str_input_addr_right = "blk_idx_" + each_idx + " * SIZE_SLICE_" + str(idx_kernel) + "_" + each_idx.capitalize() + " + " + str_specific_idx
else:
str_input_addr_right = "blk_idx_" + each_idx + " * SIZE_SLICE_" + str(idx_kernel) + "_" + each_idx.capitalize() + " + " + str_specific_idx + " + (" + str_input_addr_right + ") * size_" + each_idx
#
idx_count += 1
#
# [2-2] Manually
#
else:
print ("[Code Generator] Manually (Not Yet)")
#
#
#
del l_ext_tb_smem
del l_tb_idx
#
# [Result]
#
print ("[Result] str_input_addr_right: ", str_input_addr_right)
#
# >>>
#
#
#
print ("----------------------------------------------------------------------------------------------------------------------------------")
#
# Load Tensor Inputs to sm_b[][]
# [1] The FVI in the Input-Right is an Interal Index
if opt_load_v2 == -1:
print ("[Option][Load-Input] v2 == -1: TB_X -(loads)-> K (internal) && TB_Y -(loads)-> E (external)")
print ("[Option][Load-Input] |TB_X| = ", size_tb_x, ", |T_K| = ", size_sm_p7)
#
# |TB_X| < |T_K|
#
if size_tb_x < size_sm_p7:
for inner_step in range(0, int(size_sm_p7 / size_tb_x)):
f.write("\t\t\tsm_b[threadIdx.x + " + str(int(inner_step * size_tb_x)) + "][threadIdx.y + ll * SIZE_TB_" + str(idx_kernel) + "_Y] = ")
f.write("dev_" + tensor_contraction[1][3] + "[")
#
#
#
if opt_pre_computed == -1:
f.write(str_input_addr_right)
else:
f.write("dev_" + tensor_contraction[1][3] + "_addr[threadIdx.y + ")
f.write("ll * " + "SIZE_TB_" + str(idx_kernel) + "_Y")
f.write(" + blockIdx.x * (" + str_str_v2 + ")]")
if num_internal_indices > 1:
f.write(" + const_internal_" + tensor_contraction[1][3] + "_offset[threadIdx.x + " + str(int(inner_step * size_tb_x)) + " + l]];\n")
else:
if opt_special == 1:
f.write(" + (threadIdx.x + " + str(int(inner_step * size_tb_x)) + " + l)]; // 1\n")
else:
f.write(" + (threadIdx.x + " + str(int(inner_step * size_tb_x)) + " + l) * " + str_stride_int + "]; // 2\n")
#
# |TB_X| >= |T_K|
#
else:
#
# |TB_Y| < |E_TB|,
#
if size_tb_y < size_tb_ext:
for inner_step in range(0, int(size_tb_ext / size_tb_y)):
f.write("\t\t\tsm_b[threadIdx.x][threadIdx.y + " + str((int(inner_step * size_tb_y))) + " + ll * SIZE_TB_" + str(idx_kernel) + "_Y * " + str(int(size_tb_ext / size_tb_y)) + "] = ")
f.write("dev_" + tensor_contraction[1][3] + "[")
#
#
#
if opt_pre_computed == -1:
f.write(str_input_addr_right)
else:
f.write("dev_" + tensor_contraction[1][3] + "_addr[threadIdx.y + " + str((int(inner_step * size_tb_y))) + " + ")
f.write("ll * " + "SIZE_TB_" + str(idx_kernel) + "_Y * " + str(int(size_tb_ext / size_tb_y)))
f.write(" + blockIdx.x * (" + str_str_v2 + ")]")
if num_internal_indices > 1:
f.write(" + const_internal_" + tensor_contraction[1][3] + "_offset[threadIdx.x + l]];\n")
else:
if opt_special == 1:
f.write(" + (threadIdx.x + l)]; // FVI = Int.\n")
else:
#f.write(" + (threadIdx.x + l) * " + tensor_contraction[1][1] + "];\n")
f.write(" + (threadIdx.x + l) * " + str_stride_int + "]; // FVI = Ext.\n")
else:
f.write("\t\t\tsm_b[threadIdx.x][threadIdx.y + ll * SIZE_TB_" + str(idx_kernel) + "_Y] = ")
f.write("dev_" + tensor_contraction[1][3] + "[")
#
#
#
if opt_pre_computed == -1:
f.write(str_input_addr_right)
else:
f.write("dev_" + tensor_contraction[1][3] + "_addr[threadIdx.y + ")
f.write("ll * " + "SIZE_TB_" + str(idx_kernel) + "_Y")
f.write(" + blockIdx.x * (" + str_str_v2 + ")]")
if num_internal_indices > 1:
f.write(" + const_internal_" + tensor_contraction[1][3] + "_offset[threadIdx.x + l]];\n")
else:
if opt_special == 1:
f.write(" + (threadIdx.x + l)]; // FVI = Int.\n")
else:
#f.write(" + (threadIdx.x + l) * " + tensor_contraction[1][1] + "];\n")
f.write(" + (threadIdx.x + l) * " + str_stride_int + "]; // FVI = Ext.\n")
#
# [2] The FVI in the Input-Right is an External Index
# [Write CUDA Kernel]
#
else:
print ("[Option][Load-Input] v2 == 1: TB_X -(loads)-> E (external) && TB_Y -(loads)-> K (internal)")
print ("[Option][Load-Input] |TB_Y| = ", size_tb_y, " ?? |K| = ", size_sm_p7)
#
# |TB_Y| < |K|
# : Need to Load |K| / |TB_Y| times in the loop.
#
if size_tb_y < size_sm_p7:
print ("[Option][Load-Input] |TB_Y| < |K| >> [Solution] Need to Load |K|/|TB_Y| times in the loop")
print ("[Option][Load-Input] opt_full_partial_ext: ", opt_full_partial_ext) # ???
print ("[Option][Load-Input] Modulo-Option: ", opt_modulo) # ??
#
# [Modulo]
# : Full-Tile for External Indices
# [1] "opt_modulo == 1":
#
if opt_modulo == 1:# and opt_full_partial_ext == -1:
print ("[Solution] Modulo is possible and Full-Tile")
#
#
#
if opt_full_partial_ext == 1 and method_load_v2 > 2:
f.write("\t\t\tif (threadIdx.x < " + str(size_tb_ext) + ")\n")
f.write("\t\t\t{\n")
#
# Inner-Steps for Internal Indices
#
for inner_step in range(0, int(size_sm_p7 / size_tb_y)):
#
# |TB_X| == |SMEM_X|
#
if size_tb_ext == size_tb_x:
f.write("\t\t\tsm_b[threadIdx.y + " + str(int(inner_step * size_tb_y)) + "][threadIdx.x + ll * SIZE_TB_" + str(idx_kernel) + "_X] = ")
#
# |TB_X| != |SMEM_X|
#
else:
#
# [Ext.] Full-Tile
#
if opt_full_partial_ext == -1:
f.write("\t\t\tsm_b[threadIdx.y + " + str(int(inner_step * size_tb_y)) + "][threadIdx.x + ll * SIZE_TB_" + str(idx_kernel) + "_X] = ")
#
# [Ext.] Partial-Tile
#
else:
f.write("\t\t\tsm_b[threadIdx.y + " + str(int(inner_step * size_tb_y)) + "][threadIdx.x + ll * " + str(size_tb_ext) + "] = ")
f.write("dev_" + tensor_contraction[1][3] + "[")
#
# [External Index]
#
if opt_pre_computed == -1:
f.write(str_input_addr_right)
else:
f.write("dev_" + tensor_contraction[1][3] + "_addr[threadIdx.x + ")
f.write("ll * " + "SIZE_TB_" + str(idx_kernel) + "_X")
f.write(" + blockIdx.x * (" + str_str_v2 + ")]")
#
# [Internal Index]
#
if num_internal_indices > 1:
f.write(" + const_internal_" + tensor_contraction[1][3] + "_offset[threadIdx.y + " + str(int(inner_step * size_tb_y)) + " + l]]; // 5\n")
else:
if opt_special == 1:
f.write(" + (threadIdx.y + " + str(int(inner_step * size_tb_y)) + " + l)]; // 3\n")
else:
f.write(" + (threadIdx.y + " + str(int(inner_step * size_tb_y)) + " + l) * " + str_stride_int + "]; // 4\n")
#
#
#
if opt_full_partial_ext == 1 and method_load_v2 > 2:
f.write("\t\t\t}\n")
#
# [2] "opt_modulo == -1":
#
else:
print ("[Solution] Modulo is impossible or Partial-Tile")
print ("[Solution] method_load_v2: ", method_load_v2)
print ("[Solution] opt_full_partial_ext: ", opt_full_partial_ext)
#
# Partially Swappable
#
opt_num_tabs = 3
if opt_partially_swapped == 1:
print ("[Option][Load-Input] partially swapped == 1")
print ("[Option][Load-Input] opt_full_partial_ext: ", opt_full_partial_ext, ", method_load_v2: ", method_load_v2)
if opt_full_partial_ext == 1 and method_load_v2 > 2:
#
#
#
tc_helper.tc_gen_helper_code_a_line(f, opt_num_tabs, "if (threadIdx.x < " + str(size_tb_ext) + ")", 1)
tc_helper.tc_gen_helper_code_a_line(f, opt_num_tabs, "{", 1)
opt_num_tabs += 1
#
# Inner-Steps for Internal Indices: |K| / |TB_Y|
#
for inner_step in range(0, int(size_sm_p7 / size_tb_y)):
#
# |TB_X| == |SMEM_X|
#
if size_tb_ext == size_tb_x:
f.write("\t\t\t// |TB_X| == |SMEM_X|\n")
f.write("\t\t\tsm_b[threadIdx.y + " + str(int(inner_step * size_tb_y)) + "][threadIdx.x + ll * SIZE_TB_" + str(idx_kernel) + "_X] = ")
else:
#
# [Ext.] Full-Tile
#
if opt_full_partial_ext == -1:
tc_helper.tc_gen_helper_code_a_line(f, opt_num_tabs, "sm_b[threadIdx.y + " + str(int(inner_step * size_tb_y)) + "][threadIdx.x + ll * SIZE_TB_" + str(idx_kernel) + "_X] = ", -1)
#
# [Ext.] Partial-Tile
#
else:
tc_helper.tc_gen_helper_code_a_line(f, opt_num_tabs, "sm_b[threadIdx.y + " + str(int(inner_step * size_tb_y)) + "][threadIdx.x + ll * " + str(size_tb_ext) + "] = ", -1)
#
#
#
tc_helper.tc_gen_helper_code_a_line(f, 0, "dev_" + tensor_contraction[1][3] + "[", -1)
#
# [External Index]
# [Option] With Pre-Computations
#
if opt_pre_computed == -1:
tc_helper.tc_gen_helper_code_a_line(f, 0, str_input_addr_right, -1)
#
# [Option] Without Pre-Computations
#
else:
tc_helper.tc_gen_helper_code_a_line(f, 0, "dev_" + tensor_contraction[1][3] + "_addr[threadIdx.x + ", -1)
tc_helper.tc_gen_helper_code_a_line(f, 0, "ll * " + "SIZE_TB_" + str(idx_kernel) + "_X", -1)
tc_helper.tc_gen_helper_code_a_line(f, 0, " + blockIdx.x * (" + str_str_v2 + ")]", -1)
#
# [Internal Index]
# [Option] Multiple Internal Indices
#
if num_internal_indices > 1:
f.write(" + const_internal_" + tensor_contraction[1][3] + "_offset[threadIdx.y + " + str(int(inner_step * size_tb_y)) + " + l]]; // 7\n")
#
# [Option] Single Internal Index
#
else:
#
# [Option] The Internal Index is the FVI.
#
if opt_special == 1:
tc_helper.tc_gen_helper_code_a_line(f, 0, " + (threadIdx.y + " + str(int(inner_step * size_tb_y)) + " + l)]; // 5", 1)
#
# [Option] The Internal Index is not the FVI, requiring its computed strides.
#
else:
tc_helper.tc_gen_helper_code_a_line(f, 0, " + (threadIdx.y + " + str(int(inner_step * size_tb_y)) + " + l) * " + str_stride_int + "]; // 6", 1)
#
if opt_full_partial_ext == 1 and method_load_v2 > 2:
f.write("\t\t\t}\n")
#
# Totally Manually
#
else:
print ("[opt] partially swapped != 1")
f.write("\t\t\t// temp\n")
#
# |TB_Y| >= |K|
#
else:
print ("[Option] |TB_Y| >= |K|")
#
# ???
#
if size_tb_x < size_tb_ext:
for inner_step in range(0, int(size_tb_ext / size_tb_x)):
f.write("\t\t\tsm_b[threadIdx.y][threadIdx.x + (ll * " + str(int(size_tb_ext / size_tb_x)) + " + " + str(inner_step) + ") * SIZE_TB_" + str(idx_kernel) + "_X] = ")
f.write("dev_" + tensor_contraction[1][3] + "[")
#
#
#
if opt_pre_computed == -1:
f.write(str_input_addr_right)
else:
f.write("dev_" + tensor_contraction[1][3] + "_addr[threadIdx.x + ")
f.write("(ll * " + str(int(size_tb_ext / size_tb_x)) + " + " + str(inner_step) + ") * " + "SIZE_TB_" + str(idx_kernel) + "_X")
f.write(" + blockIdx.x * (" + str_str_v2 + ")]")
#
#
#
if num_internal_indices > 1:
f.write(" + const_internal_" + tensor_contraction[1][3] + "_offset[threadIdx.y + l]];\n")
else:
if opt_special == 1:
f.write(" + (threadIdx.y + l)];\n")
else:
f.write(" + (threadIdx.y + l) * " + tensor_contraction[1][1] + "];\n")
else:
f.write("\t\t\tsm_b[threadIdx.y][threadIdx.x + ll * SIZE_TB_" + str(idx_kernel) + "_X] = ")
f.write("dev_" + tensor_contraction[1][3] + "[")
#
#
#
if opt_pre_computed == -1:
f.write(str_input_addr_right)
else:
f.write("dev_" + tensor_contraction[1][3] + "_addr[threadIdx.x + ")
f.write("ll * " + "SIZE_TB_" + str(idx_kernel) + "_X")
f.write(" + blockIdx.x * (" + str_str_v2 + ")]")
#
#
#
if num_internal_indices > 1:
f.write(" + const_internal_" + tensor_contraction[1][3] + "_offset[threadIdx.y + l]];\n")
else:
if opt_special == 1:
f.write(" + (threadIdx.y + l)];//666\n")
else:
#f.write(" + (threadIdx.y + l) * " + tensor_contraction[1][1] + "];\n")
f.write(" + (threadIdx.y + l) * " + str_stride_int + "]; // 555\n")
#
print ("[Code Generator][Kernel][Load Input-Right] End")
print ("===============================================================================================")
#
# 1. The FVI is an internal or an external index?
# 1.1. Internal
# : TB_X should load elements along the internal index.
# : TB_Y should load elements along the external index.
# 1.2. External
# : TB_X should load elements along the external index.
# : TB_Y should load elements along the internal index.
#
# 2. For 1.1. case, TB_X -(loads)-> K && TB_Y -(loads)-> E, where E \ REG
# 2.1. |TB_X| = |K|
# :
# 2.2. |TB_X| > |K|
# :
# 2.3. |TB_X| < |K|
# :
# 2.4. |TB_Y| = |E|
# :
# 2.5. |TB_Y| > |E|
# :
# 2.6. |TB_Y| < |E|
# :
#
# 3. For 1.2. case, TB_X -(loads)-> E && TB_Y -(loads)-> K, where E \ REG
#
| 47.909091
| 315
| 0.415192
| 7,500
| 70,618
| 3.5132
| 0.034667
| 0.042772
| 0.0315
| 0.04095
| 0.880489
| 0.843789
| 0.822156
| 0.783331
| 0.767202
| 0.746708
| 0
| 0.023939
| 0.472344
| 70,618
| 1,473
| 316
| 47.941616
| 0.683189
| 0.117775
| 0
| 0.751793
| 0
| 0.007174
| 0.133172
| 0.027159
| 0.008608
| 0
| 0
| 0
| 0
| 1
| 0.002869
| false
| 0
| 0.005739
| 0
| 0.008608
| 0.150646
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
22f40930cf1de1d5ab1c34a4615327da66ea0cc3
| 194
|
py
|
Python
|
tunepy2/learners/factories/__init__.py
|
efortner/tunepy
|
28ab7aa0b851d42cf2a81a5573fb24b261daba89
|
[
"MIT"
] | null | null | null |
tunepy2/learners/factories/__init__.py
|
efortner/tunepy
|
28ab7aa0b851d42cf2a81a5573fb24b261daba89
|
[
"MIT"
] | null | null | null |
tunepy2/learners/factories/__init__.py
|
efortner/tunepy
|
28ab7aa0b851d42cf2a81a5573fb24b261daba89
|
[
"MIT"
] | null | null | null |
from tunepy2.learners.factories.sklearn_mlp_classifier_factory import SklearnMlpClassifierFactory
from tunepy2.learners.factories.sklearn_mlp_regressor_factory import SklearnMlpRegressorFactory
| 64.666667
| 97
| 0.927835
| 20
| 194
| 8.7
| 0.6
| 0.126437
| 0.218391
| 0.321839
| 0.436782
| 0.436782
| 0
| 0
| 0
| 0
| 0
| 0.010753
| 0.041237
| 194
| 2
| 98
| 97
| 0.924731
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
a3db2d98904e177f584d6bbb396ccb938f934a28
| 455,611
|
py
|
Python
|
code/python/ETFProfileandPrices/v2/fds/sdk/ETFProfileandPrices/api/factset_api.py
|
factset/enterprise-sdk
|
3fd4d1360756c515c9737a0c9a992c7451d7de7e
|
[
"Apache-2.0"
] | 6
|
2022-02-07T16:34:18.000Z
|
2022-03-30T08:04:57.000Z
|
code/python/ETFProfileandPrices/v2/fds/sdk/ETFProfileandPrices/api/factset_api.py
|
factset/enterprise-sdk
|
3fd4d1360756c515c9737a0c9a992c7451d7de7e
|
[
"Apache-2.0"
] | 2
|
2022-02-07T05:25:57.000Z
|
2022-03-07T14:18:04.000Z
|
code/python/ETFProfileandPrices/v2/fds/sdk/ETFProfileandPrices/api/factset_api.py
|
factset/enterprise-sdk
|
3fd4d1360756c515c9737a0c9a992c7451d7de7e
|
[
"Apache-2.0"
] | null | null | null |
"""
Prime Developer Trial
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501
The version of the OpenAPI document: v1
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from multiprocessing.pool import ApplyResult
import typing
from fds.sdk.ETFProfileandPrices.api_client import ApiClient, Endpoint as _Endpoint
from fds.sdk.ETFProfileandPrices.model_utils import ( # noqa: F401
check_allowed_values,
check_validations,
date,
datetime,
file_type,
none_type,
validate_and_convert_types
)
from fds.sdk.ETFProfileandPrices.exceptions import ApiException
from fds.sdk.ETFProfileandPrices.model.inline_response200 import InlineResponse200
from fds.sdk.ETFProfileandPrices.model.inline_response2001 import InlineResponse2001
from fds.sdk.ETFProfileandPrices.model.inline_response20010 import InlineResponse20010
from fds.sdk.ETFProfileandPrices.model.inline_response20011 import InlineResponse20011
from fds.sdk.ETFProfileandPrices.model.inline_response20012 import InlineResponse20012
from fds.sdk.ETFProfileandPrices.model.inline_response20013 import InlineResponse20013
from fds.sdk.ETFProfileandPrices.model.inline_response20014 import InlineResponse20014
from fds.sdk.ETFProfileandPrices.model.inline_response20015 import InlineResponse20015
from fds.sdk.ETFProfileandPrices.model.inline_response20016 import InlineResponse20016
from fds.sdk.ETFProfileandPrices.model.inline_response20017 import InlineResponse20017
from fds.sdk.ETFProfileandPrices.model.inline_response20018 import InlineResponse20018
from fds.sdk.ETFProfileandPrices.model.inline_response20019 import InlineResponse20019
from fds.sdk.ETFProfileandPrices.model.inline_response2002 import InlineResponse2002
from fds.sdk.ETFProfileandPrices.model.inline_response20020 import InlineResponse20020
from fds.sdk.ETFProfileandPrices.model.inline_response20021 import InlineResponse20021
from fds.sdk.ETFProfileandPrices.model.inline_response20022 import InlineResponse20022
from fds.sdk.ETFProfileandPrices.model.inline_response20023 import InlineResponse20023
from fds.sdk.ETFProfileandPrices.model.inline_response20024 import InlineResponse20024
from fds.sdk.ETFProfileandPrices.model.inline_response20025 import InlineResponse20025
from fds.sdk.ETFProfileandPrices.model.inline_response20026 import InlineResponse20026
from fds.sdk.ETFProfileandPrices.model.inline_response20027 import InlineResponse20027
from fds.sdk.ETFProfileandPrices.model.inline_response20028 import InlineResponse20028
from fds.sdk.ETFProfileandPrices.model.inline_response20029 import InlineResponse20029
from fds.sdk.ETFProfileandPrices.model.inline_response2003 import InlineResponse2003
from fds.sdk.ETFProfileandPrices.model.inline_response20030 import InlineResponse20030
from fds.sdk.ETFProfileandPrices.model.inline_response20031 import InlineResponse20031
from fds.sdk.ETFProfileandPrices.model.inline_response20032 import InlineResponse20032
from fds.sdk.ETFProfileandPrices.model.inline_response20033 import InlineResponse20033
from fds.sdk.ETFProfileandPrices.model.inline_response2004 import InlineResponse2004
from fds.sdk.ETFProfileandPrices.model.inline_response2005 import InlineResponse2005
from fds.sdk.ETFProfileandPrices.model.inline_response2006 import InlineResponse2006
from fds.sdk.ETFProfileandPrices.model.inline_response2007 import InlineResponse2007
from fds.sdk.ETFProfileandPrices.model.inline_response2008 import InlineResponse2008
from fds.sdk.ETFProfileandPrices.model.inline_response2009 import InlineResponse2009
class FactsetApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
self.get_factset_etf_allocation_asset_list_by_symbol_endpoint = _Endpoint(
settings={
'response_type': (
{ 200: (InlineResponse2001,), },
None
),
'auth': [
'FactSetApiKey',
'FactSetOAuth2'
],
'endpoint_path': '/factset/etf/allocation/asset/listBySymbol',
'operation_id': 'get_factset_etf_allocation_asset_list_by_symbol',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'symbol',
'attributes',
'pagination_offset',
'pagination_limit',
],
'required': [
'symbol',
],
'nullable': [
],
'enum': [
],
'validation': [
'symbol',
'attributes',
'pagination_offset',
'pagination_limit',
]
},
root_map={
'validations': {
('symbol',): {
'max_length': 10,
},
('attributes',): {
'max_items': 50,
},
('pagination_offset',): {
'inclusive_minimum': 0,
},
('pagination_limit',): {
'inclusive_maximum': 500,
'inclusive_minimum': 0,
},
},
'allowed_values': {
},
'openapi_types': {
'symbol':
(str,),
'attributes':
([str],),
'pagination_offset':
(float,),
'pagination_limit':
(float,),
},
'attribute_map': {
'symbol': 'symbol',
'attributes': '_attributes',
'pagination_offset': '_paginationOffset',
'pagination_limit': '_paginationLimit',
},
'location_map': {
'symbol': 'query',
'attributes': 'query',
'pagination_offset': 'query',
'pagination_limit': 'query',
},
'collection_format_map': {
'attributes': 'csv',
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client
)
self.get_factset_etf_allocation_country_list_by_symbol_endpoint = _Endpoint(
settings={
'response_type': (
{ 200: (InlineResponse2002,), },
None
),
'auth': [
'FactSetApiKey',
'FactSetOAuth2'
],
'endpoint_path': '/factset/etf/allocation/country/listBySymbol',
'operation_id': 'get_factset_etf_allocation_country_list_by_symbol',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'symbol',
'attributes',
'pagination_offset',
'pagination_limit',
],
'required': [
'symbol',
],
'nullable': [
],
'enum': [
],
'validation': [
'symbol',
'attributes',
'pagination_offset',
'pagination_limit',
]
},
root_map={
'validations': {
('symbol',): {
'max_length': 10,
},
('attributes',): {
'max_items': 50,
},
('pagination_offset',): {
'inclusive_minimum': 0,
},
('pagination_limit',): {
'inclusive_maximum': 500,
'inclusive_minimum': 0,
},
},
'allowed_values': {
},
'openapi_types': {
'symbol':
(str,),
'attributes':
([str],),
'pagination_offset':
(float,),
'pagination_limit':
(float,),
},
'attribute_map': {
'symbol': 'symbol',
'attributes': '_attributes',
'pagination_offset': '_paginationOffset',
'pagination_limit': '_paginationLimit',
},
'location_map': {
'symbol': 'query',
'attributes': 'query',
'pagination_offset': 'query',
'pagination_limit': 'query',
},
'collection_format_map': {
'attributes': 'csv',
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client
)
self.get_factset_etf_allocation_currency_list_by_symbol_endpoint = _Endpoint(
settings={
'response_type': (
{ 200: (InlineResponse2003,), },
None
),
'auth': [
'FactSetApiKey',
'FactSetOAuth2'
],
'endpoint_path': '/factset/etf/allocation/currency/listBySymbol',
'operation_id': 'get_factset_etf_allocation_currency_list_by_symbol',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'symbol',
'attributes',
'pagination_offset',
'pagination_limit',
],
'required': [
'symbol',
],
'nullable': [
],
'enum': [
],
'validation': [
'symbol',
'attributes',
'pagination_offset',
'pagination_limit',
]
},
root_map={
'validations': {
('symbol',): {
'max_length': 10,
},
('attributes',): {
'max_items': 50,
},
('pagination_offset',): {
'inclusive_minimum': 0,
},
('pagination_limit',): {
'inclusive_maximum': 500,
'inclusive_minimum': 0,
},
},
'allowed_values': {
},
'openapi_types': {
'symbol':
(str,),
'attributes':
([str],),
'pagination_offset':
(float,),
'pagination_limit':
(float,),
},
'attribute_map': {
'symbol': 'symbol',
'attributes': '_attributes',
'pagination_offset': '_paginationOffset',
'pagination_limit': '_paginationLimit',
},
'location_map': {
'symbol': 'query',
'attributes': 'query',
'pagination_offset': 'query',
'pagination_limit': 'query',
},
'collection_format_map': {
'attributes': 'csv',
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client
)
self.get_factset_etf_allocation_economic_development_list_by_symbol_endpoint = _Endpoint(
settings={
'response_type': (
{ 200: (InlineResponse2004,), },
None
),
'auth': [
'FactSetApiKey',
'FactSetOAuth2'
],
'endpoint_path': '/factset/etf/allocation/economicDevelopment/listBySymbol',
'operation_id': 'get_factset_etf_allocation_economic_development_list_by_symbol',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'symbol',
'attributes',
'pagination_offset',
'pagination_limit',
],
'required': [
'symbol',
],
'nullable': [
],
'enum': [
],
'validation': [
'symbol',
'attributes',
'pagination_offset',
'pagination_limit',
]
},
root_map={
'validations': {
('symbol',): {
'max_length': 10,
},
('attributes',): {
'max_items': 50,
},
('pagination_offset',): {
'inclusive_minimum': 0,
},
('pagination_limit',): {
'inclusive_maximum': 500,
'inclusive_minimum': 0,
},
},
'allowed_values': {
},
'openapi_types': {
'symbol':
(str,),
'attributes':
([str],),
'pagination_offset':
(float,),
'pagination_limit':
(float,),
},
'attribute_map': {
'symbol': 'symbol',
'attributes': '_attributes',
'pagination_offset': '_paginationOffset',
'pagination_limit': '_paginationLimit',
},
'location_map': {
'symbol': 'query',
'attributes': 'query',
'pagination_offset': 'query',
'pagination_limit': 'query',
},
'collection_format_map': {
'attributes': 'csv',
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client
)
self.get_factset_etf_allocation_exchange_list_by_symbol_endpoint = _Endpoint(
settings={
'response_type': (
{ 200: (InlineResponse2005,), },
None
),
'auth': [
'FactSetApiKey',
'FactSetOAuth2'
],
'endpoint_path': '/factset/etf/allocation/exchange/listBySymbol',
'operation_id': 'get_factset_etf_allocation_exchange_list_by_symbol',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'symbol',
'attributes',
'pagination_offset',
'pagination_limit',
],
'required': [
'symbol',
],
'nullable': [
],
'enum': [
],
'validation': [
'symbol',
'attributes',
'pagination_offset',
'pagination_limit',
]
},
root_map={
'validations': {
('symbol',): {
'max_length': 10,
},
('attributes',): {
'max_items': 50,
},
('pagination_offset',): {
'inclusive_minimum': 0,
},
('pagination_limit',): {
'inclusive_maximum': 500,
'inclusive_minimum': 0,
},
},
'allowed_values': {
},
'openapi_types': {
'symbol':
(str,),
'attributes':
([str],),
'pagination_offset':
(float,),
'pagination_limit':
(float,),
},
'attribute_map': {
'symbol': 'symbol',
'attributes': '_attributes',
'pagination_offset': '_paginationOffset',
'pagination_limit': '_paginationLimit',
},
'location_map': {
'symbol': 'query',
'attributes': 'query',
'pagination_offset': 'query',
'pagination_limit': 'query',
},
'collection_format_map': {
'attributes': 'csv',
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client
)
self.get_factset_etf_allocation_industry_list_by_symbol_endpoint = _Endpoint(
settings={
'response_type': (
{ 200: (InlineResponse2006,), },
None
),
'auth': [
'FactSetApiKey',
'FactSetOAuth2'
],
'endpoint_path': '/factset/etf/allocation/industry/listBySymbol',
'operation_id': 'get_factset_etf_allocation_industry_list_by_symbol',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'symbol',
'attributes',
'pagination_offset',
'pagination_limit',
],
'required': [
'symbol',
],
'nullable': [
],
'enum': [
],
'validation': [
'symbol',
'attributes',
'pagination_offset',
'pagination_limit',
]
},
root_map={
'validations': {
('symbol',): {
'max_length': 10,
},
('attributes',): {
'max_items': 50,
},
('pagination_offset',): {
'inclusive_minimum': 0,
},
('pagination_limit',): {
'inclusive_maximum': 500,
'inclusive_minimum': 0,
},
},
'allowed_values': {
},
'openapi_types': {
'symbol':
(str,),
'attributes':
([str],),
'pagination_offset':
(float,),
'pagination_limit':
(float,),
},
'attribute_map': {
'symbol': 'symbol',
'attributes': '_attributes',
'pagination_offset': '_paginationOffset',
'pagination_limit': '_paginationLimit',
},
'location_map': {
'symbol': 'query',
'attributes': 'query',
'pagination_offset': 'query',
'pagination_limit': 'query',
},
'collection_format_map': {
'attributes': 'csv',
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client
)
self.get_factset_etf_allocation_market_capitalization_list_by_symbol_endpoint = _Endpoint(
settings={
'response_type': (
{ 200: (InlineResponse2007,), },
None
),
'auth': [
'FactSetApiKey',
'FactSetOAuth2'
],
'endpoint_path': '/factset/etf/allocation/marketCapitalization/listBySymbol',
'operation_id': 'get_factset_etf_allocation_market_capitalization_list_by_symbol',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'symbol',
'attributes',
'pagination_offset',
'pagination_limit',
],
'required': [
'symbol',
],
'nullable': [
],
'enum': [
],
'validation': [
'symbol',
'attributes',
'pagination_offset',
'pagination_limit',
]
},
root_map={
'validations': {
('symbol',): {
'max_length': 10,
},
('attributes',): {
'max_items': 50,
},
('pagination_offset',): {
'inclusive_minimum': 0,
},
('pagination_limit',): {
'inclusive_maximum': 500,
'inclusive_minimum': 0,
},
},
'allowed_values': {
},
'openapi_types': {
'symbol':
(str,),
'attributes':
([str],),
'pagination_offset':
(float,),
'pagination_limit':
(float,),
},
'attribute_map': {
'symbol': 'symbol',
'attributes': '_attributes',
'pagination_offset': '_paginationOffset',
'pagination_limit': '_paginationLimit',
},
'location_map': {
'symbol': 'query',
'attributes': 'query',
'pagination_offset': 'query',
'pagination_limit': 'query',
},
'collection_format_map': {
'attributes': 'csv',
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client
)
self.get_factset_etf_allocation_region_list_by_symbol_endpoint = _Endpoint(
settings={
'response_type': (
{ 200: (InlineResponse2008,), },
None
),
'auth': [
'FactSetApiKey',
'FactSetOAuth2'
],
'endpoint_path': '/factset/etf/allocation/region/listBySymbol',
'operation_id': 'get_factset_etf_allocation_region_list_by_symbol',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'symbol',
'attributes',
'pagination_offset',
'pagination_limit',
],
'required': [
'symbol',
],
'nullable': [
],
'enum': [
],
'validation': [
'symbol',
'attributes',
'pagination_offset',
'pagination_limit',
]
},
root_map={
'validations': {
('symbol',): {
'max_length': 10,
},
('attributes',): {
'max_items': 50,
},
('pagination_offset',): {
'inclusive_minimum': 0,
},
('pagination_limit',): {
'inclusive_maximum': 500,
'inclusive_minimum': 0,
},
},
'allowed_values': {
},
'openapi_types': {
'symbol':
(str,),
'attributes':
([str],),
'pagination_offset':
(float,),
'pagination_limit':
(float,),
},
'attribute_map': {
'symbol': 'symbol',
'attributes': '_attributes',
'pagination_offset': '_paginationOffset',
'pagination_limit': '_paginationLimit',
},
'location_map': {
'symbol': 'query',
'attributes': 'query',
'pagination_offset': 'query',
'pagination_limit': 'query',
},
'collection_format_map': {
'attributes': 'csv',
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client
)
self.get_factset_etf_allocation_sector_list_by_symbol_endpoint = _Endpoint(
settings={
'response_type': (
{ 200: (InlineResponse2009,), },
None
),
'auth': [
'FactSetApiKey',
'FactSetOAuth2'
],
'endpoint_path': '/factset/etf/allocation/sector/listBySymbol',
'operation_id': 'get_factset_etf_allocation_sector_list_by_symbol',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'symbol',
'attributes',
'pagination_offset',
'pagination_limit',
],
'required': [
'symbol',
],
'nullable': [
],
'enum': [
],
'validation': [
'symbol',
'attributes',
'pagination_offset',
'pagination_limit',
]
},
root_map={
'validations': {
('symbol',): {
'max_length': 10,
},
('attributes',): {
'max_items': 50,
},
('pagination_offset',): {
'inclusive_minimum': 0,
},
('pagination_limit',): {
'inclusive_maximum': 500,
'inclusive_minimum': 0,
},
},
'allowed_values': {
},
'openapi_types': {
'symbol':
(str,),
'attributes':
([str],),
'pagination_offset':
(float,),
'pagination_limit':
(float,),
},
'attribute_map': {
'symbol': 'symbol',
'attributes': '_attributes',
'pagination_offset': '_paginationOffset',
'pagination_limit': '_paginationLimit',
},
'location_map': {
'symbol': 'query',
'attributes': 'query',
'pagination_offset': 'query',
'pagination_limit': 'query',
},
'collection_format_map': {
'attributes': 'csv',
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client
)
self.get_factset_etf_analytics_get_by_symbol_endpoint = _Endpoint(
settings={
'response_type': (
{ 200: (InlineResponse20010,), },
None
),
'auth': [
'FactSetApiKey',
'FactSetOAuth2'
],
'endpoint_path': '/factset/etf/analytics/getBySymbol',
'operation_id': 'get_factset_etf_analytics_get_by_symbol',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'symbol',
'attributes',
],
'required': [
'symbol',
],
'nullable': [
],
'enum': [
],
'validation': [
'symbol',
'attributes',
]
},
root_map={
'validations': {
('symbol',): {
'max_length': 10,
},
('attributes',): {
'max_items': 50,
},
},
'allowed_values': {
},
'openapi_types': {
'symbol':
(str,),
'attributes':
([str],),
},
'attribute_map': {
'symbol': 'symbol',
'attributes': '_attributes',
},
'location_map': {
'symbol': 'query',
'attributes': 'query',
},
'collection_format_map': {
'attributes': 'csv',
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client
)
self.get_factset_etf_analytics_holdings_statistics_get_by_symbol_endpoint = _Endpoint(
settings={
'response_type': (
{ 200: (InlineResponse20011,), },
None
),
'auth': [
'FactSetApiKey',
'FactSetOAuth2'
],
'endpoint_path': '/factset/etf/analytics/holdings/statistics/getBySymbol',
'operation_id': 'get_factset_etf_analytics_holdings_statistics_get_by_symbol',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'symbol',
'attributes',
],
'required': [
'symbol',
],
'nullable': [
],
'enum': [
],
'validation': [
'symbol',
'attributes',
]
},
root_map={
'validations': {
('symbol',): {
'max_length': 10,
},
('attributes',): {
'max_items': 50,
},
},
'allowed_values': {
},
'openapi_types': {
'symbol':
(str,),
'attributes':
([str],),
},
'attribute_map': {
'symbol': 'symbol',
'attributes': '_attributes',
},
'location_map': {
'symbol': 'query',
'attributes': 'query',
},
'collection_format_map': {
'attributes': 'csv',
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client
)
self.get_factset_etf_analytics_score_get_by_symbol_endpoint = _Endpoint(
settings={
'response_type': (
{ 200: (InlineResponse20012,), },
None
),
'auth': [
'FactSetApiKey',
'FactSetOAuth2'
],
'endpoint_path': '/factset/etf/analytics/score/getBySymbol',
'operation_id': 'get_factset_etf_analytics_score_get_by_symbol',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'symbol',
'attributes',
],
'required': [
'symbol',
],
'nullable': [
],
'enum': [
],
'validation': [
'symbol',
'attributes',
]
},
root_map={
'validations': {
('symbol',): {
'max_length': 10,
},
('attributes',): {
'max_items': 50,
},
},
'allowed_values': {
},
'openapi_types': {
'symbol':
(str,),
'attributes':
([str],),
},
'attribute_map': {
'symbol': 'symbol',
'attributes': '_attributes',
},
'location_map': {
'symbol': 'query',
'attributes': 'query',
},
'collection_format_map': {
'attributes': 'csv',
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client
)
self.get_factset_etf_analytics_trade_get_by_symbol_endpoint = _Endpoint(
settings={
'response_type': (
{ 200: (InlineResponse20013,), },
None
),
'auth': [
'FactSetApiKey',
'FactSetOAuth2'
],
'endpoint_path': '/factset/etf/analytics/trade/getBySymbol',
'operation_id': 'get_factset_etf_analytics_trade_get_by_symbol',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'symbol',
'attributes',
],
'required': [
'symbol',
],
'nullable': [
],
'enum': [
],
'validation': [
'symbol',
'attributes',
]
},
root_map={
'validations': {
('symbol',): {
'max_length': 10,
},
('attributes',): {
'max_items': 50,
},
},
'allowed_values': {
},
'openapi_types': {
'symbol':
(str,),
'attributes':
([str],),
},
'attribute_map': {
'symbol': 'symbol',
'attributes': '_attributes',
},
'location_map': {
'symbol': 'query',
'attributes': 'query',
},
'collection_format_map': {
'attributes': 'csv',
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client
)
self.get_factset_etf_characteristics_get_by_symbol_endpoint = _Endpoint(
settings={
'response_type': (
{ 200: (InlineResponse20014,), },
None
),
'auth': [
'FactSetApiKey',
'FactSetOAuth2'
],
'endpoint_path': '/factset/etf/characteristics/getBySymbol',
'operation_id': 'get_factset_etf_characteristics_get_by_symbol',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'symbol',
'attributes',
],
'required': [
'symbol',
],
'nullable': [
],
'enum': [
],
'validation': [
'symbol',
'attributes',
]
},
root_map={
'validations': {
('symbol',): {
'max_length': 10,
},
('attributes',): {
'max_items': 50,
},
},
'allowed_values': {
},
'openapi_types': {
'symbol':
(str,),
'attributes':
([str],),
},
'attribute_map': {
'symbol': 'symbol',
'attributes': '_attributes',
},
'location_map': {
'symbol': 'query',
'attributes': 'query',
},
'collection_format_map': {
'attributes': 'csv',
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client
)
self.get_factset_etf_class_category_broad_list_endpoint = _Endpoint(
settings={
'response_type': (
{ 200: (InlineResponse20016,), },
None
),
'auth': [
'FactSetApiKey',
'FactSetOAuth2'
],
'endpoint_path': '/factset/etf/class/category/broad/list',
'operation_id': 'get_factset_etf_class_category_broad_list',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'attributes',
],
'required': [],
'nullable': [
],
'enum': [
],
'validation': [
'attributes',
]
},
root_map={
'validations': {
('attributes',): {
'max_items': 50,
},
},
'allowed_values': {
},
'openapi_types': {
'attributes':
([str],),
},
'attribute_map': {
'attributes': '_attributes',
},
'location_map': {
'attributes': 'query',
},
'collection_format_map': {
'attributes': 'csv',
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client
)
self.get_factset_etf_class_category_focus_list_endpoint = _Endpoint(
settings={
'response_type': (
{ 200: (InlineResponse20017,), },
None
),
'auth': [
'FactSetApiKey',
'FactSetOAuth2'
],
'endpoint_path': '/factset/etf/class/category/focus/list',
'operation_id': 'get_factset_etf_class_category_focus_list',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'attributes',
'pagination_offset',
'pagination_limit',
],
'required': [],
'nullable': [
],
'enum': [
],
'validation': [
'attributes',
'pagination_offset',
'pagination_limit',
]
},
root_map={
'validations': {
('attributes',): {
'max_items': 50,
},
('pagination_offset',): {
'inclusive_minimum': 0,
},
('pagination_limit',): {
'inclusive_maximum': 500,
'inclusive_minimum': 0,
},
},
'allowed_values': {
},
'openapi_types': {
'attributes':
([str],),
'pagination_offset':
(float,),
'pagination_limit':
(float,),
},
'attribute_map': {
'attributes': '_attributes',
'pagination_offset': '_paginationOffset',
'pagination_limit': '_paginationLimit',
},
'location_map': {
'attributes': 'query',
'pagination_offset': 'query',
'pagination_limit': 'query',
},
'collection_format_map': {
'attributes': 'csv',
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client
)
self.get_factset_etf_class_category_niche_list_endpoint = _Endpoint(
settings={
'response_type': (
{ 200: (InlineResponse20018,), },
None
),
'auth': [
'FactSetApiKey',
'FactSetOAuth2'
],
'endpoint_path': '/factset/etf/class/category/niche/list',
'operation_id': 'get_factset_etf_class_category_niche_list',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'attributes',
'pagination_offset',
'pagination_limit',
],
'required': [],
'nullable': [
],
'enum': [
],
'validation': [
'attributes',
'pagination_offset',
'pagination_limit',
]
},
root_map={
'validations': {
('attributes',): {
'max_items': 50,
},
('pagination_offset',): {
'inclusive_minimum': 0,
},
('pagination_limit',): {
'inclusive_maximum': 500,
'inclusive_minimum': 0,
},
},
'allowed_values': {
},
'openapi_types': {
'attributes':
([str],),
'pagination_offset':
(float,),
'pagination_limit':
(float,),
},
'attribute_map': {
'attributes': '_attributes',
'pagination_offset': '_paginationOffset',
'pagination_limit': '_paginationLimit',
},
'location_map': {
'attributes': 'query',
'pagination_offset': 'query',
'pagination_limit': 'query',
},
'collection_format_map': {
'attributes': 'csv',
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client
)
self.get_factset_etf_class_geography_list_endpoint = _Endpoint(
settings={
'response_type': (
{ 200: (InlineResponse20019,), },
None
),
'auth': [
'FactSetApiKey',
'FactSetOAuth2'
],
'endpoint_path': '/factset/etf/class/geography/list',
'operation_id': 'get_factset_etf_class_geography_list',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'attributes',
],
'required': [],
'nullable': [
],
'enum': [
],
'validation': [
'attributes',
]
},
root_map={
'validations': {
('attributes',): {
'max_items': 50,
},
},
'allowed_values': {
},
'openapi_types': {
'attributes':
([str],),
},
'attribute_map': {
'attributes': '_attributes',
},
'location_map': {
'attributes': 'query',
},
'collection_format_map': {
'attributes': 'csv',
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client
)
self.get_factset_etf_class_get_by_symbol_endpoint = _Endpoint(
settings={
'response_type': (
{ 200: (InlineResponse20015,), },
None
),
'auth': [
'FactSetApiKey',
'FactSetOAuth2'
],
'endpoint_path': '/factset/etf/class/getBySymbol',
'operation_id': 'get_factset_etf_class_get_by_symbol',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'symbol',
'attributes',
],
'required': [
'symbol',
],
'nullable': [
],
'enum': [
],
'validation': [
'symbol',
'attributes',
]
},
root_map={
'validations': {
('symbol',): {
'max_length': 10,
},
('attributes',): {
'max_items': 50,
},
},
'allowed_values': {
},
'openapi_types': {
'symbol':
(str,),
'attributes':
([str],),
},
'attribute_map': {
'symbol': 'symbol',
'attributes': '_attributes',
},
'location_map': {
'symbol': 'query',
'attributes': 'query',
},
'collection_format_map': {
'attributes': 'csv',
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client
)
self.get_factset_etf_competitors_list_by_symbol_endpoint = _Endpoint(
settings={
'response_type': (
{ 200: (InlineResponse20020,), },
None
),
'auth': [
'FactSetApiKey',
'FactSetOAuth2'
],
'endpoint_path': '/factset/etf/competitors/listBySymbol',
'operation_id': 'get_factset_etf_competitors_list_by_symbol',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'symbol',
'attributes',
],
'required': [
'symbol',
],
'nullable': [
],
'enum': [
],
'validation': [
'symbol',
'attributes',
]
},
root_map={
'validations': {
('symbol',): {
'max_length': 10,
},
('attributes',): {
'max_items': 50,
},
},
'allowed_values': {
},
'openapi_types': {
'symbol':
(str,),
'attributes':
([str],),
},
'attribute_map': {
'symbol': 'symbol',
'attributes': '_attributes',
},
'location_map': {
'symbol': 'query',
'attributes': 'query',
},
'collection_format_map': {
'attributes': 'csv',
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client
)
self.get_factset_etf_distribution_get_by_symbol_endpoint = _Endpoint(
settings={
'response_type': (
{ 200: (InlineResponse20021,), },
None
),
'auth': [
'FactSetApiKey',
'FactSetOAuth2'
],
'endpoint_path': '/factset/etf/distribution/getBySymbol',
'operation_id': 'get_factset_etf_distribution_get_by_symbol',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'symbol',
'attributes',
],
'required': [
'symbol',
],
'nullable': [
],
'enum': [
],
'validation': [
'symbol',
'attributes',
]
},
root_map={
'validations': {
('symbol',): {
'max_length': 10,
},
('attributes',): {
'max_items': 50,
},
},
'allowed_values': {
},
'openapi_types': {
'symbol':
(str,),
'attributes':
([str],),
},
'attribute_map': {
'symbol': 'symbol',
'attributes': '_attributes',
},
'location_map': {
'symbol': 'query',
'attributes': 'query',
},
'collection_format_map': {
'attributes': 'csv',
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client
)
self.get_factset_etf_fund_flows_get_by_symbol_endpoint = _Endpoint(
settings={
'response_type': (
{ 200: (InlineResponse20022,), },
None
),
'auth': [
'FactSetApiKey',
'FactSetOAuth2'
],
'endpoint_path': '/factset/etf/fundFlows/getBySymbol',
'operation_id': 'get_factset_etf_fund_flows_get_by_symbol',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'symbol',
'attributes',
],
'required': [
'symbol',
],
'nullable': [
],
'enum': [
],
'validation': [
'symbol',
'attributes',
]
},
root_map={
'validations': {
('symbol',): {
'max_length': 10,
},
('attributes',): {
'max_items': 50,
},
},
'allowed_values': {
},
'openapi_types': {
'symbol':
(str,),
'attributes':
([str],),
},
'attribute_map': {
'symbol': 'symbol',
'attributes': '_attributes',
},
'location_map': {
'symbol': 'query',
'attributes': 'query',
},
'collection_format_map': {
'attributes': 'csv',
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client
)
self.get_factset_etf_get_by_symbol_endpoint = _Endpoint(
settings={
'response_type': (
{ 200: (InlineResponse200,), },
None
),
'auth': [
'FactSetApiKey',
'FactSetOAuth2'
],
'endpoint_path': '/factset/etf/getBySymbol',
'operation_id': 'get_factset_etf_get_by_symbol',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'symbol',
'attributes',
],
'required': [
'symbol',
],
'nullable': [
],
'enum': [
],
'validation': [
'symbol',
'attributes',
]
},
root_map={
'validations': {
('symbol',): {
'max_length': 10,
},
('attributes',): {
'max_items': 50,
},
},
'allowed_values': {
},
'openapi_types': {
'symbol':
(str,),
'attributes':
([str],),
},
'attribute_map': {
'symbol': 'symbol',
'attributes': '_attributes',
},
'location_map': {
'symbol': 'query',
'attributes': 'query',
},
'collection_format_map': {
'attributes': 'csv',
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client
)
self.get_factset_etf_growth_of_ten_k_list_by_symbol_endpoint = _Endpoint(
settings={
'response_type': (
{ 200: (InlineResponse20023,), },
None
),
'auth': [
'FactSetApiKey',
'FactSetOAuth2'
],
'endpoint_path': '/factset/etf/growthOfTenK/listBySymbol',
'operation_id': 'get_factset_etf_growth_of_ten_k_list_by_symbol',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'symbol',
'time_period',
'calculation_type',
'alignment',
'attributes',
'pagination_offset',
'pagination_limit',
],
'required': [
'symbol',
],
'nullable': [
],
'enum': [
'time_period',
'calculation_type',
'alignment',
],
'validation': [
'symbol',
'attributes',
'pagination_offset',
'pagination_limit',
]
},
root_map={
'validations': {
('symbol',): {
'max_length': 10,
},
('attributes',): {
'max_items': 50,
},
('pagination_offset',): {
'inclusive_minimum': 0,
},
('pagination_limit',): {
'inclusive_maximum': 500,
'inclusive_minimum': 0,
},
},
'allowed_values': {
('time_period',): {
"1M": "1M",
"3M": "3M",
"6M": "6M",
"YTD": "YTD",
"1Y": "1Y",
"3Y": "3Y",
"5Y": "5Y",
"10Y": "10Y",
"SI": "SI"
},
('calculation_type',): {
"I": "I",
"SI": "SI"
},
('alignment',): {
"DAY": "day",
"WEEK-END": "week-end",
"MONTH-END": "month-end"
},
},
'openapi_types': {
'symbol':
(str,),
'time_period':
(str,),
'calculation_type':
(str,),
'alignment':
(str,),
'attributes':
([str],),
'pagination_offset':
(float,),
'pagination_limit':
(float,),
},
'attribute_map': {
'symbol': 'symbol',
'time_period': 'timePeriod',
'calculation_type': 'calculationType',
'alignment': 'alignment',
'attributes': '_attributes',
'pagination_offset': '_paginationOffset',
'pagination_limit': '_paginationLimit',
},
'location_map': {
'symbol': 'query',
'time_period': 'query',
'calculation_type': 'query',
'alignment': 'query',
'attributes': 'query',
'pagination_offset': 'query',
'pagination_limit': 'query',
},
'collection_format_map': {
'attributes': 'csv',
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client
)
self.get_factset_etf_holdings_list_by_symbol_endpoint = _Endpoint(
settings={
'response_type': (
{ 200: (InlineResponse20024,), },
None
),
'auth': [
'FactSetApiKey',
'FactSetOAuth2'
],
'endpoint_path': '/factset/etf/holdings/listBySymbol',
'operation_id': 'get_factset_etf_holdings_list_by_symbol',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'symbol',
'attributes',
'pagination_offset',
'pagination_limit',
],
'required': [
'symbol',
],
'nullable': [
],
'enum': [
],
'validation': [
'symbol',
'attributes',
'pagination_offset',
'pagination_limit',
]
},
root_map={
'validations': {
('symbol',): {
'max_length': 10,
},
('attributes',): {
'max_items': 50,
},
('pagination_offset',): {
'inclusive_minimum': 0,
},
('pagination_limit',): {
'inclusive_maximum': 500,
'inclusive_minimum': 0,
},
},
'allowed_values': {
},
'openapi_types': {
'symbol':
(str,),
'attributes':
([str],),
'pagination_offset':
(float,),
'pagination_limit':
(float,),
},
'attribute_map': {
'symbol': 'symbol',
'attributes': '_attributes',
'pagination_offset': '_paginationOffset',
'pagination_limit': '_paginationLimit',
},
'location_map': {
'symbol': 'query',
'attributes': 'query',
'pagination_offset': 'query',
'pagination_limit': 'query',
},
'collection_format_map': {
'attributes': 'csv',
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client
)
self.get_factset_etf_market_aggregates_get_by_symbol_endpoint = _Endpoint(
settings={
'response_type': (
{ 200: (InlineResponse20025,), },
None
),
'auth': [
'FactSetApiKey',
'FactSetOAuth2'
],
'endpoint_path': '/factset/etf/marketAggregates/getBySymbol',
'operation_id': 'get_factset_etf_market_aggregates_get_by_symbol',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'symbol',
'attributes',
],
'required': [
'symbol',
],
'nullable': [
],
'enum': [
],
'validation': [
'symbol',
'attributes',
]
},
root_map={
'validations': {
('symbol',): {
'max_length': 10,
},
('attributes',): {
'max_items': 50,
},
},
'allowed_values': {
},
'openapi_types': {
'symbol':
(str,),
'attributes':
([str],),
},
'attribute_map': {
'symbol': 'symbol',
'attributes': '_attributes',
},
'location_map': {
'symbol': 'query',
'attributes': 'query',
},
'collection_format_map': {
'attributes': 'csv',
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client
)
self.get_factset_etf_premium_discount_summary_list_by_symbol_endpoint = _Endpoint(
settings={
'response_type': (
{ 200: (InlineResponse20026,), },
None
),
'auth': [
'FactSetApiKey',
'FactSetOAuth2'
],
'endpoint_path': '/factset/etf/premiumDiscount/summary/listBySymbol',
'operation_id': 'get_factset_etf_premium_discount_summary_list_by_symbol',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'symbol',
'time_period',
'alignment',
'attributes',
'pagination_offset',
'pagination_limit',
],
'required': [
'symbol',
],
'nullable': [
],
'enum': [
'time_period',
'alignment',
],
'validation': [
'symbol',
'attributes',
'pagination_offset',
'pagination_limit',
]
},
root_map={
'validations': {
('symbol',): {
'max_length': 10,
},
('attributes',): {
'max_items': 50,
},
('pagination_offset',): {
'inclusive_minimum': 0,
},
('pagination_limit',): {
'inclusive_maximum': 500,
'inclusive_minimum': 0,
},
},
'allowed_values': {
('time_period',): {
"YTD": "YTD",
"2Y": "2Y",
"3Y": "3Y",
"5Y": "5Y"
},
('alignment',): {
"MONTH-END": "month-end",
"QUARTER-END": "quarter-end",
"YEAR-END": "year-end"
},
},
'openapi_types': {
'symbol':
(str,),
'time_period':
(str,),
'alignment':
(str,),
'attributes':
([str],),
'pagination_offset':
(float,),
'pagination_limit':
(float,),
},
'attribute_map': {
'symbol': 'symbol',
'time_period': 'timePeriod',
'alignment': 'alignment',
'attributes': '_attributes',
'pagination_offset': '_paginationOffset',
'pagination_limit': '_paginationLimit',
},
'location_map': {
'symbol': 'query',
'time_period': 'query',
'alignment': 'query',
'attributes': 'query',
'pagination_offset': 'query',
'pagination_limit': 'query',
},
'collection_format_map': {
'attributes': 'csv',
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client
)
self.get_factset_etf_price_get_by_symbol_endpoint = _Endpoint(
settings={
'response_type': (
{ 200: (InlineResponse20027,), },
None
),
'auth': [
'FactSetApiKey',
'FactSetOAuth2'
],
'endpoint_path': '/factset/etf/price/getBySymbol',
'operation_id': 'get_factset_etf_price_get_by_symbol',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'symbol',
'attributes',
],
'required': [
'symbol',
],
'nullable': [
],
'enum': [
],
'validation': [
'symbol',
'attributes',
]
},
root_map={
'validations': {
('symbol',): {
'max_length': 10,
},
('attributes',): {
'max_items': 50,
},
},
'allowed_values': {
},
'openapi_types': {
'symbol':
(str,),
'attributes':
([str],),
},
'attribute_map': {
'symbol': 'symbol',
'attributes': '_attributes',
},
'location_map': {
'symbol': 'query',
'attributes': 'query',
},
'collection_format_map': {
'attributes': 'csv',
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client
)
self.get_factset_etf_returns_get_by_symbol_endpoint = _Endpoint(
settings={
'response_type': (
{ 200: (InlineResponse20028,), },
None
),
'auth': [
'FactSetApiKey',
'FactSetOAuth2'
],
'endpoint_path': '/factset/etf/returns/getBySymbol',
'operation_id': 'get_factset_etf_returns_get_by_symbol',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'symbol',
'return_type',
'attributes',
],
'required': [
'symbol',
],
'nullable': [
],
'enum': [
'return_type',
],
'validation': [
'symbol',
'return_type',
'attributes',
]
},
root_map={
'validations': {
('symbol',): {
'max_length': 10,
},
('return_type',): {
'max_length': 5,
},
('attributes',): {
'max_items': 50,
},
},
'allowed_values': {
('return_type',): {
"PRICE": "price",
"NAV": "nav"
},
},
'openapi_types': {
'symbol':
(str,),
'return_type':
(str,),
'attributes':
([str],),
},
'attribute_map': {
'symbol': 'symbol',
'return_type': 'returnType',
'attributes': '_attributes',
},
'location_map': {
'symbol': 'query',
'return_type': 'query',
'attributes': 'query',
},
'collection_format_map': {
'attributes': 'csv',
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client
)
self.get_factset_etf_strategy_get_by_symbol_endpoint = _Endpoint(
settings={
'response_type': (
{ 200: (InlineResponse20029,), },
None
),
'auth': [
'FactSetApiKey',
'FactSetOAuth2'
],
'endpoint_path': '/factset/etf/strategy/getBySymbol',
'operation_id': 'get_factset_etf_strategy_get_by_symbol',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'symbol',
'attributes',
],
'required': [
'symbol',
],
'nullable': [
],
'enum': [
],
'validation': [
'symbol',
'attributes',
]
},
root_map={
'validations': {
('symbol',): {
'max_length': 10,
},
('attributes',): {
'max_items': 50,
},
},
'allowed_values': {
},
'openapi_types': {
'symbol':
(str,),
'attributes':
([str],),
},
'attribute_map': {
'symbol': 'symbol',
'attributes': '_attributes',
},
'location_map': {
'symbol': 'query',
'attributes': 'query',
},
'collection_format_map': {
'attributes': 'csv',
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client
)
self.get_factset_etf_strategy_segment_list_endpoint = _Endpoint(
settings={
'response_type': (
{ 200: (InlineResponse20030,), },
None
),
'auth': [
'FactSetApiKey',
'FactSetOAuth2'
],
'endpoint_path': '/factset/etf/strategy/segment/list',
'operation_id': 'get_factset_etf_strategy_segment_list',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'attributes',
'pagination_offset',
'pagination_limit',
],
'required': [],
'nullable': [
],
'enum': [
],
'validation': [
'attributes',
'pagination_offset',
'pagination_limit',
]
},
root_map={
'validations': {
('attributes',): {
'max_items': 50,
},
('pagination_offset',): {
'inclusive_minimum': 0,
},
('pagination_limit',): {
'inclusive_maximum': 500,
'inclusive_minimum': 0,
},
},
'allowed_values': {
},
'openapi_types': {
'attributes':
([str],),
'pagination_offset':
(float,),
'pagination_limit':
(float,),
},
'attribute_map': {
'attributes': '_attributes',
'pagination_offset': '_paginationOffset',
'pagination_limit': '_paginationLimit',
},
'location_map': {
'attributes': 'query',
'pagination_offset': 'query',
'pagination_limit': 'query',
},
'collection_format_map': {
'attributes': 'csv',
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client
)
self.get_factset_etf_structure_get_by_symbol_endpoint = _Endpoint(
settings={
'response_type': (
{ 200: (InlineResponse20031,), },
None
),
'auth': [
'FactSetApiKey',
'FactSetOAuth2'
],
'endpoint_path': '/factset/etf/structure/getBySymbol',
'operation_id': 'get_factset_etf_structure_get_by_symbol',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'symbol',
'attributes',
],
'required': [
'symbol',
],
'nullable': [
],
'enum': [
],
'validation': [
'symbol',
'attributes',
]
},
root_map={
'validations': {
('symbol',): {
'max_length': 10,
},
('attributes',): {
'max_items': 50,
},
},
'allowed_values': {
},
'openapi_types': {
'symbol':
(str,),
'attributes':
([str],),
},
'attribute_map': {
'symbol': 'symbol',
'attributes': '_attributes',
},
'location_map': {
'symbol': 'query',
'attributes': 'query',
},
'collection_format_map': {
'attributes': 'csv',
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client
)
self.get_factset_etf_taxes_and_fees_us_get_by_symbol_endpoint = _Endpoint(
settings={
'response_type': (
{ 200: (InlineResponse20032,), },
None
),
'auth': [
'FactSetApiKey',
'FactSetOAuth2'
],
'endpoint_path': '/factset/etf/taxesAndFees/us/getBySymbol',
'operation_id': 'get_factset_etf_taxes_and_fees_us_get_by_symbol',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'symbol',
'attributes',
],
'required': [
'symbol',
],
'nullable': [
],
'enum': [
],
'validation': [
'symbol',
'attributes',
]
},
root_map={
'validations': {
('symbol',): {
'max_length': 10,
},
('attributes',): {
'max_items': 50,
},
},
'allowed_values': {
},
'openapi_types': {
'symbol':
(str,),
'attributes':
([str],),
},
'attribute_map': {
'symbol': 'symbol',
'attributes': '_attributes',
},
'location_map': {
'symbol': 'query',
'attributes': 'query',
},
'collection_format_map': {
'attributes': 'csv',
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client
)
self.get_factset_etf_time_series_list_by_symbol_endpoint = _Endpoint(
settings={
'response_type': (
{ 200: (InlineResponse20033,), },
None
),
'auth': [
'FactSetApiKey',
'FactSetOAuth2'
],
'endpoint_path': '/factset/etf/timeSeries/listBySymbol',
'operation_id': 'get_factset_etf_time_series_list_by_symbol',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'symbol',
'time_period',
'alignment',
'attributes',
'pagination_offset',
'pagination_limit',
],
'required': [
'symbol',
],
'nullable': [
],
'enum': [
'time_period',
'alignment',
],
'validation': [
'symbol',
'attributes',
'pagination_offset',
'pagination_limit',
]
},
root_map={
'validations': {
('symbol',): {
'max_length': 10,
},
('attributes',): {
'max_items': 50,
},
('pagination_offset',): {
'inclusive_minimum': 0,
},
('pagination_limit',): {
'inclusive_maximum': 1000,
'inclusive_minimum': 0,
},
},
'allowed_values': {
('time_period',): {
"1M": "1M",
"3M": "3M",
"6M": "6M",
"YTD": "YTD",
"1Y": "1Y",
"2Y": "2Y",
"3Y": "3Y",
"5Y": "5Y",
"10Y": "10Y",
"SI": "SI"
},
('alignment',): {
"DAY": "day",
"WEEK-END": "week-end",
"MONTH-END": "month-end"
},
},
'openapi_types': {
'symbol':
(str,),
'time_period':
(str,),
'alignment':
(str,),
'attributes':
([str],),
'pagination_offset':
(float,),
'pagination_limit':
(float,),
},
'attribute_map': {
'symbol': 'symbol',
'time_period': 'timePeriod',
'alignment': 'alignment',
'attributes': '_attributes',
'pagination_offset': '_paginationOffset',
'pagination_limit': '_paginationLimit',
},
'location_map': {
'symbol': 'query',
'time_period': 'query',
'alignment': 'query',
'attributes': 'query',
'pagination_offset': 'query',
'pagination_limit': 'query',
},
'collection_format_map': {
'attributes': 'csv',
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client
)
@staticmethod
def apply_kwargs_defaults(kwargs, return_http_data_only, async_req):
kwargs["async_req"] = async_req
kwargs["_return_http_data_only"] = return_http_data_only
kwargs["_preload_content"] = kwargs.get("_preload_content", True)
kwargs["_request_timeout"] = kwargs.get("_request_timeout", None)
kwargs["_check_input_type"] = kwargs.get("_check_input_type", True)
kwargs["_check_return_type"] = kwargs.get("_check_return_type", True)
kwargs["_spec_property_naming"] = kwargs.get("_spec_property_naming", False)
kwargs["_content_type"] = kwargs.get("_content_type")
kwargs["_host_index"] = kwargs.get("_host_index")
def get_factset_etf_allocation_asset_list_by_symbol(
self,
symbol,
**kwargs
) -> InlineResponse2001:
"""This endpoint returns selected ETP's asset allocations. # noqa: E501
This endpoint returns selected ETP's allocations grouped by asset class. The response will be sorted by weight in descending order. # noqa: E501
This method makes a synchronous HTTP request. Returns the http data only
Args:
symbol (str): Market symbol of ETP defined by FactSet.
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
pagination_offset (float): Non-negative number of entries to skip, or 0 (default).. [optional] if omitted the server will use the default value of 0.0
pagination_limit (float): Non-negative maximum number of entries to return.. [optional] if omitted the server will use the default value of 20.0
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
InlineResponse2001
Response Object
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=False)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_allocation_asset_list_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_allocation_asset_list_by_symbol_with_http_info(
self,
symbol,
**kwargs
) -> typing.Tuple[InlineResponse2001, int, typing.MutableMapping]:
"""This endpoint returns selected ETP's asset allocations. # noqa: E501
This endpoint returns selected ETP's allocations grouped by asset class. The response will be sorted by weight in descending order. # noqa: E501
This method makes a synchronous HTTP request. Returns http data, http status and headers
Args:
symbol (str): Market symbol of ETP defined by FactSet.
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
pagination_offset (float): Non-negative number of entries to skip, or 0 (default).. [optional] if omitted the server will use the default value of 0.0
pagination_limit (float): Non-negative maximum number of entries to return.. [optional] if omitted the server will use the default value of 20.0
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
InlineResponse2001
Response Object
int
Http Status Code
dict
Dictionary of the response headers
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=False)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_allocation_asset_list_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_allocation_asset_list_by_symbol_async(
self,
symbol,
**kwargs
) -> "ApplyResult[InlineResponse2001]":
"""This endpoint returns selected ETP's asset allocations. # noqa: E501
This endpoint returns selected ETP's allocations grouped by asset class. The response will be sorted by weight in descending order. # noqa: E501
This method makes a asynchronous HTTP request. Returns the http data, wrapped in ApplyResult
Args:
symbol (str): Market symbol of ETP defined by FactSet.
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
pagination_offset (float): Non-negative number of entries to skip, or 0 (default).. [optional] if omitted the server will use the default value of 0.0
pagination_limit (float): Non-negative maximum number of entries to return.. [optional] if omitted the server will use the default value of 20.0
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ApplyResult[InlineResponse2001]
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=True)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_allocation_asset_list_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_allocation_asset_list_by_symbol_with_http_info_async(
self,
symbol,
**kwargs
) -> "ApplyResult[typing.Tuple[InlineResponse2001, int, typing.MutableMapping]]":
"""This endpoint returns selected ETP's asset allocations. # noqa: E501
This endpoint returns selected ETP's allocations grouped by asset class. The response will be sorted by weight in descending order. # noqa: E501
This method makes a asynchronous HTTP request. Returns http data, http status and headers, wrapped in ApplyResult
Args:
symbol (str): Market symbol of ETP defined by FactSet.
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
pagination_offset (float): Non-negative number of entries to skip, or 0 (default).. [optional] if omitted the server will use the default value of 0.0
pagination_limit (float): Non-negative maximum number of entries to return.. [optional] if omitted the server will use the default value of 20.0
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ApplyResult[(InlineResponse2001, int, typing.Dict)]
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=True)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_allocation_asset_list_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_allocation_country_list_by_symbol(
self,
symbol,
**kwargs
) -> InlineResponse2002:
"""This endpoint returns selected ETP's country allocations. # noqa: E501
This endpoint returns selected ETP's allocations grouped by country names. The response will be sorted by weight in descending order. # noqa: E501
This method makes a synchronous HTTP request. Returns the http data only
Args:
symbol (str): Market symbol of ETP defined by FactSet.
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
pagination_offset (float): Non-negative number of entries to skip, or 0 (default).. [optional] if omitted the server will use the default value of 0.0
pagination_limit (float): Non-negative maximum number of entries to return.. [optional] if omitted the server will use the default value of 20.0
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
InlineResponse2002
Response Object
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=False)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_allocation_country_list_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_allocation_country_list_by_symbol_with_http_info(
self,
symbol,
**kwargs
) -> typing.Tuple[InlineResponse2002, int, typing.MutableMapping]:
"""This endpoint returns selected ETP's country allocations. # noqa: E501
This endpoint returns selected ETP's allocations grouped by country names. The response will be sorted by weight in descending order. # noqa: E501
This method makes a synchronous HTTP request. Returns http data, http status and headers
Args:
symbol (str): Market symbol of ETP defined by FactSet.
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
pagination_offset (float): Non-negative number of entries to skip, or 0 (default).. [optional] if omitted the server will use the default value of 0.0
pagination_limit (float): Non-negative maximum number of entries to return.. [optional] if omitted the server will use the default value of 20.0
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
InlineResponse2002
Response Object
int
Http Status Code
dict
Dictionary of the response headers
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=False)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_allocation_country_list_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_allocation_country_list_by_symbol_async(
self,
symbol,
**kwargs
) -> "ApplyResult[InlineResponse2002]":
"""This endpoint returns selected ETP's country allocations. # noqa: E501
This endpoint returns selected ETP's allocations grouped by country names. The response will be sorted by weight in descending order. # noqa: E501
This method makes a asynchronous HTTP request. Returns the http data, wrapped in ApplyResult
Args:
symbol (str): Market symbol of ETP defined by FactSet.
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
pagination_offset (float): Non-negative number of entries to skip, or 0 (default).. [optional] if omitted the server will use the default value of 0.0
pagination_limit (float): Non-negative maximum number of entries to return.. [optional] if omitted the server will use the default value of 20.0
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ApplyResult[InlineResponse2002]
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=True)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_allocation_country_list_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_allocation_country_list_by_symbol_with_http_info_async(
self,
symbol,
**kwargs
) -> "ApplyResult[typing.Tuple[InlineResponse2002, int, typing.MutableMapping]]":
"""This endpoint returns selected ETP's country allocations. # noqa: E501
This endpoint returns selected ETP's allocations grouped by country names. The response will be sorted by weight in descending order. # noqa: E501
This method makes a asynchronous HTTP request. Returns http data, http status and headers, wrapped in ApplyResult
Args:
symbol (str): Market symbol of ETP defined by FactSet.
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
pagination_offset (float): Non-negative number of entries to skip, or 0 (default).. [optional] if omitted the server will use the default value of 0.0
pagination_limit (float): Non-negative maximum number of entries to return.. [optional] if omitted the server will use the default value of 20.0
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ApplyResult[(InlineResponse2002, int, typing.Dict)]
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=True)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_allocation_country_list_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_allocation_currency_list_by_symbol(
self,
symbol,
**kwargs
) -> InlineResponse2003:
"""This endpoint returns selected ETP's currency allocations. # noqa: E501
This endpoint returns selected ETP's allocations grouped by currency. The response will be sorted by weight in descending order. # noqa: E501
This method makes a synchronous HTTP request. Returns the http data only
Args:
symbol (str): Market symbol of ETP defined by FactSet.
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
pagination_offset (float): Non-negative number of entries to skip, or 0 (default).. [optional] if omitted the server will use the default value of 0.0
pagination_limit (float): Non-negative maximum number of entries to return.. [optional] if omitted the server will use the default value of 20.0
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
InlineResponse2003
Response Object
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=False)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_allocation_currency_list_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_allocation_currency_list_by_symbol_with_http_info(
self,
symbol,
**kwargs
) -> typing.Tuple[InlineResponse2003, int, typing.MutableMapping]:
"""This endpoint returns selected ETP's currency allocations. # noqa: E501
This endpoint returns selected ETP's allocations grouped by currency. The response will be sorted by weight in descending order. # noqa: E501
This method makes a synchronous HTTP request. Returns http data, http status and headers
Args:
symbol (str): Market symbol of ETP defined by FactSet.
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
pagination_offset (float): Non-negative number of entries to skip, or 0 (default).. [optional] if omitted the server will use the default value of 0.0
pagination_limit (float): Non-negative maximum number of entries to return.. [optional] if omitted the server will use the default value of 20.0
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
InlineResponse2003
Response Object
int
Http Status Code
dict
Dictionary of the response headers
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=False)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_allocation_currency_list_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_allocation_currency_list_by_symbol_async(
self,
symbol,
**kwargs
) -> "ApplyResult[InlineResponse2003]":
"""This endpoint returns selected ETP's currency allocations. # noqa: E501
This endpoint returns selected ETP's allocations grouped by currency. The response will be sorted by weight in descending order. # noqa: E501
This method makes a asynchronous HTTP request. Returns the http data, wrapped in ApplyResult
Args:
symbol (str): Market symbol of ETP defined by FactSet.
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
pagination_offset (float): Non-negative number of entries to skip, or 0 (default).. [optional] if omitted the server will use the default value of 0.0
pagination_limit (float): Non-negative maximum number of entries to return.. [optional] if omitted the server will use the default value of 20.0
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ApplyResult[InlineResponse2003]
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=True)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_allocation_currency_list_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_allocation_currency_list_by_symbol_with_http_info_async(
self,
symbol,
**kwargs
) -> "ApplyResult[typing.Tuple[InlineResponse2003, int, typing.MutableMapping]]":
"""This endpoint returns selected ETP's currency allocations. # noqa: E501
This endpoint returns selected ETP's allocations grouped by currency. The response will be sorted by weight in descending order. # noqa: E501
This method makes a asynchronous HTTP request. Returns http data, http status and headers, wrapped in ApplyResult
Args:
symbol (str): Market symbol of ETP defined by FactSet.
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
pagination_offset (float): Non-negative number of entries to skip, or 0 (default).. [optional] if omitted the server will use the default value of 0.0
pagination_limit (float): Non-negative maximum number of entries to return.. [optional] if omitted the server will use the default value of 20.0
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ApplyResult[(InlineResponse2003, int, typing.Dict)]
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=True)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_allocation_currency_list_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_allocation_economic_development_list_by_symbol(
self,
symbol,
**kwargs
) -> InlineResponse2004:
"""List of allocations classified by a holding's economic development status. # noqa: E501
List of allocations classified by a holding's economic development status (e.g. developed market, frontier market, emerging market). # noqa: E501
This method makes a synchronous HTTP request. Returns the http data only
Args:
symbol (str): Market symbol of ETP defined by FactSet.
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
pagination_offset (float): Non-negative number of entries to skip, or 0 (default).. [optional] if omitted the server will use the default value of 0.0
pagination_limit (float): Non-negative maximum number of entries to return.. [optional] if omitted the server will use the default value of 20.0
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
InlineResponse2004
Response Object
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=False)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_allocation_economic_development_list_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_allocation_economic_development_list_by_symbol_with_http_info(
self,
symbol,
**kwargs
) -> typing.Tuple[InlineResponse2004, int, typing.MutableMapping]:
"""List of allocations classified by a holding's economic development status. # noqa: E501
List of allocations classified by a holding's economic development status (e.g. developed market, frontier market, emerging market). # noqa: E501
This method makes a synchronous HTTP request. Returns http data, http status and headers
Args:
symbol (str): Market symbol of ETP defined by FactSet.
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
pagination_offset (float): Non-negative number of entries to skip, or 0 (default).. [optional] if omitted the server will use the default value of 0.0
pagination_limit (float): Non-negative maximum number of entries to return.. [optional] if omitted the server will use the default value of 20.0
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
InlineResponse2004
Response Object
int
Http Status Code
dict
Dictionary of the response headers
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=False)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_allocation_economic_development_list_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_allocation_economic_development_list_by_symbol_async(
self,
symbol,
**kwargs
) -> "ApplyResult[InlineResponse2004]":
"""List of allocations classified by a holding's economic development status. # noqa: E501
List of allocations classified by a holding's economic development status (e.g. developed market, frontier market, emerging market). # noqa: E501
This method makes a asynchronous HTTP request. Returns the http data, wrapped in ApplyResult
Args:
symbol (str): Market symbol of ETP defined by FactSet.
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
pagination_offset (float): Non-negative number of entries to skip, or 0 (default).. [optional] if omitted the server will use the default value of 0.0
pagination_limit (float): Non-negative maximum number of entries to return.. [optional] if omitted the server will use the default value of 20.0
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ApplyResult[InlineResponse2004]
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=True)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_allocation_economic_development_list_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_allocation_economic_development_list_by_symbol_with_http_info_async(
self,
symbol,
**kwargs
) -> "ApplyResult[typing.Tuple[InlineResponse2004, int, typing.MutableMapping]]":
"""List of allocations classified by a holding's economic development status. # noqa: E501
List of allocations classified by a holding's economic development status (e.g. developed market, frontier market, emerging market). # noqa: E501
This method makes a asynchronous HTTP request. Returns http data, http status and headers, wrapped in ApplyResult
Args:
symbol (str): Market symbol of ETP defined by FactSet.
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
pagination_offset (float): Non-negative number of entries to skip, or 0 (default).. [optional] if omitted the server will use the default value of 0.0
pagination_limit (float): Non-negative maximum number of entries to return.. [optional] if omitted the server will use the default value of 20.0
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ApplyResult[(InlineResponse2004, int, typing.Dict)]
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=True)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_allocation_economic_development_list_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_allocation_exchange_list_by_symbol(
self,
symbol,
**kwargs
) -> InlineResponse2005:
"""This endpoint returns selected ETP's exchange allocations. # noqa: E501
This endpoint returns selected ETP's allocations grouped by exchanges. The response will be sorted by weight in descending order. # noqa: E501
This method makes a synchronous HTTP request. Returns the http data only
Args:
symbol (str): Market symbol of ETP defined by FactSet.
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
pagination_offset (float): Non-negative number of entries to skip, or 0 (default).. [optional] if omitted the server will use the default value of 0.0
pagination_limit (float): Non-negative maximum number of entries to return.. [optional] if omitted the server will use the default value of 20.0
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
InlineResponse2005
Response Object
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=False)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_allocation_exchange_list_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_allocation_exchange_list_by_symbol_with_http_info(
self,
symbol,
**kwargs
) -> typing.Tuple[InlineResponse2005, int, typing.MutableMapping]:
"""This endpoint returns selected ETP's exchange allocations. # noqa: E501
This endpoint returns selected ETP's allocations grouped by exchanges. The response will be sorted by weight in descending order. # noqa: E501
This method makes a synchronous HTTP request. Returns http data, http status and headers
Args:
symbol (str): Market symbol of ETP defined by FactSet.
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
pagination_offset (float): Non-negative number of entries to skip, or 0 (default).. [optional] if omitted the server will use the default value of 0.0
pagination_limit (float): Non-negative maximum number of entries to return.. [optional] if omitted the server will use the default value of 20.0
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
InlineResponse2005
Response Object
int
Http Status Code
dict
Dictionary of the response headers
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=False)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_allocation_exchange_list_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_allocation_exchange_list_by_symbol_async(
self,
symbol,
**kwargs
) -> "ApplyResult[InlineResponse2005]":
"""This endpoint returns selected ETP's exchange allocations. # noqa: E501
This endpoint returns selected ETP's allocations grouped by exchanges. The response will be sorted by weight in descending order. # noqa: E501
This method makes a asynchronous HTTP request. Returns the http data, wrapped in ApplyResult
Args:
symbol (str): Market symbol of ETP defined by FactSet.
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
pagination_offset (float): Non-negative number of entries to skip, or 0 (default).. [optional] if omitted the server will use the default value of 0.0
pagination_limit (float): Non-negative maximum number of entries to return.. [optional] if omitted the server will use the default value of 20.0
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ApplyResult[InlineResponse2005]
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=True)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_allocation_exchange_list_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_allocation_exchange_list_by_symbol_with_http_info_async(
self,
symbol,
**kwargs
) -> "ApplyResult[typing.Tuple[InlineResponse2005, int, typing.MutableMapping]]":
"""This endpoint returns selected ETP's exchange allocations. # noqa: E501
This endpoint returns selected ETP's allocations grouped by exchanges. The response will be sorted by weight in descending order. # noqa: E501
This method makes a asynchronous HTTP request. Returns http data, http status and headers, wrapped in ApplyResult
Args:
symbol (str): Market symbol of ETP defined by FactSet.
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
pagination_offset (float): Non-negative number of entries to skip, or 0 (default).. [optional] if omitted the server will use the default value of 0.0
pagination_limit (float): Non-negative maximum number of entries to return.. [optional] if omitted the server will use the default value of 20.0
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ApplyResult[(InlineResponse2005, int, typing.Dict)]
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=True)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_allocation_exchange_list_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_allocation_industry_list_by_symbol(
self,
symbol,
**kwargs
) -> InlineResponse2006:
"""This endpoint returns selected ETP's industry allocations. # noqa: E501
This endpoint returns selected ETP's allocations grouped by industry. The response will be sorted by weight in descending order. # noqa: E501
This method makes a synchronous HTTP request. Returns the http data only
Args:
symbol (str): Market symbol of ETP defined by FactSet.
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
pagination_offset (float): Non-negative number of entries to skip, or 0 (default).. [optional] if omitted the server will use the default value of 0.0
pagination_limit (float): Non-negative maximum number of entries to return.. [optional] if omitted the server will use the default value of 20.0
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
InlineResponse2006
Response Object
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=False)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_allocation_industry_list_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_allocation_industry_list_by_symbol_with_http_info(
self,
symbol,
**kwargs
) -> typing.Tuple[InlineResponse2006, int, typing.MutableMapping]:
"""This endpoint returns selected ETP's industry allocations. # noqa: E501
This endpoint returns selected ETP's allocations grouped by industry. The response will be sorted by weight in descending order. # noqa: E501
This method makes a synchronous HTTP request. Returns http data, http status and headers
Args:
symbol (str): Market symbol of ETP defined by FactSet.
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
pagination_offset (float): Non-negative number of entries to skip, or 0 (default).. [optional] if omitted the server will use the default value of 0.0
pagination_limit (float): Non-negative maximum number of entries to return.. [optional] if omitted the server will use the default value of 20.0
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
InlineResponse2006
Response Object
int
Http Status Code
dict
Dictionary of the response headers
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=False)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_allocation_industry_list_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_allocation_industry_list_by_symbol_async(
self,
symbol,
**kwargs
) -> "ApplyResult[InlineResponse2006]":
"""This endpoint returns selected ETP's industry allocations. # noqa: E501
This endpoint returns selected ETP's allocations grouped by industry. The response will be sorted by weight in descending order. # noqa: E501
This method makes a asynchronous HTTP request. Returns the http data, wrapped in ApplyResult
Args:
symbol (str): Market symbol of ETP defined by FactSet.
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
pagination_offset (float): Non-negative number of entries to skip, or 0 (default).. [optional] if omitted the server will use the default value of 0.0
pagination_limit (float): Non-negative maximum number of entries to return.. [optional] if omitted the server will use the default value of 20.0
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ApplyResult[InlineResponse2006]
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=True)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_allocation_industry_list_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_allocation_industry_list_by_symbol_with_http_info_async(
self,
symbol,
**kwargs
) -> "ApplyResult[typing.Tuple[InlineResponse2006, int, typing.MutableMapping]]":
"""This endpoint returns selected ETP's industry allocations. # noqa: E501
This endpoint returns selected ETP's allocations grouped by industry. The response will be sorted by weight in descending order. # noqa: E501
This method makes a asynchronous HTTP request. Returns http data, http status and headers, wrapped in ApplyResult
Args:
symbol (str): Market symbol of ETP defined by FactSet.
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
pagination_offset (float): Non-negative number of entries to skip, or 0 (default).. [optional] if omitted the server will use the default value of 0.0
pagination_limit (float): Non-negative maximum number of entries to return.. [optional] if omitted the server will use the default value of 20.0
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ApplyResult[(InlineResponse2006, int, typing.Dict)]
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=True)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_allocation_industry_list_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_allocation_market_capitalization_list_by_symbol(
self,
symbol,
**kwargs
) -> InlineResponse2007:
"""List of allocations classified by a holding's total market capitalization. # noqa: E501
List of allocations classified by a holding's total market capitalization (e.g. Small Cap, Mid Cap, Large Cap). Response will be sorted by weight in descending order. # noqa: E501
This method makes a synchronous HTTP request. Returns the http data only
Args:
symbol (str): Market symbol of ETP defined by FactSet.
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
pagination_offset (float): Non-negative number of entries to skip, or 0 (default).. [optional] if omitted the server will use the default value of 0.0
pagination_limit (float): Non-negative maximum number of entries to return.. [optional] if omitted the server will use the default value of 20.0
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
InlineResponse2007
Response Object
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=False)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_allocation_market_capitalization_list_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_allocation_market_capitalization_list_by_symbol_with_http_info(
self,
symbol,
**kwargs
) -> typing.Tuple[InlineResponse2007, int, typing.MutableMapping]:
"""List of allocations classified by a holding's total market capitalization. # noqa: E501
List of allocations classified by a holding's total market capitalization (e.g. Small Cap, Mid Cap, Large Cap). Response will be sorted by weight in descending order. # noqa: E501
This method makes a synchronous HTTP request. Returns http data, http status and headers
Args:
symbol (str): Market symbol of ETP defined by FactSet.
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
pagination_offset (float): Non-negative number of entries to skip, or 0 (default).. [optional] if omitted the server will use the default value of 0.0
pagination_limit (float): Non-negative maximum number of entries to return.. [optional] if omitted the server will use the default value of 20.0
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
InlineResponse2007
Response Object
int
Http Status Code
dict
Dictionary of the response headers
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=False)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_allocation_market_capitalization_list_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_allocation_market_capitalization_list_by_symbol_async(
self,
symbol,
**kwargs
) -> "ApplyResult[InlineResponse2007]":
"""List of allocations classified by a holding's total market capitalization. # noqa: E501
List of allocations classified by a holding's total market capitalization (e.g. Small Cap, Mid Cap, Large Cap). Response will be sorted by weight in descending order. # noqa: E501
This method makes a asynchronous HTTP request. Returns the http data, wrapped in ApplyResult
Args:
symbol (str): Market symbol of ETP defined by FactSet.
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
pagination_offset (float): Non-negative number of entries to skip, or 0 (default).. [optional] if omitted the server will use the default value of 0.0
pagination_limit (float): Non-negative maximum number of entries to return.. [optional] if omitted the server will use the default value of 20.0
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ApplyResult[InlineResponse2007]
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=True)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_allocation_market_capitalization_list_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_allocation_market_capitalization_list_by_symbol_with_http_info_async(
self,
symbol,
**kwargs
) -> "ApplyResult[typing.Tuple[InlineResponse2007, int, typing.MutableMapping]]":
"""List of allocations classified by a holding's total market capitalization. # noqa: E501
List of allocations classified by a holding's total market capitalization (e.g. Small Cap, Mid Cap, Large Cap). Response will be sorted by weight in descending order. # noqa: E501
This method makes a asynchronous HTTP request. Returns http data, http status and headers, wrapped in ApplyResult
Args:
symbol (str): Market symbol of ETP defined by FactSet.
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
pagination_offset (float): Non-negative number of entries to skip, or 0 (default).. [optional] if omitted the server will use the default value of 0.0
pagination_limit (float): Non-negative maximum number of entries to return.. [optional] if omitted the server will use the default value of 20.0
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ApplyResult[(InlineResponse2007, int, typing.Dict)]
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=True)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_allocation_market_capitalization_list_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_allocation_region_list_by_symbol(
self,
symbol,
**kwargs
) -> InlineResponse2008:
"""This endpoint returns selected ETP's region allocations. # noqa: E501
This endpoint returns selected ETP's allocations grouped by region names. The response will be sorted by weight in descending order. # noqa: E501
This method makes a synchronous HTTP request. Returns the http data only
Args:
symbol (str): Market symbol of ETP defined by FactSet.
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
pagination_offset (float): Non-negative number of entries to skip, or 0 (default).. [optional] if omitted the server will use the default value of 0.0
pagination_limit (float): Non-negative maximum number of entries to return.. [optional] if omitted the server will use the default value of 20.0
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
InlineResponse2008
Response Object
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=False)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_allocation_region_list_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_allocation_region_list_by_symbol_with_http_info(
self,
symbol,
**kwargs
) -> typing.Tuple[InlineResponse2008, int, typing.MutableMapping]:
"""This endpoint returns selected ETP's region allocations. # noqa: E501
This endpoint returns selected ETP's allocations grouped by region names. The response will be sorted by weight in descending order. # noqa: E501
This method makes a synchronous HTTP request. Returns http data, http status and headers
Args:
symbol (str): Market symbol of ETP defined by FactSet.
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
pagination_offset (float): Non-negative number of entries to skip, or 0 (default).. [optional] if omitted the server will use the default value of 0.0
pagination_limit (float): Non-negative maximum number of entries to return.. [optional] if omitted the server will use the default value of 20.0
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
InlineResponse2008
Response Object
int
Http Status Code
dict
Dictionary of the response headers
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=False)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_allocation_region_list_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_allocation_region_list_by_symbol_async(
self,
symbol,
**kwargs
) -> "ApplyResult[InlineResponse2008]":
"""This endpoint returns selected ETP's region allocations. # noqa: E501
This endpoint returns selected ETP's allocations grouped by region names. The response will be sorted by weight in descending order. # noqa: E501
This method makes a asynchronous HTTP request. Returns the http data, wrapped in ApplyResult
Args:
symbol (str): Market symbol of ETP defined by FactSet.
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
pagination_offset (float): Non-negative number of entries to skip, or 0 (default).. [optional] if omitted the server will use the default value of 0.0
pagination_limit (float): Non-negative maximum number of entries to return.. [optional] if omitted the server will use the default value of 20.0
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ApplyResult[InlineResponse2008]
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=True)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_allocation_region_list_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_allocation_region_list_by_symbol_with_http_info_async(
self,
symbol,
**kwargs
) -> "ApplyResult[typing.Tuple[InlineResponse2008, int, typing.MutableMapping]]":
"""This endpoint returns selected ETP's region allocations. # noqa: E501
This endpoint returns selected ETP's allocations grouped by region names. The response will be sorted by weight in descending order. # noqa: E501
This method makes a asynchronous HTTP request. Returns http data, http status and headers, wrapped in ApplyResult
Args:
symbol (str): Market symbol of ETP defined by FactSet.
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
pagination_offset (float): Non-negative number of entries to skip, or 0 (default).. [optional] if omitted the server will use the default value of 0.0
pagination_limit (float): Non-negative maximum number of entries to return.. [optional] if omitted the server will use the default value of 20.0
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ApplyResult[(InlineResponse2008, int, typing.Dict)]
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=True)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_allocation_region_list_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_allocation_sector_list_by_symbol(
self,
symbol,
**kwargs
) -> InlineResponse2009:
"""This endpoint returns selected ETP's sector allocations. # noqa: E501
This endpoint returns selected ETP's allocations grouped by sector names. The response will be sorted by weight in descending order. # noqa: E501
This method makes a synchronous HTTP request. Returns the http data only
Args:
symbol (str): Market symbol of ETP defined by FactSet.
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
pagination_offset (float): Non-negative number of entries to skip, or 0 (default).. [optional] if omitted the server will use the default value of 0.0
pagination_limit (float): Non-negative maximum number of entries to return.. [optional] if omitted the server will use the default value of 20.0
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
InlineResponse2009
Response Object
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=False)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_allocation_sector_list_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_allocation_sector_list_by_symbol_with_http_info(
self,
symbol,
**kwargs
) -> typing.Tuple[InlineResponse2009, int, typing.MutableMapping]:
"""This endpoint returns selected ETP's sector allocations. # noqa: E501
This endpoint returns selected ETP's allocations grouped by sector names. The response will be sorted by weight in descending order. # noqa: E501
This method makes a synchronous HTTP request. Returns http data, http status and headers
Args:
symbol (str): Market symbol of ETP defined by FactSet.
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
pagination_offset (float): Non-negative number of entries to skip, or 0 (default).. [optional] if omitted the server will use the default value of 0.0
pagination_limit (float): Non-negative maximum number of entries to return.. [optional] if omitted the server will use the default value of 20.0
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
InlineResponse2009
Response Object
int
Http Status Code
dict
Dictionary of the response headers
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=False)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_allocation_sector_list_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_allocation_sector_list_by_symbol_async(
self,
symbol,
**kwargs
) -> "ApplyResult[InlineResponse2009]":
"""This endpoint returns selected ETP's sector allocations. # noqa: E501
This endpoint returns selected ETP's allocations grouped by sector names. The response will be sorted by weight in descending order. # noqa: E501
This method makes a asynchronous HTTP request. Returns the http data, wrapped in ApplyResult
Args:
symbol (str): Market symbol of ETP defined by FactSet.
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
pagination_offset (float): Non-negative number of entries to skip, or 0 (default).. [optional] if omitted the server will use the default value of 0.0
pagination_limit (float): Non-negative maximum number of entries to return.. [optional] if omitted the server will use the default value of 20.0
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ApplyResult[InlineResponse2009]
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=True)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_allocation_sector_list_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_allocation_sector_list_by_symbol_with_http_info_async(
self,
symbol,
**kwargs
) -> "ApplyResult[typing.Tuple[InlineResponse2009, int, typing.MutableMapping]]":
"""This endpoint returns selected ETP's sector allocations. # noqa: E501
This endpoint returns selected ETP's allocations grouped by sector names. The response will be sorted by weight in descending order. # noqa: E501
This method makes a asynchronous HTTP request. Returns http data, http status and headers, wrapped in ApplyResult
Args:
symbol (str): Market symbol of ETP defined by FactSet.
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
pagination_offset (float): Non-negative number of entries to skip, or 0 (default).. [optional] if omitted the server will use the default value of 0.0
pagination_limit (float): Non-negative maximum number of entries to return.. [optional] if omitted the server will use the default value of 20.0
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ApplyResult[(InlineResponse2009, int, typing.Dict)]
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=True)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_allocation_sector_list_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_analytics_get_by_symbol(
self,
symbol,
**kwargs
) -> InlineResponse20010:
"""FactSet proprietary analytics datapoints for ETPs. # noqa: E501
FactSet's proprietary analytical datapoints include ETP attributes specific to lending, corporate actions, and benchmarks. # noqa: E501
This method makes a synchronous HTTP request. Returns the http data only
Args:
symbol (str): Ticker-region of a security as defined by FactSet.
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
InlineResponse20010
Response Object
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=False)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_analytics_get_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_analytics_get_by_symbol_with_http_info(
self,
symbol,
**kwargs
) -> typing.Tuple[InlineResponse20010, int, typing.MutableMapping]:
"""FactSet proprietary analytics datapoints for ETPs. # noqa: E501
FactSet's proprietary analytical datapoints include ETP attributes specific to lending, corporate actions, and benchmarks. # noqa: E501
This method makes a synchronous HTTP request. Returns http data, http status and headers
Args:
symbol (str): Ticker-region of a security as defined by FactSet.
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
InlineResponse20010
Response Object
int
Http Status Code
dict
Dictionary of the response headers
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=False)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_analytics_get_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_analytics_get_by_symbol_async(
self,
symbol,
**kwargs
) -> "ApplyResult[InlineResponse20010]":
"""FactSet proprietary analytics datapoints for ETPs. # noqa: E501
FactSet's proprietary analytical datapoints include ETP attributes specific to lending, corporate actions, and benchmarks. # noqa: E501
This method makes a asynchronous HTTP request. Returns the http data, wrapped in ApplyResult
Args:
symbol (str): Ticker-region of a security as defined by FactSet.
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ApplyResult[InlineResponse20010]
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=True)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_analytics_get_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_analytics_get_by_symbol_with_http_info_async(
self,
symbol,
**kwargs
) -> "ApplyResult[typing.Tuple[InlineResponse20010, int, typing.MutableMapping]]":
"""FactSet proprietary analytics datapoints for ETPs. # noqa: E501
FactSet's proprietary analytical datapoints include ETP attributes specific to lending, corporate actions, and benchmarks. # noqa: E501
This method makes a asynchronous HTTP request. Returns http data, http status and headers, wrapped in ApplyResult
Args:
symbol (str): Ticker-region of a security as defined by FactSet.
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ApplyResult[(InlineResponse20010, int, typing.Dict)]
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=True)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_analytics_get_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_analytics_holdings_statistics_get_by_symbol(
self,
symbol,
**kwargs
) -> InlineResponse20011:
"""FactSet's portfolio statistics for ETPs. # noqa: E501
FactSet calculates several proprietary portfolio statistics for ETPs including average maturity, credit quality, price/book ratio, price/earnings ratio, and dividend yield. # noqa: E501
This method makes a synchronous HTTP request. Returns the http data only
Args:
symbol (str): Ticker-region of a security as defined by FactSet.
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
InlineResponse20011
Response Object
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=False)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_analytics_holdings_statistics_get_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_analytics_holdings_statistics_get_by_symbol_with_http_info(
self,
symbol,
**kwargs
) -> typing.Tuple[InlineResponse20011, int, typing.MutableMapping]:
"""FactSet's portfolio statistics for ETPs. # noqa: E501
FactSet calculates several proprietary portfolio statistics for ETPs including average maturity, credit quality, price/book ratio, price/earnings ratio, and dividend yield. # noqa: E501
This method makes a synchronous HTTP request. Returns http data, http status and headers
Args:
symbol (str): Ticker-region of a security as defined by FactSet.
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
InlineResponse20011
Response Object
int
Http Status Code
dict
Dictionary of the response headers
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=False)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_analytics_holdings_statistics_get_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_analytics_holdings_statistics_get_by_symbol_async(
self,
symbol,
**kwargs
) -> "ApplyResult[InlineResponse20011]":
"""FactSet's portfolio statistics for ETPs. # noqa: E501
FactSet calculates several proprietary portfolio statistics for ETPs including average maturity, credit quality, price/book ratio, price/earnings ratio, and dividend yield. # noqa: E501
This method makes a asynchronous HTTP request. Returns the http data, wrapped in ApplyResult
Args:
symbol (str): Ticker-region of a security as defined by FactSet.
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ApplyResult[InlineResponse20011]
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=True)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_analytics_holdings_statistics_get_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_analytics_holdings_statistics_get_by_symbol_with_http_info_async(
self,
symbol,
**kwargs
) -> "ApplyResult[typing.Tuple[InlineResponse20011, int, typing.MutableMapping]]":
"""FactSet's portfolio statistics for ETPs. # noqa: E501
FactSet calculates several proprietary portfolio statistics for ETPs including average maturity, credit quality, price/book ratio, price/earnings ratio, and dividend yield. # noqa: E501
This method makes a asynchronous HTTP request. Returns http data, http status and headers, wrapped in ApplyResult
Args:
symbol (str): Ticker-region of a security as defined by FactSet.
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ApplyResult[(InlineResponse20011, int, typing.Dict)]
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=True)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_analytics_holdings_statistics_get_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_analytics_score_get_by_symbol(
self,
symbol,
**kwargs
) -> InlineResponse20012:
"""FactSet proprietary ETP rankings. # noqa: E501
FactSet calculates various proprietary fund rankings including unique scores, fund grades, segment averages, and recommendations. # noqa: E501
This method makes a synchronous HTTP request. Returns the http data only
Args:
symbol (str): Ticker-region of a security as defined by FactSet.
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
InlineResponse20012
Response Object
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=False)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_analytics_score_get_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_analytics_score_get_by_symbol_with_http_info(
self,
symbol,
**kwargs
) -> typing.Tuple[InlineResponse20012, int, typing.MutableMapping]:
"""FactSet proprietary ETP rankings. # noqa: E501
FactSet calculates various proprietary fund rankings including unique scores, fund grades, segment averages, and recommendations. # noqa: E501
This method makes a synchronous HTTP request. Returns http data, http status and headers
Args:
symbol (str): Ticker-region of a security as defined by FactSet.
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
InlineResponse20012
Response Object
int
Http Status Code
dict
Dictionary of the response headers
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=False)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_analytics_score_get_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_analytics_score_get_by_symbol_async(
self,
symbol,
**kwargs
) -> "ApplyResult[InlineResponse20012]":
"""FactSet proprietary ETP rankings. # noqa: E501
FactSet calculates various proprietary fund rankings including unique scores, fund grades, segment averages, and recommendations. # noqa: E501
This method makes a asynchronous HTTP request. Returns the http data, wrapped in ApplyResult
Args:
symbol (str): Ticker-region of a security as defined by FactSet.
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ApplyResult[InlineResponse20012]
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=True)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_analytics_score_get_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_analytics_score_get_by_symbol_with_http_info_async(
self,
symbol,
**kwargs
) -> "ApplyResult[typing.Tuple[InlineResponse20012, int, typing.MutableMapping]]":
"""FactSet proprietary ETP rankings. # noqa: E501
FactSet calculates various proprietary fund rankings including unique scores, fund grades, segment averages, and recommendations. # noqa: E501
This method makes a asynchronous HTTP request. Returns http data, http status and headers, wrapped in ApplyResult
Args:
symbol (str): Ticker-region of a security as defined by FactSet.
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ApplyResult[(InlineResponse20012, int, typing.Dict)]
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=True)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_analytics_score_get_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_analytics_trade_get_by_symbol(
self,
symbol,
**kwargs
) -> InlineResponse20013:
"""Trade statistics for specific ETP. # noqa: E501
Various metrics of an ETP's liquidity including creation metrics, premium/discount, spread, and tracking error statistics. # noqa: E501
This method makes a synchronous HTTP request. Returns the http data only
Args:
symbol (str): Ticker-region of a security as defined by FactSet.
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
InlineResponse20013
Response Object
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=False)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_analytics_trade_get_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_analytics_trade_get_by_symbol_with_http_info(
self,
symbol,
**kwargs
) -> typing.Tuple[InlineResponse20013, int, typing.MutableMapping]:
"""Trade statistics for specific ETP. # noqa: E501
Various metrics of an ETP's liquidity including creation metrics, premium/discount, spread, and tracking error statistics. # noqa: E501
This method makes a synchronous HTTP request. Returns http data, http status and headers
Args:
symbol (str): Ticker-region of a security as defined by FactSet.
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
InlineResponse20013
Response Object
int
Http Status Code
dict
Dictionary of the response headers
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=False)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_analytics_trade_get_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_analytics_trade_get_by_symbol_async(
self,
symbol,
**kwargs
) -> "ApplyResult[InlineResponse20013]":
"""Trade statistics for specific ETP. # noqa: E501
Various metrics of an ETP's liquidity including creation metrics, premium/discount, spread, and tracking error statistics. # noqa: E501
This method makes a asynchronous HTTP request. Returns the http data, wrapped in ApplyResult
Args:
symbol (str): Ticker-region of a security as defined by FactSet.
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ApplyResult[InlineResponse20013]
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=True)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_analytics_trade_get_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_analytics_trade_get_by_symbol_with_http_info_async(
self,
symbol,
**kwargs
) -> "ApplyResult[typing.Tuple[InlineResponse20013, int, typing.MutableMapping]]":
"""Trade statistics for specific ETP. # noqa: E501
Various metrics of an ETP's liquidity including creation metrics, premium/discount, spread, and tracking error statistics. # noqa: E501
This method makes a asynchronous HTTP request. Returns http data, http status and headers, wrapped in ApplyResult
Args:
symbol (str): Ticker-region of a security as defined by FactSet.
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ApplyResult[(InlineResponse20013, int, typing.Dict)]
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=True)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_analytics_trade_get_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_characteristics_get_by_symbol(
self,
symbol,
**kwargs
) -> InlineResponse20014:
"""Retrieve basic characteristic information for a specified ETP. # noqa: E501
An ETP has many unique characteristics specific to its composition that differentiate it from other products. This includes details on leverage, hedging, derivatives, and service providers. # noqa: E501
This method makes a synchronous HTTP request. Returns the http data only
Args:
symbol (str): Market symbol of ETF defined by FactSet.
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
InlineResponse20014
Response Object
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=False)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_characteristics_get_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_characteristics_get_by_symbol_with_http_info(
self,
symbol,
**kwargs
) -> typing.Tuple[InlineResponse20014, int, typing.MutableMapping]:
"""Retrieve basic characteristic information for a specified ETP. # noqa: E501
An ETP has many unique characteristics specific to its composition that differentiate it from other products. This includes details on leverage, hedging, derivatives, and service providers. # noqa: E501
This method makes a synchronous HTTP request. Returns http data, http status and headers
Args:
symbol (str): Market symbol of ETF defined by FactSet.
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
InlineResponse20014
Response Object
int
Http Status Code
dict
Dictionary of the response headers
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=False)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_characteristics_get_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_characteristics_get_by_symbol_async(
self,
symbol,
**kwargs
) -> "ApplyResult[InlineResponse20014]":
"""Retrieve basic characteristic information for a specified ETP. # noqa: E501
An ETP has many unique characteristics specific to its composition that differentiate it from other products. This includes details on leverage, hedging, derivatives, and service providers. # noqa: E501
This method makes a asynchronous HTTP request. Returns the http data, wrapped in ApplyResult
Args:
symbol (str): Market symbol of ETF defined by FactSet.
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ApplyResult[InlineResponse20014]
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=True)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_characteristics_get_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_characteristics_get_by_symbol_with_http_info_async(
self,
symbol,
**kwargs
) -> "ApplyResult[typing.Tuple[InlineResponse20014, int, typing.MutableMapping]]":
"""Retrieve basic characteristic information for a specified ETP. # noqa: E501
An ETP has many unique characteristics specific to its composition that differentiate it from other products. This includes details on leverage, hedging, derivatives, and service providers. # noqa: E501
This method makes a asynchronous HTTP request. Returns http data, http status and headers, wrapped in ApplyResult
Args:
symbol (str): Market symbol of ETF defined by FactSet.
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ApplyResult[(InlineResponse20014, int, typing.Dict)]
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=True)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_characteristics_get_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_class_category_broad_list(
self,
**kwargs
) -> InlineResponse20016:
"""List of ETP class broad categories. # noqa: E501
List of ETP class broad categories. # noqa: E501
This method makes a synchronous HTTP request. Returns the http data only
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
InlineResponse20016
Response Object
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=False)
return self.get_factset_etf_class_category_broad_list_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_class_category_broad_list_with_http_info(
self,
**kwargs
) -> typing.Tuple[InlineResponse20016, int, typing.MutableMapping]:
"""List of ETP class broad categories. # noqa: E501
List of ETP class broad categories. # noqa: E501
This method makes a synchronous HTTP request. Returns http data, http status and headers
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
InlineResponse20016
Response Object
int
Http Status Code
dict
Dictionary of the response headers
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=False)
return self.get_factset_etf_class_category_broad_list_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_class_category_broad_list_async(
self,
**kwargs
) -> "ApplyResult[InlineResponse20016]":
"""List of ETP class broad categories. # noqa: E501
List of ETP class broad categories. # noqa: E501
This method makes a asynchronous HTTP request. Returns the http data, wrapped in ApplyResult
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ApplyResult[InlineResponse20016]
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=True)
return self.get_factset_etf_class_category_broad_list_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_class_category_broad_list_with_http_info_async(
self,
**kwargs
) -> "ApplyResult[typing.Tuple[InlineResponse20016, int, typing.MutableMapping]]":
"""List of ETP class broad categories. # noqa: E501
List of ETP class broad categories. # noqa: E501
This method makes a asynchronous HTTP request. Returns http data, http status and headers, wrapped in ApplyResult
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ApplyResult[(InlineResponse20016, int, typing.Dict)]
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=True)
return self.get_factset_etf_class_category_broad_list_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_class_category_focus_list(
self,
**kwargs
) -> InlineResponse20017:
"""List of ETP class focus categories. # noqa: E501
List of ETP class focus categories. # noqa: E501
This method makes a synchronous HTTP request. Returns the http data only
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
pagination_offset (float): Non-negative number of entries to skip, or 0 (default).. [optional] if omitted the server will use the default value of 0.0
pagination_limit (float): Non-negative maximum number of entries to return.. [optional] if omitted the server will use the default value of 20.0
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
InlineResponse20017
Response Object
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=False)
return self.get_factset_etf_class_category_focus_list_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_class_category_focus_list_with_http_info(
self,
**kwargs
) -> typing.Tuple[InlineResponse20017, int, typing.MutableMapping]:
"""List of ETP class focus categories. # noqa: E501
List of ETP class focus categories. # noqa: E501
This method makes a synchronous HTTP request. Returns http data, http status and headers
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
pagination_offset (float): Non-negative number of entries to skip, or 0 (default).. [optional] if omitted the server will use the default value of 0.0
pagination_limit (float): Non-negative maximum number of entries to return.. [optional] if omitted the server will use the default value of 20.0
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
InlineResponse20017
Response Object
int
Http Status Code
dict
Dictionary of the response headers
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=False)
return self.get_factset_etf_class_category_focus_list_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_class_category_focus_list_async(
self,
**kwargs
) -> "ApplyResult[InlineResponse20017]":
"""List of ETP class focus categories. # noqa: E501
List of ETP class focus categories. # noqa: E501
This method makes a asynchronous HTTP request. Returns the http data, wrapped in ApplyResult
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
pagination_offset (float): Non-negative number of entries to skip, or 0 (default).. [optional] if omitted the server will use the default value of 0.0
pagination_limit (float): Non-negative maximum number of entries to return.. [optional] if omitted the server will use the default value of 20.0
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ApplyResult[InlineResponse20017]
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=True)
return self.get_factset_etf_class_category_focus_list_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_class_category_focus_list_with_http_info_async(
self,
**kwargs
) -> "ApplyResult[typing.Tuple[InlineResponse20017, int, typing.MutableMapping]]":
"""List of ETP class focus categories. # noqa: E501
List of ETP class focus categories. # noqa: E501
This method makes a asynchronous HTTP request. Returns http data, http status and headers, wrapped in ApplyResult
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
pagination_offset (float): Non-negative number of entries to skip, or 0 (default).. [optional] if omitted the server will use the default value of 0.0
pagination_limit (float): Non-negative maximum number of entries to return.. [optional] if omitted the server will use the default value of 20.0
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ApplyResult[(InlineResponse20017, int, typing.Dict)]
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=True)
return self.get_factset_etf_class_category_focus_list_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_class_category_niche_list(
self,
**kwargs
) -> InlineResponse20018:
"""List of ETP class niche categories. # noqa: E501
List of ETP class niche categories. # noqa: E501
This method makes a synchronous HTTP request. Returns the http data only
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
pagination_offset (float): Non-negative number of entries to skip, or 0 (default).. [optional] if omitted the server will use the default value of 0.0
pagination_limit (float): Non-negative maximum number of entries to return.. [optional] if omitted the server will use the default value of 20.0
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
InlineResponse20018
Response Object
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=False)
return self.get_factset_etf_class_category_niche_list_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_class_category_niche_list_with_http_info(
self,
**kwargs
) -> typing.Tuple[InlineResponse20018, int, typing.MutableMapping]:
"""List of ETP class niche categories. # noqa: E501
List of ETP class niche categories. # noqa: E501
This method makes a synchronous HTTP request. Returns http data, http status and headers
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
pagination_offset (float): Non-negative number of entries to skip, or 0 (default).. [optional] if omitted the server will use the default value of 0.0
pagination_limit (float): Non-negative maximum number of entries to return.. [optional] if omitted the server will use the default value of 20.0
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
InlineResponse20018
Response Object
int
Http Status Code
dict
Dictionary of the response headers
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=False)
return self.get_factset_etf_class_category_niche_list_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_class_category_niche_list_async(
self,
**kwargs
) -> "ApplyResult[InlineResponse20018]":
"""List of ETP class niche categories. # noqa: E501
List of ETP class niche categories. # noqa: E501
This method makes a asynchronous HTTP request. Returns the http data, wrapped in ApplyResult
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
pagination_offset (float): Non-negative number of entries to skip, or 0 (default).. [optional] if omitted the server will use the default value of 0.0
pagination_limit (float): Non-negative maximum number of entries to return.. [optional] if omitted the server will use the default value of 20.0
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ApplyResult[InlineResponse20018]
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=True)
return self.get_factset_etf_class_category_niche_list_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_class_category_niche_list_with_http_info_async(
self,
**kwargs
) -> "ApplyResult[typing.Tuple[InlineResponse20018, int, typing.MutableMapping]]":
"""List of ETP class niche categories. # noqa: E501
List of ETP class niche categories. # noqa: E501
This method makes a asynchronous HTTP request. Returns http data, http status and headers, wrapped in ApplyResult
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
pagination_offset (float): Non-negative number of entries to skip, or 0 (default).. [optional] if omitted the server will use the default value of 0.0
pagination_limit (float): Non-negative maximum number of entries to return.. [optional] if omitted the server will use the default value of 20.0
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ApplyResult[(InlineResponse20018, int, typing.Dict)]
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=True)
return self.get_factset_etf_class_category_niche_list_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_class_geography_list(
self,
**kwargs
) -> InlineResponse20019:
"""List of ETP class geographies. # noqa: E501
List of ETP class geographies. # noqa: E501
This method makes a synchronous HTTP request. Returns the http data only
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
InlineResponse20019
Response Object
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=False)
return self.get_factset_etf_class_geography_list_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_class_geography_list_with_http_info(
self,
**kwargs
) -> typing.Tuple[InlineResponse20019, int, typing.MutableMapping]:
"""List of ETP class geographies. # noqa: E501
List of ETP class geographies. # noqa: E501
This method makes a synchronous HTTP request. Returns http data, http status and headers
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
InlineResponse20019
Response Object
int
Http Status Code
dict
Dictionary of the response headers
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=False)
return self.get_factset_etf_class_geography_list_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_class_geography_list_async(
self,
**kwargs
) -> "ApplyResult[InlineResponse20019]":
"""List of ETP class geographies. # noqa: E501
List of ETP class geographies. # noqa: E501
This method makes a asynchronous HTTP request. Returns the http data, wrapped in ApplyResult
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ApplyResult[InlineResponse20019]
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=True)
return self.get_factset_etf_class_geography_list_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_class_geography_list_with_http_info_async(
self,
**kwargs
) -> "ApplyResult[typing.Tuple[InlineResponse20019, int, typing.MutableMapping]]":
"""List of ETP class geographies. # noqa: E501
List of ETP class geographies. # noqa: E501
This method makes a asynchronous HTTP request. Returns http data, http status and headers, wrapped in ApplyResult
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ApplyResult[(InlineResponse20019, int, typing.Dict)]
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=True)
return self.get_factset_etf_class_geography_list_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_class_get_by_symbol(
self,
symbol,
**kwargs
) -> InlineResponse20015:
"""Retrieve an ETP's classification specific to asset class, geography, or investment strategy. # noqa: E501
ETP classification is divided into three categories: Asset Class, Geography, and Investment Strategy. Asset class is determined based on the various asset types held by the fund, A fund's geography can be classified by region (e.g. Asia-Pac), specific geography (e.g. China) or economic development (e.g. BRIC). An ETP's investment strategy is classified in broad categories (e.g. Large Cap) and more granular categorizations. # noqa: E501
This method makes a synchronous HTTP request. Returns the http data only
Args:
symbol (str): Market symbol of ETP defined by FactSet.
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
InlineResponse20015
Response Object
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=False)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_class_get_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_class_get_by_symbol_with_http_info(
self,
symbol,
**kwargs
) -> typing.Tuple[InlineResponse20015, int, typing.MutableMapping]:
"""Retrieve an ETP's classification specific to asset class, geography, or investment strategy. # noqa: E501
ETP classification is divided into three categories: Asset Class, Geography, and Investment Strategy. Asset class is determined based on the various asset types held by the fund, A fund's geography can be classified by region (e.g. Asia-Pac), specific geography (e.g. China) or economic development (e.g. BRIC). An ETP's investment strategy is classified in broad categories (e.g. Large Cap) and more granular categorizations. # noqa: E501
This method makes a synchronous HTTP request. Returns http data, http status and headers
Args:
symbol (str): Market symbol of ETP defined by FactSet.
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
InlineResponse20015
Response Object
int
Http Status Code
dict
Dictionary of the response headers
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=False)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_class_get_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_class_get_by_symbol_async(
self,
symbol,
**kwargs
) -> "ApplyResult[InlineResponse20015]":
"""Retrieve an ETP's classification specific to asset class, geography, or investment strategy. # noqa: E501
ETP classification is divided into three categories: Asset Class, Geography, and Investment Strategy. Asset class is determined based on the various asset types held by the fund, A fund's geography can be classified by region (e.g. Asia-Pac), specific geography (e.g. China) or economic development (e.g. BRIC). An ETP's investment strategy is classified in broad categories (e.g. Large Cap) and more granular categorizations. # noqa: E501
This method makes a asynchronous HTTP request. Returns the http data, wrapped in ApplyResult
Args:
symbol (str): Market symbol of ETP defined by FactSet.
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ApplyResult[InlineResponse20015]
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=True)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_class_get_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_class_get_by_symbol_with_http_info_async(
self,
symbol,
**kwargs
) -> "ApplyResult[typing.Tuple[InlineResponse20015, int, typing.MutableMapping]]":
"""Retrieve an ETP's classification specific to asset class, geography, or investment strategy. # noqa: E501
ETP classification is divided into three categories: Asset Class, Geography, and Investment Strategy. Asset class is determined based on the various asset types held by the fund, A fund's geography can be classified by region (e.g. Asia-Pac), specific geography (e.g. China) or economic development (e.g. BRIC). An ETP's investment strategy is classified in broad categories (e.g. Large Cap) and more granular categorizations. # noqa: E501
This method makes a asynchronous HTTP request. Returns http data, http status and headers, wrapped in ApplyResult
Args:
symbol (str): Market symbol of ETP defined by FactSet.
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ApplyResult[(InlineResponse20015, int, typing.Dict)]
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=True)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_class_get_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_competitors_list_by_symbol(
self,
symbol,
**kwargs
) -> InlineResponse20020:
"""FactSet's proprietary list of competing companies. # noqa: E501
FactSet defines and maintains a proprietary list of competing companies based on a number of attributes specific to a fund. # noqa: E501
This method makes a synchronous HTTP request. Returns the http data only
Args:
symbol (str): Ticker-region of a security as defined by FactSet.
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
InlineResponse20020
Response Object
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=False)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_competitors_list_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_competitors_list_by_symbol_with_http_info(
self,
symbol,
**kwargs
) -> typing.Tuple[InlineResponse20020, int, typing.MutableMapping]:
"""FactSet's proprietary list of competing companies. # noqa: E501
FactSet defines and maintains a proprietary list of competing companies based on a number of attributes specific to a fund. # noqa: E501
This method makes a synchronous HTTP request. Returns http data, http status and headers
Args:
symbol (str): Ticker-region of a security as defined by FactSet.
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
InlineResponse20020
Response Object
int
Http Status Code
dict
Dictionary of the response headers
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=False)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_competitors_list_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_competitors_list_by_symbol_async(
self,
symbol,
**kwargs
) -> "ApplyResult[InlineResponse20020]":
"""FactSet's proprietary list of competing companies. # noqa: E501
FactSet defines and maintains a proprietary list of competing companies based on a number of attributes specific to a fund. # noqa: E501
This method makes a asynchronous HTTP request. Returns the http data, wrapped in ApplyResult
Args:
symbol (str): Ticker-region of a security as defined by FactSet.
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ApplyResult[InlineResponse20020]
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=True)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_competitors_list_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_competitors_list_by_symbol_with_http_info_async(
self,
symbol,
**kwargs
) -> "ApplyResult[typing.Tuple[InlineResponse20020, int, typing.MutableMapping]]":
"""FactSet's proprietary list of competing companies. # noqa: E501
FactSet defines and maintains a proprietary list of competing companies based on a number of attributes specific to a fund. # noqa: E501
This method makes a asynchronous HTTP request. Returns http data, http status and headers, wrapped in ApplyResult
Args:
symbol (str): Ticker-region of a security as defined by FactSet.
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ApplyResult[(InlineResponse20020, int, typing.Dict)]
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=True)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_competitors_list_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_distribution_get_by_symbol(
self,
symbol,
**kwargs
) -> InlineResponse20021:
"""Retrieve an ETP's current distribution details. # noqa: E501
Retrieve distribution-related details for a specific ETP including dividend and capital gain distribution details. # noqa: E501
This method makes a synchronous HTTP request. Returns the http data only
Args:
symbol (str): Market symbol of ETP defined by FactSet.
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
InlineResponse20021
Response Object
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=False)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_distribution_get_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_distribution_get_by_symbol_with_http_info(
self,
symbol,
**kwargs
) -> typing.Tuple[InlineResponse20021, int, typing.MutableMapping]:
"""Retrieve an ETP's current distribution details. # noqa: E501
Retrieve distribution-related details for a specific ETP including dividend and capital gain distribution details. # noqa: E501
This method makes a synchronous HTTP request. Returns http data, http status and headers
Args:
symbol (str): Market symbol of ETP defined by FactSet.
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
InlineResponse20021
Response Object
int
Http Status Code
dict
Dictionary of the response headers
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=False)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_distribution_get_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_distribution_get_by_symbol_async(
self,
symbol,
**kwargs
) -> "ApplyResult[InlineResponse20021]":
"""Retrieve an ETP's current distribution details. # noqa: E501
Retrieve distribution-related details for a specific ETP including dividend and capital gain distribution details. # noqa: E501
This method makes a asynchronous HTTP request. Returns the http data, wrapped in ApplyResult
Args:
symbol (str): Market symbol of ETP defined by FactSet.
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ApplyResult[InlineResponse20021]
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=True)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_distribution_get_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_distribution_get_by_symbol_with_http_info_async(
self,
symbol,
**kwargs
) -> "ApplyResult[typing.Tuple[InlineResponse20021, int, typing.MutableMapping]]":
"""Retrieve an ETP's current distribution details. # noqa: E501
Retrieve distribution-related details for a specific ETP including dividend and capital gain distribution details. # noqa: E501
This method makes a asynchronous HTTP request. Returns http data, http status and headers, wrapped in ApplyResult
Args:
symbol (str): Market symbol of ETP defined by FactSet.
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ApplyResult[(InlineResponse20021, int, typing.Dict)]
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=True)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_distribution_get_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_fund_flows_get_by_symbol(
self,
symbol,
**kwargs
) -> InlineResponse20022:
"""Retrieve an ETP's cash inflow/outflows for various time periods. # noqa: E501
Retrieve the amount invested or divested in a specific ETP over various time periods including one-day, one-week, one-month, one-year, and YTD. # noqa: E501
This method makes a synchronous HTTP request. Returns the http data only
Args:
symbol (str): Market symbol of ETP defined by FactSet.
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
InlineResponse20022
Response Object
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=False)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_fund_flows_get_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_fund_flows_get_by_symbol_with_http_info(
self,
symbol,
**kwargs
) -> typing.Tuple[InlineResponse20022, int, typing.MutableMapping]:
"""Retrieve an ETP's cash inflow/outflows for various time periods. # noqa: E501
Retrieve the amount invested or divested in a specific ETP over various time periods including one-day, one-week, one-month, one-year, and YTD. # noqa: E501
This method makes a synchronous HTTP request. Returns http data, http status and headers
Args:
symbol (str): Market symbol of ETP defined by FactSet.
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
InlineResponse20022
Response Object
int
Http Status Code
dict
Dictionary of the response headers
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=False)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_fund_flows_get_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_fund_flows_get_by_symbol_async(
self,
symbol,
**kwargs
) -> "ApplyResult[InlineResponse20022]":
"""Retrieve an ETP's cash inflow/outflows for various time periods. # noqa: E501
Retrieve the amount invested or divested in a specific ETP over various time periods including one-day, one-week, one-month, one-year, and YTD. # noqa: E501
This method makes a asynchronous HTTP request. Returns the http data, wrapped in ApplyResult
Args:
symbol (str): Market symbol of ETP defined by FactSet.
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ApplyResult[InlineResponse20022]
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=True)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_fund_flows_get_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_fund_flows_get_by_symbol_with_http_info_async(
self,
symbol,
**kwargs
) -> "ApplyResult[typing.Tuple[InlineResponse20022, int, typing.MutableMapping]]":
"""Retrieve an ETP's cash inflow/outflows for various time periods. # noqa: E501
Retrieve the amount invested or divested in a specific ETP over various time periods including one-day, one-week, one-month, one-year, and YTD. # noqa: E501
This method makes a asynchronous HTTP request. Returns http data, http status and headers, wrapped in ApplyResult
Args:
symbol (str): Market symbol of ETP defined by FactSet.
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ApplyResult[(InlineResponse20022, int, typing.Dict)]
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=True)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_fund_flows_get_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_get_by_symbol(
self,
symbol,
**kwargs
) -> InlineResponse200:
"""Retrieve basic profile information for a specified ETP. # noqa: E501
An ETP can be profiled by defining several common attributes such as issuer, fund description, and benchmark. # noqa: E501
This method makes a synchronous HTTP request. Returns the http data only
Args:
symbol (str): Market symbol of ETP defined by FactSet.
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
InlineResponse200
Response Object
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=False)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_get_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_get_by_symbol_with_http_info(
self,
symbol,
**kwargs
) -> typing.Tuple[InlineResponse200, int, typing.MutableMapping]:
"""Retrieve basic profile information for a specified ETP. # noqa: E501
An ETP can be profiled by defining several common attributes such as issuer, fund description, and benchmark. # noqa: E501
This method makes a synchronous HTTP request. Returns http data, http status and headers
Args:
symbol (str): Market symbol of ETP defined by FactSet.
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
InlineResponse200
Response Object
int
Http Status Code
dict
Dictionary of the response headers
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=False)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_get_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_get_by_symbol_async(
self,
symbol,
**kwargs
) -> "ApplyResult[InlineResponse200]":
"""Retrieve basic profile information for a specified ETP. # noqa: E501
An ETP can be profiled by defining several common attributes such as issuer, fund description, and benchmark. # noqa: E501
This method makes a asynchronous HTTP request. Returns the http data, wrapped in ApplyResult
Args:
symbol (str): Market symbol of ETP defined by FactSet.
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ApplyResult[InlineResponse200]
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=True)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_get_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_get_by_symbol_with_http_info_async(
self,
symbol,
**kwargs
) -> "ApplyResult[typing.Tuple[InlineResponse200, int, typing.MutableMapping]]":
"""Retrieve basic profile information for a specified ETP. # noqa: E501
An ETP can be profiled by defining several common attributes such as issuer, fund description, and benchmark. # noqa: E501
This method makes a asynchronous HTTP request. Returns http data, http status and headers, wrapped in ApplyResult
Args:
symbol (str): Market symbol of ETP defined by FactSet.
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ApplyResult[(InlineResponse200, int, typing.Dict)]
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=True)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_get_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_growth_of_ten_k_list_by_symbol(
self,
symbol,
**kwargs
) -> InlineResponse20023:
"""This endpoint returns selected ETP's Growth of 10K calculated values. # noqa: E501
Growth of 10K (or growth of 10,000) is a commonly used chart that highlights the change in the value of an initial 10,000 investment in the ETP during a given period of time. Often, this period of time is either since inception or the calculation between the pre-defined range. # noqa: E501
This method makes a synchronous HTTP request. Returns the http data only
Args:
symbol (str): Market symbol of ETP defined by FactSet.
Keyword Args:
time_period (str): Historice NAV date value.. [optional]
calculation_type (str): Historice NAV date value.. [optional]
alignment (str): Indicates the reference point for growth of 10k data.. [optional]
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
pagination_offset (float): Non-negative number of entries to skip, or 0 (default).. [optional] if omitted the server will use the default value of 0.0
pagination_limit (float): Non-negative maximum number of entries to return.. [optional] if omitted the server will use the default value of 20.0
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
InlineResponse20023
Response Object
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=False)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_growth_of_ten_k_list_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_growth_of_ten_k_list_by_symbol_with_http_info(
self,
symbol,
**kwargs
) -> typing.Tuple[InlineResponse20023, int, typing.MutableMapping]:
"""This endpoint returns selected ETP's Growth of 10K calculated values. # noqa: E501
Growth of 10K (or growth of 10,000) is a commonly used chart that highlights the change in the value of an initial 10,000 investment in the ETP during a given period of time. Often, this period of time is either since inception or the calculation between the pre-defined range. # noqa: E501
This method makes a synchronous HTTP request. Returns http data, http status and headers
Args:
symbol (str): Market symbol of ETP defined by FactSet.
Keyword Args:
time_period (str): Historice NAV date value.. [optional]
calculation_type (str): Historice NAV date value.. [optional]
alignment (str): Indicates the reference point for growth of 10k data.. [optional]
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
pagination_offset (float): Non-negative number of entries to skip, or 0 (default).. [optional] if omitted the server will use the default value of 0.0
pagination_limit (float): Non-negative maximum number of entries to return.. [optional] if omitted the server will use the default value of 20.0
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
InlineResponse20023
Response Object
int
Http Status Code
dict
Dictionary of the response headers
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=False)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_growth_of_ten_k_list_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_growth_of_ten_k_list_by_symbol_async(
self,
symbol,
**kwargs
) -> "ApplyResult[InlineResponse20023]":
"""This endpoint returns selected ETP's Growth of 10K calculated values. # noqa: E501
Growth of 10K (or growth of 10,000) is a commonly used chart that highlights the change in the value of an initial 10,000 investment in the ETP during a given period of time. Often, this period of time is either since inception or the calculation between the pre-defined range. # noqa: E501
This method makes a asynchronous HTTP request. Returns the http data, wrapped in ApplyResult
Args:
symbol (str): Market symbol of ETP defined by FactSet.
Keyword Args:
time_period (str): Historice NAV date value.. [optional]
calculation_type (str): Historice NAV date value.. [optional]
alignment (str): Indicates the reference point for growth of 10k data.. [optional]
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
pagination_offset (float): Non-negative number of entries to skip, or 0 (default).. [optional] if omitted the server will use the default value of 0.0
pagination_limit (float): Non-negative maximum number of entries to return.. [optional] if omitted the server will use the default value of 20.0
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ApplyResult[InlineResponse20023]
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=True)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_growth_of_ten_k_list_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_growth_of_ten_k_list_by_symbol_with_http_info_async(
self,
symbol,
**kwargs
) -> "ApplyResult[typing.Tuple[InlineResponse20023, int, typing.MutableMapping]]":
"""This endpoint returns selected ETP's Growth of 10K calculated values. # noqa: E501
Growth of 10K (or growth of 10,000) is a commonly used chart that highlights the change in the value of an initial 10,000 investment in the ETP during a given period of time. Often, this period of time is either since inception or the calculation between the pre-defined range. # noqa: E501
This method makes a asynchronous HTTP request. Returns http data, http status and headers, wrapped in ApplyResult
Args:
symbol (str): Market symbol of ETP defined by FactSet.
Keyword Args:
time_period (str): Historice NAV date value.. [optional]
calculation_type (str): Historice NAV date value.. [optional]
alignment (str): Indicates the reference point for growth of 10k data.. [optional]
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
pagination_offset (float): Non-negative number of entries to skip, or 0 (default).. [optional] if omitted the server will use the default value of 0.0
pagination_limit (float): Non-negative maximum number of entries to return.. [optional] if omitted the server will use the default value of 20.0
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ApplyResult[(InlineResponse20023, int, typing.Dict)]
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=True)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_growth_of_ten_k_list_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_holdings_list_by_symbol(
self,
symbol,
**kwargs
) -> InlineResponse20024:
"""Holdings details for an individual ETP. # noqa: E501
Retrieve an ETP's holdings information including security, shares held, and weight. # noqa: E501
This method makes a synchronous HTTP request. Returns the http data only
Args:
symbol (str): Market symbol of ETP defined by FactSet.
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
pagination_offset (float): Non-negative number of entries to skip, or 0 (default).. [optional] if omitted the server will use the default value of 0.0
pagination_limit (float): Non-negative maximum number of entries to return.. [optional] if omitted the server will use the default value of 20.0
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
InlineResponse20024
Response Object
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=False)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_holdings_list_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_holdings_list_by_symbol_with_http_info(
self,
symbol,
**kwargs
) -> typing.Tuple[InlineResponse20024, int, typing.MutableMapping]:
"""Holdings details for an individual ETP. # noqa: E501
Retrieve an ETP's holdings information including security, shares held, and weight. # noqa: E501
This method makes a synchronous HTTP request. Returns http data, http status and headers
Args:
symbol (str): Market symbol of ETP defined by FactSet.
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
pagination_offset (float): Non-negative number of entries to skip, or 0 (default).. [optional] if omitted the server will use the default value of 0.0
pagination_limit (float): Non-negative maximum number of entries to return.. [optional] if omitted the server will use the default value of 20.0
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
InlineResponse20024
Response Object
int
Http Status Code
dict
Dictionary of the response headers
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=False)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_holdings_list_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_holdings_list_by_symbol_async(
self,
symbol,
**kwargs
) -> "ApplyResult[InlineResponse20024]":
"""Holdings details for an individual ETP. # noqa: E501
Retrieve an ETP's holdings information including security, shares held, and weight. # noqa: E501
This method makes a asynchronous HTTP request. Returns the http data, wrapped in ApplyResult
Args:
symbol (str): Market symbol of ETP defined by FactSet.
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
pagination_offset (float): Non-negative number of entries to skip, or 0 (default).. [optional] if omitted the server will use the default value of 0.0
pagination_limit (float): Non-negative maximum number of entries to return.. [optional] if omitted the server will use the default value of 20.0
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ApplyResult[InlineResponse20024]
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=True)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_holdings_list_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_holdings_list_by_symbol_with_http_info_async(
self,
symbol,
**kwargs
) -> "ApplyResult[typing.Tuple[InlineResponse20024, int, typing.MutableMapping]]":
"""Holdings details for an individual ETP. # noqa: E501
Retrieve an ETP's holdings information including security, shares held, and weight. # noqa: E501
This method makes a asynchronous HTTP request. Returns http data, http status and headers, wrapped in ApplyResult
Args:
symbol (str): Market symbol of ETP defined by FactSet.
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
pagination_offset (float): Non-negative number of entries to skip, or 0 (default).. [optional] if omitted the server will use the default value of 0.0
pagination_limit (float): Non-negative maximum number of entries to return.. [optional] if omitted the server will use the default value of 20.0
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ApplyResult[(InlineResponse20024, int, typing.Dict)]
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=True)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_holdings_list_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_market_aggregates_get_by_symbol(
self,
symbol,
**kwargs
) -> InlineResponse20025:
"""Market aggregate data for ETPs. # noqa: E501
Market Aggregates combines FactSet Estimates, FactSet Fundamentals, and FactSet Prices data to derive ratios and per share values on an aggregate level. The resulting index values can be used to identify market trends and compare a combination of portfolios, benchmarks, and individual securities. # noqa: E501
This method makes a synchronous HTTP request. Returns the http data only
Args:
symbol (str): Ticker-region of an ETP as defined by FactSet.
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
InlineResponse20025
Response Object
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=False)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_market_aggregates_get_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_market_aggregates_get_by_symbol_with_http_info(
self,
symbol,
**kwargs
) -> typing.Tuple[InlineResponse20025, int, typing.MutableMapping]:
"""Market aggregate data for ETPs. # noqa: E501
Market Aggregates combines FactSet Estimates, FactSet Fundamentals, and FactSet Prices data to derive ratios and per share values on an aggregate level. The resulting index values can be used to identify market trends and compare a combination of portfolios, benchmarks, and individual securities. # noqa: E501
This method makes a synchronous HTTP request. Returns http data, http status and headers
Args:
symbol (str): Ticker-region of an ETP as defined by FactSet.
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
InlineResponse20025
Response Object
int
Http Status Code
dict
Dictionary of the response headers
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=False)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_market_aggregates_get_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_market_aggregates_get_by_symbol_async(
self,
symbol,
**kwargs
) -> "ApplyResult[InlineResponse20025]":
"""Market aggregate data for ETPs. # noqa: E501
Market Aggregates combines FactSet Estimates, FactSet Fundamentals, and FactSet Prices data to derive ratios and per share values on an aggregate level. The resulting index values can be used to identify market trends and compare a combination of portfolios, benchmarks, and individual securities. # noqa: E501
This method makes a asynchronous HTTP request. Returns the http data, wrapped in ApplyResult
Args:
symbol (str): Ticker-region of an ETP as defined by FactSet.
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ApplyResult[InlineResponse20025]
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=True)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_market_aggregates_get_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_market_aggregates_get_by_symbol_with_http_info_async(
self,
symbol,
**kwargs
) -> "ApplyResult[typing.Tuple[InlineResponse20025, int, typing.MutableMapping]]":
"""Market aggregate data for ETPs. # noqa: E501
Market Aggregates combines FactSet Estimates, FactSet Fundamentals, and FactSet Prices data to derive ratios and per share values on an aggregate level. The resulting index values can be used to identify market trends and compare a combination of portfolios, benchmarks, and individual securities. # noqa: E501
This method makes a asynchronous HTTP request. Returns http data, http status and headers, wrapped in ApplyResult
Args:
symbol (str): Ticker-region of an ETP as defined by FactSet.
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ApplyResult[(InlineResponse20025, int, typing.Dict)]
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=True)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_market_aggregates_get_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_premium_discount_summary_list_by_symbol(
self,
symbol,
**kwargs
) -> InlineResponse20026:
"""Summary of ETP premium discount data. # noqa: E501
Summary of ETP premium discount data. # noqa: E501
This method makes a synchronous HTTP request. Returns the http data only
Args:
symbol (str): Market symbol of ETP defined by FactSet.
Keyword Args:
time_period (str): Time frame of the data.. [optional] if omitted the server will use the default value of "YTD"
alignment (str): Indicates the reference point for the historical NAV and price values.. [optional] if omitted the server will use the default value of "quarter-end"
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
pagination_offset (float): Non-negative number of entries to skip, or 0 (default).. [optional] if omitted the server will use the default value of 0.0
pagination_limit (float): Non-negative maximum number of entries to return.. [optional] if omitted the server will use the default value of 20.0
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
InlineResponse20026
Response Object
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=False)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_premium_discount_summary_list_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_premium_discount_summary_list_by_symbol_with_http_info(
self,
symbol,
**kwargs
) -> typing.Tuple[InlineResponse20026, int, typing.MutableMapping]:
"""Summary of ETP premium discount data. # noqa: E501
Summary of ETP premium discount data. # noqa: E501
This method makes a synchronous HTTP request. Returns http data, http status and headers
Args:
symbol (str): Market symbol of ETP defined by FactSet.
Keyword Args:
time_period (str): Time frame of the data.. [optional] if omitted the server will use the default value of "YTD"
alignment (str): Indicates the reference point for the historical NAV and price values.. [optional] if omitted the server will use the default value of "quarter-end"
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
pagination_offset (float): Non-negative number of entries to skip, or 0 (default).. [optional] if omitted the server will use the default value of 0.0
pagination_limit (float): Non-negative maximum number of entries to return.. [optional] if omitted the server will use the default value of 20.0
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
InlineResponse20026
Response Object
int
Http Status Code
dict
Dictionary of the response headers
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=False)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_premium_discount_summary_list_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_premium_discount_summary_list_by_symbol_async(
self,
symbol,
**kwargs
) -> "ApplyResult[InlineResponse20026]":
"""Summary of ETP premium discount data. # noqa: E501
Summary of ETP premium discount data. # noqa: E501
This method makes a asynchronous HTTP request. Returns the http data, wrapped in ApplyResult
Args:
symbol (str): Market symbol of ETP defined by FactSet.
Keyword Args:
time_period (str): Time frame of the data.. [optional] if omitted the server will use the default value of "YTD"
alignment (str): Indicates the reference point for the historical NAV and price values.. [optional] if omitted the server will use the default value of "quarter-end"
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
pagination_offset (float): Non-negative number of entries to skip, or 0 (default).. [optional] if omitted the server will use the default value of 0.0
pagination_limit (float): Non-negative maximum number of entries to return.. [optional] if omitted the server will use the default value of 20.0
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ApplyResult[InlineResponse20026]
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=True)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_premium_discount_summary_list_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_premium_discount_summary_list_by_symbol_with_http_info_async(
self,
symbol,
**kwargs
) -> "ApplyResult[typing.Tuple[InlineResponse20026, int, typing.MutableMapping]]":
"""Summary of ETP premium discount data. # noqa: E501
Summary of ETP premium discount data. # noqa: E501
This method makes a asynchronous HTTP request. Returns http data, http status and headers, wrapped in ApplyResult
Args:
symbol (str): Market symbol of ETP defined by FactSet.
Keyword Args:
time_period (str): Time frame of the data.. [optional] if omitted the server will use the default value of "YTD"
alignment (str): Indicates the reference point for the historical NAV and price values.. [optional] if omitted the server will use the default value of "quarter-end"
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
pagination_offset (float): Non-negative number of entries to skip, or 0 (default).. [optional] if omitted the server will use the default value of 0.0
pagination_limit (float): Non-negative maximum number of entries to return.. [optional] if omitted the server will use the default value of 20.0
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ApplyResult[(InlineResponse20026, int, typing.Dict)]
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=True)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_premium_discount_summary_list_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_price_get_by_symbol(
self,
symbol,
**kwargs
) -> InlineResponse20027:
"""Retrieve historical ETP NAV values. # noqa: E501
Retrieve an ETP's historical NAV and shares outstanding for a specified time range. # noqa: E501
This method makes a synchronous HTTP request. Returns the http data only
Args:
symbol (str): Market symbol of ETP defined by FactSet.
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
InlineResponse20027
Response Object
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=False)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_price_get_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_price_get_by_symbol_with_http_info(
self,
symbol,
**kwargs
) -> typing.Tuple[InlineResponse20027, int, typing.MutableMapping]:
"""Retrieve historical ETP NAV values. # noqa: E501
Retrieve an ETP's historical NAV and shares outstanding for a specified time range. # noqa: E501
This method makes a synchronous HTTP request. Returns http data, http status and headers
Args:
symbol (str): Market symbol of ETP defined by FactSet.
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
InlineResponse20027
Response Object
int
Http Status Code
dict
Dictionary of the response headers
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=False)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_price_get_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_price_get_by_symbol_async(
self,
symbol,
**kwargs
) -> "ApplyResult[InlineResponse20027]":
"""Retrieve historical ETP NAV values. # noqa: E501
Retrieve an ETP's historical NAV and shares outstanding for a specified time range. # noqa: E501
This method makes a asynchronous HTTP request. Returns the http data, wrapped in ApplyResult
Args:
symbol (str): Market symbol of ETP defined by FactSet.
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ApplyResult[InlineResponse20027]
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=True)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_price_get_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_price_get_by_symbol_with_http_info_async(
self,
symbol,
**kwargs
) -> "ApplyResult[typing.Tuple[InlineResponse20027, int, typing.MutableMapping]]":
"""Retrieve historical ETP NAV values. # noqa: E501
Retrieve an ETP's historical NAV and shares outstanding for a specified time range. # noqa: E501
This method makes a asynchronous HTTP request. Returns http data, http status and headers, wrapped in ApplyResult
Args:
symbol (str): Market symbol of ETP defined by FactSet.
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ApplyResult[(InlineResponse20027, int, typing.Dict)]
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=True)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_price_get_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_returns_get_by_symbol(
self,
symbol,
**kwargs
) -> InlineResponse20028:
"""Retrieve total return data for a specified ETP. # noqa: E501
An ETP's total return data can be returned for various time frames including 1-month, 3-month, YTD, 1-year, 3-year, and 5-year. Total return calculations include price performance plus reinvested and compounded distributions. Market price is used to calcualte market returns. Portfolio nav is used to calcualte nav returns. # noqa: E501
This method makes a synchronous HTTP request. Returns the http data only
Args:
symbol (str): Market symbol of ETP defined by FactSet.
Keyword Args:
return_type (str): Return type.. [optional] if omitted the server will use the default value of "price"
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
InlineResponse20028
Response Object
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=False)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_returns_get_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_returns_get_by_symbol_with_http_info(
self,
symbol,
**kwargs
) -> typing.Tuple[InlineResponse20028, int, typing.MutableMapping]:
"""Retrieve total return data for a specified ETP. # noqa: E501
An ETP's total return data can be returned for various time frames including 1-month, 3-month, YTD, 1-year, 3-year, and 5-year. Total return calculations include price performance plus reinvested and compounded distributions. Market price is used to calcualte market returns. Portfolio nav is used to calcualte nav returns. # noqa: E501
This method makes a synchronous HTTP request. Returns http data, http status and headers
Args:
symbol (str): Market symbol of ETP defined by FactSet.
Keyword Args:
return_type (str): Return type.. [optional] if omitted the server will use the default value of "price"
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
InlineResponse20028
Response Object
int
Http Status Code
dict
Dictionary of the response headers
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=False)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_returns_get_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_returns_get_by_symbol_async(
self,
symbol,
**kwargs
) -> "ApplyResult[InlineResponse20028]":
"""Retrieve total return data for a specified ETP. # noqa: E501
An ETP's total return data can be returned for various time frames including 1-month, 3-month, YTD, 1-year, 3-year, and 5-year. Total return calculations include price performance plus reinvested and compounded distributions. Market price is used to calcualte market returns. Portfolio nav is used to calcualte nav returns. # noqa: E501
This method makes a asynchronous HTTP request. Returns the http data, wrapped in ApplyResult
Args:
symbol (str): Market symbol of ETP defined by FactSet.
Keyword Args:
return_type (str): Return type.. [optional] if omitted the server will use the default value of "price"
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ApplyResult[InlineResponse20028]
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=True)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_returns_get_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_returns_get_by_symbol_with_http_info_async(
self,
symbol,
**kwargs
) -> "ApplyResult[typing.Tuple[InlineResponse20028, int, typing.MutableMapping]]":
"""Retrieve total return data for a specified ETP. # noqa: E501
An ETP's total return data can be returned for various time frames including 1-month, 3-month, YTD, 1-year, 3-year, and 5-year. Total return calculations include price performance plus reinvested and compounded distributions. Market price is used to calcualte market returns. Portfolio nav is used to calcualte nav returns. # noqa: E501
This method makes a asynchronous HTTP request. Returns http data, http status and headers, wrapped in ApplyResult
Args:
symbol (str): Market symbol of ETP defined by FactSet.
Keyword Args:
return_type (str): Return type.. [optional] if omitted the server will use the default value of "price"
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ApplyResult[(InlineResponse20028, int, typing.Dict)]
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=True)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_returns_get_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_strategy_get_by_symbol(
self,
symbol,
**kwargs
) -> InlineResponse20029:
"""Retrieve various classification details for a specified ETP. # noqa: E501
ETP's can be classified in many different ways including investment strategy, security weightings, and fund composition. # noqa: E501
This method makes a synchronous HTTP request. Returns the http data only
Args:
symbol (str): Market symbol of ETP defined by FactSet.
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
InlineResponse20029
Response Object
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=False)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_strategy_get_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_strategy_get_by_symbol_with_http_info(
self,
symbol,
**kwargs
) -> typing.Tuple[InlineResponse20029, int, typing.MutableMapping]:
"""Retrieve various classification details for a specified ETP. # noqa: E501
ETP's can be classified in many different ways including investment strategy, security weightings, and fund composition. # noqa: E501
This method makes a synchronous HTTP request. Returns http data, http status and headers
Args:
symbol (str): Market symbol of ETP defined by FactSet.
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
InlineResponse20029
Response Object
int
Http Status Code
dict
Dictionary of the response headers
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=False)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_strategy_get_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_strategy_get_by_symbol_async(
self,
symbol,
**kwargs
) -> "ApplyResult[InlineResponse20029]":
"""Retrieve various classification details for a specified ETP. # noqa: E501
ETP's can be classified in many different ways including investment strategy, security weightings, and fund composition. # noqa: E501
This method makes a asynchronous HTTP request. Returns the http data, wrapped in ApplyResult
Args:
symbol (str): Market symbol of ETP defined by FactSet.
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ApplyResult[InlineResponse20029]
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=True)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_strategy_get_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_strategy_get_by_symbol_with_http_info_async(
self,
symbol,
**kwargs
) -> "ApplyResult[typing.Tuple[InlineResponse20029, int, typing.MutableMapping]]":
"""Retrieve various classification details for a specified ETP. # noqa: E501
ETP's can be classified in many different ways including investment strategy, security weightings, and fund composition. # noqa: E501
This method makes a asynchronous HTTP request. Returns http data, http status and headers, wrapped in ApplyResult
Args:
symbol (str): Market symbol of ETP defined by FactSet.
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ApplyResult[(InlineResponse20029, int, typing.Dict)]
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=True)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_strategy_get_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_strategy_segment_list(
self,
**kwargs
) -> InlineResponse20030:
"""Retrieve a list of ETP strategy segments. # noqa: E501
Retrieve the various segments assigned to a specific ETP. Segment data is used to group funds for comparison and relative performance analyses. # noqa: E501
This method makes a synchronous HTTP request. Returns the http data only
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
pagination_offset (float): Non-negative number of entries to skip, or 0 (default).. [optional] if omitted the server will use the default value of 0.0
pagination_limit (float): Non-negative maximum number of entries to return.. [optional] if omitted the server will use the default value of 20.0
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
InlineResponse20030
Response Object
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=False)
return self.get_factset_etf_strategy_segment_list_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_strategy_segment_list_with_http_info(
self,
**kwargs
) -> typing.Tuple[InlineResponse20030, int, typing.MutableMapping]:
"""Retrieve a list of ETP strategy segments. # noqa: E501
Retrieve the various segments assigned to a specific ETP. Segment data is used to group funds for comparison and relative performance analyses. # noqa: E501
This method makes a synchronous HTTP request. Returns http data, http status and headers
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
pagination_offset (float): Non-negative number of entries to skip, or 0 (default).. [optional] if omitted the server will use the default value of 0.0
pagination_limit (float): Non-negative maximum number of entries to return.. [optional] if omitted the server will use the default value of 20.0
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
InlineResponse20030
Response Object
int
Http Status Code
dict
Dictionary of the response headers
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=False)
return self.get_factset_etf_strategy_segment_list_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_strategy_segment_list_async(
self,
**kwargs
) -> "ApplyResult[InlineResponse20030]":
"""Retrieve a list of ETP strategy segments. # noqa: E501
Retrieve the various segments assigned to a specific ETP. Segment data is used to group funds for comparison and relative performance analyses. # noqa: E501
This method makes a asynchronous HTTP request. Returns the http data, wrapped in ApplyResult
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
pagination_offset (float): Non-negative number of entries to skip, or 0 (default).. [optional] if omitted the server will use the default value of 0.0
pagination_limit (float): Non-negative maximum number of entries to return.. [optional] if omitted the server will use the default value of 20.0
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ApplyResult[InlineResponse20030]
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=True)
return self.get_factset_etf_strategy_segment_list_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_strategy_segment_list_with_http_info_async(
self,
**kwargs
) -> "ApplyResult[typing.Tuple[InlineResponse20030, int, typing.MutableMapping]]":
"""Retrieve a list of ETP strategy segments. # noqa: E501
Retrieve the various segments assigned to a specific ETP. Segment data is used to group funds for comparison and relative performance analyses. # noqa: E501
This method makes a asynchronous HTTP request. Returns http data, http status and headers, wrapped in ApplyResult
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
pagination_offset (float): Non-negative number of entries to skip, or 0 (default).. [optional] if omitted the server will use the default value of 0.0
pagination_limit (float): Non-negative maximum number of entries to return.. [optional] if omitted the server will use the default value of 20.0
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ApplyResult[(InlineResponse20030, int, typing.Dict)]
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=True)
return self.get_factset_etf_strategy_segment_list_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_structure_get_by_symbol(
self,
symbol,
**kwargs
) -> InlineResponse20031:
"""Retrieve the basic structure information for a specified ETP. # noqa: E501
Retrieve details on a fund's structure including its type, investment style (active/passive), and legal structure. # noqa: E501
This method makes a synchronous HTTP request. Returns the http data only
Args:
symbol (str): Market symbol of ETP defined by FactSet.
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
InlineResponse20031
Response Object
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=False)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_structure_get_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_structure_get_by_symbol_with_http_info(
self,
symbol,
**kwargs
) -> typing.Tuple[InlineResponse20031, int, typing.MutableMapping]:
"""Retrieve the basic structure information for a specified ETP. # noqa: E501
Retrieve details on a fund's structure including its type, investment style (active/passive), and legal structure. # noqa: E501
This method makes a synchronous HTTP request. Returns http data, http status and headers
Args:
symbol (str): Market symbol of ETP defined by FactSet.
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
InlineResponse20031
Response Object
int
Http Status Code
dict
Dictionary of the response headers
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=False)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_structure_get_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_structure_get_by_symbol_async(
self,
symbol,
**kwargs
) -> "ApplyResult[InlineResponse20031]":
"""Retrieve the basic structure information for a specified ETP. # noqa: E501
Retrieve details on a fund's structure including its type, investment style (active/passive), and legal structure. # noqa: E501
This method makes a asynchronous HTTP request. Returns the http data, wrapped in ApplyResult
Args:
symbol (str): Market symbol of ETP defined by FactSet.
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ApplyResult[InlineResponse20031]
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=True)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_structure_get_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_structure_get_by_symbol_with_http_info_async(
self,
symbol,
**kwargs
) -> "ApplyResult[typing.Tuple[InlineResponse20031, int, typing.MutableMapping]]":
"""Retrieve the basic structure information for a specified ETP. # noqa: E501
Retrieve details on a fund's structure including its type, investment style (active/passive), and legal structure. # noqa: E501
This method makes a asynchronous HTTP request. Returns http data, http status and headers, wrapped in ApplyResult
Args:
symbol (str): Market symbol of ETP defined by FactSet.
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ApplyResult[(InlineResponse20031, int, typing.Dict)]
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=True)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_structure_get_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_taxes_and_fees_us_get_by_symbol(
self,
symbol,
**kwargs
) -> InlineResponse20032:
"""Retrieve the tax and fee related information for a specified ETP. # noqa: E501
Retrieve various fee and tax related details on a specified ETP including expense ratio and tax treatment for dividends and capital gains. # noqa: E501
This method makes a synchronous HTTP request. Returns the http data only
Args:
symbol (str): Market symbol of ETP defined by FactSet.
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
InlineResponse20032
Response Object
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=False)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_taxes_and_fees_us_get_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_taxes_and_fees_us_get_by_symbol_with_http_info(
self,
symbol,
**kwargs
) -> typing.Tuple[InlineResponse20032, int, typing.MutableMapping]:
"""Retrieve the tax and fee related information for a specified ETP. # noqa: E501
Retrieve various fee and tax related details on a specified ETP including expense ratio and tax treatment for dividends and capital gains. # noqa: E501
This method makes a synchronous HTTP request. Returns http data, http status and headers
Args:
symbol (str): Market symbol of ETP defined by FactSet.
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
InlineResponse20032
Response Object
int
Http Status Code
dict
Dictionary of the response headers
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=False)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_taxes_and_fees_us_get_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_taxes_and_fees_us_get_by_symbol_async(
self,
symbol,
**kwargs
) -> "ApplyResult[InlineResponse20032]":
"""Retrieve the tax and fee related information for a specified ETP. # noqa: E501
Retrieve various fee and tax related details on a specified ETP including expense ratio and tax treatment for dividends and capital gains. # noqa: E501
This method makes a asynchronous HTTP request. Returns the http data, wrapped in ApplyResult
Args:
symbol (str): Market symbol of ETP defined by FactSet.
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ApplyResult[InlineResponse20032]
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=True)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_taxes_and_fees_us_get_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_taxes_and_fees_us_get_by_symbol_with_http_info_async(
self,
symbol,
**kwargs
) -> "ApplyResult[typing.Tuple[InlineResponse20032, int, typing.MutableMapping]]":
"""Retrieve the tax and fee related information for a specified ETP. # noqa: E501
Retrieve various fee and tax related details on a specified ETP including expense ratio and tax treatment for dividends and capital gains. # noqa: E501
This method makes a asynchronous HTTP request. Returns http data, http status and headers, wrapped in ApplyResult
Args:
symbol (str): Market symbol of ETP defined by FactSet.
Keyword Args:
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ApplyResult[(InlineResponse20032, int, typing.Dict)]
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=True)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_taxes_and_fees_us_get_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_time_series_list_by_symbol(
self,
symbol,
**kwargs
) -> InlineResponse20033:
"""Retrieve historical NAV data for a specified ETP. # noqa: E501
Retrieve the historical NAV data and the respective fund flows and shares outstanding for a specified fund and time period. Please refer currency.fund in /factset/etf/getBySymbol for currency value. # noqa: E501
This method makes a synchronous HTTP request. Returns the http data only
Args:
symbol (str): Market symbol of ETP defined by FactSet.
Keyword Args:
time_period (str): Time frame of the data.. [optional] if omitted the server will use the default value of "1Y"
alignment (str): Indicates the reference point for the time series data.. [optional]
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
pagination_offset (float): Non-negative number of entries to skip, or 0 (default).. [optional] if omitted the server will use the default value of 0.0
pagination_limit (float): Non-negative maximum number of entries to return.. [optional] if omitted the server will use the default value of 20.0
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
InlineResponse20033
Response Object
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=False)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_time_series_list_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_time_series_list_by_symbol_with_http_info(
self,
symbol,
**kwargs
) -> typing.Tuple[InlineResponse20033, int, typing.MutableMapping]:
"""Retrieve historical NAV data for a specified ETP. # noqa: E501
Retrieve the historical NAV data and the respective fund flows and shares outstanding for a specified fund and time period. Please refer currency.fund in /factset/etf/getBySymbol for currency value. # noqa: E501
This method makes a synchronous HTTP request. Returns http data, http status and headers
Args:
symbol (str): Market symbol of ETP defined by FactSet.
Keyword Args:
time_period (str): Time frame of the data.. [optional] if omitted the server will use the default value of "1Y"
alignment (str): Indicates the reference point for the time series data.. [optional]
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
pagination_offset (float): Non-negative number of entries to skip, or 0 (default).. [optional] if omitted the server will use the default value of 0.0
pagination_limit (float): Non-negative maximum number of entries to return.. [optional] if omitted the server will use the default value of 20.0
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
InlineResponse20033
Response Object
int
Http Status Code
dict
Dictionary of the response headers
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=False)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_time_series_list_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_time_series_list_by_symbol_async(
self,
symbol,
**kwargs
) -> "ApplyResult[InlineResponse20033]":
"""Retrieve historical NAV data for a specified ETP. # noqa: E501
Retrieve the historical NAV data and the respective fund flows and shares outstanding for a specified fund and time period. Please refer currency.fund in /factset/etf/getBySymbol for currency value. # noqa: E501
This method makes a asynchronous HTTP request. Returns the http data, wrapped in ApplyResult
Args:
symbol (str): Market symbol of ETP defined by FactSet.
Keyword Args:
time_period (str): Time frame of the data.. [optional] if omitted the server will use the default value of "1Y"
alignment (str): Indicates the reference point for the time series data.. [optional]
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
pagination_offset (float): Non-negative number of entries to skip, or 0 (default).. [optional] if omitted the server will use the default value of 0.0
pagination_limit (float): Non-negative maximum number of entries to return.. [optional] if omitted the server will use the default value of 20.0
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ApplyResult[InlineResponse20033]
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=True)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_time_series_list_by_symbol_endpoint.call_with_http_info(**kwargs)
def get_factset_etf_time_series_list_by_symbol_with_http_info_async(
self,
symbol,
**kwargs
) -> "ApplyResult[typing.Tuple[InlineResponse20033, int, typing.MutableMapping]]":
"""Retrieve historical NAV data for a specified ETP. # noqa: E501
Retrieve the historical NAV data and the respective fund flows and shares outstanding for a specified fund and time period. Please refer currency.fund in /factset/etf/getBySymbol for currency value. # noqa: E501
This method makes a asynchronous HTTP request. Returns http data, http status and headers, wrapped in ApplyResult
Args:
symbol (str): Market symbol of ETP defined by FactSet.
Keyword Args:
time_period (str): Time frame of the data.. [optional] if omitted the server will use the default value of "1Y"
alignment (str): Indicates the reference point for the time series data.. [optional]
attributes ([str]): Limit the attributes returned in the response to the specified set.. [optional]
pagination_offset (float): Non-negative number of entries to skip, or 0 (default).. [optional] if omitted the server will use the default value of 0.0
pagination_limit (float): Non-negative maximum number of entries to return.. [optional] if omitted the server will use the default value of 20.0
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ApplyResult[(InlineResponse20033, int, typing.Dict)]
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=True)
kwargs['symbol'] = \
symbol
return self.get_factset_etf_time_series_list_by_symbol_endpoint.call_with_http_info(**kwargs)
| 48.557071
| 448
| 0.579804
| 50,280
| 455,611
| 5.105131
| 0.011257
| 0.028611
| 0.020663
| 0.020134
| 0.986049
| 0.984834
| 0.975671
| 0.972722
| 0.963609
| 0.953663
| 0
| 0.012182
| 0.356789
| 455,611
| 9,382
| 449
| 48.562247
| 0.863718
| 0.588125
| 0
| 0.745163
| 1
| 0
| 0.176101
| 0.046879
| 0
| 0
| 0
| 0
| 0
| 1
| 0.035132
| false
| 0
| 0.010438
| 0
| 0.080448
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
a3eb8bb2314cd5308a6d468cbe0d8171b7dc9c18
| 87
|
py
|
Python
|
echoices/enums/__init__.py
|
mbourqui/django-echoices
|
2912c01d8477329ad2fda4636dfe788adc796cc7
|
[
"MIT"
] | 7
|
2017-04-22T11:16:19.000Z
|
2018-02-08T07:17:12.000Z
|
echoices/enums/__init__.py
|
mbourqui/django-echoices
|
2912c01d8477329ad2fda4636dfe788adc796cc7
|
[
"MIT"
] | 50
|
2017-04-27T14:27:58.000Z
|
2021-08-18T17:17:03.000Z
|
echoices/enums/__init__.py
|
mbourqui/django-echoices
|
2912c01d8477329ad2fda4636dfe788adc796cc7
|
[
"MIT"
] | 1
|
2021-04-01T09:32:38.000Z
|
2021-04-01T09:32:38.000Z
|
from .enums import EChoiceMeta
from .enums import EChoice, EOrderedChoice, EAutoChoice
| 29
| 55
| 0.83908
| 10
| 87
| 7.3
| 0.7
| 0.246575
| 0.410959
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.114943
| 87
| 2
| 56
| 43.5
| 0.948052
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
43091087b878dab2d4c9a0df61712f84f0d289c6
| 4,363
|
py
|
Python
|
pyaz/lab/vm/__init__.py
|
py-az-cli/py-az-cli
|
9a7dc44e360c096a5a2f15595353e9dad88a9792
|
[
"MIT"
] | null | null | null |
pyaz/lab/vm/__init__.py
|
py-az-cli/py-az-cli
|
9a7dc44e360c096a5a2f15595353e9dad88a9792
|
[
"MIT"
] | null | null | null |
pyaz/lab/vm/__init__.py
|
py-az-cli/py-az-cli
|
9a7dc44e360c096a5a2f15595353e9dad88a9792
|
[
"MIT"
] | 1
|
2022-02-03T09:12:01.000Z
|
2022-02-03T09:12:01.000Z
|
'''
Manage VMs in an Azure DevTest Lab.
'''
from ... pyaz_utils import _call_az
def show(lab_name, name, resource_group, expand=None):
'''
Required Parameters:
- lab_name -- The name of the lab.
- name -- The name of the virtual machine.
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
Optional Parameters:
- expand -- Specify the $expand query. Example: 'properties($expand=artifacts,computeVm,networkInterface,applicableSchedule)'
'''
return _call_az("az lab vm show", locals())
def delete(lab_name, name, resource_group):
'''
Required Parameters:
- lab_name -- The name of the lab.
- name -- The name of the virtual machine.
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
'''
return _call_az("az lab vm delete", locals())
def start(lab_name, name, resource_group):
'''
Required Parameters:
- lab_name -- The name of the lab.
- name -- The name of the virtual machine.
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
'''
return _call_az("az lab vm start", locals())
def stop(lab_name, name, resource_group):
'''
Required Parameters:
- lab_name -- The name of the lab.
- name -- The name of the virtual machine.
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
'''
return _call_az("az lab vm stop", locals())
def apply_artifacts(lab_name, name, resource_group, artifacts=None):
'''
Apply artifacts to a virtual machine in Azure DevTest Lab.
Required Parameters:
- lab_name -- The name of the lab.
- name -- The name of the virtual machine.
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
Optional Parameters:
- artifacts -- The list of artifacts to apply.
'''
return _call_az("az lab vm apply-artifacts", locals())
def list(lab_name, resource_group, all=None, claimable=None, environment=None, expand=None, filters=None, object_id=None, order_by=None, top=None):
'''
List the VMs in an Azure DevTest Lab.
Required Parameters:
- lab_name -- None
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
Optional Parameters:
- all -- None
- claimable -- None
- environment -- None
- expand -- None
- filters -- None
- object_id -- None
- order_by -- None
- top -- None
'''
return _call_az("az lab vm list", locals())
def claim(lab_name=None, name=None, resource_group=None):
'''
Claim a virtual machine from the Lab.
Optional Parameters:
- lab_name -- None
- name -- None
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
'''
return _call_az("az lab vm claim", locals())
def create(lab_name, name, resource_group, admin_password=None, admin_username=None, allow_claim=None, artifacts=None, authentication_type=None, disk_type=None, expiration_date=None, formula=None, generate_ssh_keys=None, image=None, image_type=None, ip_configuration=None, notes=None, saved_secret=None, size=None, ssh_key=None, subnet=None, tags=None, vnet_name=None):
'''
Create a VM in a lab.
Required Parameters:
- lab_name -- None
- name -- None
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
Optional Parameters:
- admin_password -- None
- admin_username -- None
- allow_claim -- None
- artifacts -- None
- authentication_type -- None
- disk_type -- None
- expiration_date -- None
- formula -- None
- generate_ssh_keys -- None
- image -- None
- image_type -- None
- ip_configuration -- None
- notes -- None
- saved_secret -- None
- size -- None
- ssh_key -- None
- subnet -- None
- tags -- None
- vnet_name -- None
'''
return _call_az("az lab vm create", locals())
| 31.846715
| 369
| 0.665826
| 580
| 4,363
| 4.867241
| 0.148276
| 0.110521
| 0.035423
| 0.049593
| 0.832802
| 0.807297
| 0.779667
| 0.736805
| 0.736805
| 0.736805
| 0
| 0
| 0.225533
| 4,363
| 136
| 370
| 32.080882
| 0.835454
| 0.606922
| 0
| 0
| 0
| 0
| 0.097579
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.470588
| false
| 0.058824
| 0.058824
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
4aba06a49e2de5e693ef574735a3524795221c12
| 224
|
py
|
Python
|
Python/8 - kyu/8 kyu - Is he gonna survive.py
|
danielbom/codewars
|
d45b5a813c6f1d952a50d22f0b2fcea4ef3d0e27
|
[
"MIT"
] | null | null | null |
Python/8 - kyu/8 kyu - Is he gonna survive.py
|
danielbom/codewars
|
d45b5a813c6f1d952a50d22f0b2fcea4ef3d0e27
|
[
"MIT"
] | null | null | null |
Python/8 - kyu/8 kyu - Is he gonna survive.py
|
danielbom/codewars
|
d45b5a813c6f1d952a50d22f0b2fcea4ef3d0e27
|
[
"MIT"
] | null | null | null |
# https://www.codewars.com/kata/is-he-gonna-survive/train/python
# My solution
def hero(bullets, dragons):
return bullets - (dragons * 2) >= 0
# ...
def hero(bullets, dragons):
return bullets >= dragons * 2
| 24.888889
| 65
| 0.647321
| 30
| 224
| 4.833333
| 0.666667
| 0.386207
| 0.193103
| 0.289655
| 0.57931
| 0.57931
| 0.57931
| 0.57931
| 0
| 0
| 0
| 0.016667
| 0.196429
| 224
| 8
| 66
| 28
| 0.788889
| 0.348214
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
437d996b065a64cb6ff8d34050b16309ff80db40
| 483,006
|
py
|
Python
|
pysnmp/DATAFABRIC-MANAGER-MIB.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 11
|
2021-02-02T16:27:16.000Z
|
2021-08-31T06:22:49.000Z
|
pysnmp/DATAFABRIC-MANAGER-MIB.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 75
|
2021-02-24T17:30:31.000Z
|
2021-12-08T00:01:18.000Z
|
pysnmp/DATAFABRIC-MANAGER-MIB.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 10
|
2019-04-30T05:51:36.000Z
|
2022-02-16T03:33:41.000Z
|
#
# PySNMP MIB module DATAFABRIC-MANAGER-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/DATAFABRIC-MANAGER-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 18:21:35 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
Integer, OctetString, ObjectIdentifier = mibBuilder.importSymbols("ASN1", "Integer", "OctetString", "ObjectIdentifier")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
SingleValueConstraint, ConstraintsUnion, ValueSizeConstraint, ConstraintsIntersection, ValueRangeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "SingleValueConstraint", "ConstraintsUnion", "ValueSizeConstraint", "ConstraintsIntersection", "ValueRangeConstraint")
netapp, = mibBuilder.importSymbols("NETWORK-APPLIANCE-MIB", "netapp")
NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance")
Bits, ObjectIdentity, IpAddress, Gauge32, iso, Unsigned32, ModuleIdentity, NotificationType, MibIdentifier, Integer32, MibScalar, MibTable, MibTableRow, MibTableColumn, Counter32, Counter64, TimeTicks = mibBuilder.importSymbols("SNMPv2-SMI", "Bits", "ObjectIdentity", "IpAddress", "Gauge32", "iso", "Unsigned32", "ModuleIdentity", "NotificationType", "MibIdentifier", "Integer32", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Counter32", "Counter64", "TimeTicks")
TextualConvention, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "DisplayString")
class DisplayString(OctetString):
pass
netappDataFabricManager = MibIdentifier((1, 3, 6, 1, 4, 1, 789, 3))
dfmSerialNumber = MibScalar((1, 3, 6, 1, 4, 1, 789, 3, 1), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dfmSerialNumber.setStatus('mandatory')
dfmEventTable = MibTable((1, 3, 6, 1, 4, 1, 789, 3, 11), )
if mibBuilder.loadTexts: dfmEventTable.setStatus('mandatory')
dfmEventEntry = MibTableRow((1, 3, 6, 1, 4, 1, 789, 3, 11, 1), ).setIndexNames((0, "DATAFABRIC-MANAGER-MIB", "dfmEventId"))
if mibBuilder.loadTexts: dfmEventEntry.setStatus('mandatory')
dfmEventId = MibTableColumn((1, 3, 6, 1, 4, 1, 789, 3, 11, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dfmEventId.setStatus('mandatory')
dfmEventSourceId = MibTableColumn((1, 3, 6, 1, 4, 1, 789, 3, 11, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dfmEventSourceId.setStatus('mandatory')
dfmEventSeverity = MibTableColumn((1, 3, 6, 1, 4, 1, 789, 3, 11, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("normal", 1), ("information", 2), ("warning", 3), ("error", 4), ("critical", 5), ("emergency", 6)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dfmEventSeverity.setStatus('mandatory')
dfmEventTimestamp = MibTableColumn((1, 3, 6, 1, 4, 1, 789, 3, 11, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dfmEventTimestamp.setStatus('mandatory')
dfmEventName = MibTableColumn((1, 3, 6, 1, 4, 1, 789, 3, 11, 1, 5), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dfmEventName.setStatus('mandatory')
dfmEventMessage = MibTableColumn((1, 3, 6, 1, 4, 1, 789, 3, 11, 1, 6), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dfmEventMessage.setStatus('mandatory')
dfmEventMessageDetails = MibTableColumn((1, 3, 6, 1, 4, 1, 789, 3, 11, 1, 7), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dfmEventMessageDetails.setStatus('mandatory')
dfmEventSourceTable = MibTableColumn((1, 3, 6, 1, 4, 1, 789, 3, 11, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("objects", 1), ("quotas", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dfmEventSourceTable.setStatus('mandatory')
dfmEventSourceSerialNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 789, 3, 11, 1, 9), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dfmEventSourceSerialNumber.setStatus('mandatory')
dfmEventSourceProductId = MibTableColumn((1, 3, 6, 1, 4, 1, 789, 3, 11, 1, 10), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dfmEventSourceProductId.setStatus('mandatory')
dfmObjectTable = MibTable((1, 3, 6, 1, 4, 1, 789, 3, 12), )
if mibBuilder.loadTexts: dfmObjectTable.setStatus('mandatory')
dfmObjectEntry = MibTableRow((1, 3, 6, 1, 4, 1, 789, 3, 12, 1), ).setIndexNames((0, "DATAFABRIC-MANAGER-MIB", "dfmObjectId"))
if mibBuilder.loadTexts: dfmObjectEntry.setStatus('mandatory')
dfmObjectId = MibTableColumn((1, 3, 6, 1, 4, 1, 789, 3, 12, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dfmObjectId.setStatus('mandatory')
dfmObjectFullName = MibTableColumn((1, 3, 6, 1, 4, 1, 789, 3, 12, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dfmObjectFullName.setStatus('mandatory')
dfmObjectType = MibTableColumn((1, 3, 6, 1, 4, 1, 789, 3, 12, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35))).clone(namedValues=NamedValues(("unknown", 1), ("mgmtStation", 2), ("host", 3), ("volume", 4), ("qtree", 5), ("config", 6), ("user", 7), ("network", 8), ("lun", 9), ("group", 10), ("fcSwitchPort", 11), ("directory", 12), ("hbaPort", 13), ("cmsJob", 14), ("aggregate", 15), ("interface", 16), ("script", 17), ("scriptJob", 18), ("dataset", 19), ("storageset", 20), ("resourcePool", 21), ("dpPolicy", 22), ("dpSchedule", 23), ("dpThrottle", 24), ("ossvDirectory", 25), ("schedule", 26), ("reportSchedule", 27), ("provPolicy", 28), ("vfilerTemplate", 29), ("disk", 30), ("scriptSchedule", 31), ("port", 32), ("ifgrp", 33), ("lif", 34), ("storageService", 35)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dfmObjectType.setStatus('mandatory')
dfmObjectStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 789, 3, 12, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("unknown", 1), ("unmanaged", 2), ("normal", 3), ("warning", 4), ("error", 5), ("critical", 6), ("emergency", 7)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dfmObjectStatus.setStatus('mandatory')
dfmHostFullName = MibTableColumn((1, 3, 6, 1, 4, 1, 789, 3, 12, 1, 5), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dfmHostFullName.setStatus('mandatory')
dfmCommentFields = MibTableColumn((1, 3, 6, 1, 4, 1, 789, 3, 12, 1, 6), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dfmCommentFields.setStatus('mandatory')
dfmPhysicalHostId = MibTableColumn((1, 3, 6, 1, 4, 1, 789, 3, 12, 1, 7), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dfmPhysicalHostId.setStatus('mandatory')
dfmPhysicalHostFullName = MibTableColumn((1, 3, 6, 1, 4, 1, 789, 3, 12, 1, 8), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dfmPhysicalHostFullName.setStatus('mandatory')
dfmEvtSnapshotSpaceOk = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10001)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtSnapshotFull = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10002)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtVolumeSpaceOk = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10011)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtVolumeAlmostFull = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10012)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtVolumeFull = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10013)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtInodesUtilOk = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10021)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtInodesAlmostFull = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10022)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtInodesFull = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10023)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtMgmtStNodeLimitOk = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10031)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtMgmtStNodeLimitNearlyReached = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10032)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtMgmtStNodeLimitReached = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10033)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtMgmtStPerfAdvisorFreeSpaceOk = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10034)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtMgmtStPerfAdvisorNotEnoughFreeSpace = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10035)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtVolumeOnline = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10036)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtVolumeOfflineOrDestroyed = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10037)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtVolumeOffline = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10038)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtVolumeDestroyed = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10039)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtVolumeRestricted = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10040)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtMgmtStLicenseNotExpired = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10041)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtMgmtStLicenseNearlyExpired = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10042)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtMgmtStLicenseExpired = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10043)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtMgmtStSchedulerUp = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10049)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtMgmtStSchedulerDown = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10050)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtMgmtStDatabaseUp = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10051)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtMgmtStDatabaseDown = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10052)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtMgmtStMonitorUp = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10053)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtMgmtStMonitorDown = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10054)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtMgmtStEventdUp = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10055)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtMgmtStEventdDown = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10056)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtMgmtStWatchdogUp = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10057)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtMgmtStWatchdogDown = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10058)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtMgmtStServerUp = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10059)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtMgmtStServerDown = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10060)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtDisksSparesAvailable = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10061)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtDisksNoSpares = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10062)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtDisksNoneFailed = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10071)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtDisksSomeFailed = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10072)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtDisksReconstructNone = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10081)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtDisksReconstructSome = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10082)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtHostUp = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10091)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtHostDown = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10092)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtHostReachable = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10093)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"))
dfmEvtHostUnreachable = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10094)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"))
dfmEvtQtreeSpaceOk = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10101)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtQtreeAlmostFull = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10102)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtQtreeFull = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10103)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtQtreeFilesOk = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10104)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtQtreeFilesAlmostFull = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10105)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtQtreeFilesFull = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10106)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtCpuUtilOk = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10111)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtCpuTooBusy = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10112)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtFansNormal = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10121)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtFansOneFailed = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10122)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtFansManyFailed = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10123)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtPowerSupplyOk = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10131)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtPowerSupplyOneFailed = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10132)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtPowerSupplyManyFailed = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10133)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtTemperatureOk = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10141)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtTemperatureHot = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10142)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtNvramBatteryOk = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10151)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtNvramBatteryLow = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10152)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtNvramBatteryDischarged = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10153)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtNvramBatteryMissing = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10154)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtNvramBatteryOld = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10155)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtNvramBatteryReplace = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10156)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtNvramBatteryUnknown = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10157)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtNvramBatteryOverCharged = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10158)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtNvramBatteryFullyCharged = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10159)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtGlobalStatusOther = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10161)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtGlobalStatusUnknown = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10162)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtGlobalStatusOk = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10163)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtGlobalStatusNonCritical = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10164)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtGlobalStatusCritical = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10165)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtGlobalStatusNonRecoverable = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10166)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtSnapMirrorOff = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10171)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtSnapMirrorWorking = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10172)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtSnapMirrorNotScheduled = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10173)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtSnapMirrorPossibleProblem = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10174)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtSnapMirrorNotWorking = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10175)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtSnapMirrorUnknown = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10176)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtCfoSettingsEnabled = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10181)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtCfoSettingsNotConfigured = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10182)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtCfoSettingsDisabled = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10183)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtCfoSettingsTakeoverDisabled = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10184)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtCfoSettingsThisNodeDead = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10185)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtCfoThisFilerCanTakeover = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10191)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtCfoThisFilerCannotTakeover = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10192)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtCfoThisFilerTakeover = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10193)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtCfoThisFilerDead = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10194)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtCfoPartnerOk = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10201)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtCfoPartnerMayBeDown = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10202)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtCfoPartnerDead = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10203)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtCfoInterconnectUp = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10211)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtCfoInterconnectNotPresent = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10212)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtCfoInterconnectDown = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10213)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtCfoInterconnectPartialFailure = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10214)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtSvdirDiscovered = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10220)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtHostDiscovered = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10221)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtHostSystemIdChanged = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10222)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtHostNameChanged = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10223)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtOssvHostDiscovered = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10224)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtHostIdentityOk = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10225)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtHostIdentityConflict = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10226)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtHostLoginOk = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10227)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtHostLoginFailed = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10228)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtPrimaryHostDiscovered = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10229)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtMgmtStFreeSpaceOk = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10231)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtMgmtStNotEnoughFreeSpace = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10232)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtMgmtStFileSystemFileSizeLimitReached = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10233)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtConfigFileChanged = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10241)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtConfigGroupChanged = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10242)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtSnapMirrorDateOk = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10261)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtSnapMirrorNearlyOutOfDate = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10262)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtSnapMirrorOutOfDate = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10263)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtSnapMirrorDeleted = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10264)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtSnapMirrorUndeleted = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10265)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtSnapMirrorDiscovered = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10266)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtSnapMirrorModified = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10267)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtNetworkOk = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10281)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtNetworkTooLarge = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10282)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtUserDiskSpaceQuotaOk = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10291)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"))
dfmEvtUserDiskSpaceQuotaAlmostFull = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10292)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"))
dfmEvtUserDiskSpaceQuotaFull = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10293)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"))
dfmEvtUserFilesQuotaOk = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10301)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"))
dfmEvtUserFilesQuotaAlmostFull = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10302)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"))
dfmEvtUserFilesQuotaFull = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10303)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"))
dfmEvtLunOnline = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10311)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtLunOffline = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10312)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtLunSnapshotOk = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10313)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtLunSnapshotNotPossible = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10314)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtLunHostClusterConfigOk = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10315)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtLunHostClusterConfigError = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10316)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtUserEmailAddressRejected = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10321)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtUserEmailAddressOk = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10322)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtFCSwitchPortFaulty = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10331)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtFCSwitchPortOffline = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10332)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtFCSwitchPortOnline = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10333)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtSnapvaultRelationshipCreated = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10334)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtSnapvaultRelationshipDeleted = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10335)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtSnapvaultRelationshipModified = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10336)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtSnapvaultReplicaDateOk = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10337)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtSnapvaultReplicaNearlyOutOfDate = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10338)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtSnapvaultReplicaOutOfDate = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10339)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtSnapvaultBackupCompleted = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10340)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtSnapvaultBackupAborted = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10341)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtSnapvaultBackupFailed = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10342)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtSnapvaultRestoreCompleted = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10343)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtSnapvaultRestoreAborted = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10344)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtSnapvaultRestoreFailed = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10345)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtSnapvaultRelationshipDiscovered = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10346)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtSnapvaultRelationshipCreateFailed = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10347)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtSnapvaultRelationshipCreateAborted = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10348)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtSnapvaultRelationshipDeleteFailed = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10349)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtSnapvaultRelationshipDeleteAborted = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10350)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtTestAlarm = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10351)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtUserDiskSpaceSoftLimitNotExceeded = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10361)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"))
dfmEvtUserDiskSpaceSoftLimitExceeded = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10362)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"))
dfmEvtUserFilesSoftLimitNotExceeded = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10371)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"))
dfmEvtUserFilesSoftLimitExceeded = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10372)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"))
dfmEvtHbaPortOnline = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10373)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtHbaPortOffline = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10374)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtHbaPortError = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10375)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtSanHostLunChanged = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10376)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtHostAgentUp = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10377)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtHostAgentDown = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10378)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtHbaPortTrafficHigh = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10379)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtHbaPortTrafficOk = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10380)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtSnapMirrorInSync = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10381)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtSnapMirrorOutOfSync = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10382)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtSnapMirrorInitCompleted = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10383)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtSnapMirrorInitAborted = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10384)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtSnapMirrorInitFailed = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10385)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtSnapMirrorUpdateCompleted = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10386)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtSnapMirrorUpdateAborted = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10387)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtSnapMirrorUpdateFailed = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10388)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtSnapMirrorBreakCompleted = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10389)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtSnapMirrorBreakFailed = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10390)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtSnapMirrorResyncCompleted = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10391)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtSnapMirrorResyncAborted = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10392)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtSnapMirrorResyncFailed = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10393)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtSnapMirrorQuiesceCompleted = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10394)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtSnapMirrorQuiesceAborted = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10395)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtSnapMirrorQuiesceFailed = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10396)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtSnapMirrorResumeCompleted = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10397)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtSnapMirrorResumeFailed = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10398)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtSnapMirrorAbortCompleted = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10399)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtSnapMirrorAbortFailed = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10400)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtSnapMirrorDeleteCompleted = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10401)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtSnapMirrorDeleteFailed = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10402)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtSnapMirrorDeleteAborted = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10403)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtMgmtStLoadOk = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10411)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtMgmtStLoadTooHigh = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10412)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtHostSnmpUp = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10422)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtHostSnmpDown = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10423)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtEnvEnclOk = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10432)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtEnvEnclFailed = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10433)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtEnvEnclFound = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10434)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtEnvEnclDissapeared = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10435)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtEnvEnclInactive = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10436)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtEnvEnclActive = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10437)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtAggregateSpaceOk = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10451)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtAggregateAlmostFull = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10452)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtAggregateFull = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10453)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtAggregateOvercommitOk = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10461)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtAggregateAlmostOvercommitted = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10462)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtAggregateOvercommitted = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10463)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtVolumeCloneDiscovered = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10471)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtVolumeCloneDeleted = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10472)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtAggregateDiscovered = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10481)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtAggregateDeleted = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10482)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtTrapListenerFailed = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10491)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtTrapListenerOk = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10492)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtHostColdStart = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10501)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtEmergencyTrap = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10511)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtAlertTrap = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10521)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtCriticalTrap = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10531)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtErrorTrap = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10541)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtWarningTrap = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10551)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtNotificationTrap = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10561)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtInformationTrap = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10571)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtIfAdminStatusUp = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10581)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtIfAdminStatusDown = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10582)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtIfAdminStatusTesting = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10583)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtIfAdminStatusUnknown = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10584)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtScriptEmergency = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10591)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtScriptCritical = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10592)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtScriptError = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10593)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtScriptWarning = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10594)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtScriptInformation = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10595)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtScriptNormal = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10596)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtScriptScheduleEnabled = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10597)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtScriptScheduleDisabled = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10598)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtAggrSnapReserveOk = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10601)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtAggrSnapReserveNearlyFull = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10602)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtAggrSnapReserveFull = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10603)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtVolumeSpaceReserveOk = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10611)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtVolumeSpaceReserveNearlyFull = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10612)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtVolumeSpaceReserveFull = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10613)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtVolumeFirstSnapOk = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10621)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtVolumeNearlyNoFirstSnap = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10622)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtVolumeNoFirstSnap = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10623)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtVolumeNewSnapshot = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10631)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtVolumeSnapshotDeleted = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10632)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtLocalFilerConfigStatusOk = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10641)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtLocalFilerConfigStatusChanged = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10642)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtFilerLoginOk = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10651)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtFilerLoginFailed = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10652)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtNdmpStatusUp = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10661)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtNdmpStatusDown = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10662)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtFilerConfigPushStatusOk = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10671)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtFilerConfigPushStatusError = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10672)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtVfilerDiscovered = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10681)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtVfilerDeleted = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10682)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtVfilerRenamed = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10683)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtVfilerStorageUnitAdded = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10684)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtVfilerStorageUnitRemoved = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10685)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtVfilerIpAddressAdded = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10686)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtVfilerIpAddressRemoved = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10687)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtVfilerHostingFilerLoginOk = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10688)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtVfilerHostingFilerLoginFailed = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10689)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtVolumeGrowthRateAbnormal = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10691)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtVolumeGrowthRateOk = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10692)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtQtreeGrowthRateAbnormal = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10693)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtQtreeGrowthRateOk = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10694)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtSnapSchedModified = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10701)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtSnapSchedSmConflict = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10711)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtSnapSchedSmOk = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10712)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtSnapSchedSvConflict = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10721)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtSnapSchedSvOk = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10722)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtTooManySnapshots = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10731)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtNotTooManySnapshots = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10732)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtSnapDisabled = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10741)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtSnapEnabled = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10742)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtDatabaseBackupSucceeded = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10751)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtDatabaseBackupFailed = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10752)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtDatabaseRestoreSucceeded = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10753)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtDatabaseRestoreFailed = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10754)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtMaxdirsizeReached = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10761)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtMaxdirsizeNearlyReached = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10762)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtSnapTooOld = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10771)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtSnapNotTooOld = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10772)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtRpmOnline = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10781)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtRpmUnavailable = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10782)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtAggregateStateFailed = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10791)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtAggregateStateOnline = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10792)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtAggregateStateOffline = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10793)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtAggregateStateRestricted = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10794)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtDatasetCreated = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10801)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtDatasetDeleted = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10811)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtDatasetModified = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10821)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtSnapshotCreated = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10831)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtSnapshotFailed = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10833)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtDatasetProvisioningOk = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10841)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtDatasetProvisioningFailed = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10842)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtDatasetProtectionProtected = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10851)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtDatasetProtectionFailure = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10852)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtDatasetProtectionSuspended = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10853)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtDatasetProtectionLagError = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10854)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtDatasetProtectionLagWarning = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10855)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtDatasetProtectionUninitialized = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10856)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtDatasetConformant = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10861)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtDatasetConforming = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10862)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtDatasetNonConformant = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10863)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtDatasetCloneSnapshotFound = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10864)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtDatasetNoCloneSnapshotFound = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10865)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtDatasetWriteCheckWarning = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10866)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtDatasetWriteCheckOk = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10867)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtResourcePoolCreated = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10871)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtResourcePoolDeleted = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10881)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtResourcePoolModified = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10891)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtResourcePoolSpaceOk = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10901)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtResourcePoolSpaceNearlyFull = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10902)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtResourcePoolSpaceFull = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10903)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtDataProtectionPolicyCreated = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10911)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtDataProtectionPolicyDeleted = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10921)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtDataProtectionPolicyModified = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10931)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtDataProtectionScheduleCreated = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10941)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtDataProtectionScheduleDeleted = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10951)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtDataProtectionScheduleModified = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10961)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtNdmpCredentialsGood = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10971)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtNdmpCredentialsBad = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10972)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtDataProtectionJobStarted = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10981)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtHostDeleted = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,10991)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtHostModified = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11001)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtResourceGroupCreated = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11011)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtResourceGroupDeleted = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11021)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtResourceGroupModified = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11031)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtAlarmCreated = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11041)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtAlarmDeleted = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11051)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtAlarmModified = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11061)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtNdmpCommunicationUp = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11071)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtNdmpCommunicationDown = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11081)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtDatasetBackupCompleted = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11090)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtDatasetBackupAborted = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11091)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtDatasetBackupFailed = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11092)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtFilerCommunicationOk = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11100)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtFilerCommunicationFailed = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11101)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtLocalVfilerConfigStatusOk = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11111)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtLocalVfilerConfigStatusChanged = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11112)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtVfilerConfigPushStatusOk = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11113)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtVfilerConfigPushStatusError = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11114)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtScheduledReportOk = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11121)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtScheduledReportFailed = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11122)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtReportScheduleEnabled = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11123)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtReportScheduleDisabled = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11124)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtHostRoleDiscovered = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11131)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtHostRoleDeleted = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11132)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtHostRoleModified = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11133)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtHostUsergroupDiscovered = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11134)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtHostUsergroupDeleted = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11135)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtHostUsergroupModified = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11136)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtHostUserDiscovered = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11137)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtHostUserDeleted = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11138)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtHostUserModified = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11139)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtHostDomainUserModified = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11140)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtVolumeQuotaOvercommitOk = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11151)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtVolumeQuotaAlmostOvercommitted = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11152)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtVolumeQuotaOvercommitted = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11153)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtDatasetMemberResized = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11171)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtDatasetMemberResizeFailed = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11172)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtProvisioningPolicyCreated = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11181)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtProvisioningPolicyDeleted = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11182)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtProvisioningPolicyModified = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11183)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtVFilerTemplateCreated = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11191)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtVFilerTemplateDeleted = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11192)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtVFilerTemplateModified = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11193)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtDatasetSpaceOk = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11201)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtDatasetSpaceWarning = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11202)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtDatasetSpaceError = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11203)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtVolumeAutosized = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11211)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtVolumeSnapshotsAutoDeleted = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11221)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtVolumeNextSnapshotNotPossible = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11231)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtVolumeNextSnapshotPossible = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11232)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtDatasetMemberDestroyed = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11241)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtDatasetMemberDestroyFailed = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11242)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtDatasetDrStateReady = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11400)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtDatasetDrStateFailingOver = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11401)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtDatasetDrStateFailedOver = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11402)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtDatasetDrStateFailoverError = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11403)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtDatasetDrStatusNormal = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11410)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtDatasetDrStatusWarning = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11411)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtDatasetDrStatusError = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11412)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtDataExportOk = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11161)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtDataExportFailed = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11162)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtMgmtStProvMgrNodeLimitOk = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11262)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtMgmtStProvMgrNodeLimitNearlyReached = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11263)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtMgmtStProvMgrNodeLimitReached = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11264)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtMgmtStProtMgrNodeLimitOk = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11265)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtMgmtStProtMgrNodeLimitNearlyReached = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11266)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtMgmtStProtMgrNodeLimitReached = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11267)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtDatasetNotMigrating = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11270)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtDatasetMigrating = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11271)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtDatasetMigrateFailed = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11272)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtDatasetMigratedWithErrors = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11273)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtDatasetMigrated = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11274)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtDatasetRollbackWithErrors = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11275)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"))
dfmEvtDatasetRollback = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11279)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"))
dfmEvtVfilerNotMigrating = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11280)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtVfilerMigrating = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11281)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtVfilerMigrateFailed = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11282)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtVfilerMigratedWithErrors = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11283)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtVfilerMigrated = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11284)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtVolumeOverDeduplicated = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11285)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtVfilerRollbackWithErrors = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11286)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"))
dfmEvtVfilerRollback = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11291)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"))
dfmEvtVolumeNearlyOverDeduplicated = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11292)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtVolumeNotOverDeduplicated = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11293)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtAggregateOverDeduplicated = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11294)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtAggregateNearlyOverDeduplicated = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11295)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtAggregateNotOverDeduplicated = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11296)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtDatasetMemberDeduplicated = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11297)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtDatasetMemberDeduplicationFailed = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11298)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtDatasetMemberUndeduplicated = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11299)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtDatasetMemberUndeduplicationFailed = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11300)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtHostNfsServiceStatusUp = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11310)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtHostNfsServiceStatusDown = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11371)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtHostCifsServiceStatusUp = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11372)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtHostCifsServiceStatusDown = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11373)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtHostIscsiServiceStatusUp = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11374)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtHostIscsiServiceStatusDown = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11375)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtHostFcpServiceStatusUp = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11376)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtHostFcpServiceStatusDown = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11377)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtCommentFieldCreated = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11378)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtCommentFieldModified = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11379)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtCommentFieldDestroyed = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11380)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtNfsPerClientStatsEnabled = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11390)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtNfsPerClientStatsDisabled = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11391)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtCifsPerClientStatsEnabled = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11392)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtCifsPerClientStatsDisabled = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11393)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostId"), ("DATAFABRIC-MANAGER-MIB", "dfmPhysicalHostFullName"))
dfmEvtVserverDiscovered = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11421)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"))
dfmEvtVserverDeleted = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11422)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"))
dfmEvtVserverRenamed = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11423)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"))
dfmEvtClusterDiscovered = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11431)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"))
dfmEvtClusterRenamed = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11432)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"))
dfmEvtClusterNodeAdded = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11433)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"))
dfmEvtClusterNodeRemoved = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11434)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"))
dfmEvtPortStatusUndef = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11435)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"))
dfmEvtPortStatusUp = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11436)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"))
dfmEvtPortStatusDown = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11437)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"))
dfmEvtPortStatusUnknown = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11438)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"))
dfmEvtPortRoleChange = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11439)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"))
dfmEvtLifStatusUp = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11440)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"))
dfmEvtLifStatusDown = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11441)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"))
dfmEvtLifStatusUnknown = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11442)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"))
dfmEvtLifMigrated = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11443)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"))
dfmEvtSpaceManagementJobStarted = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11451)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"))
dfmEvtSpaceManagementJobSucceeded = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11452)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"))
dfmEvtSpaceManagementJobFailed = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11453)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"))
dfmEvtStorageServiceCreated = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11461)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"))
dfmEvtStorageServiceModified = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11462)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"))
dfmEvtStorageServiceDeleted = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11463)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"))
dfmEvtStorageServiceDatasetProvisioned = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11464)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"))
dfmEvtStorageServiceDatasetDetached = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11465)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"))
dfmEvtStorageServiceDatasetAttached = NotificationType((1, 3, 6, 1, 4, 1, 789, 3) + (0,11466)).setObjects(("DATAFABRIC-MANAGER-MIB", "dfmSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceSerialNumber"), ("DATAFABRIC-MANAGER-MIB", "dfmEventId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventName"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSeverity"), ("DATAFABRIC-MANAGER-MIB", "dfmEventTimestamp"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessage"), ("DATAFABRIC-MANAGER-MIB", "dfmEventMessageDetails"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceId"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceTable"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectId"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectType"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmObjectStatus"), ("DATAFABRIC-MANAGER-MIB", "dfmEventSourceProductId"), ("DATAFABRIC-MANAGER-MIB", "dfmHostFullName"), ("DATAFABRIC-MANAGER-MIB", "dfmCommentFields"))
mibBuilder.exportSymbols("DATAFABRIC-MANAGER-MIB", dfmEvtSnapMirrorNotScheduled=dfmEvtSnapMirrorNotScheduled, dfmEvtClusterDiscovered=dfmEvtClusterDiscovered, dfmEvtNvramBatteryUnknown=dfmEvtNvramBatteryUnknown, dfmEvtVolumeOnline=dfmEvtVolumeOnline, dfmEvtHostUserDeleted=dfmEvtHostUserDeleted, dfmEvtPowerSupplyOk=dfmEvtPowerSupplyOk, dfmEvtDataExportOk=dfmEvtDataExportOk, dfmEvtClusterNodeRemoved=dfmEvtClusterNodeRemoved, dfmEvtNotTooManySnapshots=dfmEvtNotTooManySnapshots, dfmEvtReportScheduleDisabled=dfmEvtReportScheduleDisabled, dfmEvtCfoSettingsThisNodeDead=dfmEvtCfoSettingsThisNodeDead, dfmEvtDatasetProtectionProtected=dfmEvtDatasetProtectionProtected, dfmEvtUserDiskSpaceQuotaAlmostFull=dfmEvtUserDiskSpaceQuotaAlmostFull, dfmEvtDatasetSpaceError=dfmEvtDatasetSpaceError, dfmEvtMgmtStEventdUp=dfmEvtMgmtStEventdUp, dfmEvtDatasetProtectionUninitialized=dfmEvtDatasetProtectionUninitialized, dfmEvtDatasetNotMigrating=dfmEvtDatasetNotMigrating, dfmEvtSnapvaultReplicaDateOk=dfmEvtSnapvaultReplicaDateOk, dfmEvtSnapvaultRelationshipDeleted=dfmEvtSnapvaultRelationshipDeleted, dfmEvtSnapMirrorDeleteAborted=dfmEvtSnapMirrorDeleteAborted, dfmObjectFullName=dfmObjectFullName, dfmEvtAlarmDeleted=dfmEvtAlarmDeleted, dfmEvtMgmtStLicenseNotExpired=dfmEvtMgmtStLicenseNotExpired, dfmEvtScheduledReportOk=dfmEvtScheduledReportOk, dfmEvtCfoPartnerOk=dfmEvtCfoPartnerOk, dfmEvtGlobalStatusUnknown=dfmEvtGlobalStatusUnknown, dfmEvtGlobalStatusCritical=dfmEvtGlobalStatusCritical, dfmEvtSnapMirrorDiscovered=dfmEvtSnapMirrorDiscovered, dfmEvtHostLoginOk=dfmEvtHostLoginOk, dfmEvtDatasetConformant=dfmEvtDatasetConformant, dfmEvtDatasetNonConformant=dfmEvtDatasetNonConformant, dfmEvtOssvHostDiscovered=dfmEvtOssvHostDiscovered, dfmEvtDataProtectionPolicyModified=dfmEvtDataProtectionPolicyModified, dfmEvtHostIscsiServiceStatusDown=dfmEvtHostIscsiServiceStatusDown, dfmEvtUserFilesQuotaAlmostFull=dfmEvtUserFilesQuotaAlmostFull, dfmEvtVolumeGrowthRateOk=dfmEvtVolumeGrowthRateOk, dfmEvtLifMigrated=dfmEvtLifMigrated, dfmEvtDatasetMemberDeduplicated=dfmEvtDatasetMemberDeduplicated, dfmEvtAggregateOverDeduplicated=dfmEvtAggregateOverDeduplicated, dfmEvtNdmpCommunicationDown=dfmEvtNdmpCommunicationDown, dfmEvtLunSnapshotNotPossible=dfmEvtLunSnapshotNotPossible, dfmEvtDisksSparesAvailable=dfmEvtDisksSparesAvailable, dfmEvtLocalFilerConfigStatusOk=dfmEvtLocalFilerConfigStatusOk, dfmEvtNdmpStatusDown=dfmEvtNdmpStatusDown, dfmEvtInodesFull=dfmEvtInodesFull, dfmEvtVfilerMigrated=dfmEvtVfilerMigrated, dfmEvtGlobalStatusNonRecoverable=dfmEvtGlobalStatusNonRecoverable, dfmEvtUserFilesQuotaOk=dfmEvtUserFilesQuotaOk, dfmEvtUserDiskSpaceSoftLimitExceeded=dfmEvtUserDiskSpaceSoftLimitExceeded, dfmEvtEnvEnclFailed=dfmEvtEnvEnclFailed, dfmEvtQtreeFilesFull=dfmEvtQtreeFilesFull, dfmEvtErrorTrap=dfmEvtErrorTrap, dfmEvtCfoInterconnectPartialFailure=dfmEvtCfoInterconnectPartialFailure, dfmEvtVfilerIpAddressAdded=dfmEvtVfilerIpAddressAdded, dfmEvtMgmtStLoadTooHigh=dfmEvtMgmtStLoadTooHigh, dfmEvtCfoInterconnectDown=dfmEvtCfoInterconnectDown, dfmEvtSnapshotFull=dfmEvtSnapshotFull, dfmEvtFilerLoginFailed=dfmEvtFilerLoginFailed, dfmEvtVolumeDestroyed=dfmEvtVolumeDestroyed, dfmEvtQtreeAlmostFull=dfmEvtQtreeAlmostFull, dfmEventTimestamp=dfmEventTimestamp, dfmEvtFilerConfigPushStatusOk=dfmEvtFilerConfigPushStatusOk, dfmEvtClusterRenamed=dfmEvtClusterRenamed, dfmEvtDatasetDrStatusError=dfmEvtDatasetDrStatusError, dfmEventMessage=dfmEventMessage, dfmEvtDatasetMemberUndeduplicationFailed=dfmEvtDatasetMemberUndeduplicationFailed, dfmEvtMaxdirsizeNearlyReached=dfmEvtMaxdirsizeNearlyReached, dfmEvtCfoSettingsDisabled=dfmEvtCfoSettingsDisabled, dfmEvtAggregateOvercommitted=dfmEvtAggregateOvercommitted, dfmEvtNfsPerClientStatsDisabled=dfmEvtNfsPerClientStatsDisabled, dfmEvtClusterNodeAdded=dfmEvtClusterNodeAdded, dfmEvtTemperatureHot=dfmEvtTemperatureHot, dfmEvtGlobalStatusOther=dfmEvtGlobalStatusOther, dfmEvtScriptWarning=dfmEvtScriptWarning, dfmEvtInformationTrap=dfmEvtInformationTrap, dfmEvtAggregateStateOnline=dfmEvtAggregateStateOnline, dfmEvtEnvEnclInactive=dfmEvtEnvEnclInactive, dfmEvtSnapNotTooOld=dfmEvtSnapNotTooOld, dfmEvtHostUsergroupDeleted=dfmEvtHostUsergroupDeleted, dfmEvtSnapMirrorInitAborted=dfmEvtSnapMirrorInitAborted, dfmEvtHostColdStart=dfmEvtHostColdStart, dfmEvtNdmpCredentialsBad=dfmEvtNdmpCredentialsBad, dfmEvtSanHostLunChanged=dfmEvtSanHostLunChanged, dfmEvtDatasetProtectionSuspended=dfmEvtDatasetProtectionSuspended, dfmEvtCfoThisFilerCannotTakeover=dfmEvtCfoThisFilerCannotTakeover, dfmEvtQtreeFilesOk=dfmEvtQtreeFilesOk, dfmEvtGlobalStatusNonCritical=dfmEvtGlobalStatusNonCritical, dfmEvtAlarmCreated=dfmEvtAlarmCreated, dfmEvtAggregateStateRestricted=dfmEvtAggregateStateRestricted, dfmEvtCfoSettingsNotConfigured=dfmEvtCfoSettingsNotConfigured, dfmEvtDataProtectionJobStarted=dfmEvtDataProtectionJobStarted, dfmEvtMgmtStServerDown=dfmEvtMgmtStServerDown, dfmEvtScheduledReportFailed=dfmEvtScheduledReportFailed, dfmEvtAlarmModified=dfmEvtAlarmModified, dfmCommentFields=dfmCommentFields, dfmEvtHostDomainUserModified=dfmEvtHostDomainUserModified, dfmEvtDatasetDeleted=dfmEvtDatasetDeleted, dfmEvtIfAdminStatusUp=dfmEvtIfAdminStatusUp, dfmEvtAggregateDeleted=dfmEvtAggregateDeleted, dfmEvtVolumeQuotaAlmostOvercommitted=dfmEvtVolumeQuotaAlmostOvercommitted, dfmEvtMgmtStWatchdogDown=dfmEvtMgmtStWatchdogDown, dfmEvtDataProtectionPolicyDeleted=dfmEvtDataProtectionPolicyDeleted, dfmEvtCommentFieldModified=dfmEvtCommentFieldModified, dfmEvtMgmtStPerfAdvisorFreeSpaceOk=dfmEvtMgmtStPerfAdvisorFreeSpaceOk, dfmEvtSnapMirrorUpdateFailed=dfmEvtSnapMirrorUpdateFailed, dfmEvtSnapMirrorNearlyOutOfDate=dfmEvtSnapMirrorNearlyOutOfDate, dfmEvtVolumeCloneDiscovered=dfmEvtVolumeCloneDiscovered, dfmEvtQtreeSpaceOk=dfmEvtQtreeSpaceOk, DisplayString=DisplayString, dfmPhysicalHostId=dfmPhysicalHostId, dfmEvtInodesAlmostFull=dfmEvtInodesAlmostFull, dfmEvtSnapvaultRelationshipCreateFailed=dfmEvtSnapvaultRelationshipCreateFailed, dfmEvtCfoThisFilerTakeover=dfmEvtCfoThisFilerTakeover, dfmEvtSnapvaultRelationshipCreated=dfmEvtSnapvaultRelationshipCreated, dfmEvtDatasetMigrateFailed=dfmEvtDatasetMigrateFailed, dfmEvtVolumeSpaceReserveNearlyFull=dfmEvtVolumeSpaceReserveNearlyFull, dfmEvtLunHostClusterConfigOk=dfmEvtLunHostClusterConfigOk, dfmEvtSnapvaultReplicaOutOfDate=dfmEvtSnapvaultReplicaOutOfDate, dfmEvtSnapvaultRelationshipCreateAborted=dfmEvtSnapvaultRelationshipCreateAborted, dfmObjectEntry=dfmObjectEntry, dfmEvtEnvEnclDissapeared=dfmEvtEnvEnclDissapeared, dfmSerialNumber=dfmSerialNumber, dfmEvtResourcePoolSpaceOk=dfmEvtResourcePoolSpaceOk, dfmEvtMgmtStSchedulerDown=dfmEvtMgmtStSchedulerDown, dfmEvtHostRoleDiscovered=dfmEvtHostRoleDiscovered, dfmEvtHostFcpServiceStatusUp=dfmEvtHostFcpServiceStatusUp, dfmEvtHostModified=dfmEvtHostModified, dfmEventEntry=dfmEventEntry, dfmEvtDisksNoneFailed=dfmEvtDisksNoneFailed, dfmEvtMgmtStDatabaseDown=dfmEvtMgmtStDatabaseDown, dfmEvtMgmtStSchedulerUp=dfmEvtMgmtStSchedulerUp, dfmEvtNvramBatteryDischarged=dfmEvtNvramBatteryDischarged, dfmEvtDatasetRollback=dfmEvtDatasetRollback, dfmEvtMgmtStFileSystemFileSizeLimitReached=dfmEvtMgmtStFileSystemFileSizeLimitReached, dfmEvtHbaPortOffline=dfmEvtHbaPortOffline, dfmEvtFilerCommunicationFailed=dfmEvtFilerCommunicationFailed, dfmEvtDatabaseRestoreFailed=dfmEvtDatabaseRestoreFailed, dfmEvtSnapMirrorPossibleProblem=dfmEvtSnapMirrorPossibleProblem, dfmEvtVolumeNextSnapshotNotPossible=dfmEvtVolumeNextSnapshotNotPossible, dfmEvtTemperatureOk=dfmEvtTemperatureOk, dfmEvtSnapMirrorUpdateCompleted=dfmEvtSnapMirrorUpdateCompleted, dfmEvtSnapEnabled=dfmEvtSnapEnabled, dfmEvtNvramBatteryFullyCharged=dfmEvtNvramBatteryFullyCharged, dfmEvtNvramBatteryOverCharged=dfmEvtNvramBatteryOverCharged, dfmEvtNvramBatteryMissing=dfmEvtNvramBatteryMissing, dfmEvtHostUsergroupModified=dfmEvtHostUsergroupModified, dfmEvtSpaceManagementJobStarted=dfmEvtSpaceManagementJobStarted, dfmEvtDatasetMemberResized=dfmEvtDatasetMemberResized, dfmEvtHostNfsServiceStatusDown=dfmEvtHostNfsServiceStatusDown, dfmEvtDatasetRollbackWithErrors=dfmEvtDatasetRollbackWithErrors, dfmEvtDatasetNoCloneSnapshotFound=dfmEvtDatasetNoCloneSnapshotFound, dfmEvtDatasetWriteCheckOk=dfmEvtDatasetWriteCheckOk, dfmEvtFilerLoginOk=dfmEvtFilerLoginOk, dfmEvtHostIscsiServiceStatusUp=dfmEvtHostIscsiServiceStatusUp, dfmEvtMgmtStNodeLimitOk=dfmEvtMgmtStNodeLimitOk, dfmEvtDatasetProvisioningFailed=dfmEvtDatasetProvisioningFailed, dfmEvtHostRoleDeleted=dfmEvtHostRoleDeleted, dfmEvtResourceGroupCreated=dfmEvtResourceGroupCreated, dfmEvtVfilerRenamed=dfmEvtVfilerRenamed, dfmEvtSnapMirrorQuiesceCompleted=dfmEvtSnapMirrorQuiesceCompleted, dfmEvtEmergencyTrap=dfmEvtEmergencyTrap, dfmEvtVolumeGrowthRateAbnormal=dfmEvtVolumeGrowthRateAbnormal, dfmEvtAggregateOvercommitOk=dfmEvtAggregateOvercommitOk, dfmEvtEnvEnclOk=dfmEvtEnvEnclOk, dfmEvtHostUserModified=dfmEvtHostUserModified, dfmEvtVolumeOverDeduplicated=dfmEvtVolumeOverDeduplicated, dfmEvtVolumeNearlyNoFirstSnap=dfmEvtVolumeNearlyNoFirstSnap, dfmEvtSnapSchedSvConflict=dfmEvtSnapSchedSvConflict, dfmEvtVfilerDiscovered=dfmEvtVfilerDiscovered, dfmEvtResourcePoolSpaceNearlyFull=dfmEvtResourcePoolSpaceNearlyFull, dfmEvtSnapvaultRelationshipDiscovered=dfmEvtSnapvaultRelationshipDiscovered, dfmEvtMgmtStFreeSpaceOk=dfmEvtMgmtStFreeSpaceOk, dfmEvtSnapMirrorResyncFailed=dfmEvtSnapMirrorResyncFailed, dfmEvtHostDown=dfmEvtHostDown, dfmEvtMgmtStProvMgrNodeLimitNearlyReached=dfmEvtMgmtStProvMgrNodeLimitNearlyReached, dfmEvtLifStatusUp=dfmEvtLifStatusUp, dfmEvtDisksSomeFailed=dfmEvtDisksSomeFailed, dfmEvtMgmtStLoadOk=dfmEvtMgmtStLoadOk, dfmEvtVFilerTemplateModified=dfmEvtVFilerTemplateModified, dfmEvtDatasetCloneSnapshotFound=dfmEvtDatasetCloneSnapshotFound, dfmEventId=dfmEventId, dfmEvtSnapMirrorNotWorking=dfmEvtSnapMirrorNotWorking, dfmEvtDatasetDrStatusWarning=dfmEvtDatasetDrStatusWarning, dfmEvtHostDiscovered=dfmEvtHostDiscovered, dfmEvtDatasetMigrated=dfmEvtDatasetMigrated, dfmEvtVserverDeleted=dfmEvtVserverDeleted, dfmEvtHbaPortTrafficOk=dfmEvtHbaPortTrafficOk, dfmEvtAggregateAlmostOvercommitted=dfmEvtAggregateAlmostOvercommitted, dfmEvtGlobalStatusOk=dfmEvtGlobalStatusOk, dfmEvtVolumeSpaceReserveFull=dfmEvtVolumeSpaceReserveFull, dfmEvtSnapMirrorBreakCompleted=dfmEvtSnapMirrorBreakCompleted, dfmEvtDatasetWriteCheckWarning=dfmEvtDatasetWriteCheckWarning, dfmEvtVolumeRestricted=dfmEvtVolumeRestricted, dfmEvtDatasetMigratedWithErrors=dfmEvtDatasetMigratedWithErrors, dfmEvtVfilerMigrateFailed=dfmEvtVfilerMigrateFailed, dfmEvtNdmpStatusUp=dfmEvtNdmpStatusUp, dfmEvtCommentFieldDestroyed=dfmEvtCommentFieldDestroyed, dfmEvtSnapMirrorDeleted=dfmEvtSnapMirrorDeleted, dfmEvtSnapMirrorOutOfSync=dfmEvtSnapMirrorOutOfSync, dfmEvtMaxdirsizeReached=dfmEvtMaxdirsizeReached, dfmEvtAggregateNearlyOverDeduplicated=dfmEvtAggregateNearlyOverDeduplicated, dfmEvtAggregateStateFailed=dfmEvtAggregateStateFailed, dfmEvtNvramBatteryReplace=dfmEvtNvramBatteryReplace, dfmEvtResourcePoolSpaceFull=dfmEvtResourcePoolSpaceFull, dfmEvtSnapMirrorDeleteFailed=dfmEvtSnapMirrorDeleteFailed, dfmEvtScriptScheduleDisabled=dfmEvtScriptScheduleDisabled, dfmEvtSnapvaultBackupAborted=dfmEvtSnapvaultBackupAborted, dfmEvtMgmtStProtMgrNodeLimitReached=dfmEvtMgmtStProtMgrNodeLimitReached, dfmEvtSvdirDiscovered=dfmEvtSvdirDiscovered, dfmEvtCfoPartnerMayBeDown=dfmEvtCfoPartnerMayBeDown, dfmEvtVolumeCloneDeleted=dfmEvtVolumeCloneDeleted, dfmEvtConfigFileChanged=dfmEvtConfigFileChanged, dfmEvtSpaceManagementJobFailed=dfmEvtSpaceManagementJobFailed, dfmEvtSnapshotSpaceOk=dfmEvtSnapshotSpaceOk, dfmEvtFansNormal=dfmEvtFansNormal, dfmEvtProvisioningPolicyModified=dfmEvtProvisioningPolicyModified, dfmEvtSnapMirrorUpdateAborted=dfmEvtSnapMirrorUpdateAborted, dfmEvtStorageServiceCreated=dfmEvtStorageServiceCreated, dfmEvtIfAdminStatusDown=dfmEvtIfAdminStatusDown, dfmEvtSnapSchedSmOk=dfmEvtSnapSchedSmOk, dfmEvtUserDiskSpaceQuotaFull=dfmEvtUserDiskSpaceQuotaFull, dfmEvtDatasetMemberResizeFailed=dfmEvtDatasetMemberResizeFailed, dfmEvtHostUserDiscovered=dfmEvtHostUserDiscovered, dfmEvtHostIdentityConflict=dfmEvtHostIdentityConflict, dfmEvtDatasetMemberDestroyed=dfmEvtDatasetMemberDestroyed, dfmEvtSnapvaultRelationshipModified=dfmEvtSnapvaultRelationshipModified, dfmEvtFilerConfigPushStatusError=dfmEvtFilerConfigPushStatusError, dfmEvtSnapMirrorResyncCompleted=dfmEvtSnapMirrorResyncCompleted, dfmEvtSnapMirrorWorking=dfmEvtSnapMirrorWorking, dfmEvtSnapMirrorInitCompleted=dfmEvtSnapMirrorInitCompleted, dfmEvtAggrSnapReserveOk=dfmEvtAggrSnapReserveOk, dfmEvtCpuUtilOk=dfmEvtCpuUtilOk, dfmEvtMgmtStProtMgrNodeLimitOk=dfmEvtMgmtStProtMgrNodeLimitOk, dfmEvtVfilerHostingFilerLoginOk=dfmEvtVfilerHostingFilerLoginOk, dfmEvtAggregateAlmostFull=dfmEvtAggregateAlmostFull, dfmEvtTrapListenerOk=dfmEvtTrapListenerOk, dfmEvtSnapMirrorResumeCompleted=dfmEvtSnapMirrorResumeCompleted, dfmEvtSnapDisabled=dfmEvtSnapDisabled, dfmEvtDatasetBackupFailed=dfmEvtDatasetBackupFailed, dfmEvtCfoSettingsEnabled=dfmEvtCfoSettingsEnabled, dfmEvtMgmtStProvMgrNodeLimitOk=dfmEvtMgmtStProvMgrNodeLimitOk, dfmEventSourceTable=dfmEventSourceTable, dfmEventSourceProductId=dfmEventSourceProductId, dfmEvtDatasetBackupAborted=dfmEvtDatasetBackupAborted, dfmEvtUserEmailAddressRejected=dfmEvtUserEmailAddressRejected, dfmEvtCifsPerClientStatsDisabled=dfmEvtCifsPerClientStatsDisabled, dfmEvtCriticalTrap=dfmEvtCriticalTrap, dfmEvtLocalVfilerConfigStatusChanged=dfmEvtLocalVfilerConfigStatusChanged, dfmEvtVolumeNextSnapshotPossible=dfmEvtVolumeNextSnapshotPossible, dfmEvtQtreeFilesAlmostFull=dfmEvtQtreeFilesAlmostFull, dfmEvtQtreeGrowthRateOk=dfmEvtQtreeGrowthRateOk, dfmEvtSnapMirrorBreakFailed=dfmEvtSnapMirrorBreakFailed, dfmEvtSnapMirrorOutOfDate=dfmEvtSnapMirrorOutOfDate, dfmEventMessageDetails=dfmEventMessageDetails)
mibBuilder.exportSymbols("DATAFABRIC-MANAGER-MIB", dfmEvtDisksReconstructNone=dfmEvtDisksReconstructNone, dfmEvtFansOneFailed=dfmEvtFansOneFailed, dfmEvtDataProtectionPolicyCreated=dfmEvtDataProtectionPolicyCreated, dfmEvtVfilerMigrating=dfmEvtVfilerMigrating, dfmEventTable=dfmEventTable, dfmEvtMgmtStLicenseExpired=dfmEvtMgmtStLicenseExpired, dfmEvtRpmUnavailable=dfmEvtRpmUnavailable, dfmEvtStorageServiceModified=dfmEvtStorageServiceModified, dfmEvtDatasetProtectionFailure=dfmEvtDatasetProtectionFailure, dfmEvtNdmpCredentialsGood=dfmEvtNdmpCredentialsGood, dfmEvtSnapMirrorModified=dfmEvtSnapMirrorModified, dfmEvtDatasetDrStatusNormal=dfmEvtDatasetDrStatusNormal, dfmEvtCfoInterconnectNotPresent=dfmEvtCfoInterconnectNotPresent, dfmEvtPortStatusUp=dfmEvtPortStatusUp, dfmEvtFCSwitchPortOffline=dfmEvtFCSwitchPortOffline, dfmEvtDataProtectionScheduleCreated=dfmEvtDataProtectionScheduleCreated, dfmEvtQtreeFull=dfmEvtQtreeFull, dfmEventSeverity=dfmEventSeverity, dfmEvtPortStatusUndef=dfmEvtPortStatusUndef, dfmEvtUserFilesSoftLimitExceeded=dfmEvtUserFilesSoftLimitExceeded, dfmEvtVfilerMigratedWithErrors=dfmEvtVfilerMigratedWithErrors, dfmEvtVolumeNoFirstSnap=dfmEvtVolumeNoFirstSnap, dfmEvtScriptScheduleEnabled=dfmEvtScriptScheduleEnabled, dfmEvtCfoThisFilerDead=dfmEvtCfoThisFilerDead, dfmEvtResourcePoolDeleted=dfmEvtResourcePoolDeleted, dfmEvtNvramBatteryLow=dfmEvtNvramBatteryLow, dfmEvtVolumeOfflineOrDestroyed=dfmEvtVolumeOfflineOrDestroyed, dfmEvtSnapvaultReplicaNearlyOutOfDate=dfmEvtSnapvaultReplicaNearlyOutOfDate, dfmEvtSnapshotCreated=dfmEvtSnapshotCreated, dfmPhysicalHostFullName=dfmPhysicalHostFullName, dfmEvtLunOffline=dfmEvtLunOffline, dfmEvtTrapListenerFailed=dfmEvtTrapListenerFailed, dfmEvtSnapvaultRestoreFailed=dfmEvtSnapvaultRestoreFailed, dfmEvtFansManyFailed=dfmEvtFansManyFailed, dfmEvtDatasetCreated=dfmEvtDatasetCreated, dfmEvtMgmtStLicenseNearlyExpired=dfmEvtMgmtStLicenseNearlyExpired, dfmEvtSnapMirrorAbortCompleted=dfmEvtSnapMirrorAbortCompleted, dfmEvtVfilerConfigPushStatusError=dfmEvtVfilerConfigPushStatusError, dfmEvtStorageServiceDatasetProvisioned=dfmEvtStorageServiceDatasetProvisioned, dfmEvtHostUp=dfmEvtHostUp, dfmEvtDatabaseBackupFailed=dfmEvtDatabaseBackupFailed, dfmEvtDatasetProtectionLagError=dfmEvtDatasetProtectionLagError, netappDataFabricManager=netappDataFabricManager, dfmEvtStorageServiceDeleted=dfmEvtStorageServiceDeleted, dfmEvtDatasetDrStateFailedOver=dfmEvtDatasetDrStateFailedOver, dfmEvtIfAdminStatusTesting=dfmEvtIfAdminStatusTesting, dfmEvtTestAlarm=dfmEvtTestAlarm, dfmEvtNetworkOk=dfmEvtNetworkOk, dfmEvtVolumeQuotaOvercommitOk=dfmEvtVolumeQuotaOvercommitOk, dfmEvtLunHostClusterConfigError=dfmEvtLunHostClusterConfigError, dfmEvtSnapMirrorOff=dfmEvtSnapMirrorOff, dfmEvtResourcePoolModified=dfmEvtResourcePoolModified, dfmEvtHostFcpServiceStatusDown=dfmEvtHostFcpServiceStatusDown, dfmEvtSnapMirrorQuiesceFailed=dfmEvtSnapMirrorQuiesceFailed, dfmEvtVolumeSnapshotsAutoDeleted=dfmEvtVolumeSnapshotsAutoDeleted, dfmEvtDatasetBackupCompleted=dfmEvtDatasetBackupCompleted, dfmEvtSnapvaultRelationshipDeleteFailed=dfmEvtSnapvaultRelationshipDeleteFailed, dfmEvtDatasetConforming=dfmEvtDatasetConforming, dfmEvtSnapMirrorDateOk=dfmEvtSnapMirrorDateOk, dfmEvtHostCifsServiceStatusUp=dfmEvtHostCifsServiceStatusUp, dfmEvtCpuTooBusy=dfmEvtCpuTooBusy, dfmEvtHbaPortTrafficHigh=dfmEvtHbaPortTrafficHigh, dfmEvtTooManySnapshots=dfmEvtTooManySnapshots, dfmEvtVolumeNearlyOverDeduplicated=dfmEvtVolumeNearlyOverDeduplicated, dfmEvtDatasetSpaceOk=dfmEvtDatasetSpaceOk, dfmEvtCfoSettingsTakeoverDisabled=dfmEvtCfoSettingsTakeoverDisabled, dfmEvtEnvEnclActive=dfmEvtEnvEnclActive, dfmEvtAggrSnapReserveNearlyFull=dfmEvtAggrSnapReserveNearlyFull, dfmEvtMgmtStWatchdogUp=dfmEvtMgmtStWatchdogUp, dfmEvtAlertTrap=dfmEvtAlertTrap, dfmEvtUserFilesQuotaFull=dfmEvtUserFilesQuotaFull, dfmEvtHostAgentDown=dfmEvtHostAgentDown, dfmEvtHostSystemIdChanged=dfmEvtHostSystemIdChanged, dfmEvtSnapvaultRestoreAborted=dfmEvtSnapvaultRestoreAborted, dfmEvtHostSnmpDown=dfmEvtHostSnmpDown, dfmEvtCfoInterconnectUp=dfmEvtCfoInterconnectUp, dfmEvtDatasetDrStateFailoverError=dfmEvtDatasetDrStateFailoverError, dfmEvtHostSnmpUp=dfmEvtHostSnmpUp, dfmEvtSnapMirrorAbortFailed=dfmEvtSnapMirrorAbortFailed, dfmEvtSnapMirrorQuiesceAborted=dfmEvtSnapMirrorQuiesceAborted, dfmEvtVFilerTemplateCreated=dfmEvtVFilerTemplateCreated, dfmEvtDatasetMemberUndeduplicated=dfmEvtDatasetMemberUndeduplicated, dfmEvtDatasetProtectionLagWarning=dfmEvtDatasetProtectionLagWarning, dfmEvtHostUsergroupDiscovered=dfmEvtHostUsergroupDiscovered, dfmEvtMgmtStEventdDown=dfmEvtMgmtStEventdDown, dfmEvtMgmtStMonitorDown=dfmEvtMgmtStMonitorDown, dfmEvtAggregateFull=dfmEvtAggregateFull, dfmEvtHostCifsServiceStatusDown=dfmEvtHostCifsServiceStatusDown, dfmEvtSpaceManagementJobSucceeded=dfmEvtSpaceManagementJobSucceeded, dfmEvtSnapvaultBackupCompleted=dfmEvtSnapvaultBackupCompleted, dfmEvtPowerSupplyManyFailed=dfmEvtPowerSupplyManyFailed, dfmEvtDatabaseRestoreSucceeded=dfmEvtDatabaseRestoreSucceeded, dfmEventName=dfmEventName, dfmObjectId=dfmObjectId, dfmEvtVolumeFirstSnapOk=dfmEvtVolumeFirstSnapOk, dfmEvtVfilerConfigPushStatusOk=dfmEvtVfilerConfigPushStatusOk, dfmEventSourceId=dfmEventSourceId, dfmEvtScriptEmergency=dfmEvtScriptEmergency, dfmEvtVolumeSpaceReserveOk=dfmEvtVolumeSpaceReserveOk, dfmEvtVfilerNotMigrating=dfmEvtVfilerNotMigrating, dfmEvtSnapMirrorResyncAborted=dfmEvtSnapMirrorResyncAborted, dfmEvtIfAdminStatusUnknown=dfmEvtIfAdminStatusUnknown, dfmEvtScriptInformation=dfmEvtScriptInformation, dfmEvtDatasetDrStateReady=dfmEvtDatasetDrStateReady, dfmEvtScriptError=dfmEvtScriptError, dfmEvtVolumeSnapshotDeleted=dfmEvtVolumeSnapshotDeleted, dfmEvtVolumeFull=dfmEvtVolumeFull, dfmEvtPortRoleChange=dfmEvtPortRoleChange, dfmEvtHbaPortOnline=dfmEvtHbaPortOnline, dfmEvtEnvEnclFound=dfmEvtEnvEnclFound, dfmEvtPortStatusUnknown=dfmEvtPortStatusUnknown, dfmEvtHostIdentityOk=dfmEvtHostIdentityOk, dfmEvtVolumeQuotaOvercommitted=dfmEvtVolumeQuotaOvercommitted, dfmEvtSnapMirrorInSync=dfmEvtSnapMirrorInSync, dfmEvtSnapMirrorUnknown=dfmEvtSnapMirrorUnknown, dfmEvtVolumeSpaceOk=dfmEvtVolumeSpaceOk, dfmEvtVolumeAlmostFull=dfmEvtVolumeAlmostFull, dfmEvtNetworkTooLarge=dfmEvtNetworkTooLarge, dfmEvtMgmtStNodeLimitReached=dfmEvtMgmtStNodeLimitReached, dfmEvtVolumeNewSnapshot=dfmEvtVolumeNewSnapshot, dfmObjectStatus=dfmObjectStatus, dfmEvtVFilerTemplateDeleted=dfmEvtVFilerTemplateDeleted, dfmEvtAggregateStateOffline=dfmEvtAggregateStateOffline, dfmEvtDatasetProvisioningOk=dfmEvtDatasetProvisioningOk, dfmEvtVfilerRollback=dfmEvtVfilerRollback, dfmEvtScriptNormal=dfmEvtScriptNormal, dfmEvtAggregateSpaceOk=dfmEvtAggregateSpaceOk, dfmEvtSnapTooOld=dfmEvtSnapTooOld, dfmEvtVolumeNotOverDeduplicated=dfmEvtVolumeNotOverDeduplicated, dfmEvtMgmtStDatabaseUp=dfmEvtMgmtStDatabaseUp, dfmEvtDatasetMigrating=dfmEvtDatasetMigrating, dfmEvtPrimaryHostDiscovered=dfmEvtPrimaryHostDiscovered, dfmEvtLifStatusUnknown=dfmEvtLifStatusUnknown, dfmEvtScriptCritical=dfmEvtScriptCritical, dfmEvtFilerCommunicationOk=dfmEvtFilerCommunicationOk, dfmEvtHostUnreachable=dfmEvtHostUnreachable, dfmEvtLunOnline=dfmEvtLunOnline, dfmEvtUserFilesSoftLimitNotExceeded=dfmEvtUserFilesSoftLimitNotExceeded, dfmEvtMgmtStNodeLimitNearlyReached=dfmEvtMgmtStNodeLimitNearlyReached, dfmEvtDatabaseBackupSucceeded=dfmEvtDatabaseBackupSucceeded, dfmEvtSnapMirrorUndeleted=dfmEvtSnapMirrorUndeleted, dfmEvtLocalFilerConfigStatusChanged=dfmEvtLocalFilerConfigStatusChanged, dfmEvtVserverRenamed=dfmEvtVserverRenamed, dfmEvtNvramBatteryOld=dfmEvtNvramBatteryOld, dfmEvtHbaPortError=dfmEvtHbaPortError, dfmEvtDatasetMemberDestroyFailed=dfmEvtDatasetMemberDestroyFailed, dfmEvtMgmtStPerfAdvisorNotEnoughFreeSpace=dfmEvtMgmtStPerfAdvisorNotEnoughFreeSpace, dfmEvtDisksReconstructSome=dfmEvtDisksReconstructSome, dfmEvtWarningTrap=dfmEvtWarningTrap, dfmEvtNdmpCommunicationUp=dfmEvtNdmpCommunicationUp, dfmEvtProvisioningPolicyDeleted=dfmEvtProvisioningPolicyDeleted, dfmEvtMgmtStProvMgrNodeLimitReached=dfmEvtMgmtStProvMgrNodeLimitReached, dfmEvtVserverDiscovered=dfmEvtVserverDiscovered, dfmEvtNfsPerClientStatsEnabled=dfmEvtNfsPerClientStatsEnabled, dfmEvtMgmtStNotEnoughFreeSpace=dfmEvtMgmtStNotEnoughFreeSpace, dfmEvtInodesUtilOk=dfmEvtInodesUtilOk, dfmEvtVfilerStorageUnitRemoved=dfmEvtVfilerStorageUnitRemoved, dfmEvtSnapMirrorInitFailed=dfmEvtSnapMirrorInitFailed, dfmEvtDataProtectionScheduleModified=dfmEvtDataProtectionScheduleModified, dfmEvtNotificationTrap=dfmEvtNotificationTrap, dfmEvtSnapMirrorResumeFailed=dfmEvtSnapMirrorResumeFailed, dfmEvtHostDeleted=dfmEvtHostDeleted, dfmEvtResourcePoolCreated=dfmEvtResourcePoolCreated, dfmEvtSnapshotFailed=dfmEvtSnapshotFailed, dfmEvtDatasetMemberDeduplicationFailed=dfmEvtDatasetMemberDeduplicationFailed, dfmEvtHostNfsServiceStatusUp=dfmEvtHostNfsServiceStatusUp, dfmEvtVolumeOffline=dfmEvtVolumeOffline, dfmEvtMgmtStProtMgrNodeLimitNearlyReached=dfmEvtMgmtStProtMgrNodeLimitNearlyReached, dfmEvtDisksNoSpares=dfmEvtDisksNoSpares, dfmEvtMgmtStServerUp=dfmEvtMgmtStServerUp, dfmEvtHostReachable=dfmEvtHostReachable, dfmEvtPowerSupplyOneFailed=dfmEvtPowerSupplyOneFailed, dfmEvtReportScheduleEnabled=dfmEvtReportScheduleEnabled, dfmEvtHostAgentUp=dfmEvtHostAgentUp, dfmEvtDataProtectionScheduleDeleted=dfmEvtDataProtectionScheduleDeleted, dfmObjectTable=dfmObjectTable, dfmEvtUserEmailAddressOk=dfmEvtUserEmailAddressOk, dfmEvtStorageServiceDatasetDetached=dfmEvtStorageServiceDatasetDetached, dfmEvtAggregateNotOverDeduplicated=dfmEvtAggregateNotOverDeduplicated, dfmEvtVfilerStorageUnitAdded=dfmEvtVfilerStorageUnitAdded, dfmEvtSnapSchedSvOk=dfmEvtSnapSchedSvOk, dfmEvtAggregateDiscovered=dfmEvtAggregateDiscovered, dfmObjectType=dfmObjectType, dfmEvtRpmOnline=dfmEvtRpmOnline, dfmEvtDatasetModified=dfmEvtDatasetModified, dfmEvtCifsPerClientStatsEnabled=dfmEvtCifsPerClientStatsEnabled, dfmEvtUserDiskSpaceQuotaOk=dfmEvtUserDiskSpaceQuotaOk, dfmEvtLocalVfilerConfigStatusOk=dfmEvtLocalVfilerConfigStatusOk, dfmEvtResourceGroupModified=dfmEvtResourceGroupModified, dfmEvtLifStatusDown=dfmEvtLifStatusDown, dfmEvtAggrSnapReserveFull=dfmEvtAggrSnapReserveFull, dfmEvtNvramBatteryOk=dfmEvtNvramBatteryOk, dfmEventSourceSerialNumber=dfmEventSourceSerialNumber, dfmEvtHostRoleModified=dfmEvtHostRoleModified, dfmEvtUserDiskSpaceSoftLimitNotExceeded=dfmEvtUserDiskSpaceSoftLimitNotExceeded, dfmEvtVfilerIpAddressRemoved=dfmEvtVfilerIpAddressRemoved, dfmEvtCommentFieldCreated=dfmEvtCommentFieldCreated, dfmEvtCfoThisFilerCanTakeover=dfmEvtCfoThisFilerCanTakeover, dfmEvtVolumeAutosized=dfmEvtVolumeAutosized, dfmEvtCfoPartnerDead=dfmEvtCfoPartnerDead, dfmEvtVfilerRollbackWithErrors=dfmEvtVfilerRollbackWithErrors, dfmEvtConfigGroupChanged=dfmEvtConfigGroupChanged, dfmEvtVfilerDeleted=dfmEvtVfilerDeleted, dfmEvtStorageServiceDatasetAttached=dfmEvtStorageServiceDatasetAttached, dfmEvtDatasetSpaceWarning=dfmEvtDatasetSpaceWarning, dfmEvtMgmtStMonitorUp=dfmEvtMgmtStMonitorUp, dfmEvtHostLoginFailed=dfmEvtHostLoginFailed, dfmEvtDataExportFailed=dfmEvtDataExportFailed, dfmEvtSnapvaultRestoreCompleted=dfmEvtSnapvaultRestoreCompleted, dfmEvtSnapSchedSmConflict=dfmEvtSnapSchedSmConflict, dfmEvtFCSwitchPortFaulty=dfmEvtFCSwitchPortFaulty, dfmEvtPortStatusDown=dfmEvtPortStatusDown, dfmEvtResourceGroupDeleted=dfmEvtResourceGroupDeleted, dfmEvtSnapvaultBackupFailed=dfmEvtSnapvaultBackupFailed, dfmEvtSnapSchedModified=dfmEvtSnapSchedModified, dfmHostFullName=dfmHostFullName, dfmEvtSnapMirrorDeleteCompleted=dfmEvtSnapMirrorDeleteCompleted, dfmEvtDatasetDrStateFailingOver=dfmEvtDatasetDrStateFailingOver, dfmEvtQtreeGrowthRateAbnormal=dfmEvtQtreeGrowthRateAbnormal, dfmEvtHostNameChanged=dfmEvtHostNameChanged, dfmEvtLunSnapshotOk=dfmEvtLunSnapshotOk, dfmEvtSnapvaultRelationshipDeleteAborted=dfmEvtSnapvaultRelationshipDeleteAborted, dfmEvtFCSwitchPortOnline=dfmEvtFCSwitchPortOnline, dfmEvtVfilerHostingFilerLoginFailed=dfmEvtVfilerHostingFilerLoginFailed, dfmEvtProvisioningPolicyCreated=dfmEvtProvisioningPolicyCreated)
| 927.074856
| 13,728
| 0.758417
| 41,079
| 483,006
| 8.917452
| 0.026802
| 0.387364
| 0.455722
| 0.005219
| 0.888778
| 0.887364
| 0.885884
| 0.885704
| 0.88477
| 0.884601
| 0
| 0.017013
| 0.047254
| 483,006
| 520
| 13,729
| 928.857692
| 0.77902
| 0.0007
| 0
| 0
| 0
| 0
| 0.671694
| 0.466972
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.001953
| 0.013672
| 0
| 0.015625
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 12
|
43a3c2a3a5e1c51862fcb507ffc3bdcac3a6409e
| 95,015
|
py
|
Python
|
source_dir/accountingpy/accountingpy.py
|
ahmedelsahouly/accountingpy
|
6093cdab053b0eb23d58e83fab2a46d028eed060
|
[
"MIT"
] | null | null | null |
source_dir/accountingpy/accountingpy.py
|
ahmedelsahouly/accountingpy
|
6093cdab053b0eb23d58e83fab2a46d028eed060
|
[
"MIT"
] | null | null | null |
source_dir/accountingpy/accountingpy.py
|
ahmedelsahouly/accountingpy
|
6093cdab053b0eb23d58e83fab2a46d028eed060
|
[
"MIT"
] | null | null | null |
"""
Module : accountingpy.py
Author : Ahmed El-sahooly
Version : 0.0.1
about : AccountingPy Module Provides a set Of Interactive
Functions to Compute the Most Time Consuming Accounting Formulas
"""
def sl(*x):
"""Straight Line Depreciation Method"""
cost = float(input("Please Enter the Cost Of The Asset: "))
rv = float(input("Please Enter Estimated Residual Value Of The Asset: "))
life = float(input("Please Enter Estimated Useful Life Of The Asset (Years): "))
r = (float(cost) - float(rv)) / float(life)
dd = float(cost)-float(rv)
fg = float(r)/float(12)
print ">> Your Depreciable Cost is",dd
print ">> Your Annual Depreciation is ",r
print ">> Your Monthly Depreciation is",fg
for i in x:
lg = float(fg)*float(i)
print ">> Your Depreciation For",i,"Months is",lg
def uop(*val):
"""Straight Line Depreciation Method With Life In Units"""
cost = float(input("please Enter The Cost Of Asset: "))
rv = float(input("please Enter Estimated Residual Value Of Asset: "))
lifeinunits = float(input("please Enter Estimated Life in Units: "))
rrr = (float(cost) - float(rv)) / float(lifeinunits)
print ">> Your Depreciation per Unit is ",rrr
for i in val:
print ">> Depreciation for ",i,"Units is ",i * rrr
def ddb():
"""Double Declining Balance Depreciation Method"""
cost = float(input("Please Enter The Cost Of Asset: "))
accdepreciation = float(input("Please Enter The Value Of Accumulated Depreciation: "))
life = float(input("Please Enter Estimated Useful Life Of Asset(Years): "))
rv = float(input("Please Enter Estimated Residual Value Of Asset: "))
n = 0
a = (float(cost)-float(accdepreciation)) * (float(2)/float(life))
bn = float(a)/float(12)
print ">> Your Monthly Depreciation For First Year is",bn
while(n != (life-1)):
bk = float(cost)
a = ((float(cost)-float(accdepreciation)) * (float(2)/float(life)))
cost -= float(a)
bk -= float(a)
n += 1
vvv = float(bk)-float(rv)
print ">> Your Depreciation For Year No.",n,"is",a
print ">> Your Book Value After",n,"Years is",bk,"\n"
print ">> Your Depreciation For Year No.",int(life),"is",vvv
print ">> Your Book Value After",int(life),"Years is",rv
def dpr(*val):
"""Depreciation"""
d = input("For Using Straight Line Method Press 1\nFor Using Units Of Production Method Press 2\nFor Using Double Declining Balance Method Press 3: ")
if(d == 1):
from accountingpy import sl
sl()
elif(d == 2):
if(val == 0):
from accountingpy import uop
uop()
elif(val !=0):
cost = float(input("please Enter The Cost Of Asset: "))
rv = float(input("please Enter The Residual Value Of Asset: "))
lifeinunits = float(input("please Enter The Life in Units: "))
rrr = (float(cost) - float(rv)) / float(lifeinunits)
print ">> Your Depreciation per Unit is ",rrr
for i in val:
print ">> Depreciation for ",i,"Units is ",i * rrr
elif(d == 3):
from accountingpy import ddb
ddb()
def dpl(*va):
"""Deplation"""
cost = float(input("Please Enter The Cost Of Resource: "))
rv = float(input("Please Enter Estimated Residual Value: "))
totalunits = float(input("Please Enter The Value Of Estimated total units of natural resources: "))
r = (float(cost) - float(rv)) / float(totalunits)
print ">> Your Deplation Rate Per Unit is",r,"$"
for i in va:
print ">> Your Deplation For ",i,"Units is ",i * r,"$"
def ss():
"""Stock Splits Function"""
a = float(input("Please Enter Original Number Of Outstanding Shares (Stocks): "))
b = float(input("Please Enter Original Par Value Per Share: "))
c = float(input("Please Enter Original Market Price Per Share: "))
d = float(input("Please Enter The Number Of Stock Splits: "))
e = float(a)*float(d)
f = float(b)/float(d)
g = float(c)/float(d)
print ">> Your Number Of Outstanding Shares After Spliting is",e
print ">> Your Par Value Per Share After Spliting is",f
print ">> Your Market Price Per Share After Spliting is",g
def nrd():
"""Notes Receivable Discounting"""
md = float(input("Please Enter Your Receipt Value in Maturity Date: "))
proceed = float(input("Please Enter the Amount Of Proceed: "))
time = float(input("Please Enter Number of Months the bank will hold the Note (the discount period): "))
br = float(input("Please Enter The Bank Discount Rate %: "))
per = float(br / 100)
y = (float(md) * float(per)) * float(time / 12)
gg = float(md - y)
print ">> Bank Interest Revenue is ",y
print ">> Seller proceeds from discounting the note receivable is ",gg
if(gg > proceed):
io = float(gg) - float(proceed)
print ">> Your Interest Revenue is ",io
elif(gg < proceed):
ff = float(proceed) - float(gg)
print ">> Your Interest Expence is ",ff
def ie():
"""Interest Expense Function"""
ww = float(input("for Short-Term Loan Press 1:\nfor Long-Term Loan Press 2:\nfor Bonds-Payable Press 3: "))
if(ww == 1):
e = float(input("Please Enter The Principal Value: "))
ew = float(input("Please Enter Interest rate %: "))
ea = float(input("Please Enter Time in Months: "))
cc = ew/100
v = (e * cc) * (ea /12)
l = round(v)
jj = float(l) + float(e)
oo = l / (ea * 30)
print ">> Your Interest Expense for ",ea,"Months is ",l
print ">> Total Amount Paid in Maturity Date is ",jj
print ">> Your Interest Expense Per Day is",oo
elif(ww == 2):
spp = float(input(" for Single Payment Loan Press 1\n for Installment Payment Loan Press 2: "))
if(spp == 1):
pv = float(input("Please Enter Principal Value: "))
ir = float(input("Please Enter Interest rate %: "))
lp = float(input("Please Enter The Loan Period (Years): "))
mp = (float(pv) * (float(ir) / float(100))) * (float(1) / float(12))
yp = float(mp) * float(12)
semi = float(yp)/float(2)
ap = float(yp) * float(lp)
md = float(ap) + float(pv)
print ">> Your Monthly Interest Expense is ",mp
print ">> Your Semi-Annual Interest Expense is ",semi
print ">> Your Interest Expense Per Year is ",yp
print ">> Total Interest will be Paid is ",ap
print ">> Principal Value at Maturity Date is ",md
elif(spp == 2):
pip = list(input("Please Enter Each Installment Payment: "))
iir = float(input("Please Enter Interest rate %: "))
su = sum(pip)
le = len(pip)
n = 0
tie = 0
while le != 0:
iex = (float(su)*(float(iir)/float(100)))*(float(1)/float(12))
sm = float(iex)*float(6)
an = float(iex)*float(12)
ey = pip[0 + n]
dr = float(ey)+float(an)
n += 1
le -= 1
tie += float(an)
tot = float(su)+float(tie)
print "Information for Installment no.",n,"with Value Of ",ey
print ">> Your Monthly Interest Expense is",iex
print ">> Your Semi-Annual Interest Expense is",sm
print ">> Your Annual Interest Expense is",an
print ">> Total Amount Will Be Paid for The Installment is",dr,"\n"
print ">> Total Interest Expense for The Loan is ",tie
print ">> Your Total Payment for The Loan is",tot
elif(ww == 3):
from accountingpy import bp
bp()
def mp(x=0):
"""Mortgages Payment"""
mb = float(input("Please Enter The Mortgage Value: "))
interestrate = float(input("Please Enter Interest rate %: "))
payment = float(input("Please Enter The Monthly Payment Include Interest: "))
n = 0
ee = float(mb)
hh = 0
while mb >= payment:
ss = (float(mb)*(float(interestrate)/float(100)))/float(12)
cc = float(payment)-float(ss)
mb -= cc
hh += float(ss)
dd = float(ee)-float(mb)
n += 1
print ">> Your Interest Expense for Month no.",n,"is",ss
print ">> Your Principal Payment for Month no.",n,"is",cc
print ">> Mortgage Balance After",n,"Payments","is ",mb,"\n"
if(n == x):
break
print ">> Total Interest Paid After",n,"Months is",hh
print ">> Total Principal Paid After",n,"Months is",dd
def pv(r=3):
"""Present Value"""
aa = input("For Using Present Value Press 1\nFor Using Present Value Of Annuity Press 2: ")
if (aa == 1):
a = float(input("Please Enter The Future Value: "))
b = float(input("Please Enter Interest Rate %: "))
c = float(input("Please Enter The Period Of Investment (Year): "))
g = 1
n = 0
while c != 0:
d = float(g)/(float(1)+(float(b)/float(100)))
bb = round(d,r)
bbb = round(bb,r)
e = float(g)-float(bbb)
g -= float(e)
f = float(a)*float(bb)
lm = float(a)-float(f)
n += 1
c -= 1
print ">> Your Present Value For",n,"Years is",f
print ">> Your Earning From Investment is",lm
elif(aa == 2):
x = list(input("Please Enter The Future Values: "))
y = float(input("Please Enter Interest Rate %: "))
xy = float(input("Please Enter Estimated Residual Value: "))
a = float(x[-1])+float(xy)
x[-1] = a
z = len(x)
gg = 1
t = 0
k = 0
while z != 0:
aa = float(gg)/(float(1)+(float(y)/float(100)))
lm = round(aa,r)
bb = float(gg)-float(lm)
bbb = round(bb,r)
gg -= float(bbb)
ee = float(x[0 + t])
hh = float(ee)*float(lm)
k += float(hh)
t += 1
z -= 1
print ">> Your Present Value Of",ee,"For",t,"Years is",hh
print ">> Your Total Present Value Of Net Cash Inflows is",k,"\n"
dd = input("To Calculate Net Present Value Press 1\nTo Exit Press 0: ")
if(dd == 1):
ww = float(input("Please Enter The Cost Of Investment: "))
eer = float(k)-float(ww)
rre = float(k)/float(ww)
print ">> Your Net Present Value For",t,"Years is",eer
print ">> Your Profitability Index is",rre
elif(dd == 0):
print "Canceled"
def bp(x=0):
"""Bonds Payable"""
wow = float(input("for Bonds Without Neither Discount Nor Premium Press 1:\nfor Bonds With Either Discount Or Premium Press 2: "))
if(wow == 1):
bv = float(input("Please Enter Bonds-Payable Value: "))
ir = float(input("Please Enter Interest Rate %: "))
per = float(input("Please Enter The Period Of Bonds Life(Years): "))
mb = float(bv)*(float(ir)/float(100))*(float(1)/float(12))
sa = float(mb)*float(6)
aa = float(mb)*float(12)
tie = float(aa)*float(per)
ta = float(tie)+float(bv)
print ">> Your Monthly Interest Expense is ",mb
print ">> Your Semi-Annual Interest Expense is ",sa
print ">> Your Annual Interest Expense is ",aa,"\n"
print ">> Total Interest Paid is",tie
print ">> Total Amount Paid for Bonds-Payable is ",ta
elif(wow == 2):
xx = float(input(" for Bonds-Payable With Discount Press 1:\n for Bonds-Payable With Premium Press 2: "))
if(xx == 1):
dl = input(" For Using Straight Line Amortization Method Press 1\n For Using Effective Interest Amortization Method Press 2: ")
if(dl == 1):
aa = float(input("Please Enter The Bonds-Payable Value (Par Value): "))
a = float(input("Please Enter Bonds-Payable Life(Year): "))
bb = float(input("Please Enter Stated Interest Rate %: "))
cc = float(input("Please Enter Discount Rate %: "))
mie = (float(aa)*(float(bb)/float(100)))*(float(1)/float(12))
cr = float(aa)*(float(cc)/float(100))
dv = float(aa)-float(cr)
md = float(dv)/(float(a)*float(12))
ti = float(mie)*(float(a)*float(12))
pmd = float(aa)+float(ti)
print ">> Your Monthly interest Expense is",mie
print ">> Your Monthly Discount Amortization is",md
print ">> Your Total Cash Receipt After Discount is",cr
print ">> Your Discount Value is",dv
print ">> Your Total Interest Expense is",ti
print ">> Your Total Payment In Maturity Date is",pmd
elif(dl == 2):
a = float(input("Please Enter The Bonds-Payable Value (Par Value): "))
c = float(input("Please Enter Stated Interest Rate %: "))
d = float(input("Please Enter Market Interest Rate %: "))
e = float(input("Please Enter Discount Rate %: "))
f = float(a)*(float(e)/float(100)) #actual cash recript
g = float(a)-float(f) #discount value
n = 0
print ">> Your Actual Cash Received (Bond Carrying Amount) is",f
print ">> Your Discount Value is",g,"\n"
while g > 0 :
i = (float(a)*(float(c)/float(100)))*(float(6)/float(12))
j = (float(f)*(float(d)/float(100)))*(float(6)/float(12))
k = float(j)-float(i)
l = round(k)
f += float(l)
g -= float(l)
n += 1
print "Information Related To Payment No.",n
print ">> Your Interest Expense According To Stated Interest Rate is",i
print ">> Your Interest Expense According To Market Interest Rate is",j
print ">> Your Discount Amortization Value is",l
print ">> Your Discount Balance After",n,"Payments is",g
print ">> Your Bond Carrying Amount After",n,"Payments is",f,"\n"
if(n == x):
break
elif(xx == 2):
dl = input(" For Using Straight Line Amortization Method Press 1\n For Using Effective Interest Amortization Method Press 2: ")
if(dl == 1):
aa = float(input("Please Enter The Bonds-Payable Value (Par Value): "))
a = float(input("Please Enter Bonds-Payable Life(Year): "))
bb = float(input("Please Enter Stated Interest Rate %: "))
cc = float(input("Please Enter Premium Percentage %: "))
mie = (float(aa)*(float(bb)/float(100)))*(float(1)/float(12))
cr = float(aa)*(float(cc)/float(100))
pv = float(cr)-float(aa)
mp = float(pv)/(float(a)*float(12))
ti = float(mie)*(float(a)*float(12))
pmd = float(aa)+float(ti)
print ">> Your Monthly interest Expense is",mie
print ">> Your Monthly Premium Amortization is",mp
print ">> Your Total Cash Receipt After Premium is",cr
print ">> Your Premium Value is",pv
print ">> Your Total Interest Expense is",ti
print ">> Your Total Payment In Maturity Date is",pmd
elif(dl == 2):
a = float(input("Please Enter The Bonds-Payable Value (Par Value): "))
c = float(input("Please Enter Stated Interest Rate %: "))
d = float(input("Please Enter Market Interest Rate %: "))
e = float(input("Please Enter Premium Percentage %: "))
f = float(a)*(float(e)/float(100)) #actual cash recript
g = float(f)-float(a) #premium value
n = 0
print ">> Your Actual Cash Received (Bond Carrying Amount) is",f
print ">> Your Premium Value is",g,"\n"
while g > 0 :
i = (float(a)*(float(c)/float(100)))*(float(6)/float(12))
j = (float(f)*(float(d)/float(100)))*(float(6)/float(12))
k = float(i)-float(j)
l = round(k)
g -= float(l)
f -= float(l)
n += 1
print "Information Related To Payment No.",n
print ">> Your Interest Expense According To Stated Interest Rate is",i
print ">> Your Interest Expense According To Market Interest Rate is",j
print ">> Your Premium Amortization Value is",l
print ">> Your Premium Balance After",n,"Payments is",g
print ">> Your Bond Carrying Amount After",n,"Payments is",f,"\n"
if(n == x):
break
def pc():
"""Percentage Change"""
x = float(input("Please Enter The Previous Amount(base amount): "))
y = float(input("Please Enter The Current Amount: "))
b = float(y)-float(x)
d = (float(b)/float(x))*float(100)
f = (float(100)/float(100))*float(100)
c = float(f)+float(d)
if(y > x):
print ">> Amount With Value Of",x,"Increased by",b
print ">> Amount Increased by",round(d,1),"% Of Base Amount"
print ">> Total Percentage Change is",round(c,1),"%"
elif(y < x):
print ">> Amount With Value Of",x,"Decreased by",abs(b)
print ">> Amount Decreased by",round(abs(d),1),"% Of Base Amount"
print ">> Total Percentage Change is",round(c,1),"%"
def h_analysis():
"""Horizontal Analysis"""
x = input("Please Enter The Previous Amounts(base amounts): ")
y = input("Please Enter The Current Amounts: ")
for i,v in zip(x,y):
b = float(v)-float(i)
d = (float(b)/i)*float(100)
if(v > i):
print ">> Amount With Value Of",i,"Increased by",b
print ">> Amount Increased by",round(d,1),"% Of Base Amount\n"
elif(v < i):
print ">> Amount With Value Of",i,"Decreased by",abs(b)
print ">> Amount Decreased by",round(abs(d),1),"% Of Base Amount\n"
def v_analysis():
"""Vertical Analysis"""
x = float(input("Please Enter The Base Amount: "))
y = input("Please Enter Items To Calculate Vertical Analysis for: ")
for i in y:
d = (float(i)/float(x))*float(100)
print ">> Item With Value Of",i,"Represent",round(d,1),"% Of Base Amount"
def wc():
"""Working Capital"""
a = float(input("Please Enter Current Assets Value: "))
b = float(input("Please Enter Current Liabilities Value: "))
c = float(a)-float(b)
print ">> Your Working Capital is",c
def cr():
"""Current Ratio"""
x = float(input("Please Enter Current Assets Value: "))
y = float(input("Please Enter Current Liabilities Value: "))
s = float(x)/float(y)
print ">> Your Current Ratio is",round(s,2)
def qr():
"""Quick Ratio"""
x = float(input("Please Enter Total Current Assets Value: "))
y = float(input("Please Enter Total Inventory Value: "))
z = float(input("Please Enter Total Current Liability Value: "))
s = (float(x)-float(y))/float(z)
print "Your Quick Ratio(Acid-Test Ratio) is",round(s,2)
def ito():
"""Inventory TurnOver"""
x = float(input("Please Enter The Cost Of Goods Sold(COGS) Value: "))
y = float(input("Please Enter Beginning Inventory Value: "))
z = float(input("Please Enter Ending Inventory Value: "))
s = float(x)/((float(y)+float(z))/float(2))
w = float(365)/float(s)
print ">> Your Inventory TurnOver is",round(s,1)
print ">> Your Days in Inventory is",round(w),"days"
def gpp():
"""Gross Profit Percentage"""
x = float(input("Please Enter Net Sales Value: "))
y = float(input("Please Enter The Cost Of Goods Sold(COGS) Value: "))
z = float(x)-float(y)
a = round(z,3)
c = (float(a)/float(x))*float(100)
print ">> Your Gross Profit(Gross Margin) is",a
print ">> Your Gross Profit Percentage is",round(c,1),"%"
def dr():
"""Debt Ratio"""
x = float(input("Please Enter Total Liabilities Value: "))
y = float(input("Please Enter Total Assets Value: "))
s = (float(x)/float(y))*float(100)
print ">> Your Debt Ratio is",round(s,1),"%"
def dte():
"""Debt To Equity"""
x = float(input("Please Enter Total Liabilities Value: "))
y = float(input("Please Enter Total Equity Value: "))
s = float(x)/float(y)
print ">> Your Debt to Equity is",round(s,2)
def icr():
"""Interest Coverage Ratio"""
x = float(input("Please Enter Net Income Value: "))
y = float(input("Please Enter Income Tax Expense Value: "))
z = float(input("Please Enter Interest Expense Value: "))
eb = float(x)+float(y)+float(z)
s = (float(x)+float(y)+float(z))/float(z)
print ">> Your Earning Before Interest And Tax (EBIT) is",eb
print ">> Your Interest-Coverage Ratio is",round(s,2)
def ros():
"""Return On Sales"""
x = float(input("Please Enter Net Income Value: "))
y = float(input("Please Enter Net Sales Value: "))
s = (float(x)/float(y))*float(100)
print ">> Your Return On Sales is",round(s,1),"%"
def rta():
"""Return On Total Assets"""
x = float(input("Please Enter Net Income Value: "))
y = float(input("Please Enter Interest Expense Value: "))
z = float(input("Please Enter Beginning Total Assets Value: "))
w = float(input("Please Enter Ending Total Assets Value: "))
d = ((float(x)+float(y)) / ((float(z)+float(w)) / float(2))) * float(100)
print ">> Your Rate of Return on Total Assets is",round(d,1),"%"
def ato():
"""Asset TurnOver"""
x = float(input("Please Enter Net Sales Value: "))
y = float(input("Please Enter Beginning Total Assets Value: "))
z = float(input("Please Enter Ending Total Assets Value: "))
d = float(x) / ((float(y)+float(z)) / float(2))
print ">> Your Asset TurnOver Ratio is",round(d,2)
def arto():
"""Accounts Receivable TurnOver"""
x = float(input("Please Enter Net credit sales Value: "))
y = float(input("Please Enter Beginning Accounts Receivable Value: "))
z = float(input("Please Enter Ending Accounts Receivable Value: "))
s = float(x)/((float(y)+float(z))/float(2))
q = float(365)/float(s)
print ">> Your Accounts Receivable TurnOver is",round(s,1)
print ">> Your Days Sales in Receivables is",round(q),"days"
def roe():
"""Return On Equity"""
x = float(input("Please Enter Net Income Value: "))
s = float(input("Please Enter The Value of Preferred dividend Paid: "))
y = float(input("Please Enter Beginning common stockholders equity Value: "))
z = float(input("Please Enter Ending common stockholders equity Value: "))
d = ((float(x)-float(s)) / ((float(y)+float(z)) / float(2))) * float(100)
print ">> Your Rate of Return on Common Stockholders Equity is",round(d,1),"%"
def per():
"""Price Earnings Ratio"""
x = float(input("Please Enter Market price per share of common stock: "))
a = float(input("Please Enter Net Income Value: "))
b = float(input("Please Enter The Value of Preferred dividend Paid: "))
c = float(input("Please Enter Number of shares of common stock outstanding: "))
y = (float(a)-float(b))/float(c)
s = float(x)/float(y)
print ">> Your Price/Earnings Ratio is",round(s,2)
def dp():
"""Dividend Payout"""
x = float(input("Please Enter Annual dividends per share of common stock: "))
a = float(input("Please Enter Net Income Value: "))
b = float(input("Please Enter The Value of Preferred dividend Paid: "))
c = float(input("Please Enter Number of shares of common stock outstanding: "))
y = (float(a)-float(b))/float(c)
s = (float(x)/float(y))*float(100)
print ">> Your Dividend Payout is",s,"%"
def eps():
"""Earnings Per Share"""
x = float(input("Please Enter Net Income Value: "))
s = float(input("Please Enter The Value of Preferred dividend Paid: "))
y = float(input("Please Enter Number of shares of common stock outstanding: "))
e = (float(x)-float(s)) / float(y)
print ">> Your Earnings per Share of Common Stock(EPS) is",round(e,2),"\n"
b = input(" Press 1 To Calculate Dividend Payout on Common Stock\n Press 2 to Calculate Price/Earnings Ratio\n Press 3 to Calculate Dividend Yield on Common Stock\n Or Press 0 to Exit: ")
if(b == 1):
u = float(input("Please Enter Annual dividends per share of common stock: "))
ggg = (float(u) / float(e)) *float(100)
print ">> Your Dividend Payout on Common Stock is",round(ggg,2),"%"
elif(b == 2):
cc = float(input("Please Enter Market price per share of common stock: "))
eee = float(cc)/float(e)
print ">> Your Price/Earnings(P/E) Ratio is",round(eee,2)
elif(b == 3):
tt = float(input("Please Enter Annual dividends per share of common stock: "))
cc = float(input("Please Enter Market price per share of common stock: "))
xxx = (float(tt)/float(cc)) * float(100)
print ">> Your Dividend Yield on Common Stock is",round(xxx,2),"%"
elif(b == 0):
print "Canceled"
def dy():
"""Dividend Yield"""
x = float(input("Please Enter Annual dividends per share of common stock: "))
y = float(input("Please Enter Market price per share of common stock: "))
s = (float(x)/float(y))*float(100)
print ">> Your Dividend Yield is",round(s,1),"%"
def bvcs():
"""Book Value per Share of Common Stock"""
a = float(input("Please Enter Total Stockholders Equity Value: "))
b = float(input("Please Enter Preferred Equity Value: "))
c = float(input("Please Enter Number of Shares of Common Stock Outstanding: "))
d = (float(a)-float(b))/float(c)
print ">> Your Book Value Per Share of Common Stock is",round(d,2)
def r_analysis():
"""Ratio Analysis"""
a = float(input("Please Enter Net Income(Loss) Value: "))
b = float(input("Please Enter Net sales Value: "))
c = float(input("Please Enter Net Credit Sales Value: "))
d = float(input("Please Enter The Cost Of Goods Sold(COGS) Value: "))
e = float(input("Please Enter Interest Expense Value: "))
f = float(input("Please Enter Income Tax Expense Value: "))
g = float(input("Please Enter Total Current Assets Value: "))
h = float(input("Please Enter Beginning Accounts Receivable Value: "))
i = float(input("Please Enter Ending Accounts Receivable Value: "))
j = float(input("Please Enter Beginning Inventory Value: "))
k = float(input("Please Enter Ending Inventory Value: "))
l = float(input("Please Enter Beginning Total Assets Value: "))
m = float(input("Please Enter Ending Total Assets Value: "))
n = float(input("Please Enter Total Current Liabilities Value: "))
o = float(input("Please Enter Total Liabilities Value: "))
p = float(input("Please Enter Beginning Total Common Equity Value: "))
q = float(input("Please Enter Ending Total Common Equity Value: "))
r = float(input("Please Enter Preferred Dividend Value: "))
s = float(input("Please Enter Annual Dividends Per Share Of Common Stock Value: "))
t = float(input("Please Enter Number Of Common Shares Outstanding: "))
u = float(input("Please Enter Market Price Per Share of Common Stock: "))
uu = float(g)-float(n) #Working capital
aa = float(g)/float(n) #current Ratio
bb = (float(g)-float(k))/float(n) #Quick Ratio
cc = float(d)/((float(j)+float(k))/float(2)) #Inventory turnover
dd = float(365)/float(cc) #Days in inventory
ee = float(b)-float(d) #gross margin
ff = (float(ee)/float(b))*float(100) #Gross profit percent
gg = float(c)/((float(h)+float(i))/float(2)) #Accounts receivable turnover
hh = float(365)/float(gg) #days in receivable
ii = (float(o)/float(m))*float(100) #debt ratio
jj = float(o)/float(q) #debt to equity
kk = float(a)+float(f)+float(e) #ebit
ll = float(kk)/float(e) #interest coverage
mm = (float(a)/float(b))*float(100) #return on sales
nn = ((float(a)+float(e))/((float(l)+float(m))/float(2)))*float(100) #return on total assets
oo = float(b)/((float(l)+float(m))/float(2)) #assets turn over
pp = ((float(a)-float(r))/((float(p)+float(q))/float(2)))*float(100) #return on common equity
qq = (float(a)-float(r))/float(t) #EPS
rr = float(u)/float(qq) #Price/Earnings Ratio
ss = (float(s)/float(u))*float(100) #Dividend Yield
tt = (float(s)/float(qq))*float(100) #Dividend Payout
uull = (float(f)/(float(a)-float(f)))*float(100)
print ">> Your Working Capital is",uu,"\n"
print ">> Your Current Ratio is",round(aa,2),"\n"
print ">> Your Quick (Acid-Test) Ratio is",round(bb,2),"\n"
print ">> Your Inventory TurnOver is",round(cc,1),"\n"
print ">> Your Days in Inventory is",round(dd),"days \n"
print ">> Your Gross Profit(Gross Margin) is",ee,"\n"
print ">> Your Gross Profit Percentage is",round(ff,1),"%\n"
print ">> Your Accounts Receivable TurnOver is",round(gg,2),"\n"
print ">> Your Days Sales in Receivables is",round(hh),"\n"
print ">> Your Debt Ratio is",round(ii,1),"%\n"
print ">> Your Debt to Equity Ratio is",round(jj,2),"\n"
print ">> Your Earning Before Interest and Tax is",kk,"\n"
print ">> Your Interest Coverage Ratio is",round(ll,2),"\n"
print ">> Your Rate of Return on Net Sales is",round(mm,1),"%\n"
print ">> Your Rate of Return on Total Assets is",round(nn,1),"%\n"
print ">> Your Asset TurnOver Ratio is",round(oo,2),"\n"
print ">> Your Rate of Return on Common Stockholders Equity is",round(pp,1),"%\n"
print ">> Your Effective Tax Rate is",round(uull,1),"%\n"
print ">> Your Earnings per Share of Common Stock(EPS) is",round(qq,2),"\n"
print ">> Your Price/Earnings Ratio is",round(rr,2),"\n"
print ">> Your Dividend Yield is",round(ss,1),"%\n"
print ">> Your Dividend Payout is",tt,"%\n"
def uc(*val):
"""Unit Cost"""
s = input("To Calculate Cost Per Unit For Services Company Press 1\nTo Calculate Cost Per Unit For Merchandising Company Press 2\nTo Calculate Cost Per Unit For Manufacturing Company Press 3: ")
if(s == 1):
a = float(input("Please Enter Total Service Costs (Total Expense) Value: "))
b = float(input("Please Enter Total Number Of Services Provided: "))
c = float(a)/float(b)
print ">> Your Cost Per Service is",c
elif(s == 2):
d = float(input("Please Enter Beginning Inventory Value: "))
e = float(input("Please Enter Net Purchases Value: "))
f = float(input("Please Enter Freight In Value: "))
g = float(input("Please Enter Ending Inventory Value: "))
h = (float(d)+float(e)+float(f))-float(g)
print ">> Your Cost Of Goods Sold is",h
for i in val:
dd = float(h)/float(i)
print ">> Your Cost Per Unit For",i,"Units is",dd
elif(s == 3):
i = float(input("Please Enter Beginning Work in Process Inventory Value: "))
j = float(input("Please Enter Beginning Direct Materials Inventory Value: "))
k = float(input("Please Enter Purchases of Direct Materials (including freight in): "))
l = float(input("Please Enter Ending Direct Materials Inventory Value: "))
m = float(input("Please Enter Direct Labor Cost Value: "))
n = float(input("Please Enter Indirect Materials Value: "))
o = float(input("Please Enter Indirect Labor Value: "))
p = float(input("Please Enter Depreciation Value (plant and equipment): "))
q = float(input("Please Enter Plant Utilities and Insurance and Property Taxes Value: "))
r = float(input("Please Enter Ending Work in Process Inventory Value: "))
ss = (float(j)+float(k))-float(l)
t = float(n)+float(o)+float(p)+float(q)
u = (float(i)+float(t)+float(ss)+float(m))-float(r)
print ">> Your Total Direct Materials Used is",ss
print ">> Your Total Manufacturing Overhead Cost is",t
print ">> Your Cost Of Goods Sold is",u
for ii in val:
eee = float(u)/float(ii)
print ">> Your Cost Per Unit For",ii,"Units is",eee
def hlm():
"""High Low Method"""
a = float(input("Please Enter Highest Volume Value: "))
c = float(input("Please Enter Highest Cost Value: "))
b = float(input("Please Enter Lowest Volume Value: "))
d = float(input("Please Enter Lowest Cost Value: "))
e = float(input("Please Enter Number of Units (To Compute Cost For): "))
ab = float(a)-float(b)
cd = float(c)-float(d)
vr = float(cd)/float(ab)
fx = float(c)-(float(vr)*float(a))
mc = float(vr)*float(e)
mx = float(mc)+float(fx)
print ">> Your Variable Cost Per Unit is",vr
print ">> Your Estimated Fixed Cost is",fx
print ">> Your Estimated Variable Cost is",mc
print ">> Your Estimated Total Mixed Cost For",e,"Units is",mx
def tp():
"""Target Profit"""
a = float(input("Please Enter Sales Price Per Unit: "))
b = float(input("Please Enter Variable Cost Per Unit: "))
c = float(input("Please Enter Your Target Profit: "))
d = float(input("Please Enter Total Fixed Cost: "))
e = float(a)-float(b)
f = (float(d)+float(c))/float(e)
g = float(e)/float(a)
gg = float(g)*float(100)
h = (float(d)+float(c))/float(g)
print ">> Your Contribution Margin is",e
print ">> Your Contribution Margin Ratio is",gg,"%"
print ">> Your Target Profit in Units To Earn",c,"$ is",round(f)
print ">> Your Target Profit in Dollars To Earn",c,"$ is",h
def mos():
"""Margin of Safety"""
a = float(input("Please Enter Sales Price Per Unit: "))
b = float(input("Please Enter Variable Cost Per Unit: "))
bb = float(input("Please Enter Total Fixed Cost: "))
c = float(input("Please Enter Expected Sales: "))
d = float(a)-float(b)
e = float(bb)/float(d)
f = float(c)-float(e)
g = float(f)*float(a)
print ">> Your Margin of Safety in Units is",f
print ">> Your Margin of Safety in Dollars is",g
def cvp():
"""Cost Volume Profit Analysis"""
c = float(input("Please Enter Total Fixed Costs Value: "))
a = float(input("Please Enter Sale Price Per Unit: "))
b = float(input("Please Enter Variable Cost Per Unit: "))
ccm = float(a)-float(b)
cuu = float(c)/float(ccm)
ccmr = (float(ccm)/float(a))*float(100)
cda = float(c)/(float(ccmr)/float(100))
print ">> Your Contribution Margin is",ccm
print ">> Your Breakeven Sales in Units is",round(cuu)
print ">> Your Contribution Margin Ratio is",ccmr,"%"
print ">> Your Breakeven Sales in Dollars is",cda,"\n"
qq = input(" Press 1 To Compute Target Profit\n Press 2 To Compute Margin of Safety\n Press 3 To Perform Sensitivity Analysis\n Or Press 0 To Exit: ")
if(qq == 1):
dds = float(input("Please Enter Your Target Profit: "))
xxx = (float(c)+float(dds))/float(ccm)
xxxx = (float(c)+float(dds))/(float(ccmr)/float(100))
print ">> Your Target Profit in Units To Earn",dds,"$ is",round(xxx)
print ">> Your Target Profit in Dollars To Earn",dds,"$ is",xxxx
elif(qq == 0):
print "Canceled"
elif(qq == 2):
xc = float(input("Please Enter Expected Sales in Units: "))
zzz = float(xc)-float(cuu)
zzzz = float(zzz)*float(a)
print ">> Your Margin of Safety in Units is",round(zzz)
print ">> Your Margin of Safety in Dollars is",zzzz
elif(qq == 3):
i = input("Please Enter Total Fixed Costs Value: ")
o = input("Please Enter Sale Price Per Unit: ")
p = input("Please Enter Variable Cost Per Unit: ")
n = 0
for x,y,z in zip(i,o,p):
cm = float(y)-float(z)
uu = float(x)/float(cm)
cmr = (float(cm)/float(y))*float(100)
da = float(x)/(float(cmr)/float(100))
n += 1
print "Your Results in Case",int(n),"is :"
print ">> Your Contribution Margin is",cm
print ">> Your Breakeven Sales in Units is",round(uu)
print ">> Your Contribution Margin Ratio is",cmr,"%"
print ">> Your Breakeven Sales in Dollars is",da,"\n"
if(cm > ccm):
a = float(cm)-float(ccm)
print ">> Your Contribution Margin Increased by",a
elif(ccm > cm):
a = float(ccm)-float(cm)
print ">> Your Contribution Margin Decreased by",a
if(uu > cuu):
b = float(uu)-float(cuu)
print ">> Your Breakeven Sales in Units Increased by",round(b)
elif(cuu > uu):
b = float(cuu)-float(uu)
print ">> Your Breakeven Sales in Units Decreased by",round(b)
if(cmr > ccmr):
c = float(cmr)-float(ccmr)
print ">> Your Contribution Margin Ratio Increased by",c,"%"
elif(ccmr > cmr):
c = float(ccmr)-float(cmr)
print ">> Your Contribution Margin Ratio Decreased by",c,"%"
if(da > cda):
d = float(da)-float(cda)
print ">> Your Breakeven Sales in Dollars Increased by",d
elif(cda > da):
d = float(cda)-float(da)
print ">> Your Breakeven Sales in Dollars Decreased by",d,"\n"
def pp():
"""Payback Period"""
a = input("For Payback with Equal Annual Net Cash Inflows Press 1\nFor Payback with Unequal Net Cash Inflows Press 2: ")
if (a == 1):
b = float(input("Please Enter Total Amount Invested: "))
c = float(input("Please Enter Expected Annual Net Cash Inflow: "))
d = float(input("Please Enter Investment Useful Life (Years): "))
e = (float(c)*float(d))-float(b)
f = float(b)/float(c)
print ">> Your Payback Period is",f,"Years"
print ">> Total Amount Remaining After Payback Period is",e,"(Residual Value Not Included)"
elif(a == 2):
b = float(input("Please Enter Total Amount Invested: "))
c = list(input("Please Enter Expected Annual Net Cash Inflow: "))
n = 0
t = 0
ss = sum(c)
am = float(ss)-float(b)
if(ss < b):
print ">> Your Loss On Investment is",abs(am)
elif(ss > b):
while t <= b:
e = c[0 + n]
t += float(e)
n += 1
gg = c[n - 1]
ii = (float(t)-float(b))/float(gg)
ccc = float(n)-float(ii)
print ">> Your Payback Period is",ccc,"Years"
print ">> Total Amount Remaining After Payback Period is",am
def ror():
"""Rate Of Return"""
a = input("For Asset with Equal Annual Net Cash Inflows Press 1\nFor Asset with Unequal Net Cash Inflows Press 2: ")
if(a == 1):
b = float(input("Please Enter Annual Cash Inflows For The Asset: "))
c = float(input("Please Enter Asset Useful Life (Years): "))
d = float(input("Please Enter Total Depreciation During Operating Life Of Asset: "))
e = float(input("Please Enter Any Asset Salvage Value: "))
avo = ((float(b)*float(c))-(float(d)-float(e)))/float(c)
avi = (float(d)+float(e))/float(2)
ror = (float(avo)/float(avi))*float(100)
print ">> Your Average Annual Operating Income From an Asset is",avo
print ">> Your Average Amount Invested in an Asset is",avi
print ">> Your Rate Of Return (ROR) is",ror,"%"
elif(a == 2):
b = list(input("Please Enter Annual Cash Inflows For The Asset: "))
d = float(input("Please Enter Total Depreciation During Operating Life Of Asset: "))
e = float(input("Please Enter Any Asset Salvage Value: "))
f = sum(b)
c = len(b)
g = (float(f)-(float(d)-float(e)))/float(c)
h = (float(d)+float(e))/float(2)
ror = (float(g)/float(h))*float(100)
print ">> Your Average Annual Operating Income From an Asset is",g
print ">> Your Average Amount Invested in an Asset is",h
print ">> Your Rate Of Return (ROR) is",ror,"%"
def ci():
"""Compound Interest"""
ffg = input("If Interest Added Monthly Press 1\nIf Interest Added Annually Press 2: ")
if(ffg == 1):
a = float(input("Pleae Enter The Principal Value: "))
b = float(input("Pleae Enter Interest Rate %: "))
c = float(input("Please Enter The Loan Period (Months): "))
t = 0
n = 0
g = a
while c != 0:
ie = (float(a) * (float(b)/float(100))) * (float(1)/float(12))
a += float(ie)
t += float(ie)
n += 1
c -= 1
dd = float(g)+float(t)
print ">> Your Interest Expense For Month No.",n,"is",ie
print ">> Your Total Interest Expense For",n,"Months is",t
print ">> Your Total Payment in Maturity Date is",dd
elif(ffg == 2):
a = float(input("Pleae Enter The Principal Value: "))
b = float(input("Pleae Enter Interest Rate %: "))
c = float(input("Please Enter The Loan Period (Years): "))
t = 0
n = 0
g = a
while c != 0:
ie = float(a) * (float(b)/float(100))
a += float(ie)
t += float(ie)
n += 1
c -= 1
dd = float(g)+float(t)
print ">> Your Interest Expense For Year No.",n,"is",ie
print ">> Your Total Interest Expense For",n,"Years is",t
print ">> Your Total Payment in Maturity Date is",dd
def pvr():
"""Price Variance"""
a = float(input("Please Enter Your Actual Price: "))
b = float(input("Please Enter Your Standard Price: "))
c = float(input("Please Enter Actual Quantity: "))
d = float(a)-float(b)
e = float(d)*float(c)
print ">> Your Change In Price is",d
print ">> Your Price Variance is",e
def evr():
"""Efficiency Variance"""
a = float(input("Please Enter Your Actual Quantity: "))
b = float(input("Please Enter Your Standard Quantity: "))
c = float(input("Please Enter Your Standard Price: "))
d = float(a)-float(b)
e = float(d)*float(c)
print ">> Your Change In Quantity is",d
print ">> Your Efficiency Variance is",e
def pm():
"""Profit Margin"""
a = float(input("Please Enter Operating Income Value: "))
b = float(input("Please Enter Total Sales Value: "))
c = (float(a)/float(b))*float(100)
print ">> Your Profit Margin is",c,"%"
def roi():
"""Return On Investment"""
a = float(input("Please Enter Operating Income Value: "))
b = float(input("Please Enter Beginning Total Assets Value: "))
c = float(input("Please Enter Ending Total Assets Value: "))
d = (float(a)/((float(b)+float(c))/float(2)))*float(100)
print ">> Your Return on Investment (ROI) is",d,"% \n"
e = input("To Calculate Profit Margin And Asset TurnOver Press 1\n To Exit Press 0: ")
if(e == 1):
f = float(input("Please Enter Total Sales Value: "))
i = (float(b)+float(c))/float(2)
g = (float(a)/float(f))*float(100)
h = float(f)/float(i)
print ">> Your Profit Margin is",g,"%"
print ">> Your Asset TurnOver is",h
def ri():
"""Residual Income"""
a = float(input("Please Enter Operating Income Value: "))
b = float(input("Please Enter Your Target Rate Of Return %: "))
c = float(input("Please Enter Beginning Total Assets Value: "))
d = float(input("Please Enter Ending Total Assets Value: "))
e = (float(b)/float(100))*((float(c)+float(d))/float(2))
f = float(a)-float(e)
print ">> Your Minimum Acceptable Operating Income is",e
print ">> Your Residual Income (RI) is",f
def etr():
"""Effective Tax Rate"""
b = float(input("Please Enter Net Income Value: "))
a = float(input("Please Enter Income Tax Expense: "))
c = float(b)+float(a)
d = (float(a)/float(c))*float(100)
e = float(100)-float(d)
f = float(c)*(float(e)/float(100))
print ">> Your Pre Tax Income is",c
print ">> Your Effective Tax Rate is",round(d,1),"%"
print ">> Your After-Tax Operating Income is",f
def eva():
"""Economic Value Added"""
a = float(input("Please Enter After Tax Operating Income: "))
b = float(input("Please Enter Current Liabilities Value: "))
c = float(input("Please Enter Weighted Average Cost Of Capital (minimum rate of return required) %: "))
d = float(input("Please Enter Beginning Total Assets Value: "))
e = float(input("Please Enter Ending Total Assets Value: "))
f = float(a)-((((float(d)+float(e))/float(2))-float(b))*(float(c)/float(100)))
print ">> Your Economic Value Added (EVA) is",f
def straight_line(*x):
"""Straight Line Depreciation Method"""
cost = float(input("Please Enter the Cost Of The Asset: "))
rv = float(input("Please Enter Estimated Residual Value Of The Asset: "))
life = float(input("Please Enter Estimated Useful Life Of The Asset (Years): "))
r = (float(cost) - float(rv)) / float(life)
dd = float(cost)-float(rv)
fg = float(r)/float(12)
print ">> Your Depreciable Cost is",dd
print ">> Your Annual Depreciation is ",r
print ">> Your Monthly Depreciation is",fg
for i in x:
lg = float(fg)*float(i)
print ">> Your Depreciation For",i,"Months is",lg
def units_of_production(*val):
"""Straight Line Depreciation Method With Life In Units"""
cost = float(input("please Enter The Cost Of Asset: "))
rv = float(input("please Enter Estimated Residual Value Of Asset: "))
lifeinunits = float(input("please Enter Estimated Life in Units: "))
rrr = (float(cost) - float(rv)) / float(lifeinunits)
print ">> Your Depreciation per Unit is ",rrr
for i in val:
print ">> Depreciation for ",i,"Units is ",i * rrr
def double_declining_balance():
"""Double Declining Balance Depreciation Method"""
cost = float(input("Please Enter The Cost Of Asset: "))
accdepreciation = float(input("Please Enter The Value Of Accumulated Depreciation: "))
life = float(input("Please Enter Estimated Useful Life Of Asset(Years): "))
rv = float(input("Please Enter Estimated Residual Value Of Asset: "))
n = 0
a = (float(cost)-float(accdepreciation)) * (float(2)/float(life))
bn = float(a)/float(12)
print ">> Your Monthly Depreciation For First Year is",bn
while(n != (life-1)):
bk = float(cost)
a = ((float(cost)-float(accdepreciation)) * (float(2)/float(life)))
cost -= float(a)
bk -= float(a)
n += 1
vvv = float(bk)-float(rv)
print ">> Your Depreciation For Year No.",n,"is",a
print ">> Your Book Value After",n,"Years is",bk,"\n"
print ">> Your Depreciation For Year No.",int(life),"is",vvv
print ">> Your Book Value After",int(life),"Years is",rv
def depreciation(*val):
"""Depreciation"""
d = input("For Using Straight Line Method Press 1\nFor Using Units Of Production Method Press 2\nFor Using Double Declining Balance Method Press 3: ")
if(d == 1):
from accountingpy import sl
sl()
elif(d == 2):
if(val == 0):
from accountingpy import uop
uop()
elif(val !=0):
cost = float(input("please Enter The Cost Of Asset: "))
rv = float(input("please Enter The Residual Value Of Asset: "))
lifeinunits = float(input("please Enter The Life in Units: "))
rrr = (float(cost) - float(rv)) / float(lifeinunits)
print ">> Your Depreciation per Unit is ",rrr
for i in val:
print ">> Depreciation for ",i,"Units is ",i * rrr
elif(d == 3):
from accountingpy import ddb
ddb()
def deplation(*va):
"""Deplation"""
cost = float(input("Please Enter The Cost Of Resource: "))
rv = float(input("Please Enter Estimated Residual Value: "))
totalunits = float(input("Please Enter The Value Of Estimated total units of natural resources: "))
r = (float(cost) - float(rv)) / float(totalunits)
print ">> Your Deplation Rate Per Unit is",r,"$"
for i in va:
print ">> Your Deplation For ",i,"Units is ",i * r,"$"
def stock_splits():
"""Stock Splits Function"""
a = float(input("Please Enter Original Number Of Outstanding Shares (Stocks): "))
b = float(input("Please Enter Original Par Value Per Share: "))
c = float(input("Please Enter Original Market Price Per Share: "))
d = float(input("Please Enter The Number Of Stock Splits: "))
e = float(a)*float(d)
f = float(b)/float(d)
g = float(c)/float(d)
print ">> Your Number Of Outstanding Shares After Spliting is",e
print ">> Your Par Value Per Share After Spliting is",f
print ">> Your Market Price Per Share After Spliting is",g
def notes_receivable_discounting():
"""Notes Receivable Discounting"""
md = float(input("Please Enter Your Receipt Value in Maturity Date: "))
proceed = float(input("Please Enter the Amount Of Proceed: "))
time = float(input("Please Enter Number of Months the bank will hold the Note (the discount period): "))
br = float(input("Please Enter The Bank Discount Rate %: "))
per = float(br / 100)
y = (float(md) * float(per)) * float(time / 12)
gg = float(md - y)
print ">> Bank Interest Revenue is ",y
print ">> Seller proceeds from discounting the note receivable is ",gg
if(gg > proceed):
io = float(gg) - float(proceed)
print ">> Your Interest Revenue is ",io
elif(gg < proceed):
ff = float(proceed) - float(gg)
print ">> Your Interest Expence is ",ff
def interest_expense():
"""Interest Expense Function"""
ww = float(input("for Short-Term Loan Press 1:\nfor Long-Term Loan Press 2:\nfor Bonds-Payable Press 3: "))
if(ww == 1):
e = float(input("Please Enter The Principal Value: "))
ew = float(input("Please Enter Interest rate %: "))
ea = float(input("Please Enter Time in Months: "))
cc = ew/100
v = (e * cc) * (ea /12)
l = round(v)
jj = float(l) + float(e)
oo = l / (ea * 30)
print ">> Your Interest Expense for ",ea,"Months is ",l
print ">> Total Amount Paid in Maturity Date is ",jj
print ">> Your Interest Expense Per Day is",oo
elif(ww == 2):
spp = float(input(" for Single Payment Loan Press 1\n for Installment Payment Loan Press 2: "))
if(spp == 1):
pv = float(input("Please Enter Principal Value: "))
ir = float(input("Please Enter Interest rate %: "))
lp = float(input("Please Enter The Loan Period (Years): "))
mp = (float(pv) * (float(ir) / float(100))) * (float(1) / float(12))
yp = float(mp) * float(12)
semi = float(yp)/float(2)
ap = float(yp) * float(lp)
md = float(ap) + float(pv)
print ">> Your Monthly Interest Expense is ",mp
print ">> Your Semi-Annual Interest Expense is ",semi
print ">> Your Interest Expense Per Year is ",yp
print ">> Total Interest will be Paid is ",ap
print ">> Principal Value at Maturity Date is ",md
elif(spp == 2):
pip = list(input("Please Enter Each Installment Payment: "))
iir = float(input("Please Enter Interest rate %: "))
su = sum(pip)
le = len(pip)
n = 0
tie = 0
while le != 0:
iex = (float(su)*(float(iir)/float(100)))*(float(1)/float(12))
sm = float(iex)*float(6)
an = float(iex)*float(12)
ey = pip[0 + n]
dr = float(ey)+float(an)
n += 1
le -= 1
tie += float(an)
tot = float(su)+float(tie)
print "Information for Installment no.",n,"with Value Of ",ey
print ">> Your Monthly Interest Expense is",iex
print ">> Your Semi-Annual Interest Expense is",sm
print ">> Your Annual Interest Expense is",an
print ">> Total Amount Will Be Paid for The Installment is",dr,"\n"
print ">> Total Interest Expense for The Loan is ",tie
print ">> Your Total Payment for The Loan is",tot
elif(ww == 3):
from accountingpy import bp
bp()
def mortgages_payment(x=0):
"""Mortgages Payment"""
mb = float(input("Please Enter The Mortgage Value: "))
interestrate = float(input("Please Enter Interest rate %: "))
payment = float(input("Please Enter The Monthly Payment Include Interest: "))
n = 0
ee = float(mb)
hh = 0
while mb >= payment:
ss = (float(mb)*(float(interestrate)/float(100)))/float(12)
cc = float(payment)-float(ss)
mb -= cc
hh += float(ss)
dd = float(ee)-float(mb)
n += 1
print ">> Your Interest Expense for Month no.",n,"is",ss
print ">> Your Principal Payment for Month no.",n,"is",cc
print ">> Mortgage Balance After",n,"Payments","is ",mb,"\n"
if(n == x):
break
print ">> Total Interest Paid After",n,"Months is",hh
print ">> Total Principal Paid After",n,"Months is",dd
def present_value(r=3):
"""Present Value"""
aa = input("For Using Present Value Press 1\nFor Using Present Value Of Annuity Press 2: ")
if (aa == 1):
a = float(input("Please Enter The Future Value: "))
b = float(input("Please Enter Interest Rate %: "))
c = float(input("Please Enter The Period Of Investment (Year): "))
g = 1
n = 0
while c != 0:
d = float(g)/(float(1)+(float(b)/float(100)))
bb = round(d,r)
bbb = round(bb,r)
e = float(g)-float(bbb)
g -= float(e)
f = float(a)*float(bb)
lm = float(a)-float(f)
n += 1
c -= 1
print ">> Your Present Value For",n,"Years is",f
print ">> Your Earning From Investment is",lm
elif(aa == 2):
x = list(input("Please Enter The Future Values: "))
y = float(input("Please Enter Interest Rate %: "))
xy = float(input("Please Enter Estimated Residual Value: "))
a = float(x[-1])+float(xy)
x[-1] = a
z = len(x)
gg = 1
t = 0
k = 0
while z != 0:
aa = float(gg)/(float(1)+(float(y)/float(100)))
lm = round(aa,r)
bb = float(gg)-float(lm)
bbb = round(bb,r)
gg -= float(bbb)
ee = float(x[0 + t])
hh = float(ee)*float(lm)
k += float(hh)
t += 1
z -= 1
print ">> Your Present Value Of",ee,"For",t,"Years is",hh
print ">> Your Total Present Value Of Net Cash Inflows is",k,"\n"
dd = input("To Calculate Net Present Value Press 1\nTo Exit Press 0: ")
if(dd == 1):
ww = float(input("Please Enter The Cost Of Investment: "))
eer = float(k)-float(ww)
rre = float(k)/float(ww)
print ">> Your Net Present Value For",t,"Years is",eer
print ">> Your Profitability Index is",rre
elif(dd == 0):
print "Canceled"
def bonds_payable(x=0):
"""Bonds Payable"""
wow = float(input("for Bonds Without Neither Discount Nor Premium Press 1:\nfor Bonds With Either Discount Or Premium Press 2: "))
if(wow == 1):
bv = float(input("Please Enter Bonds-Payable Value: "))
ir = float(input("Please Enter Interest Rate %: "))
per = float(input("Please Enter The Period Of Bonds Life(Years): "))
mb = float(bv)*(float(ir)/float(100))*(float(1)/float(12))
sa = float(mb)*float(6)
aa = float(mb)*float(12)
tie = float(aa)*float(per)
ta = float(tie)+float(bv)
print ">> Your Monthly Interest Expense is ",mb
print ">> Your Semi-Annual Interest Expense is ",sa
print ">> Your Annual Interest Expense is ",aa,"\n"
print ">> Total Interest Paid is",tie
print ">> Total Amount Paid for Bonds-Payable is ",ta
elif(wow == 2):
xx = float(input(" for Bonds-Payable With Discount Press 1:\n for Bonds-Payable With Premium Press 2: "))
if(xx == 1):
dl = input(" For Using Straight Line Amortization Method Press 1\n For Using Effective Interest Amortization Method Press 2: ")
if(dl == 1):
aa = float(input("Please Enter The Bonds-Payable Value (Par Value): "))
a = float(input("Please Enter Bonds-Payable Life(Year): "))
bb = float(input("Please Enter Stated Interest Rate %: "))
cc = float(input("Please Enter Discount Rate %: "))
mie = (float(aa)*(float(bb)/float(100)))*(float(1)/float(12))
cr = float(aa)*(float(cc)/float(100))
dv = float(aa)-float(cr)
md = float(dv)/(float(a)*float(12))
ti = float(mie)*(float(a)*float(12))
pmd = float(aa)+float(ti)
print ">> Your Monthly interest Expense is",mie
print ">> Your Monthly Discount Amortization is",md
print ">> Your Total Cash Receipt After Discount is",cr
print ">> Your Discount Value is",dv
print ">> Your Total Interest Expense is",ti
print ">> Your Total Payment In Maturity Date is",pmd
elif(dl == 2):
a = float(input("Please Enter The Bonds-Payable Value (Par Value): "))
c = float(input("Please Enter Stated Interest Rate %: "))
d = float(input("Please Enter Market Interest Rate %: "))
e = float(input("Please Enter Discount Rate %: "))
f = float(a)*(float(e)/float(100)) #actual cash recript
g = float(a)-float(f) #discount value
n = 0
print ">> Your Actual Cash Received (Bond Carrying Amount) is",f
print ">> Your Discount Value is",g,"\n"
while g > 0 :
i = (float(a)*(float(c)/float(100)))*(float(6)/float(12))
j = (float(f)*(float(d)/float(100)))*(float(6)/float(12))
k = float(j)-float(i)
l = round(k)
f += float(l)
g -= float(l)
n += 1
print "Information Related To Payment No.",n
print ">> Your Interest Expense According To Stated Interest Rate is",i
print ">> Your Interest Expense According To Market Interest Rate is",j
print ">> Your Discount Amortization Value is",l
print ">> Your Discount Balance After",n,"Payments is",g
print ">> Your Bond Carrying Amount After",n,"Payments is",f,"\n"
if(n == x):
break
elif(xx == 2):
dl = input(" For Using Straight Line Amortization Method Press 1\n For Using Effective Interest Amortization Method Press 2: ")
if(dl == 1):
aa = float(input("Please Enter The Bonds-Payable Value (Par Value): "))
a = float(input("Please Enter Bonds-Payable Life(Year): "))
bb = float(input("Please Enter Stated Interest Rate %: "))
cc = float(input("Please Enter Premium Percentage %: "))
mie = (float(aa)*(float(bb)/float(100)))*(float(1)/float(12))
cr = float(aa)*(float(cc)/float(100))
pv = float(cr)-float(aa)
mp = float(pv)/(float(a)*float(12))
ti = float(mie)*(float(a)*float(12))
pmd = float(aa)+float(ti)
print ">> Your Monthly interest Expense is",mie
print ">> Your Monthly Premium Amortization is",mp
print ">> Your Total Cash Receipt After Premium is",cr
print ">> Your Premium Value is",pv
print ">> Your Total Interest Expense is",ti
print ">> Your Total Payment In Maturity Date is",pmd
elif(dl == 2):
a = float(input("Please Enter The Bonds-Payable Value (Par Value): "))
c = float(input("Please Enter Stated Interest Rate %: "))
d = float(input("Please Enter Market Interest Rate %: "))
e = float(input("Please Enter Premium Percentage %: "))
f = float(a)*(float(e)/float(100)) #actual cash recript
g = float(f)-float(a) #premium value
n = 0
print ">> Your Actual Cash Received (Bond Carrying Amount) is",f
print ">> Your Premium Value is",g,"\n"
while g > 0 :
i = (float(a)*(float(c)/float(100)))*(float(6)/float(12))
j = (float(f)*(float(d)/float(100)))*(float(6)/float(12))
k = float(i)-float(j)
l = round(k)
g -= float(l)
f -= float(l)
n += 1
print "Information Related To Payment No.",n
print ">> Your Interest Expense According To Stated Interest Rate is",i
print ">> Your Interest Expense According To Market Interest Rate is",j
print ">> Your Premium Amortization Value is",l
print ">> Your Premium Balance After",n,"Payments is",g
print ">> Your Bond Carrying Amount After",n,"Payments is",f,"\n"
if(n == x):
break
def percentage_change():
"""Percentage Change"""
x = float(input("Please Enter The Previous Amount(base amount): "))
y = float(input("Please Enter The Current Amount: "))
b = float(y)-float(x)
d = (float(b)/float(x))*float(100)
f = (float(100)/float(100))*float(100)
c = float(f)+float(d)
if(y > x):
print ">> Amount With Value Of",x,"Increased by",b
print ">> Amount Increased by",round(d,1),"% Of Base Amount"
print ">> Total Percentage Change is",round(c,1),"%"
elif(y < x):
print ">> Amount With Value Of",x,"Decreased by",abs(b)
print ">> Amount Decreased by",round(abs(d),1),"% Of Base Amount"
print ">> Total Percentage Change is",round(c,1),"%"
def horizontal_analysis():
"""Horizontal Analysis"""
x = input("Please Enter The Previous Amounts(base amounts): ")
y = input("Please Enter The Current Amounts: ")
for i,v in zip(x,y):
b = float(v)-float(i)
d = (float(b)/i)*float(100)
if(v > i):
print ">> Amount With Value Of",i,"Increased by",b
print ">> Amount Increased by",round(d,1),"% Of Base Amount\n"
elif(v < i):
print ">> Amount With Value Of",i,"Decreased by",abs(b)
print ">> Amount Decreased by",round(abs(d),1),"% Of Base Amount\n"
def vertical_analysis():
"""Vertical Analysis"""
x = float(input("Please Enter The Base Amount: "))
y = input("Please Enter Items To Calculate Vertical Analysis for: ")
for i in y:
d = (float(i)/float(x))*float(100)
print ">> Item With Value Of",i,"Represent",round(d,1),"% Of Base Amount"
def working_capital():
"""Working Capital"""
a = float(input("Please Enter Current Assets Value: "))
b = float(input("Please Enter Current Liabilities Value: "))
c = float(a)-float(b)
print ">> Your Working Capital is",c
def current_ratio():
"""Current Ratio"""
x = float(input("Please Enter Current Assets Value: "))
y = float(input("Please Enter Current Liabilities Value: "))
s = float(x)/float(y)
print ">> Your Current Ratio is",round(s,2)
def quick_ratio():
"""Quick Ratio"""
x = float(input("Please Enter Total Current Assets Value: "))
y = float(input("Please Enter Total Inventory Value: "))
z = float(input("Please Enter Total Current Liability Value: "))
s = (float(x)-float(y))/float(z)
print "Your Quick Ratio(Acid-Test Ratio) is",round(s,2)
def inventory_turnover():
"""Inventory TurnOver"""
x = float(input("Please Enter The Cost Of Goods Sold(COGS) Value: "))
y = float(input("Please Enter Beginning Inventory Value: "))
z = float(input("Please Enter Ending Inventory Value: "))
s = float(x)/((float(y)+float(z))/float(2))
w = float(365)/float(s)
print ">> Your Inventory TurnOver is",round(s,1)
print ">> Your Days in Inventory is",round(w),"days"
def gross_profit_percentage():
"""Gross Profit Percentage"""
x = float(input("Please Enter Net Sales Value: "))
y = float(input("Please Enter The Cost Of Goods Sold(COGS) Value: "))
z = float(x)-float(y)
a = round(z,3)
c = (float(a)/float(x))*float(100)
print ">> Your Gross Profit(Gross Margin) is",a
print ">> Your Gross Profit Percentage is",round(c,1),"%"
def debt_ratio():
"""Debt Ratio"""
x = float(input("Please Enter Total Liabilities Value: "))
y = float(input("Please Enter Total Assets Value: "))
s = (float(x)/float(y))*float(100)
print ">> Your Debt Ratio is",round(s,1),"%"
def debt_to_equity():
"""Debt To Equity"""
x = float(input("Please Enter Total Liabilities Value: "))
y = float(input("Please Enter Total Equity Value: "))
s = float(x)/float(y)
print ">> Your Debt to Equity is",round(s,2)
def interest_coverage_ratio():
"""Interest Coverage Ratio"""
x = float(input("Please Enter Net Income Value: "))
y = float(input("Please Enter Income Tax Expense Value: "))
z = float(input("Please Enter Interest Expense Value: "))
eb = float(x)+float(y)+float(z)
s = (float(x)+float(y)+float(z))/float(z)
print ">> Your Earning Before Interest And Tax (EBIT) is",eb
print ">> Your Interest-Coverage Ratio is",round(s,2)
def return_on_sales():
"""Return On Sales"""
x = float(input("Please Enter Net Income Value: "))
y = float(input("Please Enter Net Sales Value: "))
s = (float(x)/float(y))*float(100)
print ">> Your Return On Sales is",round(s,1),"%"
def return_on_total_assets():
"""Return On Total Assets"""
x = float(input("Please Enter Net Income Value: "))
y = float(input("Please Enter Interest Expense Value: "))
z = float(input("Please Enter Beginning Total Assets Value: "))
w = float(input("Please Enter Ending Total Assets Value: "))
d = ((float(x)+float(y)) / ((float(z)+float(w)) / float(2))) * float(100)
print ">> Your Rate of Return on Total Assets is",round(d,1),"%"
def asset_turnover():
"""Asset TurnOver"""
x = float(input("Please Enter Net Sales Value: "))
y = float(input("Please Enter Beginning Total Assets Value: "))
z = float(input("Please Enter Ending Total Assets Value: "))
d = float(x) / ((float(y)+float(z)) / float(2))
print ">> Your Asset TurnOver Ratio is",round(d,2)
def ar_turnover():
"""Accounts Receivable TurnOver"""
x = float(input("Please Enter Net credit sales Value: "))
y = float(input("Please Enter Beginning Accounts Receivable Value: "))
z = float(input("Please Enter Ending Accounts Receivable Value: "))
s = float(x)/((float(y)+float(z))/float(2))
q = float(365)/float(s)
print ">> Your Accounts Receivable TurnOver is",round(s,1)
print ">> Your Days Sales in Receivables is",round(q),"days"
def return_on_equity():
"""Return On Equity"""
x = float(input("Please Enter Net Income Value: "))
s = float(input("Please Enter The Value of Preferred dividend Paid: "))
y = float(input("Please Enter Beginning common stockholders equity Value: "))
z = float(input("Please Enter Ending common stockholders equity Value: "))
d = ((float(x)-float(s)) / ((float(y)+float(z)) / float(2))) * float(100)
print ">> Your Rate of Return on Common Stockholders Equity is",round(d,1),"%"
def price_earnings():
"""Price Earnings Ratio"""
x = float(input("Please Enter Market price per share of common stock: "))
a = float(input("Please Enter Net Income Value: "))
b = float(input("Please Enter The Value of Preferred dividend Paid: "))
c = float(input("Please Enter Number of shares of common stock outstanding: "))
y = (float(a)-float(b))/float(c)
s = float(x)/float(y)
print ">> Your Price/Earnings Ratio is",round(s,2)
def dividend_payout():
"""Dividend Payout"""
x = float(input("Please Enter Annual dividends per share of common stock: "))
a = float(input("Please Enter Net Income Value: "))
b = float(input("Please Enter The Value of Preferred dividend Paid: "))
c = float(input("Please Enter Number of shares of common stock outstanding: "))
y = (float(a)-float(b))/float(c)
s = (float(x)/float(y))*float(100)
print ">> Your Dividend Payout is",s,"%"
def earnings_per_share():
"""Earnings Per Share"""
x = float(input("Please Enter Net Income Value: "))
s = float(input("Please Enter The Value of Preferred dividend Paid: "))
y = float(input("Please Enter Number of shares of common stock outstanding: "))
e = (float(x)-float(s)) / float(y)
print ">> Your Earnings per Share of Common Stock(EPS) is",round(e,2),"\n"
b = input(" Press 1 To Calculate Dividend Payout on Common Stock\n Press 2 to Calculate Price/Earnings Ratio\n Press 3 to Calculate Dividend Yield on Common Stock\n Or Press 0 to Exit: ")
if(b == 1):
u = float(input("Please Enter Annual dividends per share of common stock: "))
ggg = (float(u) / float(e)) *float(100)
print ">> Your Dividend Payout on Common Stock is",round(ggg,2),"%"
elif(b == 2):
cc = float(input("Please Enter Market price per share of common stock: "))
eee = float(cc)/float(e)
print ">> Your Price/Earnings(P/E) Ratio is",round(eee,2)
elif(b == 3):
tt = float(input("Please Enter Annual dividends per share of common stock: "))
cc = float(input("Please Enter Market price per share of common stock: "))
xxx = (float(tt)/float(cc)) * float(100)
print ">> Your Dividend Yield on Common Stock is",round(xxx,2),"%"
elif(b == 0):
print "Canceled"
def dividend_yield():
"""Dividend Yield"""
x = float(input("Please Enter Annual dividends per share of common stock: "))
y = float(input("Please Enter Market price per share of common stock: "))
s = (float(x)/float(y))*float(100)
print ">> Your Dividend Yield is",round(s,1),"%"
def book_value():
"""Book Value per Share of Common Stock"""
a = float(input("Please Enter Total Stockholders Equity Value: "))
b = float(input("Please Enter Preferred Equity Value: "))
c = float(input("Please Enter Number of Shares of Common Stock Outstanding: "))
d = (float(a)-float(b))/float(c)
print ">> Your Book Value Per Share of Common Stock is",round(d,2)
def ratio_analysis():
"""Ratio Analysis"""
a = float(input("Please Enter Net Income(Loss) Value: "))
b = float(input("Please Enter Net sales Value: "))
c = float(input("Please Enter Net Credit Sales Value: "))
d = float(input("Please Enter The Cost Of Goods Sold(COGS) Value: "))
e = float(input("Please Enter Interest Expense Value: "))
f = float(input("Please Enter Income Tax Expense Value: "))
g = float(input("Please Enter Total Current Assets Value: "))
h = float(input("Please Enter Beginning Accounts Receivable Value: "))
i = float(input("Please Enter Ending Accounts Receivable Value: "))
j = float(input("Please Enter Beginning Inventory Value: "))
k = float(input("Please Enter Ending Inventory Value: "))
l = float(input("Please Enter Beginning Total Assets Value: "))
m = float(input("Please Enter Ending Total Assets Value: "))
n = float(input("Please Enter Total Current Liabilities Value: "))
o = float(input("Please Enter Total Liabilities Value: "))
p = float(input("Please Enter Beginning Total Common Equity Value: "))
q = float(input("Please Enter Ending Total Common Equity Value: "))
r = float(input("Please Enter Preferred Dividend Value: "))
s = float(input("Please Enter Annual Dividends Per Share Of Common Stock Value: "))
t = float(input("Please Enter Number Of Common Shares Outstanding: "))
u = float(input("Please Enter Market Price Per Share of Common Stock: "))
uu = float(g)-float(n) #Working capital
aa = float(g)/float(n) #current Ratio
bb = (float(g)-float(k))/float(n) #Quick Ratio
cc = float(d)/((float(j)+float(k))/float(2)) #Inventory turnover
dd = float(365)/float(cc) #Days in inventory
ee = float(b)-float(d) #gross margin
ff = (float(ee)/float(b))*float(100) #Gross profit percent
gg = float(c)/((float(h)+float(i))/float(2)) #Accounts receivable turnover
hh = float(365)/float(gg) #days in receivable
ii = (float(o)/float(m))*float(100) #debt ratio
jj = float(o)/float(q) #debt to equity
kk = float(a)+float(f)+float(e) #ebit
ll = float(kk)/float(e) #interest coverage
mm = (float(a)/float(b))*float(100) #return on sales
nn = ((float(a)+float(e))/((float(l)+float(m))/float(2)))*float(100) #return on total assets
oo = float(b)/((float(l)+float(m))/float(2)) #assets turn over
pp = ((float(a)-float(r))/((float(p)+float(q))/float(2)))*float(100) #return on common equity
qq = (float(a)-float(r))/float(t) #EPS
rr = float(u)/float(qq) #Price/Earnings Ratio
ss = (float(s)/float(u))*float(100) #Dividend Yield
tt = (float(s)/float(qq))*float(100) #Dividend Payout
uull = (float(f)/(float(a)-float(f)))*float(100)
print ">> Your Working Capital is",uu,"\n"
print ">> Your Current Ratio is",round(aa,2),"\n"
print ">> Your Quick (Acid-Test) Ratio is",round(bb,2),"\n"
print ">> Your Inventory TurnOver is",round(cc,1),"\n"
print ">> Your Days in Inventory is",round(dd),"days \n"
print ">> Your Gross Profit(Gross Margin) is",ee,"\n"
print ">> Your Gross Profit Percentage is",round(ff,1),"%\n"
print ">> Your Accounts Receivable TurnOver is",round(gg,2),"\n"
print ">> Your Days Sales in Receivables is",round(hh),"\n"
print ">> Your Debt Ratio is",round(ii,1),"%\n"
print ">> Your Debt to Equity Ratio is",round(jj,2),"\n"
print ">> Your Earning Before Interest and Tax is",kk,"\n"
print ">> Your Interest Coverage Ratio is",round(ll,2),"\n"
print ">> Your Rate of Return on Net Sales is",round(mm,1),"%\n"
print ">> Your Rate of Return on Total Assets is",round(nn,1),"%\n"
print ">> Your Asset TurnOver Ratio is",round(oo,2),"\n"
print ">> Your Rate of Return on Common Stockholders Equity is",round(pp,1),"%\n"
print ">> Your Effective Tax Rate is",round(uull,1),"%\n"
print ">> Your Earnings per Share of Common Stock(EPS) is",round(qq,2),"\n"
print ">> Your Price/Earnings Ratio is",round(rr,2),"\n"
print ">> Your Dividend Yield is",round(ss,1),"%\n"
print ">> Your Dividend Payout is",tt,"%\n"
def unit_cost(*val):
"""Unit Cost"""
s = input("To Calculate Cost Per Unit For Services Company Press 1\nTo Calculate Cost Per Unit For Merchandising Company Press 2\nTo Calculate Cost Per Unit For Manufacturing Company Press 3: ")
if(s == 1):
a = float(input("Please Enter Total Service Costs (Total Expense) Value: "))
b = float(input("Please Enter Total Number Of Services Provided: "))
c = float(a)/float(b)
print ">> Your Cost Per Service is",c
elif(s == 2):
d = float(input("Please Enter Beginning Inventory Value: "))
e = float(input("Please Enter Net Purchases Value: "))
f = float(input("Please Enter Freight In Value: "))
g = float(input("Please Enter Ending Inventory Value: "))
h = (float(d)+float(e)+float(f))-float(g)
print ">> Your Cost Of Goods Sold is",h
for i in val:
dd = float(h)/float(i)
print ">> Your Cost Per Unit For",i,"Units is",dd
elif(s == 3):
i = float(input("Please Enter Beginning Work in Process Inventory Value: "))
j = float(input("Please Enter Beginning Direct Materials Inventory Value: "))
k = float(input("Please Enter Purchases of Direct Materials (including freight in): "))
l = float(input("Please Enter Ending Direct Materials Inventory Value: "))
m = float(input("Please Enter Direct Labor Cost Value: "))
n = float(input("Please Enter Indirect Materials Value: "))
o = float(input("Please Enter Indirect Labor Value: "))
p = float(input("Please Enter Depreciation Value (plant and equipment): "))
q = float(input("Please Enter Plant Utilities and Insurance and Property Taxes Value: "))
r = float(input("Please Enter Ending Work in Process Inventory Value: "))
ss = (float(j)+float(k))-float(l)
t = float(n)+float(o)+float(p)+float(q)
u = (float(i)+float(t)+float(ss)+float(m))-float(r)
print ">> Your Total Direct Materials Used is",ss
print ">> Your Total Manufacturing Overhead Cost is",t
print ">> Your Cost Of Goods Sold is",u
for ii in val:
eee = float(u)/float(ii)
print ">> Your Cost Per Unit For",ii,"Units is",eee
def high_low_method():
"""High Low Method"""
a = float(input("Please Enter Highest Volume Value: "))
c = float(input("Please Enter Highest Cost Value: "))
b = float(input("Please Enter Lowest Volume Value: "))
d = float(input("Please Enter Lowest Cost Value: "))
e = float(input("Please Enter Number of Units (To Compute Cost For): "))
ab = float(a)-float(b)
cd = float(c)-float(d)
vr = float(cd)/float(ab)
fx = float(c)-(float(vr)*float(a))
mc = float(vr)*float(e)
mx = float(mc)+float(fx)
print ">> Your Variable Cost Per Unit is",vr
print ">> Your Estimated Fixed Cost is",fx
print ">> Your Estimated Variable Cost is",mc
print ">> Your Estimated Total Mixed Cost For",e,"Units is",mx
def target_profit():
"""Target Profit"""
a = float(input("Please Enter Sales Price Per Unit: "))
b = float(input("Please Enter Variable Cost Per Unit: "))
c = float(input("Please Enter Your Target Profit: "))
d = float(input("Please Enter Total Fixed Cost: "))
e = float(a)-float(b)
f = (float(d)+float(c))/float(e)
g = float(e)/float(a)
gg = float(g)*float(100)
h = (float(d)+float(c))/float(g)
print ">> Your Contribution Margin is",e
print ">> Your Contribution Margin Ratio is",gg,"%"
print ">> Your Target Profit in Units To Earn",c,"$ is",round(f)
print ">> Your Target Profit in Dollars To Earn",c,"$ is",h
def margin_of_safety():
"""Margin of Safety"""
a = float(input("Please Enter Sales Price Per Unit: "))
b = float(input("Please Enter Variable Cost Per Unit: "))
bb = float(input("Please Enter Total Fixed Cost: "))
c = float(input("Please Enter Expected Sales: "))
d = float(a)-float(b)
e = float(bb)/float(d)
f = float(c)-float(e)
g = float(f)*float(a)
print ">> Your Margin of Safety in Units is",f
print ">> Your Margin of Safety in Dollars is",g
def cost_volume_profit():
"""Cost Volume Profit Analysis"""
c = float(input("Please Enter Total Fixed Costs Value: "))
a = float(input("Please Enter Sale Price Per Unit: "))
b = float(input("Please Enter Variable Cost Per Unit: "))
ccm = float(a)-float(b)
cuu = float(c)/float(ccm)
ccmr = (float(ccm)/float(a))*float(100)
cda = float(c)/(float(ccmr)/float(100))
print ">> Your Contribution Margin is",ccm
print ">> Your Breakeven Sales in Units is",round(cuu)
print ">> Your Contribution Margin Ratio is",ccmr,"%"
print ">> Your Breakeven Sales in Dollars is",cda,"\n"
qq = input(" Press 1 To Compute Target Profit\n Press 2 To Compute Margin of Safety\n Press 3 To Perform Sensitivity Analysis\n Or Press 0 To Exit: ")
if(qq == 1):
dds = float(input("Please Enter Your Target Profit: "))
xxx = (float(c)+float(dds))/float(ccm)
xxxx = (float(c)+float(dds))/(float(ccmr)/float(100))
print ">> Your Target Profit in Units To Earn",dds,"$ is",round(xxx)
print ">> Your Target Profit in Dollars To Earn",dds,"$ is",xxxx
elif(qq == 0):
print "Canceled"
elif(qq == 2):
xc = float(input("Please Enter Expected Sales in Units: "))
zzz = float(xc)-float(cuu)
zzzz = float(zzz)*float(a)
print ">> Your Margin of Safety in Units is",round(zzz)
print ">> Your Margin of Safety in Dollars is",zzzz
elif(qq == 3):
i = input("Please Enter Total Fixed Costs Value: ")
o = input("Please Enter Sale Price Per Unit: ")
p = input("Please Enter Variable Cost Per Unit: ")
n = 0
for x,y,z in zip(i,o,p):
cm = float(y)-float(z)
uu = float(x)/float(cm)
cmr = (float(cm)/float(y))*float(100)
da = float(x)/(float(cmr)/float(100))
n += 1
print "Your Results in Case",int(n),"is :"
print ">> Your Contribution Margin is",cm
print ">> Your Breakeven Sales in Units is",round(uu)
print ">> Your Contribution Margin Ratio is",cmr,"%"
print ">> Your Breakeven Sales in Dollars is",da,"\n"
if(cm > ccm):
a = float(cm)-float(ccm)
print ">> Your Contribution Margin Increased by",a
elif(ccm > cm):
a = float(ccm)-float(cm)
print ">> Your Contribution Margin Decreased by",a
if(uu > cuu):
b = float(uu)-float(cuu)
print ">> Your Breakeven Sales in Units Increased by",round(b)
elif(cuu > uu):
b = float(cuu)-float(uu)
print ">> Your Breakeven Sales in Units Decreased by",round(b)
if(cmr > ccmr):
c = float(cmr)-float(ccmr)
print ">> Your Contribution Margin Ratio Increased by",c,"%"
elif(ccmr > cmr):
c = float(ccmr)-float(cmr)
print ">> Your Contribution Margin Ratio Decreased by",c,"%"
if(da > cda):
d = float(da)-float(cda)
print ">> Your Breakeven Sales in Dollars Increased by",d
elif(cda > da):
d = float(cda)-float(da)
print ">> Your Breakeven Sales in Dollars Decreased by",d,"\n"
def payback_period():
"""Payback Period"""
a = input("For Payback with Equal Annual Net Cash Inflows Press 1\nFor Payback with Unequal Net Cash Inflows Press 2: ")
if (a == 1):
b = float(input("Please Enter Total Amount Invested: "))
c = float(input("Please Enter Expected Annual Net Cash Inflow: "))
d = float(input("Please Enter Investment Useful Life (Years): "))
e = (float(c)*float(d))-float(b)
f = float(b)/float(c)
print ">> Your Payback Period is",f,"Years"
print ">> Total Amount Remaining After Payback Period is",e,"(Residual Value Not Included)"
elif(a == 2):
b = float(input("Please Enter Total Amount Invested: "))
c = list(input("Please Enter Expected Annual Net Cash Inflow: "))
n = 0
t = 0
ss = sum(c)
am = float(ss)-float(b)
if(ss < b):
print ">> Your Loss On Investment is",abs(am)
elif(ss > b):
while t <= b:
e = c[0 + n]
t += float(e)
n += 1
gg = c[n - 1]
ii = (float(t)-float(b))/float(gg)
ccc = float(n)-float(ii)
print ">> Your Payback Period is",ccc,"Years"
print ">> Total Amount Remaining After Payback Period is",am
def rate_of_return():
"""Rate Of Return"""
a = input("For Asset with Equal Annual Net Cash Inflows Press 1\nFor Asset with Unequal Net Cash Inflows Press 2: ")
if(a == 1):
b = float(input("Please Enter Annual Cash Inflows For The Asset: "))
c = float(input("Please Enter Asset Useful Life (Years): "))
d = float(input("Please Enter Total Depreciation During Operating Life Of Asset: "))
e = float(input("Please Enter Any Asset Salvage Value: "))
avo = ((float(b)*float(c))-(float(d)-float(e)))/float(c)
avi = (float(d)+float(e))/float(2)
ror = (float(avo)/float(avi))*float(100)
print ">> Your Average Annual Operating Income From an Asset is",avo
print ">> Your Average Amount Invested in an Asset is",avi
print ">> Your Rate Of Return (ROR) is",ror,"%"
elif(a == 2):
b = list(input("Please Enter Annual Cash Inflows For The Asset: "))
d = float(input("Please Enter Total Depreciation During Operating Life Of Asset: "))
e = float(input("Please Enter Any Asset Salvage Value: "))
f = sum(b)
c = len(b)
g = (float(f)-(float(d)-float(e)))/float(c)
h = (float(d)+float(e))/float(2)
ror = (float(g)/float(h))*float(100)
print ">> Your Average Annual Operating Income From an Asset is",g
print ">> Your Average Amount Invested in an Asset is",h
print ">> Your Rate Of Return (ROR) is",ror,"%"
def compound_interest():
"""Compound Interest"""
ffg = input("If Interest Added Monthly Press 1\nIf Interest Added Annually Press 2: ")
if(ffg == 1):
a = float(input("Pleae Enter The Principal Value: "))
b = float(input("Pleae Enter Interest Rate %: "))
c = float(input("Please Enter The Loan Period (Months): "))
t = 0
n = 0
g = a
while c != 0:
ie = (float(a) * (float(b)/float(100))) * (float(1)/float(12))
a += float(ie)
t += float(ie)
n += 1
c -= 1
dd = float(g)+float(t)
print ">> Your Interest Expense For Month No.",n,"is",ie
print ">> Your Total Interest Expense For",n,"Months is",t
print ">> Your Total Payment in Maturity Date is",dd
elif(ffg == 2):
a = float(input("Pleae Enter The Principal Value: "))
b = float(input("Pleae Enter Interest Rate %: "))
c = float(input("Please Enter The Loan Period (Years): "))
t = 0
n = 0
g = a
while c != 0:
ie = float(a) * (float(b)/float(100))
a += float(ie)
t += float(ie)
n += 1
c -= 1
dd = float(g)+float(t)
print ">> Your Interest Expense For Year No.",n,"is",ie
print ">> Your Total Interest Expense For",n,"Years is",t
print ">> Your Total Payment in Maturity Date is",dd
def price_variance():
"""Price Variance"""
a = float(input("Please Enter Your Actual Price: "))
b = float(input("Please Enter Your Standard Price: "))
c = float(input("Please Enter Actual Quantity: "))
d = float(a)-float(b)
e = float(d)*float(c)
print ">> Your Change In Price is",d
print ">> Your Price Variance is",e
def efficiency_variance():
"""Efficiency Variance"""
a = float(input("Please Enter Your Actual Quantity: "))
b = float(input("Please Enter Your Standard Quantity: "))
c = float(input("Please Enter Your Standard Price: "))
d = float(a)-float(b)
e = float(d)*float(c)
print ">> Your Change In Quantity is",d
print ">> Your Efficiency Variance is",e
def profit_margin():
"""Profit Margin"""
a = float(input("Please Enter Operating Income Value: "))
b = float(input("Please Enter Total Sales Value: "))
c = (float(a)/float(b))*float(100)
print ">> Your Profit Margin is",c,"%"
def return_on_investment():
"""Return On Investment"""
a = float(input("Please Enter Operating Income Value: "))
b = float(input("Please Enter Beginning Total Assets Value: "))
c = float(input("Please Enter Ending Total Assets Value: "))
d = (float(a)/((float(b)+float(c))/float(2)))*float(100)
print ">> Your Return on Investment (ROI) is",d,"% \n"
e = input("To Calculate Profit Margin And Asset TurnOver Press 1\n To Exit Press 0: ")
if(e == 1):
f = float(input("Please Enter Total Sales Value: "))
i = (float(b)+float(c))/float(2)
g = (float(a)/float(f))*float(100)
h = float(f)/float(i)
print ">> Your Profit Margin is",g,"%"
print ">> Your Asset TurnOver is",h
def residual_income():
"""Residual Income"""
a = float(input("Please Enter Operating Income Value: "))
b = float(input("Please Enter Your Target Rate Of Return %: "))
c = float(input("Please Enter Beginning Total Assets Value: "))
d = float(input("Please Enter Ending Total Assets Value: "))
e = (float(b)/float(100))*((float(c)+float(d))/float(2))
f = float(a)-float(e)
print ">> Your Minimum Acceptable Operating Income is",e
print ">> Your Residual Income (RI) is",f
def effective_tax_rate():
"""Effective Tax Rate"""
b = float(input("Please Enter Net Income Value: "))
a = float(input("Please Enter Income Tax Expense: "))
c = float(b)+float(a)
d = (float(a)/float(c))*float(100)
e = float(100)-float(d)
f = float(c)*(float(e)/float(100))
print ">> Your Pre Tax Income is",c
print ">> Your Effective Tax Rate is",round(d,1),"%"
print ">> Your After-Tax Operating Income is",f
def economic_value_added():
"""Economic Value Added"""
a = float(input("Please Enter After Tax Operating Income: "))
b = float(input("Please Enter Current Liabilities Value: "))
c = float(input("Please Enter Weighted Average Cost Of Capital (minimum rate of return required) %: "))
d = float(input("Please Enter Beginning Total Assets Value: "))
e = float(input("Please Enter Ending Total Assets Value: "))
f = float(a)-((((float(d)+float(e))/float(2))-float(b))*(float(c)/float(100)))
print ">> Your Economic Value Added (EVA) is",f
| 45.85666
| 212
| 0.555239
| 12,695
| 95,015
| 4.150374
| 0.03781
| 0.090607
| 0.131792
| 0.165006
| 0.982027
| 0.982027
| 0.982027
| 0.982027
| 0.982027
| 0.982027
| 0
| 0.013453
| 0.30451
| 95,015
| 2,072
| 213
| 45.85666
| 0.783875
| 0.008209
| 0
| 0.944316
| 0
| 0.008121
| 0.419489
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.00464
| null | null | 0.24478
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
43dc3838b126d4358d33fdcee17fb8e0b0c0d8b6
| 7,995
|
py
|
Python
|
tests/test_triptranslator.py
|
niktto/triptranslator
|
e536fdc6c82335d7127fb29532800cc3b934aca3
|
[
"MIT"
] | null | null | null |
tests/test_triptranslator.py
|
niktto/triptranslator
|
e536fdc6c82335d7127fb29532800cc3b934aca3
|
[
"MIT"
] | null | null | null |
tests/test_triptranslator.py
|
niktto/triptranslator
|
e536fdc6c82335d7127fb29532800cc3b934aca3
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_triptranslator
----------------------------------
Tests for `triptranslator` module.
"""
import unittest
from triptranslator import triptranslator
class TestTriptranslator(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_bad_start(self):
def create_bad_bc():
triptranslator.BoardingCard(
start=None,
end=u"Szczecin Glowny",
transport_type='train',
transport_seat='225',
transport_number=u'Kopernik',
)
self.assertRaises(ValueError, create_bad_bc)
def test_bad_end(self):
def create_bad_bc():
triptranslator.BoardingCard(
start=u"Szczecin Glowny",
end='',
transport_type='train',
transport_seat='225',
transport_number=u'Kopernik',
)
self.assertRaises(ValueError, create_bad_bc)
def test_simple_trip(self):
random_boarding_passes = [
triptranslator.BoardingCard(
start=u"Warszawa Wschodnia",
end=u"Szczecin Glowny",
transport_type='train',
transport_seat='225',
transport_number=u'Kopernik',
),
triptranslator.BoardingCard(
start=u"Otwock",
end=u"Warszawa Dworzec Gdanski",
transport_type='train',
transport_seat='16',
transport_number='S1',
),
triptranslator.BoardingCard(
start=u"Warszawa Dworzec Wilenski",
end=u"Warszawa Wschodnia",
transport_type='metro',
transport_number='M1',
),
triptranslator.BoardingCard(
start=u"Warszawa Dworzec Gdanski",
end=u"Warszawa Dworzec Wilenski",
transport_type='bus',
transport_number='145',
),
]
valid_messages = [
u'Take train S1 from Otwock to Warszawa Dworzec Gdanski. Sit in seat 16.',
u'Take the 145 bus from Warszawa Dworzec Gdanski to Warszawa Dworzec Wilenski. No seat assignment.',
u'Get from Warszawa Dworzec Wilenski to Warszawa Wschodnia.',
u'Take train Kopernik from Warszawa Wschodnia to Szczecin Glowny. Sit in seat 225.',
u'You have arrived at your final destination.'
]
assert triptranslator.translate_boarding_cards(random_boarding_passes) == valid_messages
def test_split_trip(self):
random_boarding_passes = [
triptranslator.BoardingCard(
start=u"Warszawa Wschodnia",
end=u"Szczecin Glowny",
transport_type='train',
transport_seat='225',
transport_number=u'Kopernik',
),
triptranslator.BoardingCard(
start=u"Otwock",
end=u"Warszawa Dworzec Gdanski",
transport_type='train',
transport_seat='16',
transport_number='S1',
),
triptranslator.BoardingCard(
start=u"Warszawa Dworzec Wilenski",
end=u"Warszawa Wschodnia",
transport_type='metro',
transport_number='M1',
),
triptranslator.BoardingCard(
start=u"Warszawa Dworzec Pocztowy",
end=u"Warszawa Dworzec Wilenski",
transport_type='bus',
transport_number='145',
),
]
def generate_trip():
return triptranslator.translate_boarding_cards(random_boarding_passes)
self.assertRaises(ValueError, generate_trip)
def test_split_end_trip(self):
random_boarding_passes = [
triptranslator.BoardingCard(
start=u"Warszawa Wschodnia",
end=u"Szczecin Glowny",
transport_type='train',
transport_seat='225',
transport_number=u'Kopernik',
),
triptranslator.BoardingCard(
start=u"Warszawa Dworzec Wilenski",
end=u"Warszawa Dworzec Gdanski",
transport_type='train',
transport_seat='16',
transport_number='S1',
),
triptranslator.BoardingCard(
start=u"Warszawa Dworzec Wilenski",
end=u"Warszawa Wschodnia",
transport_type='metro',
transport_number='M1',
),
triptranslator.BoardingCard(
start=u"Warszawa Dworzec Pocztowy",
end=u"Warszawa Dworzec Wilenski",
transport_type='bus',
transport_number='145',
),
]
def generate_trip():
return triptranslator.translate_boarding_cards(random_boarding_passes)
self.assertRaises(ValueError, generate_trip)
def test_complex_trip(self):
random_boarding_passes = [
triptranslator.BoardingCard(
start=u"Warszawa Wschodnia",
end=u"Szczecin Glowny",
transport_type='train',
transport_seat='225',
transport_number=u'Kopernik',
),
triptranslator.BoardingCard(
start=u"Otwock",
end=u"Warszawa Dworzec Gdanski",
transport_type='train',
transport_seat='16',
transport_number='S1',
),
triptranslator.BoardingCard(
start=u"Warszawa Dworzec Wilenski",
end=u"Warszawa Wschodnia",
transport_type='metro',
transport_number='M1',
),
triptranslator.BoardingCard(
start=u"Warszawa Dworzec Gdanski",
end=u"Warszawa Dworzec Wilenski",
transport_type='bus',
transport_number='145',
),
triptranslator.BoardingCard(
start=u"Szczecin Goleniow",
end=u"Berlin",
transport_type='plane',
transport_seat='3112',
transport_number='PF3455',
transport_gate='H3L'
),
triptranslator.BoardingCard(
start=u"Szczecin Glowny",
end=u"Szczecin Goleniow",
transport_type='bus',
transport_number=u'airport',
),
triptranslator.BoardingCard(
start=u"Berlin",
end=u"Maroko",
transport_type='plane',
transport_seat='112',
transport_number='K332',
transport_gate='H3L',
transport_baggage_number='18',
),
]
valid_messages = [
u'Take train S1 from Otwock to Warszawa Dworzec Gdanski. Sit in seat 16.',
u'Take the 145 bus from Warszawa Dworzec Gdanski to Warszawa Dworzec Wilenski. No seat assignment.',
u'Get from Warszawa Dworzec Wilenski to Warszawa Wschodnia.',
u'Take train Kopernik from Warszawa Wschodnia to Szczecin Glowny. Sit in seat 225.',
u'Take the airport bus from Szczecin Glowny to Szczecin Goleniow. No seat assignment.',
u'From Szczecin Goleniow, take flight PF3455 to Berlin. Gate H3L, seat 3112. Baggage will we automatically transferred from your last leg.',
u'From Berlin, take flight K332 to Maroko. Gate H3L, seat 112. Baggage drop at ticket counter 18.',
u'You have arrived at your final destination.',
]
self.assertEqual(triptranslator.translate_boarding_cards(random_boarding_passes), valid_messages)
| 35.691964
| 152
| 0.543089
| 722
| 7,995
| 5.858726
| 0.149584
| 0.053191
| 0.153901
| 0.1513
| 0.828369
| 0.796927
| 0.796927
| 0.781087
| 0.740662
| 0.708038
| 0
| 0.020072
| 0.370607
| 7,995
| 223
| 153
| 35.852018
| 0.820548
| 0.016635
| 0
| 0.78534
| 0
| 0.010471
| 0.251242
| 0
| 0
| 0
| 0
| 0
| 0.031414
| 1
| 0.062827
| false
| 0.052356
| 0.010471
| 0.010471
| 0.089005
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
78f98f94cc6ecdbc24757e76e47d816cbb96a1c1
| 3,779
|
py
|
Python
|
example/myapp/migrations/0001_initial.py
|
sasriawesome/django_products
|
a945dbf983748ff558583695c66226d579bfa4a0
|
[
"MIT"
] | null | null | null |
example/myapp/migrations/0001_initial.py
|
sasriawesome/django_products
|
a945dbf983748ff558583695c66226d579bfa4a0
|
[
"MIT"
] | 4
|
2021-03-19T01:39:36.000Z
|
2021-06-04T22:49:28.000Z
|
example/myapp/migrations/0001_initial.py
|
sasriawesome/django_products
|
a945dbf983748ff558583695c66226d579bfa4a0
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.0.5 on 2020-04-07 16:08
import django.core.validators
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('django_products', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Book',
fields=[
('product_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='django_products.Product')),
('sn', models.CharField(blank=True, max_length=150, null=True, verbose_name='serial number')),
('stock_on_hand', models.IntegerField(default=0, verbose_name='stock on hand')),
('stock_on_delivery', models.IntegerField(default=0, verbose_name='stock on delivery')),
('stock_on_request', models.IntegerField(default=0, verbose_name='stock on request')),
('min_stock', models.IntegerField(default=0, validators=[django.core.validators.MinValueValidator(0)], verbose_name='min stock')),
('max_stock', models.IntegerField(default=0, validators=[django.core.validators.MinValueValidator(0)], verbose_name='max stock')),
],
options={
'verbose_name': 'Book',
},
bases=('django_products.product', models.Model),
),
migrations.CreateModel(
name='Music',
fields=[
('product_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='django_products.Product')),
('sn', models.CharField(blank=True, max_length=150, null=True, verbose_name='serial number')),
('stock_on_hand', models.IntegerField(default=0, verbose_name='stock on hand')),
('stock_on_delivery', models.IntegerField(default=0, verbose_name='stock on delivery')),
('stock_on_request', models.IntegerField(default=0, verbose_name='stock on request')),
('min_stock', models.IntegerField(default=0, validators=[django.core.validators.MinValueValidator(0)], verbose_name='min stock')),
('max_stock', models.IntegerField(default=0, validators=[django.core.validators.MinValueValidator(0)], verbose_name='max stock')),
],
options={
'verbose_name': 'Music',
},
bases=('django_products.product', models.Model),
),
migrations.CreateModel(
name='Video',
fields=[
('product_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='django_products.Product')),
('sn', models.CharField(blank=True, max_length=150, null=True, verbose_name='serial number')),
('stock_on_hand', models.IntegerField(default=0, verbose_name='stock on hand')),
('stock_on_delivery', models.IntegerField(default=0, verbose_name='stock on delivery')),
('stock_on_request', models.IntegerField(default=0, verbose_name='stock on request')),
('min_stock', models.IntegerField(default=0, validators=[django.core.validators.MinValueValidator(0)], verbose_name='min stock')),
('max_stock', models.IntegerField(default=0, validators=[django.core.validators.MinValueValidator(0)], verbose_name='max stock')),
],
options={
'verbose_name': 'Video',
},
bases=('django_products.product', models.Model),
),
]
| 57.257576
| 201
| 0.633501
| 408
| 3,779
| 5.693627
| 0.169118
| 0.09944
| 0.161429
| 0.167886
| 0.885493
| 0.885493
| 0.869565
| 0.869565
| 0.869565
| 0.816186
| 0
| 0.016792
| 0.227838
| 3,779
| 65
| 202
| 58.138462
| 0.779301
| 0.011908
| 0
| 0.672414
| 1
| 0
| 0.185155
| 0.036977
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.051724
| 0
| 0.12069
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
6001ac0223c91177fa24051cf023acd0acc2bbfe
| 5,154
|
py
|
Python
|
avrogen/dict_wrapper.py
|
szczeles/avro_gen
|
bce34b6f15468e925b9895a32e3167e458d092b0
|
[
"Apache-2.0"
] | null | null | null |
avrogen/dict_wrapper.py
|
szczeles/avro_gen
|
bce34b6f15468e925b9895a32e3167e458d092b0
|
[
"Apache-2.0"
] | null | null | null |
avrogen/dict_wrapper.py
|
szczeles/avro_gen
|
bce34b6f15468e925b9895a32e3167e458d092b0
|
[
"Apache-2.0"
] | null | null | null |
import six
if six.PY3:
class DictWrapper(dict):
__slots__ = ['_inner_dict']
def __init__(self, inner_dict=None):
super(DictWrapper, self).__init__()
self._inner_dict = {} if inner_dict is None else inner_dict # type: dict
def __getitem__(self, item):
return self._inner_dict.__getitem__(item)
def __iter__(self):
return self._inner_dict.__iter__()
def __len__(self):
return self._inner_dict.__len__()
def __setitem__(self, key, value):
raise NotImplementedError()
def items(self):
return self._inner_dict.items()
def keys(self):
return self._inner_dict.keys()
def values(self):
return self._inner_dict.values()
def fromkeys(S, v=None):
raise NotImplementedError
def clear(self):
raise NotImplementedError
def copy(self):
return DictWrapper(self._inner_dict.copy())
def get(self, k, d=None):
return self._inner_dict.get(k, d)
def __contains__(self, item):
return self._inner_dict.__contains__(item)
def __str__(self):
return self._inner_dict.__str__()
def __repr__(self):
return self._inner_dict.__repr__()
def __sizeof__(self):
return self._inner_dict.__sizeof__()
def pop(self, k, d=None):
raise NotImplementedError()
def popitem(self):
raise NotImplementedError()
def update(self, E=None, **F):
raise NotImplementedError()
def setdefault(self, k, d=None):
raise NotImplementedError()
def __eq__(self, other):
return self._inner_dict.__eq__(other)
def __ne__(self, other):
return self._inner_dict.__ne__(other)
def __le__(self, other):
return self._inner_dict.__le__(other)
def __ge__(self, other):
return self._inner_dict.__ge__(other)
def __lt__(self, other):
return self._inner_dict.__lt__(other)
def __gt__(self, other):
return self._inner_dict.__gt__(other)
def __hash__(self):
return self._inner_dict.__hash__()
else:
class DictWrapper(dict):
__slots__ = ['_inner_dict']
def __init__(self, inner_dict=None):
super(DictWrapper, self).__init__()
self._inner_dict = {} if inner_dict is None else inner_dict # type: dict
def __getitem__(self, item):
return self._inner_dict.__getitem__(item)
def __iter__(self):
return self._inner_dict.__iter__()
def __len__(self):
return self._inner_dict.__len__()
def __setitem__(self, key, value):
raise NotImplementedError()
def items(self):
return self._inner_dict.items()
def keys(self):
return self._inner_dict.keys()
def values(self):
return self._inner_dict.values()
def iteritems(self):
return self._inner_dict.iteritems()
def iterkeys(self):
return self._inner_dict.iterkeys()
def itervalues(self):
return self._inner_dict.itervalues()
def viewitems(self):
return self._inner_dict.viewitems()
def viewkeys(self):
return self._inner_dict.viewkeys()
def viewvalues(self):
return self._inner_dict.viewvalues()
def fromkeys(S, v=None):
raise NotImplementedError
def clear(self):
raise NotImplementedError
def copy(self):
return DictWrapper(self._inner_dict.copy())
def get(self, k, d=None):
return self._inner_dict.get(k, d)
def has_key(self, k):
return self._inner_dict.has_key(key)
def __contains__(self, item):
return self._inner_dict.__contains__(item)
def __str__(self):
return self._inner_dict.__str__()
def __repr__(self):
return self._inner_dict.__repr__()
def __sizeof__(self):
return self._inner_dict.__sizeof__()
def pop(self, k, d=None):
raise NotImplementedError()
def popitem(self):
raise NotImplementedError()
def update(self, E=None, **F):
raise NotImplementedError()
def setdefault(self, k, d=None):
raise NotImplementedError()
def __eq__(self, other):
return self._inner_dict.__eq__(other)
def __ne__(self, other):
return self._inner_dict.__ne__(other)
def __le__(self, other):
return self._inner_dict.__le__(other)
def __ge__(self, other):
return self._inner_dict.__ge__(other)
def __lt__(self, other):
return self._inner_dict.__lt__(other)
def __gt__(self, other):
return self._inner_dict.__gt__(other)
def __hash__(self):
return self._inner_dict.__hash__()
| 26.56701
| 85
| 0.58343
| 568
| 5,154
| 4.65669
| 0.110915
| 0.187146
| 0.240832
| 0.308885
| 0.930813
| 0.878639
| 0.878639
| 0.878639
| 0.878639
| 0.878639
| 0
| 0.000286
| 0.321692
| 5,154
| 193
| 86
| 26.704663
| 0.756293
| 0.004075
| 0
| 0.870229
| 0
| 0
| 0.004289
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.465649
| false
| 0
| 0.007634
| 0.343511
| 0.847328
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 12
|
6036cf4774702eb3a05cd6a1c4685bbd43911281
| 3,067
|
py
|
Python
|
tests/test_get_gt_from_sample_info.py
|
leoisl/vcf_consensus_builder
|
1e2f0312810b183edf368b33086475318a779b87
|
[
"MIT"
] | null | null | null |
tests/test_get_gt_from_sample_info.py
|
leoisl/vcf_consensus_builder
|
1e2f0312810b183edf368b33086475318a779b87
|
[
"MIT"
] | null | null | null |
tests/test_get_gt_from_sample_info.py
|
leoisl/vcf_consensus_builder
|
1e2f0312810b183edf368b33086475318a779b87
|
[
"MIT"
] | null | null | null |
import unittest
from vcf_consensus_builder.vcf_consensus_builder_core import get_gt_from_sample_info
class Test_get_gt_from_sample_info(unittest.TestCase):
def test_correct_gt_with_slash(self):
sample_info = "3/3:rest"
actual = get_gt_from_sample_info(sample_info)
expected = 3
self.assertEqual(actual, expected)
def test_correct_gt_without_slash(self):
sample_info = "3:rest"
actual = get_gt_from_sample_info(sample_info)
expected = 3
self.assertEqual(actual, expected)
def test_correct_gt_colon_comes_before_slash(self):
sample_info = "3:rest/asd"
actual = get_gt_from_sample_info(sample_info)
expected = 3
self.assertEqual(actual, expected)
def test_correct_gt_has_no_colon(self):
sample_info = "3/3"
actual = get_gt_from_sample_info(sample_info)
expected = 3
self.assertEqual(actual, expected)
def test_correct_gt_has_no_colon_no_slash(self):
sample_info = "3"
actual = get_gt_from_sample_info(sample_info)
expected = 3
self.assertEqual(actual, expected)
def test_correct_gt_several_slashes_and_colons(self):
sample_info = "3/3:rest/asd:qwe/ewr"
actual = get_gt_from_sample_info(sample_info)
expected = 3
self.assertEqual(actual, expected)
def test_correct_gt_several_slashes_and_colons_no_slash_in_first(self):
sample_info = "3:rest/asd:qwe/ewr"
actual = get_gt_from_sample_info(sample_info)
expected = 3
self.assertEqual(actual, expected)
def test_correct_gt_no_slashes_several_colons(self):
sample_info = "3:rest:qwe"
actual = get_gt_from_sample_info(sample_info)
expected = 3
self.assertEqual(actual, expected)
def test_incorrect_gt_is_dot_with_slash(self):
sample_info = "./.:rest"
actual = get_gt_from_sample_info(sample_info)
expected = -1
self.assertEqual(actual, expected)
def test_incorrect_gt_is_dot_without_slash(self):
sample_info = ".:rest"
actual = get_gt_from_sample_info(sample_info)
expected = -1
self.assertEqual(actual, expected)
def test_incorrect_gt_is_word_with_slash(self):
sample_info = "asd/qwe:rest"
actual = get_gt_from_sample_info(sample_info)
expected = -1
self.assertEqual(actual, expected)
def test_incorrect_gt_is_word_without_slash(self):
sample_info = "asd:rest"
actual = get_gt_from_sample_info(sample_info)
expected = -1
self.assertEqual(actual, expected)
def test_incorrect_gt_is_word_without_colon(self):
sample_info = "asd/rest"
actual = get_gt_from_sample_info(sample_info)
expected = -1
self.assertEqual(actual, expected)
def test_incorrect_gt_is_word_without_colon_without_slash(self):
sample_info = "asd"
actual = get_gt_from_sample_info(sample_info)
expected = -1
self.assertEqual(actual, expected)
| 34.460674
| 84
| 0.692533
| 412
| 3,067
| 4.708738
| 0.106796
| 0.226804
| 0.074227
| 0.123711
| 0.91701
| 0.839691
| 0.778866
| 0.778866
| 0.778866
| 0.778866
| 0
| 0.010602
| 0.231171
| 3,067
| 88
| 85
| 34.852273
| 0.812129
| 0
| 0
| 0.575342
| 0
| 0
| 0.039452
| 0
| 0
| 0
| 0
| 0
| 0.191781
| 1
| 0.191781
| false
| 0
| 0.027397
| 0
| 0.232877
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
605c5e2e33a9f8d5d3f61b52546433fb0a1d509d
| 54,674
|
py
|
Python
|
tests/tagger/test_co_occurrence_score.py
|
JungeAlexander/cocosco
|
81ba561f6f16b43cfbd1b6d119e042bb640da23d
|
[
"MIT"
] | 15
|
2018-01-06T19:34:12.000Z
|
2021-02-15T21:58:11.000Z
|
tests/tagger/test_co_occurrence_score.py
|
JungeAlexander/cocosco
|
81ba561f6f16b43cfbd1b6d119e042bb640da23d
|
[
"MIT"
] | 3
|
2017-11-11T15:46:29.000Z
|
2018-11-16T13:14:46.000Z
|
tests/tagger/test_co_occurrence_score.py
|
JungeAlexander/cocosco
|
81ba561f6f16b43cfbd1b6d119e042bb640da23d
|
[
"MIT"
] | 2
|
2018-06-10T18:43:14.000Z
|
2021-03-11T15:28:17.000Z
|
import numpy
import pandas
from pandas.util.testing import assert_frame_equal
from pytest import approx
from pytest import raises
import cocoscore.tagger.co_occurrence_score as co_occurrence_score
import cocoscore.tools.data_tools as dt
from cocoscore.ml.distance_scores import polynomial_decay_distance
from cocoscore.ml.fasttext_helpers import fasttext_fit_predict_default
def fasttext_function(train, valid, epochs, dim, bucket):
return fasttext_fit_predict_default(train, valid,
epochs=epochs,
dim=dim,
bucket=bucket)
class TestClass(object):
matches_file_path = 'tests/tagger/matches_file.tsv'
matches_file_same_type_path = 'tests/tagger/matches_file_same_type.tsv'
matches_document_level_comentions_file_path = 'tests/tagger/matches_file_document_level_comentions.tsv'
matches_file_single_matches_path = 'tests/tagger/matches_file_single_matches.tsv'
matches_file_cross_path = 'tests/tagger/matches_file_cross.tsv'
matches_file_cross_fantasy_types_path = 'tests/tagger/matches_file_cross_fantasy_types.tsv'
sentence_score_file_path = 'tests/tagger/sentence_scores_file.tsv'
paragraph_score_file_path = 'tests/tagger/paragraph_scores_file.tsv'
document_score_file_path = 'tests/tagger/document_scores_file.tsv'
paragraph_sentence_score_file_path = 'tests/tagger/paragraph_sentence_scores_file.tsv'
document_paragraph_sentence_score_file_path = 'tests/tagger/document_paragraph_sentence_scores_file.tsv'
document_paragraph_score_file_path = 'tests/tagger/document_paragraph_scores_file.tsv'
precedence_document_paragraph_sentence_score_file_path = \
'tests/tagger/precedence_document_paragraph_sentence_scores_file.tsv'
entity_file_path = 'tests/tagger/entities2.tsv.gz'
entity_fantasy_types_file_path = 'tests/tagger/entities2_fantasy_types.tsv.gz'
entity_file_same_type_path = 'tests/tagger/entities2_same_type.tsv.gz'
cos_cv_test_path = 'tests/ml/cos_simple_cv.txt'
def test_load_sentence_scores(self):
sentence_scores = co_occurrence_score.load_score_file(self.sentence_score_file_path)
assert {('--D', 'A'): {(1111, 1, 2): 0.9, (1111, 2, 3): 0.5,
(3333, 2, 2): 0.4, (3333, 2, 3): 0.44},
('B', 'C'): {(2222, 1, 1): 0}} == sentence_scores
def test_load_sentence_scores_score_cutoff(self):
sentence_scores = co_occurrence_score.load_score_file(self.sentence_score_file_path,
cutoff=0.5)
assert {('--D', 'A'): {(1111, 1, 2): 0.9, (1111, 2, 3): 0.5}} == sentence_scores
def test_load_paragraph_scores(self):
paragraph_scores = co_occurrence_score.load_score_file(self.paragraph_score_file_path)
assert {('--D', 'A'): {(1111, 1): 0.9, (1111, 2): 0.5,
(3333, 2): 0.4},
('B', 'C'): {(2222, 1): 0}} == paragraph_scores
def test_load_document_scores(self):
document_scores = co_occurrence_score.load_score_file(self.document_score_file_path)
assert {('--D', 'A'): {1111: 1,
3333: 2},
('B', 'C'): {2222: 3}} == document_scores
def test_weighted_counts_sentences(self):
sentence_scores = co_occurrence_score.load_score_file(self.sentence_score_file_path)
weighted_counts = co_occurrence_score.get_weighted_counts(None, sentence_scores, None, None, None,
first_type=9606, second_type=-26,
document_weight=15.0, paragraph_weight=0,
sentence_weight=1.0)
assert {('--D', 'A'): 15.9 + 15.44,
('B', 'C'): 15,
'A': 15.9 + 15.44,
'--D': 15.9 + 15.44,
'B': 15,
'C': 15,
None: 15.9 + 15.44 + 15} == approx(weighted_counts)
def test_weighted_counts_sentences_paragraphs(self):
scores = co_occurrence_score.load_score_file(self.paragraph_sentence_score_file_path)
sentence_scores, paragraph_scores, _ = co_occurrence_score.split_scores(scores)
weighted_counts = co_occurrence_score.get_weighted_counts(None, sentence_scores, paragraph_scores, None, None,
first_type=9606, second_type=-26,
document_weight=15.0, paragraph_weight=1.0,
sentence_weight=1.0)
assert {('--D', 'A'): 15.9 + 0.9 + 15.44 + 0.4,
('B', 'C'): 15,
'A': 15.9 + 0.9 + 15.44 + 0.4,
'--D': 15.9 + 0.9 + 15.44 + 0.4,
'B': 15,
'C': 15,
None: 15.9 + 0.9 + 15.44 + 0.4 + 15} == approx(weighted_counts)
def test_weighted_counts_paragraphs(self):
paragraph_scores = co_occurrence_score.load_score_file(self.paragraph_score_file_path)
weighted_counts = co_occurrence_score.get_weighted_counts(None, None, paragraph_scores, None, None,
first_type=9606, second_type=-26,
document_weight=15.0, paragraph_weight=1.0,
sentence_weight=1.0)
assert {('--D', 'A'): 15.0 + 0.9 + 15.0 + 0.4,
('B', 'C'): 15.0,
'A': 15.0 + 0.9 + 15.0 + 0.4,
'--D': 15.0 + 0.9 + 15.0 + 0.4,
'B': 15.0,
'C': 15.0,
None: 15.0 + 0.9 + 15.0 + 0.4 + 15.0} == approx(weighted_counts)
def test_weighted_counts_sentences_paragraphs_documents(self):
scores = co_occurrence_score.load_score_file(self.document_paragraph_sentence_score_file_path)
sentence_scores, paragraph_scores, document_scores = co_occurrence_score.split_scores(scores)
weighted_counts = co_occurrence_score.get_weighted_counts(None, sentence_scores, paragraph_scores,
document_scores, None,
first_type=9606, second_type=-26,
document_weight=2.0, paragraph_weight=1.0,
sentence_weight=1.0)
assert {('--D', 'A'): 0.9 + 0.9 + 1 * 2 + 0.44 + 0.4 + 2 * 2,
('B', 'C'): 3 * 2,
'A': 0.9 + 0.9 + 1 * 2 + 0.44 + 0.4 + 2 * 2,
'--D': 0.9 + 0.9 + 1 * 2 + 0.44 + 0.4 + 2 * 2,
'B': 3 * 2,
'C': 3 * 2,
None: 0.9 + 0.9 + 1 * 2 + 0.44 + 0.4 + 2 * 2 + 3 * 2} == weighted_counts
def test_weighted_counts_documents(self):
document_scores = co_occurrence_score.load_score_file(self.document_score_file_path)
weighted_counts = co_occurrence_score.get_weighted_counts(None, None, None,
document_scores, None,
first_type=9606, second_type=-26,
document_weight=2.0, paragraph_weight=1.0,
sentence_weight=2.0)
assert {('--D', 'A'): 1 * 2 + 2 * 2,
('B', 'C'): 3 * 2,
'A': 1 * 2 + 2 * 2,
'--D': 1 * 2 + 2 * 2,
'B': 3 * 2,
'C': 3 * 2,
None: 1 * 2 + 2 * 2 + 3 * 2} == weighted_counts
def test_weighted_counts_paragraphs_documents(self):
paragraph_scores = co_occurrence_score.load_score_file(self.paragraph_score_file_path, )
document_scores = co_occurrence_score.load_score_file(self.document_score_file_path)
weighted_counts = co_occurrence_score.get_weighted_counts(None, None, paragraph_scores,
document_scores, None,
first_type=9606, second_type=-26,
document_weight=2.0, paragraph_weight=1.0,
sentence_weight=1.0)
assert {('--D', 'A'): 0.9 + 1 * 2. + 0.4 + 2 * 2.,
('B', 'C'): 3 * 2.,
'A': 0.9 + 1 * 2. + 0.4 + 2 * 2.,
'--D': 0.9 + 1 * 2. + 0.4 + 2 * 2.,
'B': 3 * 2.,
'C': 3 * 2.,
None: 0.9 + 1 * 2. + 0.4 + 2 * 2. + 3 * 2.} == approx(weighted_counts)
def test_co_occurrence_score_sentences(self):
sentence_scores = co_occurrence_score.load_score_file(self.sentence_score_file_path)
document_weight = 15.0
paragraph_weight = 0
weighting_exponent = 0.6
counts = co_occurrence_score.get_weighted_counts(None, sentence_scores, None, None, None,
first_type=9606, second_type=-26,
document_weight=document_weight,
paragraph_weight=paragraph_weight,
sentence_weight=1.0)
scores = co_occurrence_score.co_occurrence_score(None, self.sentence_score_file_path, None,
first_type=9606, second_type=-26,
document_weight=document_weight,
paragraph_weight=paragraph_weight,
weighting_exponent=weighting_exponent)
c_a_d = counts[('--D', 'A')]
c_a = counts['A']
c_d = counts['--D']
c_all = counts[None]
s_a_d = c_a_d ** weighting_exponent * ((c_a_d * c_all) / (c_a * c_d)) ** (1 - weighting_exponent)
c_b_c = counts[('B', 'C')]
c_b = counts['B']
c_c = counts['C']
s_b_c = c_b_c ** weighting_exponent * ((c_b_c * c_all) / (c_b * c_c)) ** (1 - weighting_exponent)
assert s_a_d == approx(scores[('--D', 'A')])
assert s_b_c == approx(scores[('B', 'C')])
def test_co_occurrence_score_sentences_paragraphs(self):
scores = co_occurrence_score.load_score_file(self.paragraph_sentence_score_file_path)
sentence_scores, paragraph_scores, _ = co_occurrence_score.split_scores(scores)
document_weight = 15.0
paragraph_weight = 1.0
weighting_exponent = 0.6
counts = co_occurrence_score.get_weighted_counts(None, sentence_scores, paragraph_scores, None, None,
first_type=9606, second_type=-26,
document_weight=document_weight,
paragraph_weight=paragraph_weight,
sentence_weight=1.0)
scores = co_occurrence_score.co_occurrence_score(None, self.paragraph_sentence_score_file_path, None,
first_type=9606, second_type=-26,
document_weight=document_weight,
paragraph_weight=paragraph_weight,
weighting_exponent=weighting_exponent)
c_a_d = counts[('--D', 'A')]
c_a = counts['A']
c_d = counts['--D']
c_all = counts[None]
s_a_d = c_a_d ** weighting_exponent * ((c_a_d * c_all) / (c_a * c_d)) ** (1 - weighting_exponent)
c_b_c = counts[('B', 'C')]
c_b = counts['B']
c_c = counts['C']
s_b_c = c_b_c ** weighting_exponent * ((c_b_c * c_all) / (c_b * c_c)) ** (1 - weighting_exponent)
assert s_a_d == approx(scores[('--D', 'A')])
assert s_b_c == approx(scores[('B', 'C')])
def test_co_occurrence_score_sentences_documents(self):
scores = co_occurrence_score.load_score_file(self.document_paragraph_sentence_score_file_path)
sentence_scores, paragraph_scores, document_scores = co_occurrence_score.split_scores(scores)
document_weight = 15.0
paragraph_weight = 1.0
weighting_exponent = 0.6
counts = co_occurrence_score.get_weighted_counts(None, sentence_scores, paragraph_scores, document_scores, None,
first_type=9606, second_type=-26,
document_weight=document_weight,
paragraph_weight=paragraph_weight,
sentence_weight=1.0)
scores = co_occurrence_score.co_occurrence_score(None, self.document_paragraph_sentence_score_file_path, None,
first_type=9606, second_type=-26,
document_weight=document_weight,
paragraph_weight=paragraph_weight,
weighting_exponent=weighting_exponent)
c_a_d = counts[('--D', 'A')]
c_a = counts['A']
c_d = counts['--D']
c_all = counts[None]
s_a_d = c_a_d ** weighting_exponent * ((c_a_d * c_all) / (c_a * c_d)) ** (1 - weighting_exponent)
c_b_c = counts[('B', 'C')]
c_b = counts['B']
c_c = counts['C']
s_b_c = c_b_c ** weighting_exponent * ((c_b_c * c_all) / (c_b * c_c)) ** (1 - weighting_exponent)
assert s_a_d == approx(scores[('--D', 'A')])
assert s_b_c == approx(scores[('B', 'C')])
def test_co_occurrence_score_precedence_sentences_paragraphs_documents(self):
scores = co_occurrence_score.load_score_file(self.precedence_document_paragraph_sentence_score_file_path)
sentence_scores, paragraph_scores, document_scores = co_occurrence_score.split_scores(scores)
document_weight = 2.0
paragraph_weight = 1.0
sentence_weight = 1.0
weighted_counts = co_occurrence_score.get_weighted_counts(None, sentence_scores, paragraph_scores,
document_scores, None,
first_type=9606, second_type=-26,
document_weight=document_weight,
paragraph_weight=paragraph_weight,
sentence_weight=sentence_weight,
ignore_scores=True)
weight_sum = document_weight + paragraph_weight + sentence_weight
assert {('B', 'C'): weight_sum,
'B': weight_sum,
'C': weight_sum,
None: weight_sum} == weighted_counts
def test_weighted_counts_sentences_only_diseases(self):
sentence_scores = co_occurrence_score.load_score_file(self.sentence_score_file_path)
weighted_counts = co_occurrence_score.get_weighted_counts(None, sentence_scores, None, None, None,
first_type=9606, second_type=-26,
document_weight=15.0, paragraph_weight=0,
sentence_weight=1.0,
ignore_scores=True)
assert {('--D', 'A'): 32,
('B', 'C'): 16,
'A': 32,
'--D': 32,
'B': 16,
'C': 16,
None: 48} == weighted_counts
def test_co_occurrence_score_sentences_only_diseases(self):
scores = co_occurrence_score.load_score_file(self.sentence_score_file_path)
sentence_scores, _, _ = co_occurrence_score.split_scores(scores)
document_weight = 15.0
paragraph_weight = 0
weighting_exponent = 0.6
counts = co_occurrence_score.get_weighted_counts(None, sentence_scores, None, None, None,
first_type=9606, second_type=-26,
document_weight=document_weight,
paragraph_weight=paragraph_weight,
sentence_weight=1.0,
ignore_scores=True)
scores = co_occurrence_score.co_occurrence_score(None, self.sentence_score_file_path, None,
first_type=9606, second_type=-26,
document_weight=document_weight,
paragraph_weight=paragraph_weight,
weighting_exponent=weighting_exponent,
ignore_scores=True)
c_a_d = counts[('--D', 'A')]
c_a = counts['A']
c_d = counts['--D']
c_all = counts[None]
s_a_d = c_a_d ** weighting_exponent * ((c_a_d * c_all) / (c_a * c_d)) ** (1 - weighting_exponent)
c_b_c = counts[('B', 'C')]
c_b = counts['B']
c_c = counts['C']
s_b_c = c_b_c ** weighting_exponent * ((c_b_c * c_all) / (c_b * c_c)) ** (1 - weighting_exponent)
assert s_a_d == approx(scores[('--D', 'A')])
assert s_b_c == approx(scores[('B', 'C')])
def test_weighted_counts_matches_file(self):
sentence_scores = co_occurrence_score.load_score_file(self.sentence_score_file_path)
weighted_counts = co_occurrence_score.get_weighted_counts(self.matches_file_path, sentence_scores, None, None,
self.entity_file_path,
first_type=9606, second_type=-26,
document_weight=15.0, paragraph_weight=0,
sentence_weight=1.0)
assert 15.9 + 15.44 + 15. == approx(weighted_counts[None]) # needed due to floating point strangeness
del weighted_counts[None]
assert {('--D', 'A'): 15.9 + 15.44,
('B', 'C'): 15.,
'A': 15.9 + 15.44,
'--D': 15.9 + 15.44,
'B': 15.,
'C': 15.} == weighted_counts
def test_co_occurrence_score_matches_file(self):
scores = co_occurrence_score.load_score_file(self.sentence_score_file_path)
sentence_scores, _, _ = co_occurrence_score.split_scores(scores)
document_weight = 15.0
paragraph_weight = 0
weighting_exponent = 0.6
counts = co_occurrence_score.get_weighted_counts(self.matches_file_path, sentence_scores, None, None,
self.entity_file_path,
first_type=9606, second_type=-26,
document_weight=document_weight,
paragraph_weight=paragraph_weight,
sentence_weight=1.0)
scores = co_occurrence_score.co_occurrence_score(self.matches_file_path, self.sentence_score_file_path,
self.entity_file_path,
first_type=9606, second_type=-26,
document_weight=document_weight,
paragraph_weight=paragraph_weight,
weighting_exponent=weighting_exponent)
c_a_d = counts[('--D', 'A')]
c_a = counts['A']
c_d = counts['--D']
c_all = counts[None]
s_a_d = c_a_d ** weighting_exponent * ((c_a_d * c_all) / (c_a * c_d)) ** (1 - weighting_exponent)
c_b_c = counts[('B', 'C')]
c_b = counts['B']
c_c = counts['C']
s_b_c = c_b_c ** weighting_exponent * ((c_b_c * c_all) / (c_b * c_c)) ** (1 - weighting_exponent)
assert s_a_d == approx(scores[('--D', 'A')])
assert s_b_c == approx(scores[('B', 'C')])
def test_co_occurrence_score_matches_file_same_type(self):
scores = co_occurrence_score.load_score_file(self.sentence_score_file_path)
sentence_scores, _, _ = co_occurrence_score.split_scores(scores)
document_weight = 15.0
paragraph_weight = 0
weighting_exponent = 0.6
counts = co_occurrence_score.get_weighted_counts(self.matches_file_same_type_path, sentence_scores, None, None,
self.entity_file_same_type_path,
first_type=2, second_type=2,
document_weight=document_weight,
paragraph_weight=paragraph_weight,
sentence_weight=1.0)
scores = co_occurrence_score.co_occurrence_score(self.matches_file_same_type_path,
self.sentence_score_file_path,
self.entity_file_same_type_path,
first_type=2, second_type=2,
document_weight=document_weight,
paragraph_weight=paragraph_weight,
weighting_exponent=weighting_exponent)
c_a_d = counts[('--D', 'A')]
c_a = counts['A']
c_d = counts['--D']
c_all = counts[None]
s_a_d = c_a_d ** weighting_exponent * ((c_a_d * c_all) / (c_a * c_d)) ** (1 - weighting_exponent)
c_b_c = counts[('B', 'C')]
c_b = counts['B']
c_c = counts['C']
s_b_c = c_b_c ** weighting_exponent * ((c_b_c * c_all) / (c_b * c_c)) ** (1 - weighting_exponent)
assert s_a_d == approx(scores[('--D', 'A')])
assert s_b_c == approx(scores[('B', 'C')])
def test_co_occurrence_score_matches_file_diseases(self):
sentence_scores = co_occurrence_score.load_score_file(self.sentence_score_file_path)
document_weight = 15.0
paragraph_weight = 0
sentence_weight = 1.0
weighting_exponent = 0.6
counts = co_occurrence_score.get_weighted_counts(self.matches_file_path, sentence_scores, None, None,
self.entity_file_path,
first_type=9606, second_type=-26,
document_weight=document_weight,
paragraph_weight=paragraph_weight,
sentence_weight=1.0,
ignore_scores=True)
scores = co_occurrence_score.co_occurrence_score_diseases(self.matches_file_path,
self.entity_file_path,
document_weight=document_weight,
sentence_weight=sentence_weight)
c_a_d = counts[('--D', 'A')]
c_a = counts['A']
c_d = counts['--D']
c_all = counts[None]
s_a_d = c_a_d ** weighting_exponent * ((c_a_d * c_all) / (c_a * c_d)) ** (1 - weighting_exponent)
c_b_c = counts[('B', 'C')]
c_b = counts['B']
c_c = counts['C']
s_b_c = c_b_c ** weighting_exponent * ((c_b_c * c_all) / (c_b * c_c)) ** (1 - weighting_exponent)
assert s_a_d == approx(scores[('--D', 'A')])
assert s_b_c == approx(scores[('B', 'C')])
def test_weighted_counts_matches_document_level_comentions_file(self):
sentence_scores = co_occurrence_score.load_score_file(self.sentence_score_file_path)
weighted_counts = co_occurrence_score.get_weighted_counts(self.matches_document_level_comentions_file_path,
sentence_scores, None, None,
self.entity_file_path,
first_type=9606, second_type=-26,
document_weight=15.0, paragraph_weight=0,
sentence_weight=1.0)
assert {('--D', 'A'): 15. + 15.44,
('B', 'C'): 15.,
'A': 15. + 15.44,
'--D': 15. + 15.44,
'B': 15.,
'C': 15.,
None: 15. + 15.44 + 15.} == weighted_counts
def test_co_occurrence_score_matches_document_level_comentions_file(self):
scores = co_occurrence_score.load_score_file(self.sentence_score_file_path)
sentence_scores, _, _ = co_occurrence_score.split_scores(scores)
document_weight = 15.0
paragraph_weight = 0
weighting_exponent = 0.6
counts = co_occurrence_score.get_weighted_counts(self.matches_document_level_comentions_file_path,
sentence_scores, None, None,
self.entity_file_path,
first_type=9606, second_type=-26,
document_weight=document_weight,
paragraph_weight=paragraph_weight,
sentence_weight=1.0)
scores = co_occurrence_score.co_occurrence_score(self.matches_document_level_comentions_file_path,
self.sentence_score_file_path,
self.entity_file_path,
first_type=9606, second_type=-26,
document_weight=document_weight,
paragraph_weight=paragraph_weight,
weighting_exponent=weighting_exponent)
c_a_d = counts[('--D', 'A')]
c_a = counts['A']
c_d = counts['--D']
c_all = counts[None]
s_a_d = c_a_d ** weighting_exponent * ((c_a_d * c_all) / (c_a * c_d)) ** (1 - weighting_exponent)
c_b_c = counts[('B', 'C')]
c_b = counts['B']
c_c = counts['C']
s_b_c = c_b_c ** weighting_exponent * ((c_b_c * c_all) / (c_b * c_c)) ** (1 - weighting_exponent)
assert s_a_d == approx(scores[('--D', 'A')])
assert s_b_c == approx(scores[('B', 'C')])
def test_co_occurrence_score_matches_document_level_comentions_file_diseases(self):
sentence_scores = co_occurrence_score.load_score_file(self.sentence_score_file_path)
document_weight = 15.0
paragraph_weight = 0
weighting_exponent = 0.6
sentence_weight = 1.0
counts = co_occurrence_score.get_weighted_counts(self.matches_document_level_comentions_file_path,
sentence_scores, None, None, self.entity_file_path,
first_type=9606, second_type=-26,
document_weight=document_weight,
paragraph_weight=paragraph_weight,
sentence_weight=sentence_weight,
ignore_scores=True)
scores = co_occurrence_score.co_occurrence_score_diseases(self.matches_document_level_comentions_file_path,
self.entity_file_path,
document_weight=document_weight,
sentence_weight=sentence_weight)
c_a_d = counts[('--D', 'A')]
c_a = counts['A']
c_d = counts['--D']
c_all = counts[None]
s_a_d = c_a_d ** weighting_exponent * ((c_a_d * c_all) / (c_a * c_d)) ** (1 - weighting_exponent)
c_b_c = counts[('B', 'C')]
c_b = counts['B']
c_c = counts['C']
s_b_c = c_b_c ** weighting_exponent * ((c_b_c * c_all) / (c_b * c_c)) ** (1 - weighting_exponent)
assert s_a_d == approx(scores[('--D', 'A')])
assert s_b_c == approx(scores[('B', 'C')])
def test_weighted_counts_matches_single_matches_file(self):
sentence_scores = co_occurrence_score.load_score_file(self.sentence_score_file_path)
weighted_counts = co_occurrence_score.get_weighted_counts(self.matches_file_single_matches_path,
sentence_scores, None, None,
self.entity_file_path,
first_type=9606, second_type=-26,
document_weight=15.0, paragraph_weight=0,
sentence_weight=1.0)
assert 15.9 + 15.44 + 15. == approx(weighted_counts[None]) # needed due to floating point strangeness
del weighted_counts[None]
assert {('--D', 'A'): 15.9 + 15.44,
('B', 'C'): 15.,
'A': 15.9 + 15.44,
'--D': 15.9 + 15.44,
'B': 15.,
'C': 15.} == weighted_counts
def test_co_occurrence_score_matches_single_matches_file(self):
scores = co_occurrence_score.load_score_file(self.sentence_score_file_path)
sentence_scores, _, _ = co_occurrence_score.split_scores(scores)
document_weight = 15.0
paragraph_weight = 0
weighting_exponent = 0.6
counts = co_occurrence_score.get_weighted_counts(self.matches_file_single_matches_path,
sentence_scores, None, None,
self.entity_file_path,
first_type=9606, second_type=-26,
document_weight=document_weight,
paragraph_weight=paragraph_weight,
sentence_weight=1.0)
scores = co_occurrence_score.co_occurrence_score(self.matches_file_single_matches_path,
self.sentence_score_file_path,
self.entity_file_path,
first_type=9606, second_type=-26,
document_weight=document_weight,
paragraph_weight=paragraph_weight,
weighting_exponent=weighting_exponent)
c_a_d = counts[('--D', 'A')]
c_a = counts['A']
c_d = counts['--D']
c_all = counts[None]
s_a_d = c_a_d ** weighting_exponent * ((c_a_d * c_all) / (c_a * c_d)) ** (1 - weighting_exponent)
c_b_c = counts[('B', 'C')]
c_b = counts['B']
c_c = counts['C']
s_b_c = c_b_c ** weighting_exponent * ((c_b_c * c_all) / (c_b * c_c)) ** (1 - weighting_exponent)
assert s_a_d == approx(scores[('--D', 'A')])
assert s_b_c == approx(scores[('B', 'C')])
def test_co_occurrence_score_matches_single_matches_file_diseases(self):
sentence_scores = co_occurrence_score.load_score_file(self.sentence_score_file_path)
document_weight = 15.0
paragraph_weight = 0
weighting_exponent = 0.6
sentence_weight = 1.0
counts = co_occurrence_score.get_weighted_counts(self.matches_file_single_matches_path,
sentence_scores, None, None, self.entity_file_path,
first_type=9606, second_type=-26,
document_weight=document_weight,
paragraph_weight=paragraph_weight,
sentence_weight=sentence_weight,
ignore_scores=True)
scores = co_occurrence_score.co_occurrence_score_diseases(self.matches_file_path,
self.entity_file_path,
document_weight=document_weight,
sentence_weight=sentence_weight)
c_a_d = counts[('--D', 'A')]
c_a = counts['A']
c_d = counts['--D']
c_all = counts[None]
s_a_d = c_a_d ** weighting_exponent * ((c_a_d * c_all) / (c_a * c_d)) ** (1 - weighting_exponent)
c_b_c = counts[('B', 'C')]
c_b = counts['B']
c_c = counts['C']
s_b_c = c_b_c ** weighting_exponent * ((c_b_c * c_all) / (c_b * c_c)) ** (1 - weighting_exponent)
assert s_a_d == approx(scores[('--D', 'A')])
assert s_b_c == approx(scores[('B', 'C')])
def test_weighted_counts_matches_file_cross(self):
sentence_scores = co_occurrence_score.load_score_file(self.sentence_score_file_path)
weighted_counts = co_occurrence_score.get_weighted_counts(self.matches_file_cross_path, sentence_scores,
None, None,
self.entity_file_path,
first_type=9606, second_type=-26,
document_weight=15.0, paragraph_weight=0,
sentence_weight=1.0)
assert 15.9 + 15.44 + 15. + 15. == approx(weighted_counts[None]) # needed due to float inaccuracy
del weighted_counts[None]
assert 15.9 + 15.44 + 15. == approx(weighted_counts['--D'])
del weighted_counts['--D']
assert {('--D', 'A'): 15.9 + 15.44,
('--D', 'B'): 15.,
('B', 'C'): 15.,
'A': 15.9 + 15.44,
'B': 15. + 15.,
'C': 15.} == weighted_counts
def test_co_occurrence_score_matches_file_cross(self):
scores = co_occurrence_score.load_score_file(self.sentence_score_file_path)
sentence_scores, _, _ = co_occurrence_score.split_scores(scores)
document_weight = 15.0
paragraph_weight = 0
weighting_exponent = 0.6
counts = co_occurrence_score.get_weighted_counts(self.matches_file_cross_path, sentence_scores, None, None,
self.entity_file_path,
first_type=9606, second_type=-26,
document_weight=document_weight,
paragraph_weight=paragraph_weight,
sentence_weight=1.0)
scores = co_occurrence_score.co_occurrence_score(self.matches_file_cross_path, self.sentence_score_file_path,
self.entity_file_path,
first_type=9606, second_type=-26,
document_weight=document_weight,
paragraph_weight=paragraph_weight,
weighting_exponent=weighting_exponent)
c_a_d = counts[('--D', 'A')]
c_d_b = counts[('--D', 'B')]
c_a = counts['A']
c_d = counts['--D']
c_all = counts[None]
s_a_d = c_a_d ** weighting_exponent * ((c_a_d * c_all) / (c_a * c_d)) ** (1 - weighting_exponent)
c_b_c = counts[('B', 'C')]
c_b = counts['B']
c_c = counts['C']
s_b_c = c_b_c ** weighting_exponent * ((c_b_c * c_all) / (c_b * c_c)) ** (1 - weighting_exponent)
s_d_b = c_d_b ** weighting_exponent * ((c_d_b * c_all) / (c_b * c_d)) ** (1 - weighting_exponent)
assert s_a_d == approx(scores[('--D', 'A')])
assert s_b_c == approx(scores[('B', 'C')])
assert s_d_b == approx(scores[('--D', 'B')])
def test_co_occurrence_score_matches_file_cross_swap_types(self):
scores = co_occurrence_score.load_score_file(self.sentence_score_file_path)
sentence_scores, _, _ = co_occurrence_score.split_scores(scores)
document_weight = 15.0
paragraph_weight = 0
weighting_exponent = 0.6
counts = co_occurrence_score.get_weighted_counts(self.matches_file_cross_path, sentence_scores,
None, None,
self.entity_file_path,
first_type=-26, second_type=9606,
document_weight=document_weight,
paragraph_weight=paragraph_weight,
sentence_weight=1.0)
scores = co_occurrence_score.co_occurrence_score(self.matches_file_cross_path, self.sentence_score_file_path,
self.entity_file_path,
first_type=-26, second_type=9606,
document_weight=document_weight,
paragraph_weight=paragraph_weight,
weighting_exponent=weighting_exponent)
c_a_d = counts[('--D', 'A')]
c_d_b = counts[('--D', 'B')]
c_a = counts['A']
c_d = counts['--D']
c_all = counts[None]
s_a_d = c_a_d ** weighting_exponent * ((c_a_d * c_all) / (c_a * c_d)) ** (1 - weighting_exponent)
c_b_c = counts[('B', 'C')]
c_b = counts['B']
c_c = counts['C']
s_b_c = c_b_c ** weighting_exponent * ((c_b_c * c_all) / (c_b * c_c)) ** (1 - weighting_exponent)
s_d_b = c_d_b ** weighting_exponent * ((c_d_b * c_all) / (c_b * c_d)) ** (1 - weighting_exponent)
assert s_a_d == approx(scores[('--D', 'A')])
assert s_b_c == approx(scores[('B', 'C')])
assert s_d_b == approx(scores[('--D', 'B')])
def test_co_occurrence_score_matches_file_cross_fantasy_types(self):
scores = co_occurrence_score.load_score_file(self.sentence_score_file_path)
sentence_scores, _, _ = co_occurrence_score.split_scores(scores)
document_weight = 15.0
paragraph_weight = 0
weighting_exponent = 0.6
counts = co_occurrence_score.get_weighted_counts(self.matches_file_cross_fantasy_types_path, sentence_scores,
None, None,
self.entity_fantasy_types_file_path,
first_type=1, second_type=2,
document_weight=document_weight,
paragraph_weight=paragraph_weight,
sentence_weight=1.0)
scores = co_occurrence_score.co_occurrence_score(self.matches_file_cross_fantasy_types_path,
self.sentence_score_file_path,
self.entity_fantasy_types_file_path,
first_type=1, second_type=2,
document_weight=document_weight,
paragraph_weight=paragraph_weight,
weighting_exponent=weighting_exponent)
c_a_d = counts[('--D', 'A')]
c_d_b = counts[('--D', 'B')]
c_a = counts['A']
c_d = counts['--D']
c_all = counts[None]
s_a_d = c_a_d ** weighting_exponent * ((c_a_d * c_all) / (c_a * c_d)) ** (1 - weighting_exponent)
c_b_c = counts[('B', 'C')]
c_b = counts['B']
c_c = counts['C']
s_b_c = c_b_c ** weighting_exponent * ((c_b_c * c_all) / (c_b * c_c)) ** (1 - weighting_exponent)
s_d_b = c_d_b ** weighting_exponent * ((c_d_b * c_all) / (c_b * c_d)) ** (1 - weighting_exponent)
assert s_a_d == approx(scores[('--D', 'A')])
assert s_b_c == approx(scores[('B', 'C')])
assert s_d_b == approx(scores[('--D', 'B')])
def test_co_occurrence_score_matches_file_cross_diseases(self):
sentence_scores = co_occurrence_score.load_score_file(self.sentence_score_file_path)
document_weight = 15.0
paragraph_weight = 0
weighting_exponent = 0.6
sentence_weight = 1.0
counts = co_occurrence_score.get_weighted_counts(self.matches_file_cross_path, sentence_scores,
None, None,
self.entity_file_path,
first_type=9606, second_type=-26,
document_weight=document_weight,
paragraph_weight=paragraph_weight,
sentence_weight=sentence_weight,
ignore_scores=True)
scores = co_occurrence_score.co_occurrence_score_diseases(self.matches_file_cross_path,
self.entity_file_path,
document_weight=document_weight,
sentence_weight=sentence_weight)
c_a_d = counts[('--D', 'A')]
c_d_b = counts[('--D', 'B')]
c_a = counts['A']
c_d = counts['--D']
c_all = counts[None]
s_a_d = c_a_d ** weighting_exponent * ((c_a_d * c_all) / (c_a * c_d)) ** (1 - weighting_exponent)
c_b_c = counts[('B', 'C')]
c_b = counts['B']
c_c = counts['C']
s_b_c = c_b_c ** weighting_exponent * ((c_b_c * c_all) / (c_b * c_c)) ** (1 - weighting_exponent)
s_d_b = c_d_b ** weighting_exponent * ((c_d_b * c_all) / (c_b * c_d)) ** (1 - weighting_exponent)
assert s_a_d == approx(scores[('--D', 'A')])
assert s_b_c == approx(scores[('B', 'C')])
assert s_d_b == approx(scores[('--D', 'B')])
def test_cocoscore_cv_independent_associations(self):
sentence_weight = 1
paragraph_weight = 1
document_weight = 1
cv_folds = 2
test_df = dt.load_data_frame(self.cos_cv_test_path, match_distance=True)
test_df['text'] = test_df['text'].apply(lambda s: s.strip().lower())
cv_results = co_occurrence_score.cv_independent_associations(test_df,
{'sentence_weight': sentence_weight,
'paragraph_weight': paragraph_weight,
'document_weight': document_weight,
},
cv_folds=cv_folds,
random_state=numpy.random.RandomState(3),
fasttext_epochs=5,
fasttext_bucket=1000,
fasttext_dim=20)
expected_col_names = [
'mean_test_score',
'stdev_test_score',
'mean_train_score',
'stdev_train_score',
'split_0_test_score',
'split_0_train_score',
'split_0_n_test',
'split_0_pos_test',
'split_0_n_train',
'split_0_pos_train',
'split_1_test_score',
'split_1_train_score',
'split_1_n_test',
'split_1_pos_test',
'split_1_n_train',
'split_1_pos_train',
]
cv_runs = 1
expected_values = [
[1.0] * cv_runs,
[0.0] * cv_runs,
[1.0] * cv_runs,
[0.0] * cv_runs,
[1.0] * cv_runs,
[1.0] * cv_runs,
[24] * cv_runs,
[0.5] * cv_runs,
[24] * cv_runs,
[0.5] * cv_runs,
[1.0] * cv_runs,
[1.0] * cv_runs,
[24] * cv_runs,
[0.5] * cv_runs,
[24] * cv_runs,
[0.5] * cv_runs,
]
expected_df = pandas.DataFrame({col: values for col, values in zip(expected_col_names, expected_values)},
columns=expected_col_names)
assert_frame_equal(cv_results, expected_df)
def test_cocoscore_cv_independent_associations_bad_param(self):
test_df = dt.load_data_frame(self.cos_cv_test_path, match_distance=True)
test_df['text'] = test_df['text'].apply(lambda s: s.strip().lower())
with raises(TypeError, match="got an unexpected keyword argument"):
_ = co_occurrence_score.cv_independent_associations(test_df, {'sentence_weightXXXX': 1,
'paragraph_weight': 1,
'document_weight': 1,
},
cv_folds=2,
random_state=numpy.random.RandomState(3),
fasttext_epochs=5,
fasttext_bucket=1000,
fasttext_dim=20,
constant_scoring='document')
def test_cocoscore_cv_independent_associations_bad_constant_scoring(self):
test_df = dt.load_data_frame(self.cos_cv_test_path, match_distance=True)
test_df['text'] = test_df['text'].apply(lambda s: s.strip().lower())
with raises(ValueError, match='Unknown constant_scoring parameter: documenti'):
_ = co_occurrence_score.cv_independent_associations(test_df, {'sentence_weight': 1,
'paragraph_weight': 1,
'document_weight': 1,
},
cv_folds=2,
random_state=numpy.random.RandomState(3),
fasttext_epochs=5,
fasttext_bucket=1000,
fasttext_dim=20,
constant_scoring='documenti')
def test_cocoscore_constant_sentence_scoring(self):
df = dt.load_data_frame(self.cos_cv_test_path, match_distance=True)
df['text'] = df['text'].apply(lambda s: s.strip().lower())
train_df = df.copy()
test_df = df.copy()
def nmdf(data_frame):
return polynomial_decay_distance(data_frame, 0, -2, 1)
train_scores, test_scores = co_occurrence_score._get_train_test_scores(train_df, test_df, fasttext_function,
fasttext_epochs=5, fasttext_dim=20,
fasttext_bucket=1000,
match_distance_function=nmdf,
constant_scoring='sentence')
sentence_matches = numpy.logical_and(df['sentence'] != -1, df['paragraph'] != -1)
non_sentence_matches = numpy.logical_not(sentence_matches)
for scores in (train_scores, test_scores):
assert (scores[sentence_matches] == 1).all()
assert (scores[non_sentence_matches] == -1).all()
def test_cocoscore_constant_paragraph_scoring(self):
df = dt.load_data_frame(self.cos_cv_test_path, match_distance=True)
df['text'] = df['text'].apply(lambda s: s.strip().lower())
train_df = df.copy()
test_df = df.copy()
def nmdf(data_frame):
return polynomial_decay_distance(data_frame, 0, -2, 1)
train_scores, test_scores = co_occurrence_score._get_train_test_scores(train_df, test_df, fasttext_function,
fasttext_epochs=5, fasttext_dim=20,
fasttext_bucket=1000,
match_distance_function=nmdf,
constant_scoring='paragraph')
paragraph_matches = numpy.logical_and(df['sentence'] == -1, df['paragraph'] != -1)
document_matches = numpy.logical_and(df['sentence'] == -1, df['paragraph'] == -1)
for scores in (train_scores, test_scores):
assert (scores[paragraph_matches] == 1).all()
assert (scores[document_matches] == -1).all()
def test_cocoscore_constant_document_scoring(self):
df = dt.load_data_frame(self.cos_cv_test_path, match_distance=True)
df['text'] = df['text'].apply(lambda s: s.strip().lower())
train_df = df.copy()
test_df = df.copy()
def nmdf(data_frame):
return polynomial_decay_distance(data_frame, 0, -2, 1)
train_scores, test_scores = co_occurrence_score._get_train_test_scores(train_df, test_df, fasttext_function,
fasttext_epochs=5, fasttext_dim=20,
fasttext_bucket=1000,
match_distance_function=nmdf,
constant_scoring='document')
paragraph_matches = numpy.logical_and(df['sentence'] == -1, df['paragraph'] != -1)
document_matches = numpy.logical_and(df['sentence'] == -1, df['paragraph'] == -1)
for scores in (train_scores, test_scores):
assert (scores[paragraph_matches] == -1).all()
assert (scores[document_matches] == 1).all()
def test_fit_score_default(self):
df = dt.load_data_frame(self.cos_cv_test_path, match_distance=True)
train_df = df.copy()
test_df = df.copy()
pairs = [('A', 'B'), ('C', 'D'), ('E', 'F'), ('G', 'H')]
train_scores, test_scores = co_occurrence_score.fit_score_default(train_df, test_df,
fasttext_epochs=5,
fasttext_dim=20,
fasttext_bucket=1000)
for pair in pairs:
assert train_scores[pair] > 0
assert test_scores[pair] > 0
| 61.778531
| 120
| 0.480375
| 5,678
| 54,674
| 4.219443
| 0.031877
| 0.013941
| 0.090116
| 0.061441
| 0.91819
| 0.900284
| 0.880332
| 0.857501
| 0.837132
| 0.820102
| 0
| 0.035443
| 0.427699
| 54,674
| 884
| 121
| 61.848416
| 0.730233
| 0.002049
| 0
| 0.73697
| 0
| 0
| 0.033909
| 0.013142
| 0
| 0
| 0
| 0
| 0.077576
| 1
| 0.050909
| false
| 0
| 0.010909
| 0.004848
| 0.088485
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6069b98a5511c92329f3c5594f77a1e470a1f057
| 43,070
|
py
|
Python
|
tests/testflows/extended_precision_data_types/requirements/requirements.py
|
Srijan0519/ClickHouse
|
c0e1c42562c48d122a1dad4fea18b060d4805ae3
|
[
"Apache-2.0"
] | 3
|
2019-06-27T08:59:08.000Z
|
2021-09-03T02:38:02.000Z
|
tests/testflows/extended_precision_data_types/requirements/requirements.py
|
Srijan0519/ClickHouse
|
c0e1c42562c48d122a1dad4fea18b060d4805ae3
|
[
"Apache-2.0"
] | 1
|
2021-06-28T15:03:05.000Z
|
2021-06-28T15:03:05.000Z
|
tests/testflows/extended_precision_data_types/requirements/requirements.py
|
Srijan0519/ClickHouse
|
c0e1c42562c48d122a1dad4fea18b060d4805ae3
|
[
"Apache-2.0"
] | 1
|
2021-07-19T12:07:14.000Z
|
2021-07-19T12:07:14.000Z
|
# These requirements were auto generated
# from software requirements specification (SRS)
# document by TestFlows v1.6.210505.1133630.
# Do not edit by hand but re-generate instead
# using 'tfs requirements generate' command.
from testflows.core import Specification
from testflows.core import Requirement
Heading = Specification.Heading
RQ_SRS_020_ClickHouse_Extended_Precision = Requirement(
name='RQ.SRS-020.ClickHouse.Extended.Precision',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support using [Extended Precision Data Types].\n'
'\n'
),
link=None,
level=2,
num='4.1')
RQ_SRS_020_ClickHouse_Extended_Precision_Conversion_toInt128 = Requirement(
name='RQ.SRS-020.ClickHouse.Extended.Precision.Conversion.toInt128',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support converting values to `Int128` using the `toInt128` function.\n'
'\n'
'For example,\n'
'\n'
'```sql\n'
'SELECT toInt128(1)\n'
'```\n'
'\n'
),
link=None,
level=3,
num='4.2.1')
RQ_SRS_020_ClickHouse_Extended_Precision_Conversion_toUInt128 = Requirement(
name='RQ.SRS-020.ClickHouse.Extended.Precision.Conversion.toUInt128',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support converting values to `UInt128` format using `toUInt128` function.\n'
'\n'
'For example,\n'
'\n'
'```sql\n'
'SELECT toUInt128(1)\n'
'```\n'
'\n'
),
link=None,
level=3,
num='4.2.2')
RQ_SRS_020_ClickHouse_Extended_Precision_Conversion_toInt256 = Requirement(
name='RQ.SRS-020.ClickHouse.Extended.Precision.Conversion.toInt256',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support converting values to `Int256` using `toInt256` function.\n'
'\n'
'For example,\n'
'\n'
'```sql\n'
'SELECT toInt256(1)\n'
'```\n'
'\n'
),
link=None,
level=3,
num='4.2.3')
RQ_SRS_020_ClickHouse_Extended_Precision_Conversion_toUInt256 = Requirement(
name='RQ.SRS-020.ClickHouse.Extended.Precision.Conversion.toUInt256',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support converting values to `UInt256` format using `toUInt256` function.\n'
'\n'
'For example,\n'
'\n'
'```sql\n'
'SELECT toUInt256(1)\n'
'```\n'
'\n'
),
link=None,
level=3,
num='4.2.4')
RQ_SRS_020_ClickHouse_Extended_Precision_Conversion_toDecimal256 = Requirement(
name='RQ.SRS-020.ClickHouse.Extended.Precision.Conversion.toDecimal256',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support converting values to `Decimal256` format using `toDecimal256` function.\n'
'\n'
'For example,\n'
'\n'
'```sql\n'
'SELECT toDecimal256(1,2)\n'
'```\n'
'\n'
),
link=None,
level=3,
num='4.2.5')
RQ_SRS_020_ClickHouse_Extended_Precision_Conversion_FromMySQL = Requirement(
name='RQ.SRS-020.ClickHouse.Extended.Precision.Conversion.FromMySQL',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support converting to [Extended Precision Data Types] from MySQL.\n'
'\n'
'\n'
),
link=None,
level=3,
num='4.2.6')
RQ_SRS_020_ClickHouse_Extended_Precision_Conversion_ToMySQL = Requirement(
name='RQ.SRS-020.ClickHouse.Extended.Precision.Conversion.ToMySQL',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] MAY not support converting from [Extended Precision Data Types] to MySQL.\n'
'\n'
),
link=None,
level=3,
num='4.2.7')
RQ_SRS_020_ClickHouse_Extended_Precision_Arithmetic_Int_Supported = Requirement(
name='RQ.SRS-020.ClickHouse.Extended.Precision.Arithmetic.Int.Supported',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support using [Arithmetic functions] with Int128, UInt128, Int256, and UInt256.\n'
'\n'
'Arithmetic functions:\n'
'* plus\n'
'* minus\n'
'* multiply\n'
'* divide\n'
'* intDiv\n'
'* intDivOrZero\n'
'* modulo\n'
'* moduloOrZero\n'
'* negate\n'
'* abs\n'
'* gcd\n'
'* lcm\n'
'\n'
),
link=None,
level=3,
num='4.3.1')
RQ_SRS_020_ClickHouse_Extended_Precision_Arithmetic_Dec_Supported = Requirement(
name='RQ.SRS-020.ClickHouse.Extended.Precision.Arithmetic.Dec.Supported',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support using the following [Arithmetic functions] with Decimal256:\n'
'\n'
'* plus\n'
'* minus\n'
'* multiply\n'
'* divide\n'
'* intDiv\n'
'* intDivOrZero\n'
'* negate\n'
'* abs\n'
'\n'
),
link=None,
level=3,
num='4.3.2')
RQ_SRS_020_ClickHouse_Extended_Precision_Arithmetic_Dec_NotSupported = Requirement(
name='RQ.SRS-020.ClickHouse.Extended.Precision.Arithmetic.Dec.NotSupported',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] MAY not support using the following [Arithmetic functions] with Decimal256:\n'
'\n'
'* modulo\n'
'* moduloOrZero\n'
'* gcd\n'
'* lcm\n'
'\n'
),
link=None,
level=3,
num='4.3.3')
RQ_SRS_020_ClickHouse_Extended_Precision_Arrays_Int_Supported = Requirement(
name='RQ.SRS-020.ClickHouse.Extended.Precision.Arrays.Int.Supported',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support using the following [Array functions] with Int128, UInt128, Int256, and UInt256.\n'
'\n'
'* empty\n'
'* notEmpty\n'
'* length\n'
'* arrayCount\n'
'* arrayPopBack\n'
'* arrayPopFront\n'
'* arraySort\n'
'* arrayReverseSort\n'
'* arrayUniq\n'
'* arrayJoin\n'
'* arrayDistinct\n'
'* arrayEnumerate\n'
'* arrayEnumerateDense\n'
'* arrayEnumerateUniq\n'
'* arrayReverse\n'
'* reverse\n'
'* arrayFlatten\n'
'* arrayCompact\n'
'* arrayExists\n'
'* arrayAll\n'
'* arrayMin\n'
'* arrayMax\n'
'* arraySum\n'
'* arrayAvg\n'
'* arrayReduce\n'
'* arrayReduceInRanges\n'
'* arrayZip\n'
'* arrayMap\n'
'* arrayFilter\n'
'* arrayFill\n'
'* arrayReverseFill\n'
'* arraySplit\n'
'* arrayFirst\n'
'* arrayFirstIndex\n'
'* arrayConcat\n'
'* hasAll\n'
'* hasAny\n'
'* hasSubstr\n'
'* arrayElement\n'
'* has\n'
'* indexOf\n'
'* countEqual\n'
'* arrayPushBack\n'
'* arrayPushFront\n'
'* arrayResize\n'
'* arraySlice\n'
'\n'
),
link=None,
level=3,
num='4.4.1')
RQ_SRS_020_ClickHouse_Extended_Precision_Arrays_Int_NotSupported = Requirement(
name='RQ.SRS-020.ClickHouse.Extended.Precision.Arrays.Int.NotSupported',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] MAY not support using the following [Array functions] with Int128, UInt128, Int256, and UInt256:\n'
'\n'
'* arrayDifference\n'
'* arrayCumSum\n'
'* arrayCumSumNonNegative\n'
'\n'
),
link=None,
level=3,
num='4.4.2')
RQ_SRS_020_ClickHouse_Extended_Precision_Arrays_Dec_Supported = Requirement(
name='RQ.SRS-020.ClickHouse.Extended.Precision.Arrays.Dec.Supported',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support using the following [Array functions] with Decimal256:\n'
'\n'
'* empty\n'
'* notEmpty\n'
'* length\n'
'* arrayCount\n'
'* arrayPopBack\n'
'* arrayPopFront\n'
'* arraySort\n'
'* arrayReverseSort\n'
'* arrayUniq\n'
'* arrayJoin\n'
'* arrayDistinct\n'
'* arrayEnumerate\n'
'* arrayEnumerateDense\n'
'* arrayEnumerateUniq\n'
'* arrayReverse\n'
'* reverse\n'
'* arrayFlatten\n'
'* arrayCompact\n'
'* arrayExists\n'
'* arrayAll\n'
'* arrayReduce\n'
'* arrayReduceInRanges\n'
'* arrayZip\n'
'* arrayMap\n'
'* arrayFilter\n'
'* arrayFill\n'
'* arrayReverseFill\n'
'* arraySplit\n'
'* arrayFirst\n'
'* arrayFirstIndex\n'
'* arrayConcat\n'
'* hasAll\n'
'* hasAny\n'
'* hasSubstr\n'
'* arrayElement\n'
'* has\n'
'* indexOf\n'
'* countEqual\n'
'* arrayPushBack\n'
'* arrayPushFront\n'
'* arrayResize\n'
'* arraySlice\n'
'\n'
),
link=None,
level=3,
num='4.4.3')
RQ_SRS_020_ClickHouse_Extended_Precision_Arrays_Dec_NotSupported = Requirement(
name='RQ.SRS-020.ClickHouse.Extended.Precision.Arrays.Dec.NotSupported',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] MAY not support using the following [Array functions] with Decimal256:\n'
'\n'
'* arrayMin\n'
'* arrayMax\n'
'* arraaySum\n'
'* arrayAvg\n'
'* arrayDifference\n'
'* arrayCumSum\n'
'* arrayCumSumNonNegative\n'
'\n'
),
link=None,
level=3,
num='4.4.4')
RQ_SRS_020_ClickHouse_Extended_Precision_Comparison = Requirement(
name='RQ.SRS-020.ClickHouse.Extended.Precision.Comparison',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support using [Comparison functions] with [Extended Precision Data Types].\n'
'\n'
'Comparison functions:\n'
'* equals\n'
'* notEquals\n'
'* less\n'
'* greater\n'
'* lessOrEquals\n'
'* greaterOrEquals\n'
'\n'
),
link=None,
level=3,
num='4.5.1')
RQ_SRS_020_ClickHouse_Extended_Precision_Logical = Requirement(
name='RQ.SRS-020.ClickHouse.Extended.Precision.Logical',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] MAY not support using [Logical functions] with [Extended Precision Data Types].\n'
'\n'
'Logical functions:\n'
'* and\n'
'* or\n'
'* not\n'
'* xor\n'
'\n'
),
link=None,
level=3,
num='4.6.1')
RQ_SRS_020_ClickHouse_Extended_Precision_Mathematical_Supported = Requirement(
name='RQ.SRS-020.ClickHouse.Extended.Precision.Mathematical.Supported',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support using the following [Mathematical functions] with [Extended Precision Data Types]:\n'
'\n'
'* exp\n'
'* log, ln\n'
'* exp2\n'
'* log2\n'
'* exp10\n'
'* log10\n'
'* sqrt\n'
'* cbrt\n'
'* erf\n'
'* erfc\n'
'* lgamma\n'
'* tgamma\n'
'* sin\n'
'* cos\n'
'* tan\n'
'* asin\n'
'* acos\n'
'* atan\n'
'* cosh\n'
'* acosh\n'
'* sinh\n'
'* asinh\n'
'* tanh\n'
'* atanh\n'
'* log1p\n'
'* sign\n'
'\n'
),
link=None,
level=3,
num='4.7.1')
RQ_SRS_020_ClickHouse_Extended_Precision_Mathematical_NotSupported = Requirement(
name='RQ.SRS-020.ClickHouse.Extended.Precision.Mathematical.NotSupported',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] MAY not support using the following [Mathematical functions] with [Extended Precision Data Types]:\n'
'\n'
'* pow, power\n'
'* intExp2\n'
'* intExp10\n'
'* atan2\n'
'* hypot\n'
'\n'
),
link=None,
level=3,
num='4.7.2')
RQ_SRS_020_ClickHouse_Extended_Precision_Rounding_Int_Supported = Requirement(
name='RQ.SRS-020.ClickHouse.Extended.Precision.Rounding.Int.Supported',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support using the following [Rounding functions] with Int128, UInt128, Int256, and UInt256:\n'
'\n'
'* floor\n'
'* ceil\n'
'* trunc\n'
'* round\n'
'* roundBankers\n'
'* roundDuration\n'
'* roundAge\n'
'\n'
),
link=None,
level=3,
num='4.8.1')
RQ_SRS_020_ClickHouse_Extended_Precision_Rounding_Int_NotSupported = Requirement(
name='RQ.SRS-020.ClickHouse.Extended.Precision.Rounding.Int.NotSupported',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] MAY not support using the following [Rounding functions] with Int128, UInt128, Int256, and UInt256:\n'
'\n'
'* roundDown\n'
'* roundToExp2\n'
'\n'
),
link=None,
level=3,
num='4.8.2')
RQ_SRS_020_ClickHouse_Extended_Precision_Rounding_Dec_Supported = Requirement(
name='RQ.SRS-020.ClickHouse.Extended.Precision.Rounding.Dec.Supported',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support using the following [Rounding functions] with Decimal256:\n'
'\n'
'* floor\n'
'* ceil\n'
'* trunc\n'
'* round\n'
'* roundBankers\n'
'\n'
),
link=None,
level=3,
num='4.8.3')
RQ_SRS_020_ClickHouse_Extended_Precision_Rounding_Dec_NotSupported = Requirement(
name='RQ.SRS-020.ClickHouse.Extended.Precision.Rounding.Dec.NotSupported',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] MAY not support using the following [Rounding functions] with Decimal256:\n'
'\n'
'* roundDuration\n'
'* roundAge\n'
'* roundDown\n'
'* roundToExp2\n'
'\n'
),
link=None,
level=3,
num='4.8.4')
RQ_SRS_020_ClickHouse_Extended_Precision_Bit_Int_Supported = Requirement(
name='RQ.SRS-020.ClickHouse.Extended.Precision.Bit.Int.Supported',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support using the following [Bit functions] with Int128, UInt128, Int256, and UInt256:\n'
'\n'
'* bitAnd\n'
'* bitOr\n'
'* bitXor\n'
'* bitNot\n'
'* bitShiftLeft\n'
'* bitShiftRight\n'
'* bitCount\n'
'\n'
),
link=None,
level=3,
num='4.9.1')
RQ_SRS_020_ClickHouse_Extended_Precision_Bit_Int_NotSupported = Requirement(
name='RQ.SRS-020.ClickHouse.Extended.Precision.Bit.Int.NotSupported',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] MAY not support using the following [Bit functions] with Int128, UInt128, Int256, and UInt256:\n'
'\n'
'* bitRotateLeft\n'
'* bitRotateRight\n'
'* bitTest\n'
'* bitTestAll\n'
'* bitTestAny\n'
'\n'
),
link=None,
level=3,
num='4.9.2')
RQ_SRS_020_ClickHouse_Extended_Precision_Bit_Dec_NotSupported = Requirement(
name='RQ.SRS-020.ClickHouse.Extended.Precision.Bit.Dec.NotSupported',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] MAY not support using [Bit functions] with Decimal256.\n'
'\n'
'Bit functions:\n'
'* bitAnd\n'
'* bitOr\n'
'* bitXor\n'
'* bitNot\n'
'* bitShiftLeft\n'
'* bitShiftRight\n'
'* bitCount\n'
'* bitRotateLeft\n'
'* bitRotateRight\n'
'* bitTest\n'
'* bitTestAll\n'
'* bitTestAny\n'
'\n'
),
link=None,
level=3,
num='4.9.3')
RQ_SRS_020_ClickHouse_Extended_Precision_Null = Requirement(
name='RQ.SRS-020.ClickHouse.Extended.Precision.Null',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support using [Null functions] with [Extended Precision Data Types].\n'
'\n'
'Null functions:\n'
'* isNull\n'
'* isNotNull\n'
'* coalesce\n'
'* ifNull\n'
'* nullIf\n'
'* assumeNotNull\n'
'* toNullable\n'
'\n'
),
link=None,
level=3,
num='4.10.1')
RQ_SRS_020_ClickHouse_Extended_Precision_Tuple = Requirement(
name='RQ.SRS-020.ClickHouse.Extended.Precision.Tuple',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support using [Tuple functions] with [Extended Precision Data Types].\n'
'\n'
'Tuple functions:\n'
'* tuple\n'
'* tupleElement\n'
'* untuple\n'
'\n'
),
link=None,
level=3,
num='4.11.1')
RQ_SRS_020_ClickHouse_Extended_Precision_Map_Supported = Requirement(
name='RQ.SRS-020.ClickHouse.Extended.Precision.Map.Supported',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support using the following [Map functions] with [Extended Precision Data Types]:\n'
'\n'
'* map\n'
'* mapContains\n'
'* mapKeys\n'
'* mapValues\n'
'\n'
),
link=None,
level=3,
num='4.12.1')
RQ_SRS_020_ClickHouse_Extended_Precision_Map_NotSupported = Requirement(
name='RQ.SRS-020.ClickHouse.Extended.Precision.Map.NotSupported',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] MAY not support using the following [Map functions] with [Extended Precision Data Types]:\n'
'\n'
'* mapAdd\n'
'* mapSubtract\n'
'* mapPopulateSeries\n'
'\n'
),
link=None,
level=3,
num='4.12.2')
RQ_SRS_020_ClickHouse_Extended_Precision_Create_Table = Requirement(
name='RQ.SRS-020.ClickHouse.Extended.Precision.Create.Table',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'[ClickHouse] SHALL support creating table with columns that use [Extended Precision Data Types].\n'
'\n'
),
link=None,
level=3,
num='4.13.1')
SRS020_ClickHouse_Extended_Precision_Data_Types = Specification(
name='SRS020 ClickHouse Extended Precision Data Types',
description=None,
author=None,
date=None,
status=None,
approved_by=None,
approved_date=None,
approved_version=None,
version=None,
group=None,
type=None,
link=None,
uid=None,
parent=None,
children=None,
headings=(
Heading(name='Revision History', level=1, num='1'),
Heading(name='Introduction', level=1, num='2'),
Heading(name='Terminology', level=1, num='3'),
Heading(name='Extended Precision Data Types', level=2, num='3.1'),
Heading(name='Requirements', level=1, num='4'),
Heading(name='RQ.SRS-020.ClickHouse.Extended.Precision', level=2, num='4.1'),
Heading(name='Conversion', level=2, num='4.2'),
Heading(name='RQ.SRS-020.ClickHouse.Extended.Precision.Conversion.toInt128', level=3, num='4.2.1'),
Heading(name='RQ.SRS-020.ClickHouse.Extended.Precision.Conversion.toUInt128', level=3, num='4.2.2'),
Heading(name='RQ.SRS-020.ClickHouse.Extended.Precision.Conversion.toInt256', level=3, num='4.2.3'),
Heading(name='RQ.SRS-020.ClickHouse.Extended.Precision.Conversion.toUInt256', level=3, num='4.2.4'),
Heading(name='RQ.SRS-020.ClickHouse.Extended.Precision.Conversion.toDecimal256', level=3, num='4.2.5'),
Heading(name='RQ.SRS-020.ClickHouse.Extended.Precision.Conversion.FromMySQL', level=3, num='4.2.6'),
Heading(name='RQ.SRS-020.ClickHouse.Extended.Precision.Conversion.ToMySQL', level=3, num='4.2.7'),
Heading(name='Arithmetic', level=2, num='4.3'),
Heading(name='RQ.SRS-020.ClickHouse.Extended.Precision.Arithmetic.Int.Supported', level=3, num='4.3.1'),
Heading(name='RQ.SRS-020.ClickHouse.Extended.Precision.Arithmetic.Dec.Supported', level=3, num='4.3.2'),
Heading(name='RQ.SRS-020.ClickHouse.Extended.Precision.Arithmetic.Dec.NotSupported', level=3, num='4.3.3'),
Heading(name='Arrays', level=2, num='4.4'),
Heading(name='RQ.SRS-020.ClickHouse.Extended.Precision.Arrays.Int.Supported', level=3, num='4.4.1'),
Heading(name='RQ.SRS-020.ClickHouse.Extended.Precision.Arrays.Int.NotSupported', level=3, num='4.4.2'),
Heading(name='RQ.SRS-020.ClickHouse.Extended.Precision.Arrays.Dec.Supported', level=3, num='4.4.3'),
Heading(name='RQ.SRS-020.ClickHouse.Extended.Precision.Arrays.Dec.NotSupported', level=3, num='4.4.4'),
Heading(name='Comparison', level=2, num='4.5'),
Heading(name='RQ.SRS-020.ClickHouse.Extended.Precision.Comparison', level=3, num='4.5.1'),
Heading(name='Logical Functions', level=2, num='4.6'),
Heading(name='RQ.SRS-020.ClickHouse.Extended.Precision.Logical', level=3, num='4.6.1'),
Heading(name='Mathematical Functions', level=2, num='4.7'),
Heading(name='RQ.SRS-020.ClickHouse.Extended.Precision.Mathematical.Supported', level=3, num='4.7.1'),
Heading(name='RQ.SRS-020.ClickHouse.Extended.Precision.Mathematical.NotSupported', level=3, num='4.7.2'),
Heading(name='Rounding Functions', level=2, num='4.8'),
Heading(name='RQ.SRS-020.ClickHouse.Extended.Precision.Rounding.Int.Supported', level=3, num='4.8.1'),
Heading(name='RQ.SRS-020.ClickHouse.Extended.Precision.Rounding.Int.NotSupported', level=3, num='4.8.2'),
Heading(name='RQ.SRS-020.ClickHouse.Extended.Precision.Rounding.Dec.Supported', level=3, num='4.8.3'),
Heading(name='RQ.SRS-020.ClickHouse.Extended.Precision.Rounding.Dec.NotSupported', level=3, num='4.8.4'),
Heading(name='Bit Functions', level=2, num='4.9'),
Heading(name='RQ.SRS-020.ClickHouse.Extended.Precision.Bit.Int.Supported', level=3, num='4.9.1'),
Heading(name='RQ.SRS-020.ClickHouse.Extended.Precision.Bit.Int.NotSupported', level=3, num='4.9.2'),
Heading(name='RQ.SRS-020.ClickHouse.Extended.Precision.Bit.Dec.NotSupported', level=3, num='4.9.3'),
Heading(name='Null Functions', level=2, num='4.10'),
Heading(name='RQ.SRS-020.ClickHouse.Extended.Precision.Null', level=3, num='4.10.1'),
Heading(name='Tuple Functions', level=2, num='4.11'),
Heading(name='RQ.SRS-020.ClickHouse.Extended.Precision.Tuple', level=3, num='4.11.1'),
Heading(name='Map Functions', level=2, num='4.12'),
Heading(name='RQ.SRS-020.ClickHouse.Extended.Precision.Map.Supported', level=3, num='4.12.1'),
Heading(name='RQ.SRS-020.ClickHouse.Extended.Precision.Map.NotSupported', level=3, num='4.12.2'),
Heading(name='Create', level=2, num='4.13'),
Heading(name='RQ.SRS-020.ClickHouse.Extended.Precision.Create.Table', level=3, num='4.13.1'),
Heading(name='References', level=1, num='5'),
),
requirements=(
RQ_SRS_020_ClickHouse_Extended_Precision,
RQ_SRS_020_ClickHouse_Extended_Precision_Conversion_toInt128,
RQ_SRS_020_ClickHouse_Extended_Precision_Conversion_toUInt128,
RQ_SRS_020_ClickHouse_Extended_Precision_Conversion_toInt256,
RQ_SRS_020_ClickHouse_Extended_Precision_Conversion_toUInt256,
RQ_SRS_020_ClickHouse_Extended_Precision_Conversion_toDecimal256,
RQ_SRS_020_ClickHouse_Extended_Precision_Conversion_FromMySQL,
RQ_SRS_020_ClickHouse_Extended_Precision_Conversion_ToMySQL,
RQ_SRS_020_ClickHouse_Extended_Precision_Arithmetic_Int_Supported,
RQ_SRS_020_ClickHouse_Extended_Precision_Arithmetic_Dec_Supported,
RQ_SRS_020_ClickHouse_Extended_Precision_Arithmetic_Dec_NotSupported,
RQ_SRS_020_ClickHouse_Extended_Precision_Arrays_Int_Supported,
RQ_SRS_020_ClickHouse_Extended_Precision_Arrays_Int_NotSupported,
RQ_SRS_020_ClickHouse_Extended_Precision_Arrays_Dec_Supported,
RQ_SRS_020_ClickHouse_Extended_Precision_Arrays_Dec_NotSupported,
RQ_SRS_020_ClickHouse_Extended_Precision_Comparison,
RQ_SRS_020_ClickHouse_Extended_Precision_Logical,
RQ_SRS_020_ClickHouse_Extended_Precision_Mathematical_Supported,
RQ_SRS_020_ClickHouse_Extended_Precision_Mathematical_NotSupported,
RQ_SRS_020_ClickHouse_Extended_Precision_Rounding_Int_Supported,
RQ_SRS_020_ClickHouse_Extended_Precision_Rounding_Int_NotSupported,
RQ_SRS_020_ClickHouse_Extended_Precision_Rounding_Dec_Supported,
RQ_SRS_020_ClickHouse_Extended_Precision_Rounding_Dec_NotSupported,
RQ_SRS_020_ClickHouse_Extended_Precision_Bit_Int_Supported,
RQ_SRS_020_ClickHouse_Extended_Precision_Bit_Int_NotSupported,
RQ_SRS_020_ClickHouse_Extended_Precision_Bit_Dec_NotSupported,
RQ_SRS_020_ClickHouse_Extended_Precision_Null,
RQ_SRS_020_ClickHouse_Extended_Precision_Tuple,
RQ_SRS_020_ClickHouse_Extended_Precision_Map_Supported,
RQ_SRS_020_ClickHouse_Extended_Precision_Map_NotSupported,
RQ_SRS_020_ClickHouse_Extended_Precision_Create_Table,
),
content='''
# SRS020 ClickHouse Extended Precision Data Types
# Software Requirements Specification
## Table of Contents
* 1 [Revision History](#revision-history)
* 2 [Introduction](#introduction)
* 3 [Terminology](#terminology)
* 3.1 [Extended Precision Data Types](#extended-precision-data-types)
* 4 [Requirements](#requirements)
* 4.1 [RQ.SRS-020.ClickHouse.Extended.Precision](#rqsrs-020clickhouseextendedprecision)
* 4.2 [Conversion](#conversion)
* 4.2.1 [RQ.SRS-020.ClickHouse.Extended.Precision.Conversion.toInt128](#rqsrs-020clickhouseextendedprecisionconversiontoint128)
* 4.2.2 [RQ.SRS-020.ClickHouse.Extended.Precision.Conversion.toUInt128](#rqsrs-020clickhouseextendedprecisionconversiontouint128)
* 4.2.3 [RQ.SRS-020.ClickHouse.Extended.Precision.Conversion.toInt256](#rqsrs-020clickhouseextendedprecisionconversiontoint256)
* 4.2.4 [RQ.SRS-020.ClickHouse.Extended.Precision.Conversion.toUInt256](#rqsrs-020clickhouseextendedprecisionconversiontouint256)
* 4.2.5 [RQ.SRS-020.ClickHouse.Extended.Precision.Conversion.toDecimal256](#rqsrs-020clickhouseextendedprecisionconversiontodecimal256)
* 4.2.6 [RQ.SRS-020.ClickHouse.Extended.Precision.Conversion.FromMySQL](#rqsrs-020clickhouseextendedprecisionconversionfrommysql)
* 4.2.7 [RQ.SRS-020.ClickHouse.Extended.Precision.Conversion.ToMySQL](#rqsrs-020clickhouseextendedprecisionconversiontomysql)
* 4.3 [Arithmetic](#arithmetic)
* 4.3.1 [RQ.SRS-020.ClickHouse.Extended.Precision.Arithmetic.Int.Supported](#rqsrs-020clickhouseextendedprecisionarithmeticintsupported)
* 4.3.2 [RQ.SRS-020.ClickHouse.Extended.Precision.Arithmetic.Dec.Supported](#rqsrs-020clickhouseextendedprecisionarithmeticdecsupported)
* 4.3.3 [RQ.SRS-020.ClickHouse.Extended.Precision.Arithmetic.Dec.NotSupported](#rqsrs-020clickhouseextendedprecisionarithmeticdecnotsupported)
* 4.4 [Arrays](#arrays)
* 4.4.1 [RQ.SRS-020.ClickHouse.Extended.Precision.Arrays.Int.Supported](#rqsrs-020clickhouseextendedprecisionarraysintsupported)
* 4.4.2 [RQ.SRS-020.ClickHouse.Extended.Precision.Arrays.Int.NotSupported](#rqsrs-020clickhouseextendedprecisionarraysintnotsupported)
* 4.4.3 [RQ.SRS-020.ClickHouse.Extended.Precision.Arrays.Dec.Supported](#rqsrs-020clickhouseextendedprecisionarraysdecsupported)
* 4.4.4 [RQ.SRS-020.ClickHouse.Extended.Precision.Arrays.Dec.NotSupported](#rqsrs-020clickhouseextendedprecisionarraysdecnotsupported)
* 4.5 [Comparison](#comparison)
* 4.5.1 [RQ.SRS-020.ClickHouse.Extended.Precision.Comparison](#rqsrs-020clickhouseextendedprecisioncomparison)
* 4.6 [Logical Functions](#logical-functions)
* 4.6.1 [RQ.SRS-020.ClickHouse.Extended.Precision.Logical](#rqsrs-020clickhouseextendedprecisionlogical)
* 4.7 [Mathematical Functions](#mathematical-functions)
* 4.7.1 [RQ.SRS-020.ClickHouse.Extended.Precision.Mathematical.Supported](#rqsrs-020clickhouseextendedprecisionmathematicalsupported)
* 4.7.2 [RQ.SRS-020.ClickHouse.Extended.Precision.Mathematical.NotSupported](#rqsrs-020clickhouseextendedprecisionmathematicalnotsupported)
* 4.8 [Rounding Functions](#rounding-functions)
* 4.8.1 [RQ.SRS-020.ClickHouse.Extended.Precision.Rounding.Int.Supported](#rqsrs-020clickhouseextendedprecisionroundingintsupported)
* 4.8.2 [RQ.SRS-020.ClickHouse.Extended.Precision.Rounding.Int.NotSupported](#rqsrs-020clickhouseextendedprecisionroundingintnotsupported)
* 4.8.3 [RQ.SRS-020.ClickHouse.Extended.Precision.Rounding.Dec.Supported](#rqsrs-020clickhouseextendedprecisionroundingdecsupported)
* 4.8.4 [RQ.SRS-020.ClickHouse.Extended.Precision.Rounding.Dec.NotSupported](#rqsrs-020clickhouseextendedprecisionroundingdecnotsupported)
* 4.9 [Bit Functions](#bit-functions)
* 4.9.1 [RQ.SRS-020.ClickHouse.Extended.Precision.Bit.Int.Supported](#rqsrs-020clickhouseextendedprecisionbitintsupported)
* 4.9.2 [RQ.SRS-020.ClickHouse.Extended.Precision.Bit.Int.NotSupported](#rqsrs-020clickhouseextendedprecisionbitintnotsupported)
* 4.9.3 [RQ.SRS-020.ClickHouse.Extended.Precision.Bit.Dec.NotSupported](#rqsrs-020clickhouseextendedprecisionbitdecnotsupported)
* 4.10 [Null Functions](#null-functions)
* 4.10.1 [RQ.SRS-020.ClickHouse.Extended.Precision.Null](#rqsrs-020clickhouseextendedprecisionnull)
* 4.11 [Tuple Functions](#tuple-functions)
* 4.11.1 [RQ.SRS-020.ClickHouse.Extended.Precision.Tuple](#rqsrs-020clickhouseextendedprecisiontuple)
* 4.12 [Map Functions](#map-functions)
* 4.12.1 [RQ.SRS-020.ClickHouse.Extended.Precision.Map.Supported](#rqsrs-020clickhouseextendedprecisionmapsupported)
* 4.12.2 [RQ.SRS-020.ClickHouse.Extended.Precision.Map.NotSupported](#rqsrs-020clickhouseextendedprecisionmapnotsupported)
* 4.13 [Create](#create)
* 4.13.1 [RQ.SRS-020.ClickHouse.Extended.Precision.Create.Table](#rqsrs-020clickhouseextendedprecisioncreatetable)
* 5 [References](#references)
## Revision History
This document is stored in an electronic form using [Git] source control management software
hosted in a [GitHub Repository].
All the updates are tracked using the [Revision History].
## Introduction
This software requirements specification covers requirements related to [ClickHouse]
using extended precision data types.
## Terminology
### Extended Precision Data Types
Inclusive bounds:
* Int128 - [-170141183460469231731687303715884105728 : 170141183460469231731687303715884105727]
* UInt128 - [0 : 340282366920938463463374607431768211455]
* Int256 - [-57896044618658097711785492504343953926634992332820282019728792003956564819968 : 57896044618658097711785492504343953926634992332820282019728792003956564819967]
* UInt256 - [0 : 115792089237316195423570985008687907853269984665640564039457584007913129639935]
Exclusive bounds:
* Decimal256 - (10^(76 - S): 10^(76 - S)), where S is the scale.
## Requirements
### RQ.SRS-020.ClickHouse.Extended.Precision
version: 1.0
[ClickHouse] SHALL support using [Extended Precision Data Types].
### Conversion
#### RQ.SRS-020.ClickHouse.Extended.Precision.Conversion.toInt128
version: 1.0
[ClickHouse] SHALL support converting values to `Int128` using the `toInt128` function.
For example,
```sql
SELECT toInt128(1)
```
#### RQ.SRS-020.ClickHouse.Extended.Precision.Conversion.toUInt128
version: 1.0
[ClickHouse] SHALL support converting values to `UInt128` format using `toUInt128` function.
For example,
```sql
SELECT toUInt128(1)
```
#### RQ.SRS-020.ClickHouse.Extended.Precision.Conversion.toInt256
version: 1.0
[ClickHouse] SHALL support converting values to `Int256` using `toInt256` function.
For example,
```sql
SELECT toInt256(1)
```
#### RQ.SRS-020.ClickHouse.Extended.Precision.Conversion.toUInt256
version: 1.0
[ClickHouse] SHALL support converting values to `UInt256` format using `toUInt256` function.
For example,
```sql
SELECT toUInt256(1)
```
#### RQ.SRS-020.ClickHouse.Extended.Precision.Conversion.toDecimal256
version: 1.0
[ClickHouse] SHALL support converting values to `Decimal256` format using `toDecimal256` function.
For example,
```sql
SELECT toDecimal256(1,2)
```
#### RQ.SRS-020.ClickHouse.Extended.Precision.Conversion.FromMySQL
version: 1.0
[ClickHouse] SHALL support converting to [Extended Precision Data Types] from MySQL.
#### RQ.SRS-020.ClickHouse.Extended.Precision.Conversion.ToMySQL
version: 1.0
[ClickHouse] MAY not support converting from [Extended Precision Data Types] to MySQL.
### Arithmetic
#### RQ.SRS-020.ClickHouse.Extended.Precision.Arithmetic.Int.Supported
version: 1.0
[ClickHouse] SHALL support using [Arithmetic functions] with Int128, UInt128, Int256, and UInt256.
Arithmetic functions:
* plus
* minus
* multiply
* divide
* intDiv
* intDivOrZero
* modulo
* moduloOrZero
* negate
* abs
* gcd
* lcm
#### RQ.SRS-020.ClickHouse.Extended.Precision.Arithmetic.Dec.Supported
version: 1.0
[ClickHouse] SHALL support using the following [Arithmetic functions] with Decimal256:
* plus
* minus
* multiply
* divide
* intDiv
* intDivOrZero
* negate
* abs
#### RQ.SRS-020.ClickHouse.Extended.Precision.Arithmetic.Dec.NotSupported
version: 1.0
[ClickHouse] MAY not support using the following [Arithmetic functions] with Decimal256:
* modulo
* moduloOrZero
* gcd
* lcm
### Arrays
#### RQ.SRS-020.ClickHouse.Extended.Precision.Arrays.Int.Supported
version: 1.0
[ClickHouse] SHALL support using the following [Array functions] with Int128, UInt128, Int256, and UInt256.
* empty
* notEmpty
* length
* arrayCount
* arrayPopBack
* arrayPopFront
* arraySort
* arrayReverseSort
* arrayUniq
* arrayJoin
* arrayDistinct
* arrayEnumerate
* arrayEnumerateDense
* arrayEnumerateUniq
* arrayReverse
* reverse
* arrayFlatten
* arrayCompact
* arrayExists
* arrayAll
* arrayMin
* arrayMax
* arraySum
* arrayAvg
* arrayReduce
* arrayReduceInRanges
* arrayZip
* arrayMap
* arrayFilter
* arrayFill
* arrayReverseFill
* arraySplit
* arrayFirst
* arrayFirstIndex
* arrayConcat
* hasAll
* hasAny
* hasSubstr
* arrayElement
* has
* indexOf
* countEqual
* arrayPushBack
* arrayPushFront
* arrayResize
* arraySlice
#### RQ.SRS-020.ClickHouse.Extended.Precision.Arrays.Int.NotSupported
version: 1.0
[ClickHouse] MAY not support using the following [Array functions] with Int128, UInt128, Int256, and UInt256:
* arrayDifference
* arrayCumSum
* arrayCumSumNonNegative
#### RQ.SRS-020.ClickHouse.Extended.Precision.Arrays.Dec.Supported
version: 1.0
[ClickHouse] SHALL support using the following [Array functions] with Decimal256:
* empty
* notEmpty
* length
* arrayCount
* arrayPopBack
* arrayPopFront
* arraySort
* arrayReverseSort
* arrayUniq
* arrayJoin
* arrayDistinct
* arrayEnumerate
* arrayEnumerateDense
* arrayEnumerateUniq
* arrayReverse
* reverse
* arrayFlatten
* arrayCompact
* arrayExists
* arrayAll
* arrayReduce
* arrayReduceInRanges
* arrayZip
* arrayMap
* arrayFilter
* arrayFill
* arrayReverseFill
* arraySplit
* arrayFirst
* arrayFirstIndex
* arrayConcat
* hasAll
* hasAny
* hasSubstr
* arrayElement
* has
* indexOf
* countEqual
* arrayPushBack
* arrayPushFront
* arrayResize
* arraySlice
#### RQ.SRS-020.ClickHouse.Extended.Precision.Arrays.Dec.NotSupported
version: 1.0
[ClickHouse] MAY not support using the following [Array functions] with Decimal256:
* arrayMin
* arrayMax
* arraaySum
* arrayAvg
* arrayDifference
* arrayCumSum
* arrayCumSumNonNegative
### Comparison
#### RQ.SRS-020.ClickHouse.Extended.Precision.Comparison
version: 1.0
[ClickHouse] SHALL support using [Comparison functions] with [Extended Precision Data Types].
Comparison functions:
* equals
* notEquals
* less
* greater
* lessOrEquals
* greaterOrEquals
### Logical Functions
#### RQ.SRS-020.ClickHouse.Extended.Precision.Logical
version: 1.0
[ClickHouse] MAY not support using [Logical functions] with [Extended Precision Data Types].
Logical functions:
* and
* or
* not
* xor
### Mathematical Functions
#### RQ.SRS-020.ClickHouse.Extended.Precision.Mathematical.Supported
version: 1.0
[ClickHouse] SHALL support using the following [Mathematical functions] with [Extended Precision Data Types]:
* exp
* log, ln
* exp2
* log2
* exp10
* log10
* sqrt
* cbrt
* erf
* erfc
* lgamma
* tgamma
* sin
* cos
* tan
* asin
* acos
* atan
* cosh
* acosh
* sinh
* asinh
* tanh
* atanh
* log1p
* sign
#### RQ.SRS-020.ClickHouse.Extended.Precision.Mathematical.NotSupported
version: 1.0
[ClickHouse] MAY not support using the following [Mathematical functions] with [Extended Precision Data Types]:
* pow, power
* intExp2
* intExp10
* atan2
* hypot
### Rounding Functions
#### RQ.SRS-020.ClickHouse.Extended.Precision.Rounding.Int.Supported
version: 1.0
[ClickHouse] SHALL support using the following [Rounding functions] with Int128, UInt128, Int256, and UInt256:
* floor
* ceil
* trunc
* round
* roundBankers
* roundDuration
* roundAge
#### RQ.SRS-020.ClickHouse.Extended.Precision.Rounding.Int.NotSupported
version: 1.0
[ClickHouse] MAY not support using the following [Rounding functions] with Int128, UInt128, Int256, and UInt256:
* roundDown
* roundToExp2
#### RQ.SRS-020.ClickHouse.Extended.Precision.Rounding.Dec.Supported
version: 1.0
[ClickHouse] SHALL support using the following [Rounding functions] with Decimal256:
* floor
* ceil
* trunc
* round
* roundBankers
#### RQ.SRS-020.ClickHouse.Extended.Precision.Rounding.Dec.NotSupported
version: 1.0
[ClickHouse] MAY not support using the following [Rounding functions] with Decimal256:
* roundDuration
* roundAge
* roundDown
* roundToExp2
### Bit Functions
#### RQ.SRS-020.ClickHouse.Extended.Precision.Bit.Int.Supported
version: 1.0
[ClickHouse] SHALL support using the following [Bit functions] with Int128, UInt128, Int256, and UInt256:
* bitAnd
* bitOr
* bitXor
* bitNot
* bitShiftLeft
* bitShiftRight
* bitCount
#### RQ.SRS-020.ClickHouse.Extended.Precision.Bit.Int.NotSupported
version: 1.0
[ClickHouse] MAY not support using the following [Bit functions] with Int128, UInt128, Int256, and UInt256:
* bitRotateLeft
* bitRotateRight
* bitTest
* bitTestAll
* bitTestAny
#### RQ.SRS-020.ClickHouse.Extended.Precision.Bit.Dec.NotSupported
version: 1.0
[ClickHouse] MAY not support using [Bit functions] with Decimal256.
Bit functions:
* bitAnd
* bitOr
* bitXor
* bitNot
* bitShiftLeft
* bitShiftRight
* bitCount
* bitRotateLeft
* bitRotateRight
* bitTest
* bitTestAll
* bitTestAny
### Null Functions
#### RQ.SRS-020.ClickHouse.Extended.Precision.Null
version: 1.0
[ClickHouse] SHALL support using [Null functions] with [Extended Precision Data Types].
Null functions:
* isNull
* isNotNull
* coalesce
* ifNull
* nullIf
* assumeNotNull
* toNullable
### Tuple Functions
#### RQ.SRS-020.ClickHouse.Extended.Precision.Tuple
version: 1.0
[ClickHouse] SHALL support using [Tuple functions] with [Extended Precision Data Types].
Tuple functions:
* tuple
* tupleElement
* untuple
### Map Functions
#### RQ.SRS-020.ClickHouse.Extended.Precision.Map.Supported
version: 1.0
[ClickHouse] SHALL support using the following [Map functions] with [Extended Precision Data Types]:
* map
* mapContains
* mapKeys
* mapValues
#### RQ.SRS-020.ClickHouse.Extended.Precision.Map.NotSupported
version: 1.0
[ClickHouse] MAY not support using the following [Map functions] with [Extended Precision Data Types]:
* mapAdd
* mapSubtract
* mapPopulateSeries
### Create
#### RQ.SRS-020.ClickHouse.Extended.Precision.Create.Table
version: 1.0
[ClickHouse] SHALL support creating table with columns that use [Extended Precision Data Types].
## References
* **ClickHouse:** https://clickhouse.tech
* **GitHub Repository**: https://github.com/ClickHouse/ClickHouse/blob/master/tests/testflows/extended_precision_data_types/requirements/requirements.md
* **Revision History**: https://github.com/ClickHouse/ClickHouse/blob/master/tests/testflows/extended_precision_data_types/requirements/requirements.md
* **Git:** https://git-scm.com/
[Extended Precision Data Types]: #extended-precision-data-types
[Arithmetic functions]: https://clickhouse.tech/docs/en/sql-reference/functions/arithmetic-functions/
[Array functions]: https://clickhouse.tech/docs/en/sql-reference/functions/array-functions/
[Comparison functions]: https://clickhouse.tech/docs/en/sql-reference/functions/comparison-functions/
[Logical Functions]: https://clickhouse.tech/docs/en/sql-reference/functions/logical-functions/
[Mathematical Functions]: https://clickhouse.tech/docs/en/sql-reference/functions/math-functions/
[Rounding Functions]: https://clickhouse.tech/docs/en/sql-reference/functions/rounding-functions/
[Bit Functions]: https://clickhouse.tech/docs/en/sql-reference/functions/bit-functions/
[Null Functions]: https://clickhouse.tech/docs/en/sql-reference/functions/functions-for-nulls/
[Tuple Functions]: https://clickhouse.tech/docs/en/sql-reference/functions/tuple-functions/
[Map Functions]: https://clickhouse.tech/docs/en/sql-reference/functions/tuple-map-functions/
[SRS]: #srs
[ClickHouse]: https://clickhouse.tech
[GitHub Repository]: https://github.com/ClickHouse/ClickHouse/blob/master/tests/testflows/extended_precision_data_types/requirements/requirements.md
[Revision History]: https://github.com/ClickHouse/ClickHouse/blob/master/tests/testflows/extended_precision_data_types/requirements/requirements.md
[Git]: https://git-scm.com/
[GitHub]: https://github.com
''')
| 30.245787
| 171
| 0.67785
| 4,992
| 43,070
| 5.754207
| 0.069912
| 0.132567
| 0.17765
| 0.116554
| 0.809225
| 0.788129
| 0.767554
| 0.754708
| 0.704822
| 0.57396
| 0
| 0.059035
| 0.190991
| 43,070
| 1,423
| 172
| 30.267041
| 0.765354
| 0.004992
| 0
| 0.579905
| 1
| 0.048259
| 0.627949
| 0.236925
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.001582
| 0
| 0.001582
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
60ee3927a07acf16d3850d1064ef44a8ca24fa0b
| 190
|
py
|
Python
|
InterfaceMethodsPython/helpReader.py
|
jonyzp/parallelGames
|
ba9895971dcf0c6e77a0738113ffae3a01596e6e
|
[
"Apache-2.0"
] | null | null | null |
InterfaceMethodsPython/helpReader.py
|
jonyzp/parallelGames
|
ba9895971dcf0c6e77a0738113ffae3a01596e6e
|
[
"Apache-2.0"
] | null | null | null |
InterfaceMethodsPython/helpReader.py
|
jonyzp/parallelGames
|
ba9895971dcf0c6e77a0738113ffae3a01596e6e
|
[
"Apache-2.0"
] | null | null | null |
file = open("/Users/marcossierra/Dropbox/Eafit/Semestre6/AnalisisNumerico/Proyecto/parallelGames/InterfaceMethodsPython/Ayudas.txt",'r')
lines = file.readlines()
for i in lines:
print i
| 38
| 136
| 0.789474
| 23
| 190
| 6.521739
| 0.869565
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.005747
| 0.084211
| 190
| 4
| 137
| 47.5
| 0.856322
| 0
| 0
| 0
| 0
| 0.25
| 0.621053
| 0.615789
| 0
| 1
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0.25
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
60f1bd9974500b1fca8c5318bf4bcf70306f17ee
| 109
|
py
|
Python
|
prod/frontend/models/__init__.py
|
frederikgram/describe
|
5c21edcf9b35811d34a9446eda34d5a92974c8e9
|
[
"MIT"
] | 2
|
2021-03-05T20:49:08.000Z
|
2021-03-10T01:32:19.000Z
|
prod/frontend/models/__init__.py
|
frederikgram/describe
|
5c21edcf9b35811d34a9446eda34d5a92974c8e9
|
[
"MIT"
] | 1
|
2020-03-24T19:54:42.000Z
|
2020-03-24T19:54:42.000Z
|
prod/frontend/models/__init__.py
|
frederikgram/describe
|
5c21edcf9b35811d34a9446eda34d5a92974c8e9
|
[
"MIT"
] | null | null | null |
""" """
from .database_management import *
from .database_queries import *
from .database_updaters import *
| 18.166667
| 34
| 0.752294
| 12
| 109
| 6.583333
| 0.5
| 0.455696
| 0.455696
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.137615
| 109
| 5
| 35
| 21.8
| 0.840426
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
e0a20ebd431e5ed6f728022105721db4f0143a1b
| 50,336
|
py
|
Python
|
pypureclient/flashblade/FB_2_0/api/file_system_replica_links_api.py
|
Flav-STOR-WL/py-pure-client
|
03b889c997d90380ac5d6380ca5d5432792d3e89
|
[
"BSD-2-Clause"
] | 14
|
2018-12-07T18:30:27.000Z
|
2022-02-22T09:12:33.000Z
|
pypureclient/flashblade/FB_2_0/api/file_system_replica_links_api.py
|
Flav-STOR-WL/py-pure-client
|
03b889c997d90380ac5d6380ca5d5432792d3e89
|
[
"BSD-2-Clause"
] | 28
|
2019-09-17T21:03:52.000Z
|
2022-03-29T22:07:35.000Z
|
pypureclient/flashblade/FB_2_0/api/file_system_replica_links_api.py
|
Flav-STOR-WL/py-pure-client
|
03b889c997d90380ac5d6380ca5d5432792d3e89
|
[
"BSD-2-Clause"
] | 15
|
2020-06-11T15:50:08.000Z
|
2022-03-21T09:27:25.000Z
|
# coding: utf-8
"""
FlashBlade REST API Client
A lightweight client for FlashBlade REST API 2.0, developed by Pure Storage, Inc. (http://www.purestorage.com/).
OpenAPI spec version: 2.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re
# python 2 and python 3 compatibility library
import six
from typing import List, Optional
from .. import models
class FileSystemReplicaLinksApi(object):
def __init__(self, api_client):
self.api_client = api_client
def api20_file_system_replica_links_get_with_http_info(
self,
continuation_token=None, # type: str
filter=None, # type: str
ids=None, # type: List[str]
limit=None, # type: int
local_file_system_ids=None, # type: List[str]
local_file_system_names=None, # type: List[str]
offset=None, # type: int
remote_file_system_ids=None, # type: List[str]
remote_file_system_names=None, # type: List[str]
remote_ids=None, # type: List[str]
remote_names=None, # type: List[str]
sort=None, # type: List[str]
async_req=False, # type: bool
_return_http_data_only=False, # type: bool
_preload_content=True, # type: bool
_request_timeout=None, # type: Optional[int]
):
# type: (...) -> models.FileSystemReplicaLinkGetResponse
"""GET file-system-replica-links
List file system replication link.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.api20_file_system_replica_links_get_with_http_info(async_req=True)
>>> result = thread.get()
:param str continuation_token: An opaque token used to iterate over a collection. The token to use on the next request is returned in the `continuation_token` field of the result.
:param str filter: Exclude resources that don't match the specified criteria.
:param list[str] ids: A comma-separated list of resource IDs. If after filtering, there is not at least one resource that matches each of the elements of `ids`, then an error is returned. This cannot be provided together with the `name` or `names` query parameters.
:param int limit: Limit the size of the response to the specified number of resources. A `limit` of `0` can be used to get the number of resources without getting all of the resources. It will be returned in the `total_item_count` field. If a client asks for a page size larger than the maximum number, the request is still valid. In that case the server just returns the maximum number of items, disregarding the client's page size request.
:param list[str] local_file_system_ids: A comma-separated list of local file system IDs. If after filtering, there is not at least one resource that matches each of the elements, then an error is returned. This cannot be provided together with the `local_file_system_names` query parameter.
:param list[str] local_file_system_names: A comma-separated list of local file system names. If there is not at least one resource that matches each of the elements, then an error is returned. This cannot be provided together with `local_file_system_ids` query parameter.
:param int offset: The offset of the first resource to return from a collection.
:param list[str] remote_file_system_ids: A comma-separated list of remote file system IDs. If there is not at least one resource that matches each of the elements, then an error is returned. This cannot be provided together with the `remote_file_system_names` query parameter.
:param list[str] remote_file_system_names: A comma-separated list of remote file system names. If there is not at least one resource that matches each of the elements, then an error is returned. This cannot be provided together with the `remote_file_system_ids` query parameter.
:param list[str] remote_ids: A comma-separated list of remote array IDs. If after filtering, there is not at least one resource that matches each of the elements, then an error is returned. This cannot be provided together with the `remote_names` query parameter.
:param list[str] remote_names: A comma-separated list of remote array names. If there is not at least one resource that matches each of the elements, then an error is returned. This cannot be provided together with `remote_ids` query parameter.
:param list[str] sort: Sort the response by the specified fields (in descending order if '-' is appended to the field name). NOTE: If you provide a sort you will not get a `continuation_token` in the response.
:param bool async_req: Request runs in separate thread and method returns multiprocessing.pool.ApplyResult.
:param bool _return_http_data_only: Returns only data field.
:param bool _preload_content: Response is converted into objects.
:param int _request_timeout: Total request timeout in seconds.
It can also be a tuple of (connection time, read time) timeouts.
:return: FileSystemReplicaLinkGetResponse
If the method is called asynchronously,
returns the request thread.
"""
if ids is not None:
if not isinstance(ids, list):
ids = [ids]
if local_file_system_ids is not None:
if not isinstance(local_file_system_ids, list):
local_file_system_ids = [local_file_system_ids]
if local_file_system_names is not None:
if not isinstance(local_file_system_names, list):
local_file_system_names = [local_file_system_names]
if remote_file_system_ids is not None:
if not isinstance(remote_file_system_ids, list):
remote_file_system_ids = [remote_file_system_ids]
if remote_file_system_names is not None:
if not isinstance(remote_file_system_names, list):
remote_file_system_names = [remote_file_system_names]
if remote_ids is not None:
if not isinstance(remote_ids, list):
remote_ids = [remote_ids]
if remote_names is not None:
if not isinstance(remote_names, list):
remote_names = [remote_names]
if sort is not None:
if not isinstance(sort, list):
sort = [sort]
params = {k: v for k, v in six.iteritems(locals()) if v is not None}
# Convert the filter into a string
if params.get('filter'):
params['filter'] = str(params['filter'])
if params.get('sort'):
params['sort'] = [str(_x) for _x in params['sort']]
if 'limit' in params and params['limit'] < 1:
raise ValueError("Invalid value for parameter `limit` when calling `api20_file_system_replica_links_get`, must be a value greater than or equal to `1`")
if 'offset' in params and params['offset'] < 0:
raise ValueError("Invalid value for parameter `offset` when calling `api20_file_system_replica_links_get`, must be a value greater than or equal to `0`")
collection_formats = {}
path_params = {}
query_params = []
if 'continuation_token' in params:
query_params.append(('continuation_token', params['continuation_token']))
if 'filter' in params:
query_params.append(('filter', params['filter']))
if 'ids' in params:
query_params.append(('ids', params['ids']))
collection_formats['ids'] = 'csv'
if 'limit' in params:
query_params.append(('limit', params['limit']))
if 'local_file_system_ids' in params:
query_params.append(('local_file_system_ids', params['local_file_system_ids']))
collection_formats['local_file_system_ids'] = 'csv'
if 'local_file_system_names' in params:
query_params.append(('local_file_system_names', params['local_file_system_names']))
collection_formats['local_file_system_names'] = 'csv'
if 'offset' in params:
query_params.append(('offset', params['offset']))
if 'remote_file_system_ids' in params:
query_params.append(('remote_file_system_ids', params['remote_file_system_ids']))
collection_formats['remote_file_system_ids'] = 'csv'
if 'remote_file_system_names' in params:
query_params.append(('remote_file_system_names', params['remote_file_system_names']))
collection_formats['remote_file_system_names'] = 'csv'
if 'remote_ids' in params:
query_params.append(('remote_ids', params['remote_ids']))
collection_formats['remote_ids'] = 'csv'
if 'remote_names' in params:
query_params.append(('remote_names', params['remote_names']))
collection_formats['remote_names'] = 'csv'
if 'sort' in params:
query_params.append(('sort', params['sort']))
collection_formats['sort'] = 'csv'
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type(
['application/json'])
# Authentication setting
auth_settings = ['AuthorizationHeader']
return self.api_client.call_api(
'/api/2.0/file-system-replica-links', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FileSystemReplicaLinkGetResponse',
auth_settings=auth_settings,
async_req=async_req,
_return_http_data_only=_return_http_data_only,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
collection_formats=collection_formats,
)
def api20_file_system_replica_links_policies_delete_with_http_info(
self,
local_file_system_ids=None, # type: List[str]
local_file_system_names=None, # type: List[str]
member_ids=None, # type: List[str]
policy_ids=None, # type: List[str]
policy_names=None, # type: List[str]
remote_ids=None, # type: List[str]
remote_names=None, # type: List[str]
async_req=False, # type: bool
_return_http_data_only=False, # type: bool
_preload_content=True, # type: bool
_request_timeout=None, # type: Optional[int]
):
# type: (...) -> None
"""DELETE file-system-replica-links/policies
Remove a policy from a file system replication link.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.api20_file_system_replica_links_policies_delete_with_http_info(async_req=True)
>>> result = thread.get()
:param list[str] local_file_system_ids: A comma-separated list of local file system IDs. If after filtering, there is not at least one resource that matches each of the elements, then an error is returned. This cannot be provided together with the `local_file_system_names` query parameter.
:param list[str] local_file_system_names: A comma-separated list of local file system names. If there is not at least one resource that matches each of the elements, then an error is returned. This cannot be provided together with `local_file_system_ids` query parameter.
:param list[str] member_ids: A comma-separated list of member IDs. If after filtering, there is not at least one resource that matches each of the elements of `member_ids`, then an error is returned. This cannot be provided together with the `member_names` query parameter.
:param list[str] policy_ids: A comma-separated list of policy IDs. If after filtering, there is not at least one resource that matches each of the elements of `policy_ids`, then an error is returned. This cannot be provided together with the `policy_names` query parameter.
:param list[str] policy_names: A comma-separated list of policy names.
:param list[str] remote_ids: A comma-separated list of remote array IDs. If after filtering, there is not at least one resource that matches each of the elements, then an error is returned. This cannot be provided together with the `remote_names` query parameter.
:param list[str] remote_names: A comma-separated list of remote array names. If there is not at least one resource that matches each of the elements, then an error is returned. This cannot be provided together with `remote_ids` query parameter.
:param bool async_req: Request runs in separate thread and method returns multiprocessing.pool.ApplyResult.
:param bool _return_http_data_only: Returns only data field.
:param bool _preload_content: Response is converted into objects.
:param int _request_timeout: Total request timeout in seconds.
It can also be a tuple of (connection time, read time) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
if local_file_system_ids is not None:
if not isinstance(local_file_system_ids, list):
local_file_system_ids = [local_file_system_ids]
if local_file_system_names is not None:
if not isinstance(local_file_system_names, list):
local_file_system_names = [local_file_system_names]
if member_ids is not None:
if not isinstance(member_ids, list):
member_ids = [member_ids]
if policy_ids is not None:
if not isinstance(policy_ids, list):
policy_ids = [policy_ids]
if policy_names is not None:
if not isinstance(policy_names, list):
policy_names = [policy_names]
if remote_ids is not None:
if not isinstance(remote_ids, list):
remote_ids = [remote_ids]
if remote_names is not None:
if not isinstance(remote_names, list):
remote_names = [remote_names]
params = {k: v for k, v in six.iteritems(locals()) if v is not None}
# Convert the filter into a string
if params.get('filter'):
params['filter'] = str(params['filter'])
if params.get('sort'):
params['sort'] = [str(_x) for _x in params['sort']]
collection_formats = {}
path_params = {}
query_params = []
if 'local_file_system_ids' in params:
query_params.append(('local_file_system_ids', params['local_file_system_ids']))
collection_formats['local_file_system_ids'] = 'csv'
if 'local_file_system_names' in params:
query_params.append(('local_file_system_names', params['local_file_system_names']))
collection_formats['local_file_system_names'] = 'csv'
if 'member_ids' in params:
query_params.append(('member_ids', params['member_ids']))
collection_formats['member_ids'] = 'csv'
if 'policy_ids' in params:
query_params.append(('policy_ids', params['policy_ids']))
collection_formats['policy_ids'] = 'csv'
if 'policy_names' in params:
query_params.append(('policy_names', params['policy_names']))
collection_formats['policy_names'] = 'csv'
if 'remote_ids' in params:
query_params.append(('remote_ids', params['remote_ids']))
collection_formats['remote_ids'] = 'csv'
if 'remote_names' in params:
query_params.append(('remote_names', params['remote_names']))
collection_formats['remote_names'] = 'csv'
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type(
['application/json'])
# Authentication setting
auth_settings = ['AuthorizationHeader']
return self.api_client.call_api(
'/api/2.0/file-system-replica-links/policies', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
async_req=async_req,
_return_http_data_only=_return_http_data_only,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
collection_formats=collection_formats,
)
def api20_file_system_replica_links_policies_get_with_http_info(
self,
continuation_token=None, # type: str
filter=None, # type: str
limit=None, # type: int
local_file_system_ids=None, # type: List[str]
local_file_system_names=None, # type: List[str]
member_ids=None, # type: List[str]
offset=None, # type: int
policy_ids=None, # type: List[str]
policy_names=None, # type: List[str]
remote_ids=None, # type: List[str]
remote_file_system_ids=None, # type: List[str]
remote_file_system_names=None, # type: List[str]
remote_names=None, # type: List[str]
sort=None, # type: List[str]
async_req=False, # type: bool
_return_http_data_only=False, # type: bool
_preload_content=True, # type: bool
_request_timeout=None, # type: Optional[int]
):
# type: (...) -> models.PolicyMemberWithRemoteGetResponse
"""GET file-system-replica-links/policies
List file system replication link policies.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.api20_file_system_replica_links_policies_get_with_http_info(async_req=True)
>>> result = thread.get()
:param str continuation_token: An opaque token used to iterate over a collection. The token to use on the next request is returned in the `continuation_token` field of the result.
:param str filter: Exclude resources that don't match the specified criteria.
:param int limit: Limit the size of the response to the specified number of resources. A `limit` of `0` can be used to get the number of resources without getting all of the resources. It will be returned in the `total_item_count` field. If a client asks for a page size larger than the maximum number, the request is still valid. In that case the server just returns the maximum number of items, disregarding the client's page size request.
:param list[str] local_file_system_ids: A comma-separated list of local file system IDs. If after filtering, there is not at least one resource that matches each of the elements, then an error is returned. This cannot be provided together with the `local_file_system_names` query parameter.
:param list[str] local_file_system_names: A comma-separated list of local file system names. If there is not at least one resource that matches each of the elements, then an error is returned. This cannot be provided together with `local_file_system_ids` query parameter.
:param list[str] member_ids: A comma-separated list of member IDs. If after filtering, there is not at least one resource that matches each of the elements of `member_ids`, then an error is returned. This cannot be provided together with the `member_names` query parameter.
:param int offset: The offset of the first resource to return from a collection.
:param list[str] policy_ids: A comma-separated list of policy IDs. If after filtering, there is not at least one resource that matches each of the elements of `policy_ids`, then an error is returned. This cannot be provided together with the `policy_names` query parameter.
:param list[str] policy_names: A comma-separated list of policy names.
:param list[str] remote_ids: A comma-separated list of remote array IDs. If after filtering, there is not at least one resource that matches each of the elements, then an error is returned. This cannot be provided together with the `remote_names` query parameter.
:param list[str] remote_file_system_ids: A comma-separated list of remote file system IDs. If there is not at least one resource that matches each of the elements, then an error is returned. This cannot be provided together with the `remote_file_system_names` query parameter.
:param list[str] remote_file_system_names: A comma-separated list of remote file system names. If there is not at least one resource that matches each of the elements, then an error is returned. This cannot be provided together with the `remote_file_system_ids` query parameter.
:param list[str] remote_names: A comma-separated list of remote array names. If there is not at least one resource that matches each of the elements, then an error is returned. This cannot be provided together with `remote_ids` query parameter.
:param list[str] sort: Sort the response by the specified fields (in descending order if '-' is appended to the field name). NOTE: If you provide a sort you will not get a `continuation_token` in the response.
:param bool async_req: Request runs in separate thread and method returns multiprocessing.pool.ApplyResult.
:param bool _return_http_data_only: Returns only data field.
:param bool _preload_content: Response is converted into objects.
:param int _request_timeout: Total request timeout in seconds.
It can also be a tuple of (connection time, read time) timeouts.
:return: PolicyMemberWithRemoteGetResponse
If the method is called asynchronously,
returns the request thread.
"""
if local_file_system_ids is not None:
if not isinstance(local_file_system_ids, list):
local_file_system_ids = [local_file_system_ids]
if local_file_system_names is not None:
if not isinstance(local_file_system_names, list):
local_file_system_names = [local_file_system_names]
if member_ids is not None:
if not isinstance(member_ids, list):
member_ids = [member_ids]
if policy_ids is not None:
if not isinstance(policy_ids, list):
policy_ids = [policy_ids]
if policy_names is not None:
if not isinstance(policy_names, list):
policy_names = [policy_names]
if remote_ids is not None:
if not isinstance(remote_ids, list):
remote_ids = [remote_ids]
if remote_file_system_ids is not None:
if not isinstance(remote_file_system_ids, list):
remote_file_system_ids = [remote_file_system_ids]
if remote_file_system_names is not None:
if not isinstance(remote_file_system_names, list):
remote_file_system_names = [remote_file_system_names]
if remote_names is not None:
if not isinstance(remote_names, list):
remote_names = [remote_names]
if sort is not None:
if not isinstance(sort, list):
sort = [sort]
params = {k: v for k, v in six.iteritems(locals()) if v is not None}
# Convert the filter into a string
if params.get('filter'):
params['filter'] = str(params['filter'])
if params.get('sort'):
params['sort'] = [str(_x) for _x in params['sort']]
if 'limit' in params and params['limit'] < 1:
raise ValueError("Invalid value for parameter `limit` when calling `api20_file_system_replica_links_policies_get`, must be a value greater than or equal to `1`")
if 'offset' in params and params['offset'] < 0:
raise ValueError("Invalid value for parameter `offset` when calling `api20_file_system_replica_links_policies_get`, must be a value greater than or equal to `0`")
collection_formats = {}
path_params = {}
query_params = []
if 'continuation_token' in params:
query_params.append(('continuation_token', params['continuation_token']))
if 'filter' in params:
query_params.append(('filter', params['filter']))
if 'limit' in params:
query_params.append(('limit', params['limit']))
if 'local_file_system_ids' in params:
query_params.append(('local_file_system_ids', params['local_file_system_ids']))
collection_formats['local_file_system_ids'] = 'csv'
if 'local_file_system_names' in params:
query_params.append(('local_file_system_names', params['local_file_system_names']))
collection_formats['local_file_system_names'] = 'csv'
if 'member_ids' in params:
query_params.append(('member_ids', params['member_ids']))
collection_formats['member_ids'] = 'csv'
if 'offset' in params:
query_params.append(('offset', params['offset']))
if 'policy_ids' in params:
query_params.append(('policy_ids', params['policy_ids']))
collection_formats['policy_ids'] = 'csv'
if 'policy_names' in params:
query_params.append(('policy_names', params['policy_names']))
collection_formats['policy_names'] = 'csv'
if 'remote_ids' in params:
query_params.append(('remote_ids', params['remote_ids']))
collection_formats['remote_ids'] = 'csv'
if 'remote_file_system_ids' in params:
query_params.append(('remote_file_system_ids', params['remote_file_system_ids']))
collection_formats['remote_file_system_ids'] = 'csv'
if 'remote_file_system_names' in params:
query_params.append(('remote_file_system_names', params['remote_file_system_names']))
collection_formats['remote_file_system_names'] = 'csv'
if 'remote_names' in params:
query_params.append(('remote_names', params['remote_names']))
collection_formats['remote_names'] = 'csv'
if 'sort' in params:
query_params.append(('sort', params['sort']))
collection_formats['sort'] = 'csv'
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type(
['application/json'])
# Authentication setting
auth_settings = ['AuthorizationHeader']
return self.api_client.call_api(
'/api/2.0/file-system-replica-links/policies', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PolicyMemberWithRemoteGetResponse',
auth_settings=auth_settings,
async_req=async_req,
_return_http_data_only=_return_http_data_only,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
collection_formats=collection_formats,
)
def api20_file_system_replica_links_policies_post_with_http_info(
self,
local_file_system_ids=None, # type: List[str]
local_file_system_names=None, # type: List[str]
member_ids=None, # type: List[str]
policy_ids=None, # type: List[str]
policy_names=None, # type: List[str]
remote_ids=None, # type: List[str]
remote_names=None, # type: List[str]
async_req=False, # type: bool
_return_http_data_only=False, # type: bool
_preload_content=True, # type: bool
_request_timeout=None, # type: Optional[int]
):
# type: (...) -> models.PolicyMemberWithRemoteResponse
"""POST file-system-replica-links/policies
Add a policy to a file system replication link.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.api20_file_system_replica_links_policies_post_with_http_info(async_req=True)
>>> result = thread.get()
:param list[str] local_file_system_ids: A comma-separated list of local file system IDs. If after filtering, there is not at least one resource that matches each of the elements, then an error is returned. This cannot be provided together with the `local_file_system_names` query parameter.
:param list[str] local_file_system_names: A comma-separated list of local file system names. If there is not at least one resource that matches each of the elements, then an error is returned. This cannot be provided together with `local_file_system_ids` query parameter.
:param list[str] member_ids: A comma-separated list of member IDs. If after filtering, there is not at least one resource that matches each of the elements of `member_ids`, then an error is returned. This cannot be provided together with the `member_names` query parameter.
:param list[str] policy_ids: A comma-separated list of policy IDs. If after filtering, there is not at least one resource that matches each of the elements of `policy_ids`, then an error is returned. This cannot be provided together with the `policy_names` query parameter.
:param list[str] policy_names: A comma-separated list of policy names.
:param list[str] remote_ids: A comma-separated list of remote array IDs. If after filtering, there is not at least one resource that matches each of the elements, then an error is returned. This cannot be provided together with the `remote_names` query parameter.
:param list[str] remote_names: A comma-separated list of remote array names. If there is not at least one resource that matches each of the elements, then an error is returned. This cannot be provided together with `remote_ids` query parameter.
:param bool async_req: Request runs in separate thread and method returns multiprocessing.pool.ApplyResult.
:param bool _return_http_data_only: Returns only data field.
:param bool _preload_content: Response is converted into objects.
:param int _request_timeout: Total request timeout in seconds.
It can also be a tuple of (connection time, read time) timeouts.
:return: PolicyMemberWithRemoteResponse
If the method is called asynchronously,
returns the request thread.
"""
if local_file_system_ids is not None:
if not isinstance(local_file_system_ids, list):
local_file_system_ids = [local_file_system_ids]
if local_file_system_names is not None:
if not isinstance(local_file_system_names, list):
local_file_system_names = [local_file_system_names]
if member_ids is not None:
if not isinstance(member_ids, list):
member_ids = [member_ids]
if policy_ids is not None:
if not isinstance(policy_ids, list):
policy_ids = [policy_ids]
if policy_names is not None:
if not isinstance(policy_names, list):
policy_names = [policy_names]
if remote_ids is not None:
if not isinstance(remote_ids, list):
remote_ids = [remote_ids]
if remote_names is not None:
if not isinstance(remote_names, list):
remote_names = [remote_names]
params = {k: v for k, v in six.iteritems(locals()) if v is not None}
# Convert the filter into a string
if params.get('filter'):
params['filter'] = str(params['filter'])
if params.get('sort'):
params['sort'] = [str(_x) for _x in params['sort']]
collection_formats = {}
path_params = {}
query_params = []
if 'local_file_system_ids' in params:
query_params.append(('local_file_system_ids', params['local_file_system_ids']))
collection_formats['local_file_system_ids'] = 'csv'
if 'local_file_system_names' in params:
query_params.append(('local_file_system_names', params['local_file_system_names']))
collection_formats['local_file_system_names'] = 'csv'
if 'member_ids' in params:
query_params.append(('member_ids', params['member_ids']))
collection_formats['member_ids'] = 'csv'
if 'policy_ids' in params:
query_params.append(('policy_ids', params['policy_ids']))
collection_formats['policy_ids'] = 'csv'
if 'policy_names' in params:
query_params.append(('policy_names', params['policy_names']))
collection_formats['policy_names'] = 'csv'
if 'remote_ids' in params:
query_params.append(('remote_ids', params['remote_ids']))
collection_formats['remote_ids'] = 'csv'
if 'remote_names' in params:
query_params.append(('remote_names', params['remote_names']))
collection_formats['remote_names'] = 'csv'
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type(
['application/json'])
# Authentication setting
auth_settings = ['AuthorizationHeader']
return self.api_client.call_api(
'/api/2.0/file-system-replica-links/policies', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PolicyMemberWithRemoteResponse',
auth_settings=auth_settings,
async_req=async_req,
_return_http_data_only=_return_http_data_only,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
collection_formats=collection_formats,
)
def api20_file_system_replica_links_post_with_http_info(
self,
file_system_replica_link=None, # type: models.FileSystemReplicaLink
ids=None, # type: List[str]
local_file_system_ids=None, # type: List[str]
local_file_system_names=None, # type: List[str]
remote_file_system_names=None, # type: List[str]
remote_ids=None, # type: List[str]
remote_names=None, # type: List[str]
async_req=False, # type: bool
_return_http_data_only=False, # type: bool
_preload_content=True, # type: bool
_request_timeout=None, # type: Optional[int]
):
# type: (...) -> models.FileSystemReplicaLinkResponse
"""POST file-system-replica-links
Create a file system replication link.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.api20_file_system_replica_links_post_with_http_info(file_system_replica_link, async_req=True)
>>> result = thread.get()
:param FileSystemReplicaLink file_system_replica_link: (required)
:param list[str] ids: A comma-separated list of resource IDs. If after filtering, there is not at least one resource that matches each of the elements of `ids`, then an error is returned. This cannot be provided together with the `name` or `names` query parameters.
:param list[str] local_file_system_ids: A comma-separated list of local file system IDs. If after filtering, there is not at least one resource that matches each of the elements, then an error is returned. This cannot be provided together with the `local_file_system_names` query parameter.
:param list[str] local_file_system_names: A comma-separated list of local file system names. If there is not at least one resource that matches each of the elements, then an error is returned. This cannot be provided together with `local_file_system_ids` query parameter.
:param list[str] remote_file_system_names: A comma-separated list of remote file system names. If there is not at least one resource that matches each of the elements, then an error is returned. This cannot be provided together with the `remote_file_system_ids` query parameter.
:param list[str] remote_ids: A comma-separated list of remote array IDs. If after filtering, there is not at least one resource that matches each of the elements, then an error is returned. This cannot be provided together with the `remote_names` query parameter.
:param list[str] remote_names: A comma-separated list of remote array names. If there is not at least one resource that matches each of the elements, then an error is returned. This cannot be provided together with `remote_ids` query parameter.
:param bool async_req: Request runs in separate thread and method returns multiprocessing.pool.ApplyResult.
:param bool _return_http_data_only: Returns only data field.
:param bool _preload_content: Response is converted into objects.
:param int _request_timeout: Total request timeout in seconds.
It can also be a tuple of (connection time, read time) timeouts.
:return: FileSystemReplicaLinkResponse
If the method is called asynchronously,
returns the request thread.
"""
if ids is not None:
if not isinstance(ids, list):
ids = [ids]
if local_file_system_ids is not None:
if not isinstance(local_file_system_ids, list):
local_file_system_ids = [local_file_system_ids]
if local_file_system_names is not None:
if not isinstance(local_file_system_names, list):
local_file_system_names = [local_file_system_names]
if remote_file_system_names is not None:
if not isinstance(remote_file_system_names, list):
remote_file_system_names = [remote_file_system_names]
if remote_ids is not None:
if not isinstance(remote_ids, list):
remote_ids = [remote_ids]
if remote_names is not None:
if not isinstance(remote_names, list):
remote_names = [remote_names]
params = {k: v for k, v in six.iteritems(locals()) if v is not None}
# Convert the filter into a string
if params.get('filter'):
params['filter'] = str(params['filter'])
if params.get('sort'):
params['sort'] = [str(_x) for _x in params['sort']]
# verify the required parameter 'file_system_replica_link' is set
if file_system_replica_link is None:
raise TypeError("Missing the required parameter `file_system_replica_link` when calling `api20_file_system_replica_links_post`")
collection_formats = {}
path_params = {}
query_params = []
if 'ids' in params:
query_params.append(('ids', params['ids']))
collection_formats['ids'] = 'csv'
if 'local_file_system_ids' in params:
query_params.append(('local_file_system_ids', params['local_file_system_ids']))
collection_formats['local_file_system_ids'] = 'csv'
if 'local_file_system_names' in params:
query_params.append(('local_file_system_names', params['local_file_system_names']))
collection_formats['local_file_system_names'] = 'csv'
if 'remote_file_system_names' in params:
query_params.append(('remote_file_system_names', params['remote_file_system_names']))
collection_formats['remote_file_system_names'] = 'csv'
if 'remote_ids' in params:
query_params.append(('remote_ids', params['remote_ids']))
collection_formats['remote_ids'] = 'csv'
if 'remote_names' in params:
query_params.append(('remote_names', params['remote_names']))
collection_formats['remote_names'] = 'csv'
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'file_system_replica_link' in params:
body_params = params['file_system_replica_link']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type(
['application/json'])
# Authentication setting
auth_settings = ['AuthorizationHeader']
return self.api_client.call_api(
'/api/2.0/file-system-replica-links', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FileSystemReplicaLinkResponse',
auth_settings=auth_settings,
async_req=async_req,
_return_http_data_only=_return_http_data_only,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
collection_formats=collection_formats,
)
def api20_file_system_replica_links_transfer_get_with_http_info(
self,
continuation_token=None, # type: str
filter=None, # type: str
ids=None, # type: List[str]
limit=None, # type: int
names_or_owner_names=None, # type: List[str]
offset=None, # type: int
remote_ids=None, # type: List[str]
remote_names=None, # type: List[str]
sort=None, # type: List[str]
total_only=None, # type: bool
async_req=False, # type: bool
_return_http_data_only=False, # type: bool
_preload_content=True, # type: bool
_request_timeout=None, # type: Optional[int]
):
# type: (...) -> models.FileSystemSnapshotGetTransferResponse
"""GET file-system-replica-links/transfer
List the transfer status details for file system replication.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.api20_file_system_replica_links_transfer_get_with_http_info(async_req=True)
>>> result = thread.get()
:param str continuation_token: An opaque token used to iterate over a collection. The token to use on the next request is returned in the `continuation_token` field of the result.
:param str filter: Exclude resources that don't match the specified criteria.
:param list[str] ids: A comma-separated list of resource IDs. If after filtering, there is not at least one resource that matches each of the elements of `ids`, then an error is returned. This cannot be provided together with the `name` or `names` query parameters.
:param int limit: Limit the size of the response to the specified number of resources. A `limit` of `0` can be used to get the number of resources without getting all of the resources. It will be returned in the `total_item_count` field. If a client asks for a page size larger than the maximum number, the request is still valid. In that case the server just returns the maximum number of items, disregarding the client's page size request.
:param list[str] names_or_owner_names: A comma-separated list of resource names. Either the names of the snapshots or the owning file systems.
:param int offset: The offset of the first resource to return from a collection.
:param list[str] remote_ids: A comma-separated list of remote array IDs. If after filtering, there is not at least one resource that matches each of the elements, then an error is returned. This cannot be provided together with the `remote_names` query parameter.
:param list[str] remote_names: A comma-separated list of remote array names. If there is not at least one resource that matches each of the elements, then an error is returned. This cannot be provided together with `remote_ids` query parameter.
:param list[str] sort: Sort the response by the specified fields (in descending order if '-' is appended to the field name). NOTE: If you provide a sort you will not get a `continuation_token` in the response.
:param bool total_only: Only return the total record for the specified items. The total record will be the total of all items after filtering. The `items` list will be empty.
:param bool async_req: Request runs in separate thread and method returns multiprocessing.pool.ApplyResult.
:param bool _return_http_data_only: Returns only data field.
:param bool _preload_content: Response is converted into objects.
:param int _request_timeout: Total request timeout in seconds.
It can also be a tuple of (connection time, read time) timeouts.
:return: FileSystemSnapshotGetTransferResponse
If the method is called asynchronously,
returns the request thread.
"""
if ids is not None:
if not isinstance(ids, list):
ids = [ids]
if names_or_owner_names is not None:
if not isinstance(names_or_owner_names, list):
names_or_owner_names = [names_or_owner_names]
if remote_ids is not None:
if not isinstance(remote_ids, list):
remote_ids = [remote_ids]
if remote_names is not None:
if not isinstance(remote_names, list):
remote_names = [remote_names]
if sort is not None:
if not isinstance(sort, list):
sort = [sort]
params = {k: v for k, v in six.iteritems(locals()) if v is not None}
# Convert the filter into a string
if params.get('filter'):
params['filter'] = str(params['filter'])
if params.get('sort'):
params['sort'] = [str(_x) for _x in params['sort']]
if 'limit' in params and params['limit'] < 1:
raise ValueError("Invalid value for parameter `limit` when calling `api20_file_system_replica_links_transfer_get`, must be a value greater than or equal to `1`")
if 'offset' in params and params['offset'] < 0:
raise ValueError("Invalid value for parameter `offset` when calling `api20_file_system_replica_links_transfer_get`, must be a value greater than or equal to `0`")
collection_formats = {}
path_params = {}
query_params = []
if 'continuation_token' in params:
query_params.append(('continuation_token', params['continuation_token']))
if 'filter' in params:
query_params.append(('filter', params['filter']))
if 'ids' in params:
query_params.append(('ids', params['ids']))
collection_formats['ids'] = 'csv'
if 'limit' in params:
query_params.append(('limit', params['limit']))
if 'names_or_owner_names' in params:
query_params.append(('names_or_owner_names', params['names_or_owner_names']))
collection_formats['names_or_owner_names'] = 'csv'
if 'offset' in params:
query_params.append(('offset', params['offset']))
if 'remote_ids' in params:
query_params.append(('remote_ids', params['remote_ids']))
collection_formats['remote_ids'] = 'csv'
if 'remote_names' in params:
query_params.append(('remote_names', params['remote_names']))
collection_formats['remote_names'] = 'csv'
if 'sort' in params:
query_params.append(('sort', params['sort']))
collection_formats['sort'] = 'csv'
if 'total_only' in params:
query_params.append(('total_only', params['total_only']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type(
['application/json'])
# Authentication setting
auth_settings = ['AuthorizationHeader']
return self.api_client.call_api(
'/api/2.0/file-system-replica-links/transfer', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FileSystemSnapshotGetTransferResponse',
auth_settings=auth_settings,
async_req=async_req,
_return_http_data_only=_return_http_data_only,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
collection_formats=collection_formats,
)
| 57.526857
| 449
| 0.668647
| 6,660
| 50,336
| 4.827177
| 0.04009
| 0.069987
| 0.055989
| 0.033594
| 0.950823
| 0.940962
| 0.938318
| 0.931506
| 0.930418
| 0.928583
| 0
| 0.001913
| 0.252205
| 50,336
| 874
| 450
| 57.592677
| 0.852182
| 0.410561
| 0
| 0.912397
| 0
| 0.009917
| 0.175075
| 0.074775
| 0
| 0
| 0
| 0
| 0
| 1
| 0.01157
| false
| 0
| 0.008264
| 0
| 0.031405
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e0cdeefd05e6018f8b97754466a732140b265a3b
| 1,132
|
py
|
Python
|
setup.py
|
saparina/SeaRNN-open
|
d0703b4f9e6f03020d70185270672f6d7649003e
|
[
"MIT"
] | 47
|
2018-01-30T15:56:32.000Z
|
2021-04-14T16:41:19.000Z
|
setup.py
|
saparina/SeaRNN-open
|
d0703b4f9e6f03020d70185270672f6d7649003e
|
[
"MIT"
] | 6
|
2018-06-16T13:52:17.000Z
|
2020-07-01T10:01:32.000Z
|
setup.py
|
saparina/SeaRNN-open
|
d0703b4f9e6f03020d70185270672f6d7649003e
|
[
"MIT"
] | 9
|
2018-01-30T16:26:39.000Z
|
2018-08-15T02:45:31.000Z
|
import numpy
from distutils.core import setup
from Cython.Build import cythonize
setup(
ext_modules=cythonize("cython/fast_bleu.pyx"),
include_dirs=[numpy.get_include()]
)
setup(
ext_modules=cythonize("cython/fast_bleu1.pyx"),
include_dirs=[numpy.get_include()]
)
setup(
ext_modules=cythonize("cython/fast_gleu.pyx"),
include_dirs=[numpy.get_include()]
)
setup(
ext_modules=cythonize("cython/fast_chunkscore.pyx"),
include_dirs=[numpy.get_include()]
)
setup(
ext_modules=cythonize("cython/fast_bleu_ref_rollout.pyx"),
include_dirs=[numpy.get_include()]
)
setup(
ext_modules=cythonize("cython/fast_bleu_ref_rollout_with_suffix_length.pyx"),
include_dirs=[numpy.get_include()]
)
setup(
ext_modules=cythonize("cython/fast_gleu_ref_rollout_with_suffix_length.pyx"),
include_dirs=[numpy.get_include()]
)
setup(
ext_modules=cythonize("cython/fast_bleu_ref_rollout_with_suffix_length_bleu1noBrev.pyx"),
include_dirs=[numpy.get_include()]
)
setup(
ext_modules=cythonize("cython/fast_bleu_ref_rollout_with_suffix_length_bleu1.pyx"),
include_dirs=[numpy.get_include()]
)
| 22.196078
| 92
| 0.767668
| 153
| 1,132
| 5.287582
| 0.169935
| 0.088999
| 0.166873
| 0.266996
| 0.888752
| 0.888752
| 0.888752
| 0.793572
| 0.793572
| 0.793572
| 0
| 0.002956
| 0.103357
| 1,132
| 50
| 93
| 22.64
| 0.794089
| 0
| 0
| 0.461538
| 0
| 0
| 0.301237
| 0.265901
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.076923
| 0
| 0.076923
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
1ccc2873e638c3fabfc2be5594a95d9e011ba3ff
| 12,389
|
py
|
Python
|
zhaquirks/tuya/ts0601_switch.py
|
Siglis-AG/zha-device-handlers
|
2e8d6a117fbc1bec50cad463a8a2dcf948588838
|
[
"Apache-2.0"
] | 56
|
2018-12-07T19:45:36.000Z
|
2020-03-30T15:01:58.000Z
|
zhaquirks/tuya/ts0601_switch.py
|
Siglis-AG/zha-device-handlers
|
2e8d6a117fbc1bec50cad463a8a2dcf948588838
|
[
"Apache-2.0"
] | 207
|
2018-12-07T20:34:30.000Z
|
2020-04-03T11:50:39.000Z
|
zhaquirks/tuya/ts0601_switch.py
|
Siglis-AG/zha-device-handlers
|
2e8d6a117fbc1bec50cad463a8a2dcf948588838
|
[
"Apache-2.0"
] | 65
|
2018-12-08T01:11:41.000Z
|
2020-03-24T18:23:17.000Z
|
"""Tuya DP based switches."""
from zigpy.profiles import zha
from zigpy.zcl.clusters.general import Basic, Groups, Ota, Scenes, Time
from zhaquirks.const import (
DEVICE_TYPE,
ENDPOINTS,
INPUT_CLUSTERS,
MODELS_INFO,
OUTPUT_CLUSTERS,
PROFILE_ID,
)
from zhaquirks.tuya import TuyaSwitch
from zhaquirks.tuya.mcu import (
MoesSwitchManufCluster,
TuyaOnOff,
TuyaOnOffManufCluster,
TuyaOnOffNM,
)
class TuyaSingleSwitchTI(TuyaSwitch):
"""Tuya single channel switch time on in cluster device."""
signature = {
# "node_descriptor": "<NodeDescriptor byte1=1 byte2=64 mac_capability_flags=142 manufacturer_code=4098
# maximum_buffer_size=82 maximum_incoming_transfer_size=82 server_mask=11264
# maximum_outgoing_transfer_size=82 descriptor_capability_field=0>",
# device_version=1
# input_clusters=[0x0000,0x0004, 0x0005,0x000a, 0xef00]
# output_clusters=[0x0019]
# <SimpleDescriptor endpoint=1 profile=260 device_type=81 device_version=1 input_clusters=[0, 4, 5, 10, 61184] output_clusters=[25]>
MODELS_INFO: [("_TZE200_7tdtqgwv", "TS0601")],
ENDPOINTS: {
1: {
PROFILE_ID: zha.PROFILE_ID,
DEVICE_TYPE: zha.DeviceType.SMART_PLUG,
INPUT_CLUSTERS: [
Basic.cluster_id,
Groups.cluster_id,
Scenes.cluster_id,
Time.cluster_id,
TuyaOnOffManufCluster.cluster_id,
],
OUTPUT_CLUSTERS: [Ota.cluster_id],
}
},
}
replacement = {
ENDPOINTS: {
1: {
DEVICE_TYPE: zha.DeviceType.ON_OFF_LIGHT,
INPUT_CLUSTERS: [
Basic.cluster_id,
Time.cluster_id,
Groups.cluster_id,
Scenes.cluster_id,
MoesSwitchManufCluster,
TuyaOnOff,
],
OUTPUT_CLUSTERS: [Ota.cluster_id],
}
}
}
class TuyaSingleSwitchTO(TuyaSwitch):
"""Tuya single channel switch time on out cluster device."""
signature = {
# "node_descriptor": "<NodeDescriptor byte1=1 byte2=64 mac_capability_flags=142 manufacturer_code=4098
# maximum_buffer_size=82 maximum_incoming_transfer_size=82 server_mask=11264
# maximum_outgoing_transfer_size=82 descriptor_capability_field=0>",
# device_version=1
# input_clusters=[0x0000, 0x0004, 0x0005, 0xef00]
# output_clusters=[0x000a, 0x0019]
# <SimpleDescriptor endpoint=1 profile=260 device_type=51 device_version=1 input_clusters=[0, 4, 5, 61184] output_clusters=[10, 25]>
MODELS_INFO: [
("_TZE200_amp6tsvy", "TS0601"),
("_TZE200_oisqyl4o", "TS0601"),
("_TZE200_vhy3iakz", "TS0601"), # ¿1 or 4 gangs?
("_TZ3000_uim07oem", "TS0601"), # ¿1 or 4 gangs?
("_TZE200_wfxuhoea", "TS0601"),
("_TZE200_tviaymwx", "TS0601"),
],
ENDPOINTS: {
1: {
PROFILE_ID: zha.PROFILE_ID,
DEVICE_TYPE: zha.DeviceType.SMART_PLUG,
INPUT_CLUSTERS: [
Basic.cluster_id,
Groups.cluster_id,
Scenes.cluster_id,
TuyaOnOffManufCluster.cluster_id,
],
OUTPUT_CLUSTERS: [Time.cluster_id, Ota.cluster_id],
}
},
}
replacement = {
ENDPOINTS: {
1: {
DEVICE_TYPE: zha.DeviceType.ON_OFF_LIGHT,
INPUT_CLUSTERS: [
Basic.cluster_id,
Groups.cluster_id,
Scenes.cluster_id,
MoesSwitchManufCluster,
TuyaOnOff,
],
OUTPUT_CLUSTERS: [Time.cluster_id, Ota.cluster_id],
}
}
}
class TuyaDoubleSwitchTO(TuyaSwitch):
"""Tuya double channel switch time on out cluster device."""
signature = {
# "node_descriptor": "<NodeDescriptor byte1=1 byte2=64 mac_capability_flags=142 manufacturer_code=4098
# maximum_buffer_size=82 maximum_incoming_transfer_size=82 server_mask=11264
# maximum_outgoing_transfer_size=82 descriptor_capability_field=0>",
# device_version=1
# input_clusters=[0x0000, 0x0004, 0x0005, 0xef00]
# output_clusters=[0x000a, 0x0019]
# <SimpleDescriptor endpoint=1 profile=260 device_type=51 device_version=1 input_clusters=[0, 4, 5, 61184] output_clusters=[10, 25]>
MODELS_INFO: [
("_TZE200_g1ib5ldv", "TS0601"),
("_TZE200_wunufsil", "TS0601"),
],
ENDPOINTS: {
1: {
PROFILE_ID: zha.PROFILE_ID,
DEVICE_TYPE: zha.DeviceType.SMART_PLUG,
INPUT_CLUSTERS: [
Basic.cluster_id,
Groups.cluster_id,
Scenes.cluster_id,
TuyaOnOffManufCluster.cluster_id,
],
OUTPUT_CLUSTERS: [Time.cluster_id, Ota.cluster_id],
}
},
}
replacement = {
ENDPOINTS: {
1: {
DEVICE_TYPE: zha.DeviceType.ON_OFF_LIGHT,
INPUT_CLUSTERS: [
Basic.cluster_id,
Groups.cluster_id,
Scenes.cluster_id,
MoesSwitchManufCluster,
TuyaOnOff,
],
OUTPUT_CLUSTERS: [Time.cluster_id, Ota.cluster_id],
},
2: {
PROFILE_ID: zha.PROFILE_ID,
DEVICE_TYPE: zha.DeviceType.ON_OFF_LIGHT,
INPUT_CLUSTERS: [
TuyaOnOff,
],
OUTPUT_CLUSTERS: [],
},
}
}
class TuyaTripleSwitchTO(TuyaSwitch):
"""Tuya triple channel switch time on out cluster device."""
signature = {
MODELS_INFO: [
# ("_TZE200_kyfqmmyl", "TS0601"), ## candidate reported in #716
("_TZE200_tz32mtza", "TS0601"),
],
ENDPOINTS: {
1: {
PROFILE_ID: zha.PROFILE_ID,
DEVICE_TYPE: zha.DeviceType.SMART_PLUG,
INPUT_CLUSTERS: [
Basic.cluster_id,
Groups.cluster_id,
Scenes.cluster_id,
TuyaOnOffManufCluster.cluster_id,
],
OUTPUT_CLUSTERS: [Time.cluster_id, Ota.cluster_id],
}
},
}
replacement = {
ENDPOINTS: {
1: {
DEVICE_TYPE: zha.DeviceType.ON_OFF_LIGHT,
INPUT_CLUSTERS: [
Basic.cluster_id,
Groups.cluster_id,
Scenes.cluster_id,
MoesSwitchManufCluster,
TuyaOnOff,
],
OUTPUT_CLUSTERS: [Time.cluster_id, Ota.cluster_id],
},
2: {
PROFILE_ID: zha.PROFILE_ID,
DEVICE_TYPE: zha.DeviceType.ON_OFF_LIGHT,
INPUT_CLUSTERS: [
TuyaOnOff,
],
OUTPUT_CLUSTERS: [],
},
3: {
PROFILE_ID: zha.PROFILE_ID,
DEVICE_TYPE: zha.DeviceType.ON_OFF_LIGHT,
INPUT_CLUSTERS: [
TuyaOnOff,
],
OUTPUT_CLUSTERS: [],
},
}
}
class TuyaQuadrupleSwitchTO(TuyaSwitch):
"""Tuya quadruple channel switch time on out cluster device."""
signature = {
MODELS_INFO: [
("_TZE200_aqnazj70", "TS0601"),
("_TZE200_1ozguk6x", "TS0601"),
("_TZE200_k6jhsr0q", "TS0601"),
],
ENDPOINTS: {
1: {
PROFILE_ID: zha.PROFILE_ID,
DEVICE_TYPE: zha.DeviceType.SMART_PLUG,
INPUT_CLUSTERS: [
Basic.cluster_id,
Groups.cluster_id,
Scenes.cluster_id,
TuyaOnOffManufCluster.cluster_id,
],
OUTPUT_CLUSTERS: [Time.cluster_id, Ota.cluster_id],
}
},
}
replacement = {
ENDPOINTS: {
1: {
DEVICE_TYPE: zha.DeviceType.ON_OFF_LIGHT,
INPUT_CLUSTERS: [
Basic.cluster_id,
Groups.cluster_id,
Scenes.cluster_id,
MoesSwitchManufCluster,
TuyaOnOff,
],
OUTPUT_CLUSTERS: [Time.cluster_id, Ota.cluster_id],
},
2: {
PROFILE_ID: zha.PROFILE_ID,
DEVICE_TYPE: zha.DeviceType.ON_OFF_LIGHT,
INPUT_CLUSTERS: [
TuyaOnOff,
],
OUTPUT_CLUSTERS: [],
},
3: {
PROFILE_ID: zha.PROFILE_ID,
DEVICE_TYPE: zha.DeviceType.ON_OFF_LIGHT,
INPUT_CLUSTERS: [
TuyaOnOff,
],
OUTPUT_CLUSTERS: [],
},
4: {
PROFILE_ID: zha.PROFILE_ID,
DEVICE_TYPE: zha.DeviceType.ON_OFF_LIGHT,
INPUT_CLUSTERS: [
TuyaOnOff,
],
OUTPUT_CLUSTERS: [],
},
}
}
class TuyaSextupleSwitchTO(TuyaSwitch):
"""Tuya sextuple channel switch time on out cluster device."""
signature = {
# <SimpleDescriptor endpoint=1 profile=260 device_type=81
# input_clusters=[0x0000,0x0004,0x0005,0xEF00]
# output_clusters=[0x000A,0x0019]>
MODELS_INFO: [
("_TZE200_9mahtqtg", "TS0601"),
],
ENDPOINTS: {
1: {
PROFILE_ID: zha.PROFILE_ID,
DEVICE_TYPE: zha.DeviceType.SMART_PLUG,
INPUT_CLUSTERS: [
Basic.cluster_id,
Groups.cluster_id,
Scenes.cluster_id,
TuyaOnOffManufCluster.cluster_id,
],
OUTPUT_CLUSTERS: [Time.cluster_id, Ota.cluster_id],
}
},
}
replacement = {
ENDPOINTS: {
1: {
DEVICE_TYPE: zha.DeviceType.ON_OFF_LIGHT,
INPUT_CLUSTERS: [
Basic.cluster_id,
Groups.cluster_id,
Scenes.cluster_id,
TuyaOnOffManufCluster,
TuyaOnOffNM,
],
OUTPUT_CLUSTERS: [Time.cluster_id, Ota.cluster_id],
},
2: {
PROFILE_ID: zha.PROFILE_ID,
DEVICE_TYPE: zha.DeviceType.ON_OFF_LIGHT,
INPUT_CLUSTERS: [
TuyaOnOffNM,
],
OUTPUT_CLUSTERS: [],
},
3: {
PROFILE_ID: zha.PROFILE_ID,
DEVICE_TYPE: zha.DeviceType.ON_OFF_LIGHT,
INPUT_CLUSTERS: [
TuyaOnOffNM,
],
OUTPUT_CLUSTERS: [],
},
4: {
PROFILE_ID: zha.PROFILE_ID,
DEVICE_TYPE: zha.DeviceType.ON_OFF_LIGHT,
INPUT_CLUSTERS: [
TuyaOnOffNM,
],
OUTPUT_CLUSTERS: [],
},
5: {
PROFILE_ID: zha.PROFILE_ID,
DEVICE_TYPE: zha.DeviceType.ON_OFF_LIGHT,
INPUT_CLUSTERS: [
TuyaOnOffNM,
],
OUTPUT_CLUSTERS: [],
},
6: {
PROFILE_ID: zha.PROFILE_ID,
DEVICE_TYPE: zha.DeviceType.ON_OFF_LIGHT,
INPUT_CLUSTERS: [
TuyaOnOffNM,
],
OUTPUT_CLUSTERS: [],
},
}
}
| 33.037333
| 140
| 0.487206
| 1,017
| 12,389
| 5.623402
| 0.126844
| 0.103864
| 0.052282
| 0.092499
| 0.849624
| 0.846127
| 0.840532
| 0.820948
| 0.790523
| 0.790523
| 0
| 0.056316
| 0.428122
| 12,389
| 374
| 141
| 33.125668
| 0.7506
| 0.171765
| 0
| 0.741325
| 0
| 0
| 0.030208
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.015773
| 0
| 0.072555
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1ce708963c0c58c3ecd82256067445ef1317718a
| 108
|
py
|
Python
|
platform/hwconf_data/bgm13/PythonSnippet/__init__.py
|
lenloe1/v2.7
|
9ac9c4a7bb37987af382c80647f42d84db5f2e1d
|
[
"Zlib"
] | null | null | null |
platform/hwconf_data/bgm13/PythonSnippet/__init__.py
|
lenloe1/v2.7
|
9ac9c4a7bb37987af382c80647f42d84db5f2e1d
|
[
"Zlib"
] | 1
|
2020-08-25T02:36:22.000Z
|
2020-08-25T02:36:22.000Z
|
platform/hwconf_data/bgm13/PythonSnippet/__init__.py
|
lenloe1/v2.7
|
9ac9c4a7bb37987af382c80647f42d84db5f2e1d
|
[
"Zlib"
] | 1
|
2020-08-25T01:56:04.000Z
|
2020-08-25T01:56:04.000Z
|
from bgm13.halconfig import halconfig_types as types
from bgm13.halconfig import halconfig_dependency as dep
| 54
| 55
| 0.87963
| 16
| 108
| 5.8125
| 0.5
| 0.193548
| 0.387097
| 0.516129
| 0.709677
| 0
| 0
| 0
| 0
| 0
| 0
| 0.041237
| 0.101852
| 108
| 2
| 55
| 54
| 0.917526
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
e8117c5d4f937f519dde6883a9f6ee64815c9d94
| 217,846
|
py
|
Python
|
dohq_teamcity/api/build_api.py
|
DenKoren/teamcity
|
69acb4d1402c316129b4602882a9cce2d55cf926
|
[
"MIT"
] | 23
|
2018-10-19T07:28:45.000Z
|
2021-11-12T12:46:09.000Z
|
dohq_teamcity/api/build_api.py
|
DenKoren/teamcity
|
69acb4d1402c316129b4602882a9cce2d55cf926
|
[
"MIT"
] | 31
|
2018-10-16T05:53:11.000Z
|
2021-09-09T14:44:14.000Z
|
dohq_teamcity/api/build_api.py
|
DenKoren/teamcity
|
69acb4d1402c316129b4602882a9cce2d55cf926
|
[
"MIT"
] | 12
|
2018-10-28T23:00:17.000Z
|
2021-09-07T12:07:13.000Z
|
# coding: utf-8
"""
TeamCity REST API
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: 2018.1
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
from dohq_teamcity.custom.base_model import TeamCityObject
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from dohq_teamcity.models.build import Build # noqa: F401,E501
from dohq_teamcity.models.build_cancel_request import BuildCancelRequest # noqa: F401,E501
from dohq_teamcity.models.build_changes import BuildChanges # noqa: F401,E501
from dohq_teamcity.models.builds import Builds # noqa: F401,E501
from dohq_teamcity.models.comment import Comment # noqa: F401,E501
from dohq_teamcity.models.file import File # noqa: F401,E501
from dohq_teamcity.models.files import Files # noqa: F401,E501
from dohq_teamcity.models.issues_usages import IssuesUsages # noqa: F401,E501
from dohq_teamcity.models.model_property import ModelProperty # noqa: F401,E501
from dohq_teamcity.models.problem_occurrences import ProblemOccurrences # noqa: F401,E501
from dohq_teamcity.models.properties import Properties # noqa: F401,E501
from dohq_teamcity.models.tags import Tags # noqa: F401,E501
from dohq_teamcity.models.test_occurrences import TestOccurrences # noqa: F401,E501
from dohq_teamcity.models.file import file # noqa: F401,E501
class BuildApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
base_name = 'Build'
def __init__(self, api_client=None):
self.api_client = api_client
def add_tags(self, build_locator, **kwargs): # noqa: E501
"""add_tags # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_tags(build_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str build_locator: (required)
:param Tags body:
:param str fields:
:return: Tags
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__add_tags_with_http_info(build_locator, **kwargs) # noqa: E501
else:
(data) = self.__add_tags_with_http_info(build_locator, **kwargs) # noqa: E501
return data
def cancel_build(self, build_locator, **kwargs): # noqa: E501
"""cancel_build # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.cancel_build(build_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str build_locator: (required)
:param BuildCancelRequest body:
:param str fields:
:return: Build
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__cancel_build_with_http_info(build_locator, **kwargs) # noqa: E501
else:
(data) = self.__cancel_build_with_http_info(build_locator, **kwargs) # noqa: E501
return data
def cancel_build_0(self, build_locator, **kwargs): # noqa: E501
"""cancel_build_0 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.cancel_build_0(build_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str build_locator: (required)
:return: BuildCancelRequest
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__cancel_build_0_with_http_info(build_locator, **kwargs) # noqa: E501
else:
(data) = self.__cancel_build_0_with_http_info(build_locator, **kwargs) # noqa: E501
return data
def delete_all_parameters(self, build_locator, **kwargs): # noqa: E501
"""delete_all_parameters # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_all_parameters(build_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str build_locator: (required)
:param str fields:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__delete_all_parameters_with_http_info(build_locator, **kwargs) # noqa: E501
else:
(data) = self.__delete_all_parameters_with_http_info(build_locator, **kwargs) # noqa: E501
return data
def delete_build(self, build_locator, **kwargs): # noqa: E501
"""delete_build # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_build(build_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str build_locator: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__delete_build_with_http_info(build_locator, **kwargs) # noqa: E501
else:
(data) = self.__delete_build_with_http_info(build_locator, **kwargs) # noqa: E501
return data
def delete_builds(self, **kwargs): # noqa: E501
"""delete_builds # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_builds(async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str locator:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__delete_builds_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.__delete_builds_with_http_info(**kwargs) # noqa: E501
return data
def delete_comment(self, build_locator, **kwargs): # noqa: E501
"""delete_comment # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_comment(build_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str build_locator: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__delete_comment_with_http_info(build_locator, **kwargs) # noqa: E501
else:
(data) = self.__delete_comment_with_http_info(build_locator, **kwargs) # noqa: E501
return data
def delete_parameter(self, name, build_locator, **kwargs): # noqa: E501
"""delete_parameter # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_parameter(name, build_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str name: (required)
:param str build_locator: (required)
:param str fields:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__delete_parameter_with_http_info(name, build_locator, **kwargs) # noqa: E501
else:
(data) = self.__delete_parameter_with_http_info(name, build_locator, **kwargs) # noqa: E501
return data
def get_artifact_dependency_changes(self, build_locator, **kwargs): # noqa: E501
"""get_artifact_dependency_changes # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_artifact_dependency_changes(build_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str build_locator: (required)
:param str fields:
:return: BuildChanges
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__get_artifact_dependency_changes_with_http_info(build_locator, **kwargs) # noqa: E501
else:
(data) = self.__get_artifact_dependency_changes_with_http_info(build_locator, **kwargs) # noqa: E501
return data
def get_artifacts_directory(self, build_locator, **kwargs): # noqa: E501
"""get_artifacts_directory # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_artifacts_directory(build_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str build_locator: (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__get_artifacts_directory_with_http_info(build_locator, **kwargs) # noqa: E501
else:
(data) = self.__get_artifacts_directory_with_http_info(build_locator, **kwargs) # noqa: E501
return data
def get_build_number(self, build_locator, **kwargs): # noqa: E501
"""get_build_number # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_build_number(build_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str build_locator: (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__get_build_number_with_http_info(build_locator, **kwargs) # noqa: E501
else:
(data) = self.__get_build_number_with_http_info(build_locator, **kwargs) # noqa: E501
return data
def get_build_status_text(self, build_locator, **kwargs): # noqa: E501
"""get_build_status_text # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_build_status_text(build_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str build_locator: (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__get_build_status_text_with_http_info(build_locator, **kwargs) # noqa: E501
else:
(data) = self.__get_build_status_text_with_http_info(build_locator, **kwargs) # noqa: E501
return data
def get_canceled_info(self, build_locator, **kwargs): # noqa: E501
"""get_canceled_info # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_canceled_info(build_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str build_locator: (required)
:param str fields:
:return: Comment
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__get_canceled_info_with_http_info(build_locator, **kwargs) # noqa: E501
else:
(data) = self.__get_canceled_info_with_http_info(build_locator, **kwargs) # noqa: E501
return data
def get_children(self, path, build_locator, **kwargs): # noqa: E501
"""get_children # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_children(path, build_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str path: (required)
:param str build_locator: (required)
:param str base_path:
:param str locator:
:param str fields:
:param bool resolve_parameters:
:param bool log_build_usage:
:return: Files
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__get_children_with_http_info(path, build_locator, **kwargs) # noqa: E501
else:
(data) = self.__get_children_with_http_info(path, build_locator, **kwargs) # noqa: E501
return data
def get_children_alias(self, path, build_locator, **kwargs): # noqa: E501
"""get_children_alias # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_children_alias(path, build_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str path: (required)
:param str build_locator: (required)
:param str base_path:
:param str locator:
:param str fields:
:param bool resolve_parameters:
:param bool log_build_usage:
:return: Files
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__get_children_alias_with_http_info(path, build_locator, **kwargs) # noqa: E501
else:
(data) = self.__get_children_alias_with_http_info(path, build_locator, **kwargs) # noqa: E501
return data
def get_content(self, path, build_locator, **kwargs): # noqa: E501
"""get_content # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_content(path, build_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str path: (required)
:param str build_locator: (required)
:param str response_builder:
:param bool resolve_parameters:
:param bool log_build_usage:
:return: file
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__get_content_with_http_info(path, build_locator, **kwargs) # noqa: E501
else:
(data) = self.__get_content_with_http_info(path, build_locator, **kwargs) # noqa: E501
return data
def get_content_alias(self, path, build_locator, **kwargs): # noqa: E501
"""get_content_alias # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_content_alias(path, build_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str path: (required)
:param str build_locator: (required)
:param bool resolve_parameters:
:param bool log_build_usage:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__get_content_alias_with_http_info(path, build_locator, **kwargs) # noqa: E501
else:
(data) = self.__get_content_alias_with_http_info(path, build_locator, **kwargs) # noqa: E501
return data
def get_metadata(self, path, build_locator, **kwargs): # noqa: E501
"""get_metadata # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_metadata(path, build_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str path: (required)
:param str build_locator: (required)
:param str fields:
:param bool resolve_parameters:
:param bool log_build_usage:
:return: File
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__get_metadata_with_http_info(path, build_locator, **kwargs) # noqa: E501
else:
(data) = self.__get_metadata_with_http_info(path, build_locator, **kwargs) # noqa: E501
return data
def get_parameter(self, name, build_locator, **kwargs): # noqa: E501
"""get_parameter # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_parameter(name, build_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str name: (required)
:param str build_locator: (required)
:param str fields:
:param str fields2:
:return: ModelProperty
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__get_parameter_with_http_info(name, build_locator, **kwargs) # noqa: E501
else:
(data) = self.__get_parameter_with_http_info(name, build_locator, **kwargs) # noqa: E501
return data
def get_parameter_0(self, build_locator, property_name, **kwargs): # noqa: E501
"""get_parameter_0 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_parameter_0(build_locator, property_name, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str build_locator: (required)
:param str property_name: (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__get_parameter_0_with_http_info(build_locator, property_name, **kwargs) # noqa: E501
else:
(data) = self.__get_parameter_0_with_http_info(build_locator, property_name, **kwargs) # noqa: E501
return data
def get_parameter_value_long(self, name, build_locator, **kwargs): # noqa: E501
"""get_parameter_value_long # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_parameter_value_long(name, build_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str name: (required)
:param str build_locator: (required)
:param str fields:
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__get_parameter_value_long_with_http_info(name, build_locator, **kwargs) # noqa: E501
else:
(data) = self.__get_parameter_value_long_with_http_info(name, build_locator, **kwargs) # noqa: E501
return data
def get_parameters(self, build_locator, **kwargs): # noqa: E501
"""get_parameters # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_parameters(build_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str build_locator: (required)
:param str locator:
:param str fields:
:param str fields2:
:return: Properties
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__get_parameters_with_http_info(build_locator, **kwargs) # noqa: E501
else:
(data) = self.__get_parameters_with_http_info(build_locator, **kwargs) # noqa: E501
return data
def get_pinned(self, build_locator, **kwargs): # noqa: E501
"""get_pinned # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_pinned(build_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str build_locator: (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__get_pinned_with_http_info(build_locator, **kwargs) # noqa: E501
else:
(data) = self.__get_pinned_with_http_info(build_locator, **kwargs) # noqa: E501
return data
def get_problems(self, build_locator, **kwargs): # noqa: E501
"""get_problems # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_problems(build_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str build_locator: (required)
:param str fields:
:return: ProblemOccurrences
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__get_problems_with_http_info(build_locator, **kwargs) # noqa: E501
else:
(data) = self.__get_problems_with_http_info(build_locator, **kwargs) # noqa: E501
return data
def get_resolved_parameter(self, build_locator, value, **kwargs): # noqa: E501
"""get_resolved_parameter # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_resolved_parameter(build_locator, value, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str build_locator: (required)
:param str value: (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__get_resolved_parameter_with_http_info(build_locator, value, **kwargs) # noqa: E501
else:
(data) = self.__get_resolved_parameter_with_http_info(build_locator, value, **kwargs) # noqa: E501
return data
def get_root(self, build_locator, **kwargs): # noqa: E501
"""get_root # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_root(build_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str build_locator: (required)
:param str base_path:
:param str locator:
:param str fields:
:param bool resolve_parameters:
:param bool log_build_usage:
:return: Files
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__get_root_with_http_info(build_locator, **kwargs) # noqa: E501
else:
(data) = self.__get_root_with_http_info(build_locator, **kwargs) # noqa: E501
return data
def get_tests(self, build_locator, **kwargs): # noqa: E501
"""get_tests # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_tests(build_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str build_locator: (required)
:param str fields:
:return: TestOccurrences
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__get_tests_with_http_info(build_locator, **kwargs) # noqa: E501
else:
(data) = self.__get_tests_with_http_info(build_locator, **kwargs) # noqa: E501
return data
def get_zipped(self, path, build_locator, **kwargs): # noqa: E501
"""get_zipped # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_zipped(path, build_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str path: (required)
:param str build_locator: (required)
:param str base_path:
:param str locator:
:param str name:
:param bool resolve_parameters:
:param bool log_build_usage:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__get_zipped_with_http_info(path, build_locator, **kwargs) # noqa: E501
else:
(data) = self.__get_zipped_with_http_info(path, build_locator, **kwargs) # noqa: E501
return data
def pin_build(self, build_locator, **kwargs): # noqa: E501
"""pin_build # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.pin_build(build_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str build_locator: (required)
:param str body:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__pin_build_with_http_info(build_locator, **kwargs) # noqa: E501
else:
(data) = self.__pin_build_with_http_info(build_locator, **kwargs) # noqa: E501
return data
def replace_comment(self, build_locator, **kwargs): # noqa: E501
"""replace_comment # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.replace_comment(build_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str build_locator: (required)
:param str body:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__replace_comment_with_http_info(build_locator, **kwargs) # noqa: E501
else:
(data) = self.__replace_comment_with_http_info(build_locator, **kwargs) # noqa: E501
return data
def replace_tags(self, build_locator, **kwargs): # noqa: E501
"""replace_tags # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.replace_tags(build_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str build_locator: (required)
:param str locator:
:param Tags body:
:param str fields:
:return: Tags
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__replace_tags_with_http_info(build_locator, **kwargs) # noqa: E501
else:
(data) = self.__replace_tags_with_http_info(build_locator, **kwargs) # noqa: E501
return data
def reset_build_finish_parameters(self, build_locator, **kwargs): # noqa: E501
"""reset_build_finish_parameters # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.reset_build_finish_parameters(build_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str build_locator: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__reset_build_finish_parameters_with_http_info(build_locator, **kwargs) # noqa: E501
else:
(data) = self.__reset_build_finish_parameters_with_http_info(build_locator, **kwargs) # noqa: E501
return data
def serve_aggregated_build_status(self, build_locator, **kwargs): # noqa: E501
"""serve_aggregated_build_status # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.serve_aggregated_build_status(build_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str build_locator: (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__serve_aggregated_build_status_with_http_info(build_locator, **kwargs) # noqa: E501
else:
(data) = self.__serve_aggregated_build_status_with_http_info(build_locator, **kwargs) # noqa: E501
return data
def serve_aggregated_build_status_icon(self, build_locator, suffix, **kwargs): # noqa: E501
"""serve_aggregated_build_status_icon # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.serve_aggregated_build_status_icon(build_locator, suffix, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str build_locator: (required)
:param str suffix: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__serve_aggregated_build_status_icon_with_http_info(build_locator, suffix, **kwargs) # noqa: E501
else:
(data) = self.__serve_aggregated_build_status_icon_with_http_info(build_locator, suffix, **kwargs) # noqa: E501
return data
def serve_all_builds(self, **kwargs): # noqa: E501
"""serve_all_builds # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.serve_all_builds(async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str build_type:
:param str status:
:param str triggered_by_user:
:param bool include_personal:
:param bool include_canceled:
:param bool only_pinned:
:param list[str] tag:
:param str agent_name:
:param str since_build:
:param str since_date:
:param int start:
:param int count:
:param str locator:
:param str fields:
:return: Builds
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__serve_all_builds_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.__serve_all_builds_with_http_info(**kwargs) # noqa: E501
return data
def serve_build(self, build_locator, **kwargs): # noqa: E501
"""serve_build # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.serve_build(build_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str build_locator: (required)
:param str fields:
:return: Build
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__serve_build_with_http_info(build_locator, **kwargs) # noqa: E501
else:
(data) = self.__serve_build_with_http_info(build_locator, **kwargs) # noqa: E501
return data
def serve_build_actual_parameters(self, build_locator, **kwargs): # noqa: E501
"""serve_build_actual_parameters # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.serve_build_actual_parameters(build_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str build_locator: (required)
:param str fields:
:return: Properties
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__serve_build_actual_parameters_with_http_info(build_locator, **kwargs) # noqa: E501
else:
(data) = self.__serve_build_actual_parameters_with_http_info(build_locator, **kwargs) # noqa: E501
return data
def serve_build_field_by_build_only(self, build_locator, field, **kwargs): # noqa: E501
"""serve_build_field_by_build_only # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.serve_build_field_by_build_only(build_locator, field, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str build_locator: (required)
:param str field: (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__serve_build_field_by_build_only_with_http_info(build_locator, field, **kwargs) # noqa: E501
else:
(data) = self.__serve_build_field_by_build_only_with_http_info(build_locator, field, **kwargs) # noqa: E501
return data
def serve_build_related_issues(self, build_locator, **kwargs): # noqa: E501
"""serve_build_related_issues # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.serve_build_related_issues(build_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str build_locator: (required)
:param str fields:
:return: IssuesUsages
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__serve_build_related_issues_with_http_info(build_locator, **kwargs) # noqa: E501
else:
(data) = self.__serve_build_related_issues_with_http_info(build_locator, **kwargs) # noqa: E501
return data
def serve_build_related_issues_old(self, build_locator, **kwargs): # noqa: E501
"""serve_build_related_issues_old # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.serve_build_related_issues_old(build_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str build_locator: (required)
:param str fields:
:return: IssuesUsages
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__serve_build_related_issues_old_with_http_info(build_locator, **kwargs) # noqa: E501
else:
(data) = self.__serve_build_related_issues_old_with_http_info(build_locator, **kwargs) # noqa: E501
return data
def serve_build_statistic_value(self, build_locator, name, **kwargs): # noqa: E501
"""serve_build_statistic_value # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.serve_build_statistic_value(build_locator, name, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str build_locator: (required)
:param str name: (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__serve_build_statistic_value_with_http_info(build_locator, name, **kwargs) # noqa: E501
else:
(data) = self.__serve_build_statistic_value_with_http_info(build_locator, name, **kwargs) # noqa: E501
return data
def serve_build_statistic_values(self, build_locator, **kwargs): # noqa: E501
"""serve_build_statistic_values # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.serve_build_statistic_values(build_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str build_locator: (required)
:param str fields:
:return: Properties
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__serve_build_statistic_values_with_http_info(build_locator, **kwargs) # noqa: E501
else:
(data) = self.__serve_build_statistic_values_with_http_info(build_locator, **kwargs) # noqa: E501
return data
def serve_build_status_icon(self, build_locator, suffix, **kwargs): # noqa: E501
"""serve_build_status_icon # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.serve_build_status_icon(build_locator, suffix, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str build_locator: (required)
:param str suffix: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__serve_build_status_icon_with_http_info(build_locator, suffix, **kwargs) # noqa: E501
else:
(data) = self.__serve_build_status_icon_with_http_info(build_locator, suffix, **kwargs) # noqa: E501
return data
def serve_source_file(self, build_locator, file_name, **kwargs): # noqa: E501
"""serve_source_file # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.serve_source_file(build_locator, file_name, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str build_locator: (required)
:param str file_name: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__serve_source_file_with_http_info(build_locator, file_name, **kwargs) # noqa: E501
else:
(data) = self.__serve_source_file_with_http_info(build_locator, file_name, **kwargs) # noqa: E501
return data
def serve_tags(self, build_locator, **kwargs): # noqa: E501
"""serve_tags # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.serve_tags(build_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str build_locator: (required)
:param str locator:
:param str fields:
:return: Tags
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__serve_tags_with_http_info(build_locator, **kwargs) # noqa: E501
else:
(data) = self.__serve_tags_with_http_info(build_locator, **kwargs) # noqa: E501
return data
def set_build_number(self, build_locator, **kwargs): # noqa: E501
"""set_build_number # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.set_build_number(build_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str build_locator: (required)
:param str body:
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__set_build_number_with_http_info(build_locator, **kwargs) # noqa: E501
else:
(data) = self.__set_build_number_with_http_info(build_locator, **kwargs) # noqa: E501
return data
def set_build_status_text(self, build_locator, **kwargs): # noqa: E501
"""set_build_status_text # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.set_build_status_text(build_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str build_locator: (required)
:param str body:
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__set_build_status_text_with_http_info(build_locator, **kwargs) # noqa: E501
else:
(data) = self.__set_build_status_text_with_http_info(build_locator, **kwargs) # noqa: E501
return data
def set_parameter(self, build_locator, **kwargs): # noqa: E501
"""set_parameter # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.set_parameter(build_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str build_locator: (required)
:param ModelProperty body:
:param str fields:
:param str fields2:
:return: ModelProperty
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__set_parameter_with_http_info(build_locator, **kwargs) # noqa: E501
else:
(data) = self.__set_parameter_with_http_info(build_locator, **kwargs) # noqa: E501
return data
def set_parameter_0(self, name, build_locator, **kwargs): # noqa: E501
"""set_parameter_0 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.set_parameter_0(name, build_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str name: (required)
:param str build_locator: (required)
:param ModelProperty body:
:param str fields:
:param str fields2:
:return: ModelProperty
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__set_parameter_0_with_http_info(name, build_locator, **kwargs) # noqa: E501
else:
(data) = self.__set_parameter_0_with_http_info(name, build_locator, **kwargs) # noqa: E501
return data
def set_parameter_value_long(self, name, build_locator, **kwargs): # noqa: E501
"""set_parameter_value_long # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.set_parameter_value_long(name, build_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str name: (required)
:param str build_locator: (required)
:param str body:
:param str fields:
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__set_parameter_value_long_with_http_info(name, build_locator, **kwargs) # noqa: E501
else:
(data) = self.__set_parameter_value_long_with_http_info(name, build_locator, **kwargs) # noqa: E501
return data
def set_parameters(self, build_locator, **kwargs): # noqa: E501
"""set_parameters # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.set_parameters(build_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str build_locator: (required)
:param Properties body:
:param str fields:
:param str fields2:
:return: Properties
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__set_parameters_with_http_info(build_locator, **kwargs) # noqa: E501
else:
(data) = self.__set_parameters_with_http_info(build_locator, **kwargs) # noqa: E501
return data
def unpin_build(self, build_locator, **kwargs): # noqa: E501
"""unpin_build # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.unpin_build(build_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str build_locator: (required)
:param str body:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__unpin_build_with_http_info(build_locator, **kwargs) # noqa: E501
else:
(data) = self.__unpin_build_with_http_info(build_locator, **kwargs) # noqa: E501
return data
def __add_tags_with_http_info(self, build_locator, **kwargs): # noqa: E501
"""add_tags # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__add_tags_with_http_info(build_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str build_locator: (required)
:param Tags body:
:param str fields:
:return: Tags
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['build_locator', 'body', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_tags" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'build_locator' is set
if ('build_locator' not in params or
params['build_locator'] is None):
raise ValueError("Missing the required parameter `build_locator` when calling `add_tags`") # noqa: E501
collection_formats = {}
path_params = {}
if 'build_locator' in params:
if isinstance(params['build_locator'], TeamCityObject):
path_params['buildLocator'] = params['build_locator'].locator_id
else:
path_params['buildLocator'] = params['build_locator'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/builds/{buildLocator}/tags', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Tags', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __cancel_build_with_http_info(self, build_locator, **kwargs): # noqa: E501
"""cancel_build # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__cancel_build_with_http_info(build_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str build_locator: (required)
:param BuildCancelRequest body:
:param str fields:
:return: Build
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['build_locator', 'body', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method cancel_build" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'build_locator' is set
if ('build_locator' not in params or
params['build_locator'] is None):
raise ValueError("Missing the required parameter `build_locator` when calling `cancel_build`") # noqa: E501
collection_formats = {}
path_params = {}
if 'build_locator' in params:
if isinstance(params['build_locator'], TeamCityObject):
path_params['buildLocator'] = params['build_locator'].locator_id
else:
path_params['buildLocator'] = params['build_locator'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/builds/{buildLocator}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Build', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __cancel_build_0_with_http_info(self, build_locator, **kwargs): # noqa: E501
"""cancel_build_0 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__cancel_build_0_with_http_info(build_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str build_locator: (required)
:return: BuildCancelRequest
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['build_locator'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method cancel_build_0" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'build_locator' is set
if ('build_locator' not in params or
params['build_locator'] is None):
raise ValueError("Missing the required parameter `build_locator` when calling `cancel_build_0`") # noqa: E501
collection_formats = {}
path_params = {}
if 'build_locator' in params:
if isinstance(params['build_locator'], TeamCityObject):
path_params['buildLocator'] = params['build_locator'].locator_id
else:
path_params['buildLocator'] = params['build_locator'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/builds/{buildLocator}/example/buildCancelRequest', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='BuildCancelRequest', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __delete_all_parameters_with_http_info(self, build_locator, **kwargs): # noqa: E501
"""delete_all_parameters # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__delete_all_parameters_with_http_info(build_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str build_locator: (required)
:param str fields:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['build_locator', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_all_parameters" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'build_locator' is set
if ('build_locator' not in params or
params['build_locator'] is None):
raise ValueError("Missing the required parameter `build_locator` when calling `delete_all_parameters`") # noqa: E501
collection_formats = {}
path_params = {}
if 'build_locator' in params:
if isinstance(params['build_locator'], TeamCityObject):
path_params['buildLocator'] = params['build_locator'].locator_id
else:
path_params['buildLocator'] = params['build_locator'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/builds/{buildLocator}/attributes', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __delete_build_with_http_info(self, build_locator, **kwargs): # noqa: E501
"""delete_build # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__delete_build_with_http_info(build_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str build_locator: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['build_locator'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_build" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'build_locator' is set
if ('build_locator' not in params or
params['build_locator'] is None):
raise ValueError("Missing the required parameter `build_locator` when calling `delete_build`") # noqa: E501
collection_formats = {}
path_params = {}
if 'build_locator' in params:
if isinstance(params['build_locator'], TeamCityObject):
path_params['buildLocator'] = params['build_locator'].locator_id
else:
path_params['buildLocator'] = params['build_locator'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/builds/{buildLocator}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __delete_builds_with_http_info(self, **kwargs): # noqa: E501
"""delete_builds # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__delete_builds_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str locator:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['locator'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_builds" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'locator' in params:
query_params.append(('locator', params['locator'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/builds', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __delete_comment_with_http_info(self, build_locator, **kwargs): # noqa: E501
"""delete_comment # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__delete_comment_with_http_info(build_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str build_locator: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['build_locator'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_comment" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'build_locator' is set
if ('build_locator' not in params or
params['build_locator'] is None):
raise ValueError("Missing the required parameter `build_locator` when calling `delete_comment`") # noqa: E501
collection_formats = {}
path_params = {}
if 'build_locator' in params:
if isinstance(params['build_locator'], TeamCityObject):
path_params['buildLocator'] = params['build_locator'].locator_id
else:
path_params['buildLocator'] = params['build_locator'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/builds/{buildLocator}/comment', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __delete_parameter_with_http_info(self, name, build_locator, **kwargs): # noqa: E501
"""delete_parameter # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__delete_parameter_with_http_info(name, build_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: (required)
:param str build_locator: (required)
:param str fields:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'build_locator', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_parameter" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params or
params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `delete_parameter`") # noqa: E501
# verify the required parameter 'build_locator' is set
if ('build_locator' not in params or
params['build_locator'] is None):
raise ValueError("Missing the required parameter `build_locator` when calling `delete_parameter`") # noqa: E501
collection_formats = {}
path_params = {}
if 'name' in params:
if isinstance(params['name'], TeamCityObject):
path_params['name'] = params['name'].locator_id
else:
path_params['name'] = params['name'] # noqa: E501
if 'build_locator' in params:
if isinstance(params['build_locator'], TeamCityObject):
path_params['buildLocator'] = params['build_locator'].locator_id
else:
path_params['buildLocator'] = params['build_locator'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/builds/{buildLocator}/attributes/{name}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __get_artifact_dependency_changes_with_http_info(self, build_locator, **kwargs): # noqa: E501
"""get_artifact_dependency_changes # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__get_artifact_dependency_changes_with_http_info(build_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str build_locator: (required)
:param str fields:
:return: BuildChanges
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['build_locator', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_artifact_dependency_changes" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'build_locator' is set
if ('build_locator' not in params or
params['build_locator'] is None):
raise ValueError("Missing the required parameter `build_locator` when calling `get_artifact_dependency_changes`") # noqa: E501
collection_formats = {}
path_params = {}
if 'build_locator' in params:
if isinstance(params['build_locator'], TeamCityObject):
path_params['buildLocator'] = params['build_locator'].locator_id
else:
path_params['buildLocator'] = params['build_locator'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/builds/{buildLocator}/artifactDependencyChanges', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='BuildChanges', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __get_artifacts_directory_with_http_info(self, build_locator, **kwargs): # noqa: E501
"""get_artifacts_directory # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__get_artifacts_directory_with_http_info(build_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str build_locator: (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['build_locator'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_artifacts_directory" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'build_locator' is set
if ('build_locator' not in params or
params['build_locator'] is None):
raise ValueError("Missing the required parameter `build_locator` when calling `get_artifacts_directory`") # noqa: E501
collection_formats = {}
path_params = {}
if 'build_locator' in params:
if isinstance(params['build_locator'], TeamCityObject):
path_params['buildLocator'] = params['build_locator'].locator_id
else:
path_params['buildLocator'] = params['build_locator'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/builds/{buildLocator}/artifactsDirectory', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __get_build_number_with_http_info(self, build_locator, **kwargs): # noqa: E501
"""get_build_number # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__get_build_number_with_http_info(build_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str build_locator: (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['build_locator'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_build_number" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'build_locator' is set
if ('build_locator' not in params or
params['build_locator'] is None):
raise ValueError("Missing the required parameter `build_locator` when calling `get_build_number`") # noqa: E501
collection_formats = {}
path_params = {}
if 'build_locator' in params:
if isinstance(params['build_locator'], TeamCityObject):
path_params['buildLocator'] = params['build_locator'].locator_id
else:
path_params['buildLocator'] = params['build_locator'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/builds/{buildLocator}/number', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __get_build_status_text_with_http_info(self, build_locator, **kwargs): # noqa: E501
"""get_build_status_text # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__get_build_status_text_with_http_info(build_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str build_locator: (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['build_locator'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_build_status_text" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'build_locator' is set
if ('build_locator' not in params or
params['build_locator'] is None):
raise ValueError("Missing the required parameter `build_locator` when calling `get_build_status_text`") # noqa: E501
collection_formats = {}
path_params = {}
if 'build_locator' in params:
if isinstance(params['build_locator'], TeamCityObject):
path_params['buildLocator'] = params['build_locator'].locator_id
else:
path_params['buildLocator'] = params['build_locator'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/builds/{buildLocator}/statusText', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __get_canceled_info_with_http_info(self, build_locator, **kwargs): # noqa: E501
"""get_canceled_info # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__get_canceled_info_with_http_info(build_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str build_locator: (required)
:param str fields:
:return: Comment
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['build_locator', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_canceled_info" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'build_locator' is set
if ('build_locator' not in params or
params['build_locator'] is None):
raise ValueError("Missing the required parameter `build_locator` when calling `get_canceled_info`") # noqa: E501
collection_formats = {}
path_params = {}
if 'build_locator' in params:
if isinstance(params['build_locator'], TeamCityObject):
path_params['buildLocator'] = params['build_locator'].locator_id
else:
path_params['buildLocator'] = params['build_locator'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/builds/{buildLocator}/canceledInfo', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Comment', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __get_children_with_http_info(self, path, build_locator, **kwargs): # noqa: E501
"""get_children # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__get_children_with_http_info(path, build_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str path: (required)
:param str build_locator: (required)
:param str base_path:
:param str locator:
:param str fields:
:param bool resolve_parameters:
:param bool log_build_usage:
:return: Files
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['path', 'build_locator', 'base_path', 'locator', 'fields', 'resolve_parameters', 'log_build_usage'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_children" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'path' is set
if ('path' not in params or
params['path'] is None):
raise ValueError("Missing the required parameter `path` when calling `get_children`") # noqa: E501
# verify the required parameter 'build_locator' is set
if ('build_locator' not in params or
params['build_locator'] is None):
raise ValueError("Missing the required parameter `build_locator` when calling `get_children`") # noqa: E501
if 'path' in params and not re.search('(\/.*)?', params['path']): # noqa: E501
raise ValueError("Invalid value for parameter `path` when calling `get_children`, must conform to the pattern `/(\/.*)?/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'path' in params:
if isinstance(params['path'], TeamCityObject):
path_params['path'] = params['path'].locator_id
else:
path_params['path'] = params['path'] # noqa: E501
if 'build_locator' in params:
if isinstance(params['build_locator'], TeamCityObject):
path_params['buildLocator'] = params['build_locator'].locator_id
else:
path_params['buildLocator'] = params['build_locator'] # noqa: E501
query_params = []
if 'base_path' in params:
query_params.append(('basePath', params['base_path'])) # noqa: E501
if 'locator' in params:
query_params.append(('locator', params['locator'])) # noqa: E501
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
if 'resolve_parameters' in params:
query_params.append(('resolveParameters', params['resolve_parameters'])) # noqa: E501
if 'log_build_usage' in params:
query_params.append(('logBuildUsage', params['log_build_usage'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/builds/{buildLocator}/artifacts/children{path}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Files', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __get_children_alias_with_http_info(self, path, build_locator, **kwargs): # noqa: E501
"""get_children_alias # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__get_children_alias_with_http_info(path, build_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str path: (required)
:param str build_locator: (required)
:param str base_path:
:param str locator:
:param str fields:
:param bool resolve_parameters:
:param bool log_build_usage:
:return: Files
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['path', 'build_locator', 'base_path', 'locator', 'fields', 'resolve_parameters', 'log_build_usage'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_children_alias" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'path' is set
if ('path' not in params or
params['path'] is None):
raise ValueError("Missing the required parameter `path` when calling `get_children_alias`") # noqa: E501
# verify the required parameter 'build_locator' is set
if ('build_locator' not in params or
params['build_locator'] is None):
raise ValueError("Missing the required parameter `build_locator` when calling `get_children_alias`") # noqa: E501
if 'path' in params and not re.search('(.*)?', params['path']): # noqa: E501
raise ValueError("Invalid value for parameter `path` when calling `get_children_alias`, must conform to the pattern `/(.*)?/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'path' in params:
if isinstance(params['path'], TeamCityObject):
path_params['path'] = params['path'].locator_id
else:
path_params['path'] = params['path'] # noqa: E501
if 'build_locator' in params:
if isinstance(params['build_locator'], TeamCityObject):
path_params['buildLocator'] = params['build_locator'].locator_id
else:
path_params['buildLocator'] = params['build_locator'] # noqa: E501
query_params = []
if 'base_path' in params:
query_params.append(('basePath', params['base_path'])) # noqa: E501
if 'locator' in params:
query_params.append(('locator', params['locator'])) # noqa: E501
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
if 'resolve_parameters' in params:
query_params.append(('resolveParameters', params['resolve_parameters'])) # noqa: E501
if 'log_build_usage' in params:
query_params.append(('logBuildUsage', params['log_build_usage'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/builds/{buildLocator}/artifacts/{path}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Files', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __get_content_with_http_info(self, path, build_locator, **kwargs): # noqa: E501
"""get_content # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__get_content_with_http_info(path, build_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str path: (required)
:param str build_locator: (required)
:param str response_builder:
:param bool resolve_parameters:
:param bool log_build_usage:
:return: file
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['path', 'build_locator', 'response_builder', 'resolve_parameters', 'log_build_usage'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_content" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'path' is set
if ('path' not in params or
params['path'] is None):
raise ValueError("Missing the required parameter `path` when calling `get_content`") # noqa: E501
# verify the required parameter 'build_locator' is set
if ('build_locator' not in params or
params['build_locator'] is None):
raise ValueError("Missing the required parameter `build_locator` when calling `get_content`") # noqa: E501
if 'path' in params and not re.search('(\/.*)?', params['path']): # noqa: E501
raise ValueError("Invalid value for parameter `path` when calling `get_content`, must conform to the pattern `/(\/.*)?/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'path' in params:
if isinstance(params['path'], TeamCityObject):
path_params['path'] = params['path'].locator_id
else:
path_params['path'] = params['path'] # noqa: E501
if 'build_locator' in params:
if isinstance(params['build_locator'], TeamCityObject):
path_params['buildLocator'] = params['build_locator'].locator_id
else:
path_params['buildLocator'] = params['build_locator'] # noqa: E501
query_params = []
if 'response_builder' in params:
query_params.append(('responseBuilder', params['response_builder'])) # noqa: E501
if 'resolve_parameters' in params:
query_params.append(('resolveParameters', params['resolve_parameters'])) # noqa: E501
if 'log_build_usage' in params:
query_params.append(('logBuildUsage', params['log_build_usage'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/builds/{buildLocator}/artifacts/content{path}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='file', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __get_content_alias_with_http_info(self, path, build_locator, **kwargs): # noqa: E501
"""get_content_alias # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__get_content_alias_with_http_info(path, build_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str path: (required)
:param str build_locator: (required)
:param bool resolve_parameters:
:param bool log_build_usage:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['path', 'build_locator', 'resolve_parameters', 'log_build_usage'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_content_alias" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'path' is set
if ('path' not in params or
params['path'] is None):
raise ValueError("Missing the required parameter `path` when calling `get_content_alias`") # noqa: E501
# verify the required parameter 'build_locator' is set
if ('build_locator' not in params or
params['build_locator'] is None):
raise ValueError("Missing the required parameter `build_locator` when calling `get_content_alias`") # noqa: E501
if 'path' in params and not re.search('(\/.*)?', params['path']): # noqa: E501
raise ValueError("Invalid value for parameter `path` when calling `get_content_alias`, must conform to the pattern `/(\/.*)?/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'path' in params:
if isinstance(params['path'], TeamCityObject):
path_params['path'] = params['path'].locator_id
else:
path_params['path'] = params['path'] # noqa: E501
if 'build_locator' in params:
if isinstance(params['build_locator'], TeamCityObject):
path_params['buildLocator'] = params['build_locator'].locator_id
else:
path_params['buildLocator'] = params['build_locator'] # noqa: E501
query_params = []
if 'resolve_parameters' in params:
query_params.append(('resolveParameters', params['resolve_parameters'])) # noqa: E501
if 'log_build_usage' in params:
query_params.append(('logBuildUsage', params['log_build_usage'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/builds/{buildLocator}/artifacts/files{path}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __get_metadata_with_http_info(self, path, build_locator, **kwargs): # noqa: E501
"""get_metadata # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__get_metadata_with_http_info(path, build_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str path: (required)
:param str build_locator: (required)
:param str fields:
:param bool resolve_parameters:
:param bool log_build_usage:
:return: File
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['path', 'build_locator', 'fields', 'resolve_parameters', 'log_build_usage'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_metadata" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'path' is set
if ('path' not in params or
params['path'] is None):
raise ValueError("Missing the required parameter `path` when calling `get_metadata`") # noqa: E501
# verify the required parameter 'build_locator' is set
if ('build_locator' not in params or
params['build_locator'] is None):
raise ValueError("Missing the required parameter `build_locator` when calling `get_metadata`") # noqa: E501
if 'path' in params and not re.search('(\/.*)?', params['path']): # noqa: E501
raise ValueError("Invalid value for parameter `path` when calling `get_metadata`, must conform to the pattern `/(\/.*)?/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'path' in params:
if isinstance(params['path'], TeamCityObject):
path_params['path'] = params['path'].locator_id
else:
path_params['path'] = params['path'] # noqa: E501
if 'build_locator' in params:
if isinstance(params['build_locator'], TeamCityObject):
path_params['buildLocator'] = params['build_locator'].locator_id
else:
path_params['buildLocator'] = params['build_locator'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
if 'resolve_parameters' in params:
query_params.append(('resolveParameters', params['resolve_parameters'])) # noqa: E501
if 'log_build_usage' in params:
query_params.append(('logBuildUsage', params['log_build_usage'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/builds/{buildLocator}/artifacts/metadata{path}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='File', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __get_parameter_with_http_info(self, name, build_locator, **kwargs): # noqa: E501
"""get_parameter # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__get_parameter_with_http_info(name, build_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: (required)
:param str build_locator: (required)
:param str fields:
:param str fields2:
:return: ModelProperty
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'build_locator', 'fields', 'fields2'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_parameter" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params or
params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `get_parameter`") # noqa: E501
# verify the required parameter 'build_locator' is set
if ('build_locator' not in params or
params['build_locator'] is None):
raise ValueError("Missing the required parameter `build_locator` when calling `get_parameter`") # noqa: E501
collection_formats = {}
path_params = {}
if 'name' in params:
if isinstance(params['name'], TeamCityObject):
path_params['name'] = params['name'].locator_id
else:
path_params['name'] = params['name'] # noqa: E501
if 'build_locator' in params:
if isinstance(params['build_locator'], TeamCityObject):
path_params['buildLocator'] = params['build_locator'].locator_id
else:
path_params['buildLocator'] = params['build_locator'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
if 'fields2' in params:
query_params.append(('fields', params['fields2'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/builds/{buildLocator}/attributes/{name}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ModelProperty', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __get_parameter_0_with_http_info(self, build_locator, property_name, **kwargs): # noqa: E501
"""get_parameter_0 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__get_parameter_0_with_http_info(build_locator, property_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str build_locator: (required)
:param str property_name: (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['build_locator', 'property_name'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_parameter_0" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'build_locator' is set
if ('build_locator' not in params or
params['build_locator'] is None):
raise ValueError("Missing the required parameter `build_locator` when calling `get_parameter_0`") # noqa: E501
# verify the required parameter 'property_name' is set
if ('property_name' not in params or
params['property_name'] is None):
raise ValueError("Missing the required parameter `property_name` when calling `get_parameter_0`") # noqa: E501
collection_formats = {}
path_params = {}
if 'build_locator' in params:
if isinstance(params['build_locator'], TeamCityObject):
path_params['buildLocator'] = params['build_locator'].locator_id
else:
path_params['buildLocator'] = params['build_locator'] # noqa: E501
if 'property_name' in params:
if isinstance(params['property_name'], TeamCityObject):
path_params['propertyName'] = params['property_name'].locator_id
else:
path_params['propertyName'] = params['property_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/builds/{buildLocator}/resulting-properties/{propertyName}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __get_parameter_value_long_with_http_info(self, name, build_locator, **kwargs): # noqa: E501
"""get_parameter_value_long # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__get_parameter_value_long_with_http_info(name, build_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: (required)
:param str build_locator: (required)
:param str fields:
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'build_locator', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_parameter_value_long" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params or
params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `get_parameter_value_long`") # noqa: E501
# verify the required parameter 'build_locator' is set
if ('build_locator' not in params or
params['build_locator'] is None):
raise ValueError("Missing the required parameter `build_locator` when calling `get_parameter_value_long`") # noqa: E501
collection_formats = {}
path_params = {}
if 'name' in params:
if isinstance(params['name'], TeamCityObject):
path_params['name'] = params['name'].locator_id
else:
path_params['name'] = params['name'] # noqa: E501
if 'build_locator' in params:
if isinstance(params['build_locator'], TeamCityObject):
path_params['buildLocator'] = params['build_locator'].locator_id
else:
path_params['buildLocator'] = params['build_locator'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/builds/{buildLocator}/attributes/{name}/value', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __get_parameters_with_http_info(self, build_locator, **kwargs): # noqa: E501
"""get_parameters # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__get_parameters_with_http_info(build_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str build_locator: (required)
:param str locator:
:param str fields:
:param str fields2:
:return: Properties
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['build_locator', 'locator', 'fields', 'fields2'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_parameters" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'build_locator' is set
if ('build_locator' not in params or
params['build_locator'] is None):
raise ValueError("Missing the required parameter `build_locator` when calling `get_parameters`") # noqa: E501
collection_formats = {}
path_params = {}
if 'build_locator' in params:
if isinstance(params['build_locator'], TeamCityObject):
path_params['buildLocator'] = params['build_locator'].locator_id
else:
path_params['buildLocator'] = params['build_locator'] # noqa: E501
query_params = []
if 'locator' in params:
query_params.append(('locator', params['locator'])) # noqa: E501
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
if 'fields2' in params:
query_params.append(('fields', params['fields2'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/builds/{buildLocator}/attributes', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Properties', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __get_pinned_with_http_info(self, build_locator, **kwargs): # noqa: E501
"""get_pinned # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__get_pinned_with_http_info(build_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str build_locator: (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['build_locator'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_pinned" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'build_locator' is set
if ('build_locator' not in params or
params['build_locator'] is None):
raise ValueError("Missing the required parameter `build_locator` when calling `get_pinned`") # noqa: E501
collection_formats = {}
path_params = {}
if 'build_locator' in params:
if isinstance(params['build_locator'], TeamCityObject):
path_params['buildLocator'] = params['build_locator'].locator_id
else:
path_params['buildLocator'] = params['build_locator'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/builds/{buildLocator}/pin', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __get_problems_with_http_info(self, build_locator, **kwargs): # noqa: E501
"""get_problems # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__get_problems_with_http_info(build_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str build_locator: (required)
:param str fields:
:return: ProblemOccurrences
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['build_locator', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_problems" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'build_locator' is set
if ('build_locator' not in params or
params['build_locator'] is None):
raise ValueError("Missing the required parameter `build_locator` when calling `get_problems`") # noqa: E501
collection_formats = {}
path_params = {}
if 'build_locator' in params:
if isinstance(params['build_locator'], TeamCityObject):
path_params['buildLocator'] = params['build_locator'].locator_id
else:
path_params['buildLocator'] = params['build_locator'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/builds/{buildLocator}/problemOccurrences', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ProblemOccurrences', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __get_resolved_parameter_with_http_info(self, build_locator, value, **kwargs): # noqa: E501
"""get_resolved_parameter # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__get_resolved_parameter_with_http_info(build_locator, value, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str build_locator: (required)
:param str value: (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['build_locator', 'value'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_resolved_parameter" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'build_locator' is set
if ('build_locator' not in params or
params['build_locator'] is None):
raise ValueError("Missing the required parameter `build_locator` when calling `get_resolved_parameter`") # noqa: E501
# verify the required parameter 'value' is set
if ('value' not in params or
params['value'] is None):
raise ValueError("Missing the required parameter `value` when calling `get_resolved_parameter`") # noqa: E501
collection_formats = {}
path_params = {}
if 'build_locator' in params:
if isinstance(params['build_locator'], TeamCityObject):
path_params['buildLocator'] = params['build_locator'].locator_id
else:
path_params['buildLocator'] = params['build_locator'] # noqa: E501
if 'value' in params:
if isinstance(params['value'], TeamCityObject):
path_params['value'] = params['value'].locator_id
else:
path_params['value'] = params['value'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/builds/{buildLocator}/resolved/{value}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __get_root_with_http_info(self, build_locator, **kwargs): # noqa: E501
"""get_root # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__get_root_with_http_info(build_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str build_locator: (required)
:param str base_path:
:param str locator:
:param str fields:
:param bool resolve_parameters:
:param bool log_build_usage:
:return: Files
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['build_locator', 'base_path', 'locator', 'fields', 'resolve_parameters', 'log_build_usage'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_root" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'build_locator' is set
if ('build_locator' not in params or
params['build_locator'] is None):
raise ValueError("Missing the required parameter `build_locator` when calling `get_root`") # noqa: E501
collection_formats = {}
path_params = {}
if 'build_locator' in params:
if isinstance(params['build_locator'], TeamCityObject):
path_params['buildLocator'] = params['build_locator'].locator_id
else:
path_params['buildLocator'] = params['build_locator'] # noqa: E501
query_params = []
if 'base_path' in params:
query_params.append(('basePath', params['base_path'])) # noqa: E501
if 'locator' in params:
query_params.append(('locator', params['locator'])) # noqa: E501
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
if 'resolve_parameters' in params:
query_params.append(('resolveParameters', params['resolve_parameters'])) # noqa: E501
if 'log_build_usage' in params:
query_params.append(('logBuildUsage', params['log_build_usage'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/builds/{buildLocator}/artifacts', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Files', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __get_tests_with_http_info(self, build_locator, **kwargs): # noqa: E501
"""get_tests # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__get_tests_with_http_info(build_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str build_locator: (required)
:param str fields:
:return: TestOccurrences
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['build_locator', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_tests" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'build_locator' is set
if ('build_locator' not in params or
params['build_locator'] is None):
raise ValueError("Missing the required parameter `build_locator` when calling `get_tests`") # noqa: E501
collection_formats = {}
path_params = {}
if 'build_locator' in params:
if isinstance(params['build_locator'], TeamCityObject):
path_params['buildLocator'] = params['build_locator'].locator_id
else:
path_params['buildLocator'] = params['build_locator'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/builds/{buildLocator}/testOccurrences', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TestOccurrences', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __get_zipped_with_http_info(self, path, build_locator, **kwargs): # noqa: E501
"""get_zipped # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__get_zipped_with_http_info(path, build_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str path: (required)
:param str build_locator: (required)
:param str base_path:
:param str locator:
:param str name:
:param bool resolve_parameters:
:param bool log_build_usage:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['path', 'build_locator', 'base_path', 'locator', 'name', 'resolve_parameters', 'log_build_usage'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_zipped" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'path' is set
if ('path' not in params or
params['path'] is None):
raise ValueError("Missing the required parameter `path` when calling `get_zipped`") # noqa: E501
# verify the required parameter 'build_locator' is set
if ('build_locator' not in params or
params['build_locator'] is None):
raise ValueError("Missing the required parameter `build_locator` when calling `get_zipped`") # noqa: E501
if 'path' in params and not re.search('(\/.*)?', params['path']): # noqa: E501
raise ValueError("Invalid value for parameter `path` when calling `get_zipped`, must conform to the pattern `/(\/.*)?/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'path' in params:
if isinstance(params['path'], TeamCityObject):
path_params['path'] = params['path'].locator_id
else:
path_params['path'] = params['path'] # noqa: E501
if 'build_locator' in params:
if isinstance(params['build_locator'], TeamCityObject):
path_params['buildLocator'] = params['build_locator'].locator_id
else:
path_params['buildLocator'] = params['build_locator'] # noqa: E501
query_params = []
if 'base_path' in params:
query_params.append(('basePath', params['base_path'])) # noqa: E501
if 'locator' in params:
query_params.append(('locator', params['locator'])) # noqa: E501
if 'name' in params:
query_params.append(('name', params['name'])) # noqa: E501
if 'resolve_parameters' in params:
query_params.append(('resolveParameters', params['resolve_parameters'])) # noqa: E501
if 'log_build_usage' in params:
query_params.append(('logBuildUsage', params['log_build_usage'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/builds/{buildLocator}/artifacts/archived{path}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __pin_build_with_http_info(self, build_locator, **kwargs): # noqa: E501
"""pin_build # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__pin_build_with_http_info(build_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str build_locator: (required)
:param str body:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['build_locator', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method pin_build" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'build_locator' is set
if ('build_locator' not in params or
params['build_locator'] is None):
raise ValueError("Missing the required parameter `build_locator` when calling `pin_build`") # noqa: E501
collection_formats = {}
path_params = {}
if 'build_locator' in params:
if isinstance(params['build_locator'], TeamCityObject):
path_params['buildLocator'] = params['build_locator'].locator_id
else:
path_params['buildLocator'] = params['build_locator'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/builds/{buildLocator}/pin', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __replace_comment_with_http_info(self, build_locator, **kwargs): # noqa: E501
"""replace_comment # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__replace_comment_with_http_info(build_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str build_locator: (required)
:param str body:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['build_locator', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method replace_comment" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'build_locator' is set
if ('build_locator' not in params or
params['build_locator'] is None):
raise ValueError("Missing the required parameter `build_locator` when calling `replace_comment`") # noqa: E501
collection_formats = {}
path_params = {}
if 'build_locator' in params:
if isinstance(params['build_locator'], TeamCityObject):
path_params['buildLocator'] = params['build_locator'].locator_id
else:
path_params['buildLocator'] = params['build_locator'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/builds/{buildLocator}/comment', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __replace_tags_with_http_info(self, build_locator, **kwargs): # noqa: E501
"""replace_tags # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__replace_tags_with_http_info(build_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str build_locator: (required)
:param str locator:
:param Tags body:
:param str fields:
:return: Tags
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['build_locator', 'locator', 'body', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method replace_tags" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'build_locator' is set
if ('build_locator' not in params or
params['build_locator'] is None):
raise ValueError("Missing the required parameter `build_locator` when calling `replace_tags`") # noqa: E501
collection_formats = {}
path_params = {}
if 'build_locator' in params:
if isinstance(params['build_locator'], TeamCityObject):
path_params['buildLocator'] = params['build_locator'].locator_id
else:
path_params['buildLocator'] = params['build_locator'] # noqa: E501
query_params = []
if 'locator' in params:
query_params.append(('locator', params['locator'])) # noqa: E501
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/builds/{buildLocator}/tags', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Tags', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __reset_build_finish_parameters_with_http_info(self, build_locator, **kwargs): # noqa: E501
"""reset_build_finish_parameters # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__reset_build_finish_parameters_with_http_info(build_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str build_locator: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['build_locator'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method reset_build_finish_parameters" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'build_locator' is set
if ('build_locator' not in params or
params['build_locator'] is None):
raise ValueError("Missing the required parameter `build_locator` when calling `reset_build_finish_parameters`") # noqa: E501
collection_formats = {}
path_params = {}
if 'build_locator' in params:
if isinstance(params['build_locator'], TeamCityObject):
path_params['buildLocator'] = params['build_locator'].locator_id
else:
path_params['buildLocator'] = params['build_locator'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/builds/{buildLocator}/caches/finishProperties', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __serve_aggregated_build_status_with_http_info(self, build_locator, **kwargs): # noqa: E501
"""serve_aggregated_build_status # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__serve_aggregated_build_status_with_http_info(build_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str build_locator: (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['build_locator'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method serve_aggregated_build_status" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'build_locator' is set
if ('build_locator' not in params or
params['build_locator'] is None):
raise ValueError("Missing the required parameter `build_locator` when calling `serve_aggregated_build_status`") # noqa: E501
collection_formats = {}
path_params = {}
if 'build_locator' in params:
if isinstance(params['build_locator'], TeamCityObject):
path_params['buildLocator'] = params['build_locator'].locator_id
else:
path_params['buildLocator'] = params['build_locator'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/builds/aggregated/{buildLocator}/status', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __serve_aggregated_build_status_icon_with_http_info(self, build_locator, suffix, **kwargs): # noqa: E501
"""serve_aggregated_build_status_icon # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__serve_aggregated_build_status_icon_with_http_info(build_locator, suffix, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str build_locator: (required)
:param str suffix: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['build_locator', 'suffix'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method serve_aggregated_build_status_icon" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'build_locator' is set
if ('build_locator' not in params or
params['build_locator'] is None):
raise ValueError("Missing the required parameter `build_locator` when calling `serve_aggregated_build_status_icon`") # noqa: E501
# verify the required parameter 'suffix' is set
if ('suffix' not in params or
params['suffix'] is None):
raise ValueError("Missing the required parameter `suffix` when calling `serve_aggregated_build_status_icon`") # noqa: E501
if 'suffix' in params and not re.search('(.*)?', params['suffix']): # noqa: E501
raise ValueError("Invalid value for parameter `suffix` when calling `serve_aggregated_build_status_icon`, must conform to the pattern `/(.*)?/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'build_locator' in params:
if isinstance(params['build_locator'], TeamCityObject):
path_params['buildLocator'] = params['build_locator'].locator_id
else:
path_params['buildLocator'] = params['build_locator'] # noqa: E501
if 'suffix' in params:
if isinstance(params['suffix'], TeamCityObject):
path_params['suffix'] = params['suffix'].locator_id
else:
path_params['suffix'] = params['suffix'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/builds/aggregated/{buildLocator}/statusIcon{suffix}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __serve_all_builds_with_http_info(self, **kwargs): # noqa: E501
"""serve_all_builds # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__serve_all_builds_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str build_type:
:param str status:
:param str triggered_by_user:
:param bool include_personal:
:param bool include_canceled:
:param bool only_pinned:
:param list[str] tag:
:param str agent_name:
:param str since_build:
:param str since_date:
:param int start:
:param int count:
:param str locator:
:param str fields:
:return: Builds
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['build_type', 'status', 'triggered_by_user', 'include_personal', 'include_canceled', 'only_pinned', 'tag', 'agent_name', 'since_build', 'since_date', 'start', 'count', 'locator', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method serve_all_builds" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'build_type' in params:
query_params.append(('buildType', params['build_type'])) # noqa: E501
if 'status' in params:
query_params.append(('status', params['status'])) # noqa: E501
if 'triggered_by_user' in params:
query_params.append(('triggeredByUser', params['triggered_by_user'])) # noqa: E501
if 'include_personal' in params:
query_params.append(('includePersonal', params['include_personal'])) # noqa: E501
if 'include_canceled' in params:
query_params.append(('includeCanceled', params['include_canceled'])) # noqa: E501
if 'only_pinned' in params:
query_params.append(('onlyPinned', params['only_pinned'])) # noqa: E501
if 'tag' in params:
query_params.append(('tag', params['tag'])) # noqa: E501
collection_formats['tag'] = 'multi' # noqa: E501
if 'agent_name' in params:
query_params.append(('agentName', params['agent_name'])) # noqa: E501
if 'since_build' in params:
query_params.append(('sinceBuild', params['since_build'])) # noqa: E501
if 'since_date' in params:
query_params.append(('sinceDate', params['since_date'])) # noqa: E501
if 'start' in params:
query_params.append(('start', params['start'])) # noqa: E501
if 'count' in params:
query_params.append(('count', params['count'])) # noqa: E501
if 'locator' in params:
query_params.append(('locator', params['locator'])) # noqa: E501
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/builds', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Builds', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __serve_build_with_http_info(self, build_locator, **kwargs): # noqa: E501
"""serve_build # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__serve_build_with_http_info(build_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str build_locator: (required)
:param str fields:
:return: Build
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['build_locator', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method serve_build" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'build_locator' is set
if ('build_locator' not in params or
params['build_locator'] is None):
raise ValueError("Missing the required parameter `build_locator` when calling `serve_build`") # noqa: E501
collection_formats = {}
path_params = {}
if 'build_locator' in params:
if isinstance(params['build_locator'], TeamCityObject):
path_params['buildLocator'] = params['build_locator'].locator_id
else:
path_params['buildLocator'] = params['build_locator'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/builds/{buildLocator}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Build', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __serve_build_actual_parameters_with_http_info(self, build_locator, **kwargs): # noqa: E501
"""serve_build_actual_parameters # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__serve_build_actual_parameters_with_http_info(build_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str build_locator: (required)
:param str fields:
:return: Properties
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['build_locator', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method serve_build_actual_parameters" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'build_locator' is set
if ('build_locator' not in params or
params['build_locator'] is None):
raise ValueError("Missing the required parameter `build_locator` when calling `serve_build_actual_parameters`") # noqa: E501
collection_formats = {}
path_params = {}
if 'build_locator' in params:
if isinstance(params['build_locator'], TeamCityObject):
path_params['buildLocator'] = params['build_locator'].locator_id
else:
path_params['buildLocator'] = params['build_locator'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/builds/{buildLocator}/resulting-properties', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Properties', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __serve_build_field_by_build_only_with_http_info(self, build_locator, field, **kwargs): # noqa: E501
"""serve_build_field_by_build_only # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__serve_build_field_by_build_only_with_http_info(build_locator, field, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str build_locator: (required)
:param str field: (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['build_locator', 'field'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method serve_build_field_by_build_only" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'build_locator' is set
if ('build_locator' not in params or
params['build_locator'] is None):
raise ValueError("Missing the required parameter `build_locator` when calling `serve_build_field_by_build_only`") # noqa: E501
# verify the required parameter 'field' is set
if ('field' not in params or
params['field'] is None):
raise ValueError("Missing the required parameter `field` when calling `serve_build_field_by_build_only`") # noqa: E501
collection_formats = {}
path_params = {}
if 'build_locator' in params:
if isinstance(params['build_locator'], TeamCityObject):
path_params['buildLocator'] = params['build_locator'].locator_id
else:
path_params['buildLocator'] = params['build_locator'] # noqa: E501
if 'field' in params:
if isinstance(params['field'], TeamCityObject):
path_params['field'] = params['field'].locator_id
else:
path_params['field'] = params['field'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/builds/{buildLocator}/{field}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __serve_build_related_issues_with_http_info(self, build_locator, **kwargs): # noqa: E501
"""serve_build_related_issues # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__serve_build_related_issues_with_http_info(build_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str build_locator: (required)
:param str fields:
:return: IssuesUsages
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['build_locator', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method serve_build_related_issues" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'build_locator' is set
if ('build_locator' not in params or
params['build_locator'] is None):
raise ValueError("Missing the required parameter `build_locator` when calling `serve_build_related_issues`") # noqa: E501
collection_formats = {}
path_params = {}
if 'build_locator' in params:
if isinstance(params['build_locator'], TeamCityObject):
path_params['buildLocator'] = params['build_locator'].locator_id
else:
path_params['buildLocator'] = params['build_locator'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/builds/{buildLocator}/relatedIssues', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='IssuesUsages', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __serve_build_related_issues_old_with_http_info(self, build_locator, **kwargs): # noqa: E501
"""serve_build_related_issues_old # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__serve_build_related_issues_old_with_http_info(build_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str build_locator: (required)
:param str fields:
:return: IssuesUsages
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['build_locator', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method serve_build_related_issues_old" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'build_locator' is set
if ('build_locator' not in params or
params['build_locator'] is None):
raise ValueError("Missing the required parameter `build_locator` when calling `serve_build_related_issues_old`") # noqa: E501
collection_formats = {}
path_params = {}
if 'build_locator' in params:
if isinstance(params['build_locator'], TeamCityObject):
path_params['buildLocator'] = params['build_locator'].locator_id
else:
path_params['buildLocator'] = params['build_locator'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/builds/{buildLocator}/related-issues', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='IssuesUsages', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __serve_build_statistic_value_with_http_info(self, build_locator, name, **kwargs): # noqa: E501
"""serve_build_statistic_value # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__serve_build_statistic_value_with_http_info(build_locator, name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str build_locator: (required)
:param str name: (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['build_locator', 'name'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method serve_build_statistic_value" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'build_locator' is set
if ('build_locator' not in params or
params['build_locator'] is None):
raise ValueError("Missing the required parameter `build_locator` when calling `serve_build_statistic_value`") # noqa: E501
# verify the required parameter 'name' is set
if ('name' not in params or
params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `serve_build_statistic_value`") # noqa: E501
collection_formats = {}
path_params = {}
if 'build_locator' in params:
if isinstance(params['build_locator'], TeamCityObject):
path_params['buildLocator'] = params['build_locator'].locator_id
else:
path_params['buildLocator'] = params['build_locator'] # noqa: E501
if 'name' in params:
if isinstance(params['name'], TeamCityObject):
path_params['name'] = params['name'].locator_id
else:
path_params['name'] = params['name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/builds/{buildLocator}/statistics/{name}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __serve_build_statistic_values_with_http_info(self, build_locator, **kwargs): # noqa: E501
"""serve_build_statistic_values # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__serve_build_statistic_values_with_http_info(build_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str build_locator: (required)
:param str fields:
:return: Properties
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['build_locator', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method serve_build_statistic_values" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'build_locator' is set
if ('build_locator' not in params or
params['build_locator'] is None):
raise ValueError("Missing the required parameter `build_locator` when calling `serve_build_statistic_values`") # noqa: E501
collection_formats = {}
path_params = {}
if 'build_locator' in params:
if isinstance(params['build_locator'], TeamCityObject):
path_params['buildLocator'] = params['build_locator'].locator_id
else:
path_params['buildLocator'] = params['build_locator'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/builds/{buildLocator}/statistics', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Properties', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __serve_build_status_icon_with_http_info(self, build_locator, suffix, **kwargs): # noqa: E501
"""serve_build_status_icon # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__serve_build_status_icon_with_http_info(build_locator, suffix, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str build_locator: (required)
:param str suffix: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['build_locator', 'suffix'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method serve_build_status_icon" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'build_locator' is set
if ('build_locator' not in params or
params['build_locator'] is None):
raise ValueError("Missing the required parameter `build_locator` when calling `serve_build_status_icon`") # noqa: E501
# verify the required parameter 'suffix' is set
if ('suffix' not in params or
params['suffix'] is None):
raise ValueError("Missing the required parameter `suffix` when calling `serve_build_status_icon`") # noqa: E501
if 'suffix' in params and not re.search('(.*)?', params['suffix']): # noqa: E501
raise ValueError("Invalid value for parameter `suffix` when calling `serve_build_status_icon`, must conform to the pattern `/(.*)?/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'build_locator' in params:
if isinstance(params['build_locator'], TeamCityObject):
path_params['buildLocator'] = params['build_locator'].locator_id
else:
path_params['buildLocator'] = params['build_locator'] # noqa: E501
if 'suffix' in params:
if isinstance(params['suffix'], TeamCityObject):
path_params['suffix'] = params['suffix'].locator_id
else:
path_params['suffix'] = params['suffix'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/builds/{buildLocator}/statusIcon{suffix}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __serve_source_file_with_http_info(self, build_locator, file_name, **kwargs): # noqa: E501
"""serve_source_file # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__serve_source_file_with_http_info(build_locator, file_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str build_locator: (required)
:param str file_name: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['build_locator', 'file_name'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method serve_source_file" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'build_locator' is set
if ('build_locator' not in params or
params['build_locator'] is None):
raise ValueError("Missing the required parameter `build_locator` when calling `serve_source_file`") # noqa: E501
# verify the required parameter 'file_name' is set
if ('file_name' not in params or
params['file_name'] is None):
raise ValueError("Missing the required parameter `file_name` when calling `serve_source_file`") # noqa: E501
if 'file_name' in params and not re.search('.+', params['file_name']): # noqa: E501
raise ValueError("Invalid value for parameter `file_name` when calling `serve_source_file`, must conform to the pattern `/.+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'build_locator' in params:
if isinstance(params['build_locator'], TeamCityObject):
path_params['buildLocator'] = params['build_locator'].locator_id
else:
path_params['buildLocator'] = params['build_locator'] # noqa: E501
if 'file_name' in params:
if isinstance(params['file_name'], TeamCityObject):
path_params['fileName'] = params['file_name'].locator_id
else:
path_params['fileName'] = params['file_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/builds/{buildLocator}/sources/files/{fileName}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __serve_tags_with_http_info(self, build_locator, **kwargs): # noqa: E501
"""serve_tags # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__serve_tags_with_http_info(build_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str build_locator: (required)
:param str locator:
:param str fields:
:return: Tags
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['build_locator', 'locator', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method serve_tags" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'build_locator' is set
if ('build_locator' not in params or
params['build_locator'] is None):
raise ValueError("Missing the required parameter `build_locator` when calling `serve_tags`") # noqa: E501
collection_formats = {}
path_params = {}
if 'build_locator' in params:
if isinstance(params['build_locator'], TeamCityObject):
path_params['buildLocator'] = params['build_locator'].locator_id
else:
path_params['buildLocator'] = params['build_locator'] # noqa: E501
query_params = []
if 'locator' in params:
query_params.append(('locator', params['locator'])) # noqa: E501
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/builds/{buildLocator}/tags', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Tags', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __set_build_number_with_http_info(self, build_locator, **kwargs): # noqa: E501
"""set_build_number # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__set_build_number_with_http_info(build_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str build_locator: (required)
:param str body:
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['build_locator', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method set_build_number" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'build_locator' is set
if ('build_locator' not in params or
params['build_locator'] is None):
raise ValueError("Missing the required parameter `build_locator` when calling `set_build_number`") # noqa: E501
collection_formats = {}
path_params = {}
if 'build_locator' in params:
if isinstance(params['build_locator'], TeamCityObject):
path_params['buildLocator'] = params['build_locator'].locator_id
else:
path_params['buildLocator'] = params['build_locator'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/builds/{buildLocator}/number', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __set_build_status_text_with_http_info(self, build_locator, **kwargs): # noqa: E501
"""set_build_status_text # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__set_build_status_text_with_http_info(build_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str build_locator: (required)
:param str body:
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['build_locator', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method set_build_status_text" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'build_locator' is set
if ('build_locator' not in params or
params['build_locator'] is None):
raise ValueError("Missing the required parameter `build_locator` when calling `set_build_status_text`") # noqa: E501
collection_formats = {}
path_params = {}
if 'build_locator' in params:
if isinstance(params['build_locator'], TeamCityObject):
path_params['buildLocator'] = params['build_locator'].locator_id
else:
path_params['buildLocator'] = params['build_locator'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/builds/{buildLocator}/statusText', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __set_parameter_with_http_info(self, build_locator, **kwargs): # noqa: E501
"""set_parameter # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__set_parameter_with_http_info(build_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str build_locator: (required)
:param ModelProperty body:
:param str fields:
:param str fields2:
:return: ModelProperty
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['build_locator', 'body', 'fields', 'fields2'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method set_parameter" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'build_locator' is set
if ('build_locator' not in params or
params['build_locator'] is None):
raise ValueError("Missing the required parameter `build_locator` when calling `set_parameter`") # noqa: E501
collection_formats = {}
path_params = {}
if 'build_locator' in params:
if isinstance(params['build_locator'], TeamCityObject):
path_params['buildLocator'] = params['build_locator'].locator_id
else:
path_params['buildLocator'] = params['build_locator'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
if 'fields2' in params:
query_params.append(('fields', params['fields2'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/builds/{buildLocator}/attributes', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ModelProperty', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __set_parameter_0_with_http_info(self, name, build_locator, **kwargs): # noqa: E501
"""set_parameter_0 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__set_parameter_0_with_http_info(name, build_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: (required)
:param str build_locator: (required)
:param ModelProperty body:
:param str fields:
:param str fields2:
:return: ModelProperty
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'build_locator', 'body', 'fields', 'fields2'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method set_parameter_0" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params or
params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `set_parameter_0`") # noqa: E501
# verify the required parameter 'build_locator' is set
if ('build_locator' not in params or
params['build_locator'] is None):
raise ValueError("Missing the required parameter `build_locator` when calling `set_parameter_0`") # noqa: E501
collection_formats = {}
path_params = {}
if 'name' in params:
if isinstance(params['name'], TeamCityObject):
path_params['name'] = params['name'].locator_id
else:
path_params['name'] = params['name'] # noqa: E501
if 'build_locator' in params:
if isinstance(params['build_locator'], TeamCityObject):
path_params['buildLocator'] = params['build_locator'].locator_id
else:
path_params['buildLocator'] = params['build_locator'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
if 'fields2' in params:
query_params.append(('fields', params['fields2'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/builds/{buildLocator}/attributes/{name}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ModelProperty', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __set_parameter_value_long_with_http_info(self, name, build_locator, **kwargs): # noqa: E501
"""set_parameter_value_long # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__set_parameter_value_long_with_http_info(name, build_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: (required)
:param str build_locator: (required)
:param str body:
:param str fields:
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'build_locator', 'body', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method set_parameter_value_long" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params or
params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `set_parameter_value_long`") # noqa: E501
# verify the required parameter 'build_locator' is set
if ('build_locator' not in params or
params['build_locator'] is None):
raise ValueError("Missing the required parameter `build_locator` when calling `set_parameter_value_long`") # noqa: E501
collection_formats = {}
path_params = {}
if 'name' in params:
if isinstance(params['name'], TeamCityObject):
path_params['name'] = params['name'].locator_id
else:
path_params['name'] = params['name'] # noqa: E501
if 'build_locator' in params:
if isinstance(params['build_locator'], TeamCityObject):
path_params['buildLocator'] = params['build_locator'].locator_id
else:
path_params['buildLocator'] = params['build_locator'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/builds/{buildLocator}/attributes/{name}/value', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __set_parameters_with_http_info(self, build_locator, **kwargs): # noqa: E501
"""set_parameters # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__set_parameters_with_http_info(build_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str build_locator: (required)
:param Properties body:
:param str fields:
:param str fields2:
:return: Properties
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['build_locator', 'body', 'fields', 'fields2'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method set_parameters" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'build_locator' is set
if ('build_locator' not in params or
params['build_locator'] is None):
raise ValueError("Missing the required parameter `build_locator` when calling `set_parameters`") # noqa: E501
collection_formats = {}
path_params = {}
if 'build_locator' in params:
if isinstance(params['build_locator'], TeamCityObject):
path_params['buildLocator'] = params['build_locator'].locator_id
else:
path_params['buildLocator'] = params['build_locator'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
if 'fields2' in params:
query_params.append(('fields', params['fields2'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/builds/{buildLocator}/attributes', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Properties', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __unpin_build_with_http_info(self, build_locator, **kwargs): # noqa: E501
"""unpin_build # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__unpin_build_with_http_info(build_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str build_locator: (required)
:param str body:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['build_locator', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method unpin_build" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'build_locator' is set
if ('build_locator' not in params or
params['build_locator'] is None):
raise ValueError("Missing the required parameter `build_locator` when calling `unpin_build`") # noqa: E501
collection_formats = {}
path_params = {}
if 'build_locator' in params:
if isinstance(params['build_locator'], TeamCityObject):
path_params['buildLocator'] = params['build_locator'].locator_id
else:
path_params['buildLocator'] = params['build_locator'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/builds/{buildLocator}/pin', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 40.589901
| 224
| 0.606259
| 24,391
| 217,846
| 5.127465
| 0.010742
| 0.081558
| 0.034398
| 0.029937
| 0.981601
| 0.975676
| 0.971239
| 0.964586
| 0.960668
| 0.958597
| 0
| 0.014708
| 0.299969
| 217,846
| 5,366
| 225
| 40.597466
| 0.805389
| 0.276953
| 0
| 0.824556
| 1
| 0
| 0.211043
| 0.047859
| 0
| 0
| 0
| 0
| 0
| 1
| 0.033926
| false
| 0
| 0.005816
| 0
| 0.090792
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
1c0d8e059bdb20ed411c9b787c46daf111d14372
| 66,951
|
py
|
Python
|
pint_xarray/tests/test_accessors.py
|
TomNicholas/pint-xarray
|
6ee1bf9aff0c6bef51bde2ecf6a62660a9ed6f39
|
[
"Apache-2.0"
] | 7
|
2020-04-08T13:50:22.000Z
|
2020-06-13T03:58:06.000Z
|
pint_xarray/tests/test_accessors.py
|
TomNicholas/pint-xarray
|
6ee1bf9aff0c6bef51bde2ecf6a62660a9ed6f39
|
[
"Apache-2.0"
] | 11
|
2020-04-08T14:10:49.000Z
|
2020-07-08T16:09:22.000Z
|
pint_xarray/tests/test_accessors.py
|
TomNicholas/pint-xarray
|
6ee1bf9aff0c6bef51bde2ecf6a62660a9ed6f39
|
[
"Apache-2.0"
] | 2
|
2020-04-08T13:50:29.000Z
|
2020-04-08T14:08:20.000Z
|
import numpy as np
import pint
import pytest
import xarray as xr
from numpy.testing import assert_array_equal
from pint import Unit, UnitRegistry
from .. import accessors, conversion
from .utils import (
assert_equal,
assert_identical,
assert_units_equal,
raises_regex,
requires_bottleneck,
requires_dask_array,
requires_scipy,
)
pytestmark = [
pytest.mark.filterwarnings("error::pint.UnitStrippedWarning"),
]
# make sure scalars are converted to 0d arrays so quantities can
# always be treated like ndarrays
unit_registry = UnitRegistry(force_ndarray=True)
Quantity = unit_registry.Quantity
nan = np.nan
def assert_all_str_or_none(mapping):
__tracebackhide__ = True
compared = {
key: isinstance(value, str) or value is None for key, value in mapping.items()
}
not_passing = {key: value for key, value in mapping.items() if not compared[key]}
check = all(compared.values())
assert check, f"Not all values are str or None: {not_passing}"
@pytest.fixture
def example_unitless_da():
array = np.linspace(0, 10, 20)
x = np.arange(20)
u = np.linspace(0, 1, 20)
da = xr.DataArray(
data=array,
dims="x",
coords={"x": ("x", x), "u": ("x", u, {"units": "hour"})},
attrs={"units": "m"},
)
return da
@pytest.fixture()
def example_quantity_da():
array = np.linspace(0, 10, 20) * unit_registry.m
x = np.arange(20)
u = np.linspace(0, 1, 20) * unit_registry.hour
return xr.DataArray(data=array, dims="x", coords={"x": ("x", x), "u": ("x", u)})
class TestQuantifyDataArray:
def test_attach_units_from_str(self, example_unitless_da):
orig = example_unitless_da
result = orig.pint.quantify("m")
assert_array_equal(result.data.magnitude, orig.data)
# TODO better comparisons for when you can't access the unit_registry?
assert str(result.data.units) == "meter"
def test_attach_units_given_registry(self, example_unitless_da):
orig = example_unitless_da
ureg = UnitRegistry(force_ndarray=True)
result = orig.pint.quantify("m", unit_registry=ureg)
assert_array_equal(result.data.magnitude, orig.data)
assert result.data.units == ureg.Unit("m")
def test_attach_units_from_attrs(self, example_unitless_da):
orig = example_unitless_da
result = orig.pint.quantify()
assert_array_equal(result.data.magnitude, orig.data)
assert str(result.data.units) == "meter"
remaining_attrs = conversion.extract_unit_attributes(result)
assert {k: v for k, v in remaining_attrs.items() if v is not None} == {}
def test_attach_units_given_unit_objs(self, example_unitless_da):
orig = example_unitless_da
ureg = UnitRegistry(force_ndarray=True)
result = orig.pint.quantify(ureg.Unit("m"), unit_registry=ureg)
assert_array_equal(result.data.magnitude, orig.data)
assert result.data.units == ureg.Unit("m")
def test_error_when_already_units(self, example_quantity_da):
da = example_quantity_da
with raises_regex(ValueError, "already has units"):
da.pint.quantify()
def test_error_on_nonsense_units(self, example_unitless_da):
da = example_unitless_da
with pytest.raises(ValueError, match=str(da.name)):
da.pint.quantify(units="aecjhbav")
def test_error_on_nonsense_units_attrs(self, example_unitless_da):
da = example_unitless_da
da.attrs["units"] = "aecjhbav"
with pytest.raises(
ValueError, match=rf"{da.name}: {da.attrs['units']} \(attribute\)"
):
da.pint.quantify()
def test_parse_integer_inverse(self):
# Regression test for issue #40
da = xr.DataArray([10], attrs={"units": "m^-1"})
result = da.pint.quantify()
assert result.pint.units == Unit("1 / meter")
@pytest.mark.parametrize("formatter", ("", "P", "C"))
@pytest.mark.parametrize("flags", ("", "~", "#", "~#"))
def test_units_to_str_or_none(formatter, flags):
unit_format = f"{{:{flags}{formatter}}}"
unit_attrs = {None: "m", "a": "s", "b": "degC", "c": "degF", "d": "degK"}
units = {key: unit_registry.Unit(value) for key, value in unit_attrs.items()}
expected = {key: unit_format.format(value) for key, value in units.items()}
actual = accessors.units_to_str_or_none(units, unit_format)
assert expected == actual
assert units == {key: unit_registry.Unit(value) for key, value in actual.items()}
expected = {None: None}
assert expected == accessors.units_to_str_or_none(expected, unit_format)
class TestDequantifyDataArray:
def test_strip_units(self, example_quantity_da):
result = example_quantity_da.pint.dequantify()
assert isinstance(result.data, np.ndarray)
assert isinstance(result.coords["x"].data, np.ndarray)
def test_attrs_reinstated(self, example_quantity_da):
da = example_quantity_da
result = da.pint.dequantify()
units = conversion.extract_units(da)
attrs = conversion.extract_unit_attributes(result)
assert units == attrs
assert_all_str_or_none(attrs)
def test_roundtrip_data(self, example_unitless_da):
orig = example_unitless_da
quantified = orig.pint.quantify()
result = quantified.pint.dequantify()
assert_equal(result, orig)
class TestPropertiesDataArray:
def test_magnitude_getattr(self, example_quantity_da):
da = example_quantity_da
actual = da.pint.magnitude
assert not isinstance(actual, Quantity)
def test_magnitude_getattr_unitless(self, example_unitless_da):
da = example_unitless_da
xr.testing.assert_duckarray_equal(da.pint.magnitude, da.data)
def test_units_getattr(self, example_quantity_da):
da = example_quantity_da
actual = da.pint.units
assert isinstance(actual, Unit)
assert actual == unit_registry.m
def test_units_setattr(self, example_quantity_da):
da = example_quantity_da
with pytest.raises(ValueError):
da.pint.units = "s"
def test_units_getattr_unitless(self, example_unitless_da):
da = example_unitless_da
assert da.pint.units is None
def test_units_setattr_unitless(self, example_unitless_da):
da = example_unitless_da
da.pint.units = unit_registry.s
assert da.pint.units == unit_registry.s
@pytest.fixture()
def example_unitless_ds():
users = np.linspace(0, 10, 20)
funds = np.logspace(0, 10, 20)
t = np.arange(20)
ds = xr.Dataset(
data_vars={"users": (["t"], users), "funds": (["t"], funds)}, coords={"t": t}
)
ds["users"].attrs["units"] = ""
ds["funds"].attrs["units"] = "pound"
return ds
@pytest.fixture()
def example_quantity_ds():
users = np.linspace(0, 10, 20) * unit_registry.dimensionless
funds = np.logspace(0, 10, 20) * unit_registry.pound
t = np.arange(20)
ds = xr.Dataset(
data_vars={"users": (["t"], users), "funds": (["t"], funds)}, coords={"t": t}
)
return ds
class TestQuantifyDataSet:
def test_attach_units_from_str(self, example_unitless_ds):
orig = example_unitless_ds
result = orig.pint.quantify()
assert_array_equal(result["users"].data.magnitude, orig["users"].data)
assert str(result["users"].data.units) == "dimensionless"
def test_attach_units_given_registry(self, example_unitless_ds):
orig = example_unitless_ds
orig["users"].attrs.clear()
result = orig.pint.quantify(
{"users": "dimensionless"}, unit_registry=unit_registry
)
assert_array_equal(result["users"].data.magnitude, orig["users"].data)
assert str(result["users"].data.units) == "dimensionless"
def test_attach_units_from_attrs(self, example_unitless_ds):
orig = example_unitless_ds
orig["users"].attrs.clear()
result = orig.pint.quantify({"users": "dimensionless"})
assert_array_equal(result["users"].data.magnitude, orig["users"].data)
assert str(result["users"].data.units) == "dimensionless"
remaining_attrs = conversion.extract_unit_attributes(result)
assert {k: v for k, v in remaining_attrs.items() if v is not None} == {}
def test_attach_units_given_unit_objs(self, example_unitless_ds):
orig = example_unitless_ds
orig["users"].attrs.clear()
dimensionless = unit_registry.Unit("dimensionless")
result = orig.pint.quantify({"users": dimensionless})
assert_array_equal(result["users"].data.magnitude, orig["users"].data)
assert str(result["users"].data.units) == "dimensionless"
def test_error_when_already_units(self, example_quantity_ds):
with raises_regex(ValueError, "already has units"):
example_quantity_ds.pint.quantify({"funds": "pounds"})
def test_error_on_nonsense_units(self, example_unitless_ds):
ds = example_unitless_ds
with pytest.raises(ValueError):
ds.pint.quantify(units={"users": "aecjhbav"})
def test_error_on_nonsense_units_attrs(self, example_unitless_ds):
ds = example_unitless_ds
ds.users.attrs["units"] = "aecjhbav"
with pytest.raises(
ValueError, match=rf"'users': {ds.users.attrs['units']} \(attribute\)"
):
ds.pint.quantify()
def test_error_indicates_problematic_variable(self, example_unitless_ds):
ds = example_unitless_ds
with pytest.raises(ValueError, match="'users'"):
ds.pint.quantify(units={"users": "aecjhbav"})
class TestDequantifyDataSet:
def test_strip_units(self, example_quantity_ds):
result = example_quantity_ds.pint.dequantify()
assert all(
isinstance(var.data, np.ndarray) for var in result.variables.values()
)
def test_attrs_reinstated(self, example_quantity_ds):
ds = example_quantity_ds
result = ds.pint.dequantify()
units = conversion.extract_units(ds)
# workaround for Unit("dimensionless") != str(Unit("dimensionless"))
units = {
key: str(value) if isinstance(value, Unit) else value
for key, value in units.items()
}
attrs = conversion.extract_unit_attributes(result)
assert units == attrs
assert_all_str_or_none(attrs)
def test_roundtrip_data(self, example_unitless_ds):
orig = example_unitless_ds
quantified = orig.pint.quantify()
result = quantified.pint.dequantify()
assert_equal(result, orig)
result = quantified.pint.dequantify().pint.quantify()
assert_equal(quantified, result)
@pytest.mark.parametrize(
["obj", "units", "expected", "error"],
(
pytest.param(
xr.Dataset(
{"a": ("x", Quantity([0, 1], "m")), "b": ("x", Quantity([2, 4], "s"))}
),
{"a": "mm", "b": "ms"},
xr.Dataset(
{
"a": ("x", Quantity([0, 1000], "mm")),
"b": ("x", Quantity([2000, 4000], "ms")),
}
),
None,
id="Dataset-compatible units-data",
),
pytest.param(
xr.Dataset(
{"a": ("x", Quantity([0, 1], "km")), "b": ("x", Quantity([2, 4], "cm"))}
),
"m",
xr.Dataset(
{
"a": ("x", Quantity([0, 1000], "m")),
"b": ("x", Quantity([0.02, 0.04], "m")),
}
),
None,
id="Dataset-compatible units-data-str",
),
pytest.param(
xr.Dataset(
{"a": ("x", Quantity([0, 1], "m")), "b": ("x", Quantity([2, 4], "s"))}
),
{"a": "ms", "b": "mm"},
None,
ValueError,
id="Dataset-incompatible units-data",
),
pytest.param(
xr.Dataset(coords={"x": ("x", [2, 4], {"units": Unit("s")})}),
{"x": "ms"},
xr.Dataset(coords={"x": ("x", [2000, 4000], {"units": Unit("ms")})}),
None,
id="Dataset-compatible units-dims",
),
pytest.param(
xr.Dataset(coords={"x": ("x", [2, 4], {"units": Unit("s")})}),
{"x": "mm"},
None,
ValueError,
id="Dataset-incompatible units-dims",
),
pytest.param(
xr.DataArray(Quantity([0, 1], "m"), dims="x"),
{None: "mm"},
xr.DataArray(Quantity([0, 1000], "mm"), dims="x"),
None,
id="DataArray-compatible units-data",
),
pytest.param(
xr.DataArray(Quantity([0, 1], "m"), dims="x"),
"mm",
xr.DataArray(Quantity([0, 1000], "mm"), dims="x"),
None,
id="DataArray-compatible units-data-str",
),
pytest.param(
xr.DataArray(Quantity([0, 1], "m"), dims="x", name="a"),
{"a": "mm"},
xr.DataArray(Quantity([0, 1000], "mm"), dims="x", name="a"),
None,
id="DataArray-compatible units-data-by name",
),
pytest.param(
xr.DataArray(Quantity([0, 1], "m"), dims="x"),
{None: "ms"},
None,
ValueError,
id="DataArray-incompatible units-data",
),
pytest.param(
xr.DataArray(
[0, 1], dims="x", coords={"x": ("x", [2, 4], {"units": Unit("s")})}
),
{"x": "ms"},
xr.DataArray(
[0, 1],
dims="x",
coords={"x": ("x", [2000, 4000], {"units": Unit("ms")})},
),
None,
id="DataArray-compatible units-dims",
),
pytest.param(
xr.DataArray(
[0, 1], dims="x", coords={"x": ("x", [2, 4], {"units": Unit("s")})}
),
{"x": "mm"},
None,
ValueError,
id="DataArray-incompatible units-dims",
),
),
)
def test_to(obj, units, expected, error):
if error is not None:
with pytest.raises(error):
obj.pint.to(units)
else:
actual = obj.pint.to(units)
assert_units_equal(actual, expected)
assert_identical(actual, expected)
@pytest.mark.parametrize(
["obj", "indexers", "expected", "error"],
(
pytest.param(
xr.Dataset(
{
"x": ("x", [10, 20, 30], {"units": unit_registry.Unit("dm")}),
"y": ("y", [60, 120], {"units": unit_registry.Unit("s")}),
}
),
{"x": Quantity([10, 30], "dm"), "y": Quantity([60], "s")},
xr.Dataset(
{
"x": ("x", [10, 30], {"units": unit_registry.Unit("dm")}),
"y": ("y", [60], {"units": unit_registry.Unit("s")}),
}
),
None,
id="Dataset-identical units",
),
pytest.param(
xr.Dataset(
{
"x": ("x", [10, 20, 30], {"units": unit_registry.Unit("dm")}),
"y": ("y", [60, 120], {"units": unit_registry.Unit("s")}),
}
),
{"x": Quantity([1, 3], "m"), "y": Quantity([1], "min")},
xr.Dataset(
{
"x": ("x", [1, 3], {"units": unit_registry.Unit("m")}),
"y": ("y", [1], {"units": unit_registry.Unit("min")}),
}
),
None,
id="Dataset-compatible units",
),
pytest.param(
xr.Dataset(
{
"x": ("x", [10, 20, 30], {"units": unit_registry.Unit("dm")}),
"y": ("y", [60, 120], {"units": unit_registry.Unit("s")}),
}
),
{"x": Quantity([1, 3], "s"), "y": Quantity([1], "m")},
None,
KeyError,
id="Dataset-incompatible units",
),
pytest.param(
xr.DataArray(
[[0, 1], [2, 3], [4, 5]],
dims=("x", "y"),
coords={
"x": ("x", [10, 20, 30], {"units": unit_registry.Unit("dm")}),
"y": ("y", [60, 120], {"units": unit_registry.Unit("s")}),
},
),
{"x": Quantity([10, 30], "dm"), "y": Quantity([60], "s")},
xr.DataArray(
[[0], [4]],
dims=("x", "y"),
coords={
"x": ("x", [10, 30], {"units": unit_registry.Unit("dm")}),
"y": ("y", [60], {"units": unit_registry.Unit("s")}),
},
),
None,
id="DataArray-identical units",
),
pytest.param(
xr.DataArray(
[[0, 1], [2, 3], [4, 5]],
dims=("x", "y"),
coords={
"x": ("x", [10, 20, 30], {"units": unit_registry.Unit("dm")}),
"y": ("y", [60, 120], {"units": unit_registry.Unit("s")}),
},
),
{"x": Quantity([1, 3], "m"), "y": Quantity([1], "min")},
xr.DataArray(
[[0], [4]],
dims=("x", "y"),
coords={
"x": ("x", [1, 3], {"units": unit_registry.Unit("m")}),
"y": ("y", [1], {"units": unit_registry.Unit("min")}),
},
),
None,
id="DataArray-compatible units",
),
pytest.param(
xr.DataArray(
[[0, 1], [2, 3], [4, 5]],
dims=("x", "y"),
coords={
"x": ("x", [10, 20, 30], {"units": unit_registry.Unit("dm")}),
"y": ("y", [60, 120], {"units": unit_registry.Unit("s")}),
},
),
{"x": Quantity([10, 30], "s"), "y": Quantity([60], "m")},
None,
KeyError,
id="DataArray-incompatible units",
),
),
)
def test_sel(obj, indexers, expected, error):
if error is not None:
with pytest.raises(error):
obj.pint.sel(indexers)
else:
actual = obj.pint.sel(indexers)
assert_units_equal(actual, expected)
assert_identical(actual, expected)
@pytest.mark.parametrize(
["obj", "indexers", "expected", "error"],
(
pytest.param(
xr.Dataset(
{
"x": ("x", [10, 20, 30], {"units": unit_registry.Unit("dm")}),
"y": ("y", [60, 120], {"units": unit_registry.Unit("s")}),
}
),
{"x": Quantity([10, 30], "dm"), "y": Quantity([60], "s")},
xr.Dataset(
{
"x": ("x", [10, 30], {"units": unit_registry.Unit("dm")}),
"y": ("y", [60], {"units": unit_registry.Unit("s")}),
}
),
None,
id="Dataset-identical units",
),
pytest.param(
xr.Dataset(
{
"x": ("x", [10, 20, 30], {"units": unit_registry.Unit("dm")}),
"y": ("y", [60, 120], {"units": unit_registry.Unit("s")}),
}
),
{"x": Quantity([1, 3], "m"), "y": Quantity([1], "min")},
xr.Dataset(
{
"x": ("x", [1, 3], {"units": unit_registry.Unit("m")}),
"y": ("y", [1], {"units": unit_registry.Unit("min")}),
}
),
None,
id="Dataset-compatible units",
),
pytest.param(
xr.Dataset(
{
"x": ("x", [10, 20, 30], {"units": unit_registry.Unit("dm")}),
"y": ("y", [60, 120], {"units": unit_registry.Unit("s")}),
}
),
{"x": Quantity([1, 3], "s"), "y": Quantity([1], "m")},
None,
KeyError,
id="Dataset-incompatible units",
),
pytest.param(
xr.DataArray(
[[0, 1], [2, 3], [4, 5]],
dims=("x", "y"),
coords={
"x": ("x", [10, 20, 30], {"units": unit_registry.Unit("dm")}),
"y": ("y", [60, 120], {"units": unit_registry.Unit("s")}),
},
),
{"x": Quantity([10, 30], "dm"), "y": Quantity([60], "s")},
xr.DataArray(
[[0], [4]],
dims=("x", "y"),
coords={
"x": ("x", [10, 30], {"units": unit_registry.Unit("dm")}),
"y": ("y", [60], {"units": unit_registry.Unit("s")}),
},
),
None,
id="DataArray-identical units",
),
pytest.param(
xr.DataArray(
[[0, 1], [2, 3], [4, 5]],
dims=("x", "y"),
coords={
"x": ("x", [10, 20, 30], {"units": unit_registry.Unit("dm")}),
"y": ("y", [60, 120], {"units": unit_registry.Unit("s")}),
},
),
{"x": Quantity([1, 3], "m"), "y": Quantity([1], "min")},
xr.DataArray(
[[0], [4]],
dims=("x", "y"),
coords={
"x": ("x", [1, 3], {"units": unit_registry.Unit("m")}),
"y": ("y", [1], {"units": unit_registry.Unit("min")}),
},
),
None,
id="DataArray-compatible units",
),
pytest.param(
xr.DataArray(
[[0, 1], [2, 3], [4, 5]],
dims=("x", "y"),
coords={
"x": ("x", [10, 20, 30], {"units": unit_registry.Unit("dm")}),
"y": ("y", [60, 120], {"units": unit_registry.Unit("s")}),
},
),
{"x": Quantity([10, 30], "s"), "y": Quantity([60], "m")},
None,
KeyError,
id="DataArray-incompatible units",
),
),
)
def test_loc(obj, indexers, expected, error):
if error is not None:
with pytest.raises(error):
obj.pint.loc[indexers]
else:
actual = obj.pint.loc[indexers]
assert_units_equal(actual, expected)
assert_identical(actual, expected)
@pytest.mark.parametrize(
["obj", "indexers", "values", "expected", "error"],
(
pytest.param(
xr.DataArray(
[[0, 1], [2, 3], [4, 5]],
dims=("x", "y"),
coords={
"x": ("x", [10, 20, 30], {"units": unit_registry.Unit("dm")}),
"y": ("y", [60, 120], {"units": unit_registry.Unit("s")}),
},
),
{"x": Quantity([10, 30], "dm"), "y": Quantity([60], "s")},
[[-1], [-2]],
xr.DataArray(
[[-1, 1], [2, 3], [-2, 5]],
dims=("x", "y"),
coords={
"x": ("x", [10, 20, 30], {"units": unit_registry.Unit("dm")}),
"y": ("y", [60, 120], {"units": unit_registry.Unit("s")}),
},
),
None,
id="coords-identical units",
),
pytest.param(
xr.DataArray(
[[0, 1], [2, 3], [4, 5]],
dims=("x", "y"),
coords={
"x": ("x", [10, 20, 30], {"units": unit_registry.Unit("dm")}),
"y": ("y", [60, 120], {"units": unit_registry.Unit("s")}),
},
),
{"x": Quantity([1, 3], "m"), "y": Quantity([1], "min")},
[[-1], [-2]],
xr.DataArray(
[[-1, 1], [2, 3], [-2, 5]],
dims=("x", "y"),
coords={
"x": ("x", [10, 20, 30], {"units": unit_registry.Unit("dm")}),
"y": ("y", [60, 120], {"units": unit_registry.Unit("s")}),
},
),
None,
id="coords-compatible units",
),
pytest.param(
xr.DataArray(
[[0, 1], [2, 3], [4, 5]],
dims=("x", "y"),
coords={
"x": ("x", [10, 20, 30], {"units": unit_registry.Unit("dm")}),
"y": ("y", [60, 120], {"units": unit_registry.Unit("s")}),
},
),
{"x": Quantity([1, 3], "s"), "y": Quantity([1], "m")},
[[-1], [-2]],
None,
KeyError,
id="coords-incompatible units",
),
pytest.param(
xr.DataArray(
Quantity([[0, 1], [2, 3], [4, 5]], "m"),
dims=("x", "y"),
coords={
"x": ("x", [10, 20, 30], {"units": unit_registry.Unit("dm")}),
"y": ("y", [60, 120], {"units": unit_registry.Unit("s")}),
},
),
{"x": Quantity([10, 30], "dm"), "y": Quantity([60], "s")},
Quantity([[-1], [-2]], "m"),
xr.DataArray(
Quantity([[-1, 1], [2, 3], [-2, 5]], "m"),
dims=("x", "y"),
coords={
"x": ("x", [10, 20, 30], {"units": unit_registry.Unit("dm")}),
"y": ("y", [60, 120], {"units": unit_registry.Unit("s")}),
},
),
None,
id="data-identical units",
),
pytest.param(
xr.DataArray(
Quantity([[0, 1], [2, 3], [4, 5]], "m"),
dims=("x", "y"),
coords={
"x": ("x", [10, 20, 30], {"units": unit_registry.Unit("dm")}),
"y": ("y", [60, 120], {"units": unit_registry.Unit("s")}),
},
),
{"x": Quantity([10, 30], "dm"), "y": Quantity([60], "s")},
Quantity([[-1], [-2]], "km"),
xr.DataArray(
Quantity([[-1000, 1], [2, 3], [-2000, 5]], "m"),
dims=("x", "y"),
coords={
"x": ("x", [10, 20, 30], {"units": unit_registry.Unit("dm")}),
"y": ("y", [60, 120], {"units": unit_registry.Unit("s")}),
},
),
None,
id="data-compatible units",
),
pytest.param(
xr.DataArray(
Quantity([[0, 1], [2, 3], [4, 5]], "m"),
dims=("x", "y"),
coords={
"x": ("x", [10, 20, 30], {"units": unit_registry.Unit("dm")}),
"y": ("y", [60, 120], {"units": unit_registry.Unit("s")}),
},
),
{"x": Quantity([10, 30], "dm"), "y": Quantity([60], "s")},
Quantity([[-1], [-2]], "s"),
None,
pint.DimensionalityError,
id="data-incompatible units",
),
),
)
def test_loc_setitem(obj, indexers, values, expected, error):
if error is not None:
with pytest.raises(error):
obj.pint.loc[indexers] = values
else:
obj.pint.loc[indexers] = values
assert_units_equal(obj, expected)
assert_identical(obj, expected)
@pytest.mark.parametrize(
["obj", "indexers", "expected", "error"],
(
pytest.param(
xr.Dataset(
{
"x": ("x", [10, 20, 30], {"units": unit_registry.Unit("dm")}),
"y": ("y", [60, 120], {"units": unit_registry.Unit("s")}),
}
),
{"x": Quantity([10, 30], "dm"), "y": Quantity([60], "s")},
xr.Dataset(
{
"x": ("x", [20], {"units": unit_registry.Unit("dm")}),
"y": ("y", [120], {"units": unit_registry.Unit("s")}),
}
),
None,
id="Dataset-identical units",
),
pytest.param(
xr.Dataset(
{
"x": ("x", [10, 20, 30], {"units": unit_registry.Unit("dm")}),
"y": ("y", [60, 120], {"units": unit_registry.Unit("s")}),
}
),
{"x": Quantity([1, 3], "m"), "y": Quantity([1], "min")},
xr.Dataset(
{
"x": ("x", [20], {"units": unit_registry.Unit("dm")}),
"y": ("y", [120], {"units": unit_registry.Unit("s")}),
}
),
None,
id="Dataset-compatible units",
),
pytest.param(
xr.Dataset(
{
"x": ("x", [10, 20, 30], {"units": unit_registry.Unit("dm")}),
"y": ("y", [60, 120], {"units": unit_registry.Unit("s")}),
}
),
{"x": Quantity([1, 3], "s"), "y": Quantity([1], "m")},
None,
KeyError,
id="Dataset-incompatible units",
),
pytest.param(
xr.Dataset(
{
"x": ("x", [10, 20, 30], {"units": unit_registry.Unit("dm")}),
"y": ("y", [60, 120], {"units": unit_registry.Unit("s")}),
}
),
{"x": Quantity([10, 30], "m"), "y": Quantity([60], "min")},
None,
KeyError,
id="Dataset-compatible units-not found",
),
pytest.param(
xr.DataArray(
[[0, 1], [2, 3], [4, 5]],
dims=("x", "y"),
coords={
"x": ("x", [10, 20, 30], {"units": unit_registry.Unit("dm")}),
"y": ("y", [60, 120], {"units": unit_registry.Unit("s")}),
},
),
{"x": Quantity([10, 30], "dm"), "y": Quantity([60], "s")},
xr.DataArray(
[[3]],
dims=("x", "y"),
coords={
"x": ("x", [20], {"units": unit_registry.Unit("dm")}),
"y": ("y", [120], {"units": unit_registry.Unit("s")}),
},
),
None,
id="DataArray-identical units",
),
pytest.param(
xr.DataArray(
[[0, 1], [2, 3], [4, 5]],
dims=("x", "y"),
coords={
"x": ("x", [10, 20, 30], {"units": unit_registry.Unit("dm")}),
"y": ("y", [60, 120], {"units": unit_registry.Unit("s")}),
},
),
{"x": Quantity([1, 3], "m"), "y": Quantity([1], "min")},
xr.DataArray(
[[3]],
dims=("x", "y"),
coords={
"x": ("x", [20], {"units": unit_registry.Unit("dm")}),
"y": ("y", [120], {"units": unit_registry.Unit("s")}),
},
),
None,
id="DataArray-compatible units",
),
pytest.param(
xr.DataArray(
[[0, 1], [2, 3], [4, 5]],
dims=("x", "y"),
coords={
"x": ("x", [10, 20, 30], {"units": unit_registry.Unit("dm")}),
"y": ("y", [60, 120], {"units": unit_registry.Unit("s")}),
},
),
{"x": Quantity([10, 30], "s"), "y": Quantity([60], "m")},
None,
KeyError,
id="DataArray-incompatible units",
),
pytest.param(
xr.DataArray(
[[0, 1], [2, 3], [4, 5]],
dims=("x", "y"),
coords={
"x": ("x", [10, 20, 30], {"units": unit_registry.Unit("dm")}),
"y": ("y", [60, 120], {"units": unit_registry.Unit("s")}),
},
),
{"x": Quantity([10, 30], "m"), "y": Quantity([60], "min")},
None,
KeyError,
id="DataArray-compatible units-not found",
),
),
)
def test_drop_sel(obj, indexers, expected, error):
if error is not None:
with pytest.raises(error):
obj.pint.drop_sel(indexers)
else:
actual = obj.pint.drop_sel(indexers)
assert_units_equal(actual, expected)
assert_identical(actual, expected)
@requires_dask_array
@pytest.mark.parametrize(
"obj",
(
pytest.param(
xr.Dataset(
{"a": ("x", np.linspace(0, 1, 11))},
coords={"u": ("x", np.arange(11))},
),
id="Dataset-no units",
),
pytest.param(
xr.Dataset(
{
"a": (
"x",
Quantity(np.linspace(0, 1, 11), "m"),
)
},
coords={
"u": (
"x",
Quantity(np.arange(11), "m"),
)
},
),
id="Dataset-units",
),
pytest.param(
xr.DataArray(
np.linspace(0, 1, 11),
coords={
"u": (
"x",
np.arange(11),
)
},
dims="x",
),
id="DataArray-no units",
),
pytest.param(
xr.DataArray(
Quantity(np.linspace(0, 1, 11), "m"),
coords={
"u": (
"x",
Quantity(np.arange(11), "m"),
)
},
dims="x",
),
id="DataArray-units",
),
),
)
def test_chunk(obj):
actual = obj.pint.chunk({"x": 2})
expected = (
obj.pint.dequantify().chunk({"x": 2}).pint.quantify(unit_registry=unit_registry)
)
assert_units_equal(actual, expected)
assert_identical(actual, expected)
@pytest.mark.parametrize(
["obj", "indexers", "expected", "error"],
(
pytest.param(
xr.Dataset(
{
"x": ("x", [10, 20, 30], {"units": unit_registry.Unit("dm")}),
"y": ("y", [60, 120], {"units": unit_registry.Unit("s")}),
}
),
{"x": Quantity([10, 30, 50], "dm"), "y": Quantity([0, 120, 240], "s")},
xr.Dataset(
{
"x": ("x", [10, 30, 50], {"units": unit_registry.Unit("dm")}),
"y": ("y", [0, 120, 240], {"units": unit_registry.Unit("s")}),
}
),
None,
id="Dataset-identical units",
),
pytest.param(
xr.Dataset(
{
"x": ("x", [10, 20, 30], {"units": unit_registry.Unit("dm")}),
"y": ("y", [60, 120], {"units": unit_registry.Unit("s")}),
}
),
{"x": Quantity([0, 1, 3, 5], "m"), "y": Quantity([0, 2, 4], "min")},
xr.Dataset(
{
"x": ("x", [0, 1, 3, 5], {"units": unit_registry.Unit("m")}),
"y": ("y", [0, 2, 4], {"units": unit_registry.Unit("min")}),
}
),
None,
id="Dataset-compatible units",
),
pytest.param(
xr.Dataset(
{
"x": ("x", [10, 20, 30], {"units": unit_registry.Unit("dm")}),
"y": ("y", [60, 120], {"units": unit_registry.Unit("s")}),
}
),
{"x": Quantity([1, 3], "s"), "y": Quantity([1], "m")},
None,
ValueError,
id="Dataset-incompatible units",
),
pytest.param(
xr.Dataset(
{
"a": ("x", [10, 20, 30], {"units": unit_registry.Unit("dm")}),
"b": ("y", [60, 120], {"units": unit_registry.Unit("s")}),
}
),
{"a": Quantity([1, 3], "s"), "b": Quantity([1], "m")},
None,
ValueError,
id="Dataset-incompatible units-invalid dims",
),
pytest.param(
xr.DataArray(
[[0, 1], [2, 3], [4, 5]],
dims=("x", "y"),
coords={
"x": ("x", [10, 20, 30], {"units": unit_registry.Unit("dm")}),
"y": ("y", [60, 120], {"units": unit_registry.Unit("s")}),
},
),
{"x": Quantity([10, 30, 50], "dm"), "y": Quantity([0, 240], "s")},
xr.DataArray(
[[np.nan, np.nan], [np.nan, np.nan], [np.nan, np.nan]],
dims=("x", "y"),
coords={
"x": ("x", [10, 30, 50], {"units": unit_registry.Unit("dm")}),
"y": ("y", [0, 240], {"units": unit_registry.Unit("s")}),
},
),
None,
id="DataArray-identical units",
),
pytest.param(
xr.DataArray(
[[0, 1], [2, 3], [4, 5]],
dims=("x", "y"),
coords={
"x": ("x", [10, 20, 30], {"units": unit_registry.Unit("dm")}),
"y": ("y", [60, 120], {"units": unit_registry.Unit("s")}),
},
),
{"x": Quantity([1, 3, 5], "m"), "y": Quantity([0, 2], "min")},
xr.DataArray(
[[np.nan, 1], [np.nan, 5], [np.nan, np.nan]],
dims=("x", "y"),
coords={
"x": ("x", [1, 3, 5], {"units": unit_registry.Unit("m")}),
"y": ("y", [0, 2], {"units": unit_registry.Unit("min")}),
},
),
None,
id="DataArray-compatible units",
),
pytest.param(
xr.DataArray(
[[0, 1], [2, 3], [4, 5]],
dims=("x", "y"),
coords={
"x": ("x", [10, 20, 30], {"units": unit_registry.Unit("dm")}),
"y": ("y", [60, 120], {"units": unit_registry.Unit("s")}),
},
),
{"x": Quantity([10, 30], "s"), "y": Quantity([60], "m")},
None,
ValueError,
id="DataArray-incompatible units",
),
pytest.param(
xr.DataArray(
[[0, 1], [2, 3], [4, 5]],
dims=("x", "y"),
coords={
"x": ("x", [10, 20, 30], {"units": unit_registry.Unit("dm")}),
"y": ("y", [60, 120], {"units": unit_registry.Unit("s")}),
},
),
{"x": Quantity([10, 30], "s"), "y": Quantity([60], "m")},
None,
ValueError,
id="DataArray-incompatible units-invalid dims",
),
),
)
def test_reindex(obj, indexers, expected, error):
if error is not None:
with pytest.raises(error):
obj.pint.reindex(indexers)
else:
actual = obj.pint.reindex(indexers)
assert_units_equal(actual, expected)
assert_identical(actual, expected)
@pytest.mark.parametrize(
["obj", "other", "expected", "error"],
(
pytest.param(
xr.Dataset(
{
"x": ("x", [10, 20, 30], {"units": unit_registry.Unit("dm")}),
"y": ("y", [60, 120], {"units": unit_registry.Unit("s")}),
}
),
xr.Dataset(
{
"x": ("x", [10, 30, 50], {"units": unit_registry.Unit("dm")}),
"y": ("y", [0, 120, 240], {"units": unit_registry.Unit("s")}),
}
),
xr.Dataset(
{
"x": ("x", [10, 30, 50], {"units": unit_registry.Unit("dm")}),
"y": ("y", [0, 120, 240], {"units": unit_registry.Unit("s")}),
}
),
None,
id="Dataset-identical units",
),
pytest.param(
xr.Dataset(
{
"x": ("x", [10, 20, 30], {"units": unit_registry.Unit("dm")}),
"y": ("y", [60, 120], {"units": unit_registry.Unit("s")}),
}
),
xr.Dataset(
{
"x": ("x", [0, 1, 3, 5], {"units": unit_registry.Unit("m")}),
"y": ("y", [0, 2, 4], {"units": unit_registry.Unit("min")}),
}
),
xr.Dataset(
{
"x": ("x", [0, 1, 3, 5], {"units": unit_registry.Unit("m")}),
"y": ("y", [0, 2, 4], {"units": unit_registry.Unit("min")}),
}
),
None,
id="Dataset-compatible units",
),
pytest.param(
xr.Dataset(
{
"x": ("x", [10, 20, 30], {"units": unit_registry.Unit("dm")}),
"y": ("y", [60, 120], {"units": unit_registry.Unit("s")}),
}
),
xr.Dataset(
{
"x": ("x", [1, 3], {"units": unit_registry.Unit("s")}),
"y": ("y", [1], {"units": unit_registry.Unit("m")}),
}
),
None,
ValueError,
id="Dataset-incompatible units",
),
pytest.param(
xr.DataArray(
[[0, 1], [2, 3], [4, 5]],
dims=("x", "y"),
coords={
"x": ("x", [10, 20, 30], {"units": unit_registry.Unit("dm")}),
"y": ("y", [60, 120], {"units": unit_registry.Unit("s")}),
},
),
xr.Dataset(
{
"x": ("x", [10, 30, 50], {"units": unit_registry.Unit("dm")}),
"y": ("y", [0, 240], {"units": unit_registry.Unit("s")}),
}
),
xr.DataArray(
[[np.nan, np.nan], [np.nan, np.nan], [np.nan, np.nan]],
dims=("x", "y"),
coords={
"x": ("x", [10, 30, 50], {"units": unit_registry.Unit("dm")}),
"y": ("y", [0, 240], {"units": unit_registry.Unit("s")}),
},
),
None,
id="DataArray-identical units",
),
pytest.param(
xr.DataArray(
[[0, 1], [2, 3], [4, 5]],
dims=("x", "y"),
coords={
"x": ("x", [10, 20, 30], {"units": unit_registry.Unit("dm")}),
"y": ("y", [60, 120], {"units": unit_registry.Unit("s")}),
},
),
xr.Dataset(
{
"x": ("x", [1, 3, 5], {"units": unit_registry.Unit("m")}),
"y": ("y", [0, 2], {"units": unit_registry.Unit("min")}),
}
),
xr.DataArray(
[[np.nan, 1], [np.nan, 5], [np.nan, np.nan]],
dims=("x", "y"),
coords={
"x": ("x", [1, 3, 5], {"units": unit_registry.Unit("m")}),
"y": ("y", [0, 2], {"units": unit_registry.Unit("min")}),
},
),
None,
id="DataArray-compatible units",
),
pytest.param(
xr.DataArray(
[[0, 1], [2, 3], [4, 5]],
dims=("x", "y"),
coords={
"x": ("x", [10, 20, 30], {"units": unit_registry.Unit("dm")}),
"y": ("y", [60, 120], {"units": unit_registry.Unit("s")}),
},
),
xr.Dataset(
{
"x": ("x", [10, 30], {"units": unit_registry.Unit("s")}),
"y": ("y", [60], {"units": unit_registry.Unit("m")}),
}
),
None,
ValueError,
id="DataArray-incompatible units",
),
),
)
def test_reindex_like(obj, other, expected, error):
if error is not None:
with pytest.raises(error):
obj.pint.reindex_like(other)
else:
actual = obj.pint.reindex_like(other)
assert_units_equal(actual, expected)
assert_identical(actual, expected)
@requires_scipy
@pytest.mark.parametrize(
["obj", "indexers", "expected", "error"],
(
pytest.param(
xr.Dataset(
{
"x": ("x", [10, 20, 30], {"units": unit_registry.Unit("dm")}),
"y": ("y", [60, 120], {"units": unit_registry.Unit("s")}),
}
),
{"x": Quantity([10, 30, 50], "dm"), "y": Quantity([0, 120, 240], "s")},
xr.Dataset(
{
"x": ("x", [10, 30, 50], {"units": unit_registry.Unit("dm")}),
"y": ("y", [0, 120, 240], {"units": unit_registry.Unit("s")}),
}
),
None,
id="Dataset-identical units",
),
pytest.param(
xr.Dataset(
{
"x": ("x", [10, 20, 30], {"units": unit_registry.Unit("dm")}),
"y": ("y", [60, 120], {"units": unit_registry.Unit("s")}),
}
),
{"x": Quantity([0, 1, 3, 5], "m"), "y": Quantity([0, 2, 4], "min")},
xr.Dataset(
{
"x": ("x", [0, 1, 3, 5], {"units": unit_registry.Unit("m")}),
"y": ("y", [0, 2, 4], {"units": unit_registry.Unit("min")}),
}
),
None,
id="Dataset-compatible units",
),
pytest.param(
xr.Dataset(
{
"x": ("x", [10, 20, 30], {"units": unit_registry.Unit("dm")}),
"y": ("y", [60, 120], {"units": unit_registry.Unit("s")}),
}
),
{"x": Quantity([1, 3], "s"), "y": Quantity([1], "m")},
None,
ValueError,
id="Dataset-incompatible units",
),
pytest.param(
xr.Dataset(
{
"a": ("x", [10, 20, 30], {"units": unit_registry.Unit("dm")}),
"b": ("y", [60, 120], {"units": unit_registry.Unit("s")}),
}
),
{"a": Quantity([1, 3], "s"), "b": Quantity([1], "m")},
None,
ValueError,
id="Dataset-incompatible units-invalid dims",
),
pytest.param(
xr.Dataset(
{
"a": (("x", "y"), Quantity([[0, 1], [2, 3], [4, 5]], "kg")),
"x": [10, 20, 30],
"y": [60, 120],
}
),
{
"x": [15, 25],
"y": [75, 105],
},
xr.Dataset(
{
"a": (("x", "y"), Quantity([[1.25, 1.75], [3.25, 3.75]], "kg")),
"x": [15, 25],
"y": [75, 105],
}
),
None,
id="Dataset-data units",
),
pytest.param(
xr.DataArray(
[[0, 1], [2, 3], [4, 5]],
dims=("x", "y"),
coords={
"x": ("x", [10, 20, 30], {"units": unit_registry.Unit("dm")}),
"y": ("y", [60, 120], {"units": unit_registry.Unit("s")}),
},
),
{"x": Quantity([10, 30, 50], "dm"), "y": Quantity([0, 240], "s")},
xr.DataArray(
[[np.nan, np.nan], [np.nan, np.nan], [np.nan, np.nan]],
dims=("x", "y"),
coords={
"x": ("x", [10, 30, 50], {"units": unit_registry.Unit("dm")}),
"y": ("y", [0, 240], {"units": unit_registry.Unit("s")}),
},
),
None,
id="DataArray-identical units",
),
pytest.param(
xr.DataArray(
[[0, 1], [2, 3], [4, 5]],
dims=("x", "y"),
coords={
"x": ("x", [10, 20, 30], {"units": unit_registry.Unit("dm")}),
"y": ("y", [60, 120], {"units": unit_registry.Unit("s")}),
},
),
{"x": Quantity([1, 3, 5], "m"), "y": Quantity([0, 2], "min")},
xr.DataArray(
[[np.nan, 1], [np.nan, 5], [np.nan, np.nan]],
dims=("x", "y"),
coords={
"x": ("x", [1, 3, 5], {"units": unit_registry.Unit("m")}),
"y": ("y", [0, 2], {"units": unit_registry.Unit("min")}),
},
),
None,
id="DataArray-compatible units",
),
pytest.param(
xr.DataArray(
[[0, 1], [2, 3], [4, 5]],
dims=("x", "y"),
coords={
"x": ("x", [10, 20, 30], {"units": unit_registry.Unit("dm")}),
"y": ("y", [60, 120], {"units": unit_registry.Unit("s")}),
},
),
{"x": Quantity([10, 30], "s"), "y": Quantity([60], "m")},
None,
ValueError,
id="DataArray-incompatible units",
),
pytest.param(
xr.DataArray(
Quantity([[0, 1], [2, 3], [4, 5]], "kg"),
dims=("x", "y"),
coords={
"x": [10, 20, 30],
"y": [60, 120],
},
),
{
"x": [15, 25],
"y": [75, 105],
},
xr.DataArray(
Quantity([[1.25, 1.75], [3.25, 3.75]], "kg"),
dims=("x", "y"),
coords={
"x": [15, 25],
"y": [75, 105],
},
),
None,
id="DataArray-data units",
),
pytest.param(
xr.DataArray(
[[0, 1], [2, 3], [4, 5]],
dims=("x", "y"),
coords={
"x": ("x", [10, 20, 30], {"units": unit_registry.Unit("dm")}),
"y": ("y", [60, 120], {"units": unit_registry.Unit("s")}),
},
),
{"x": Quantity([10, 30], "s"), "y": Quantity([60], "m")},
None,
ValueError,
id="DataArray-incompatible units-invalid dims",
),
),
)
def test_interp(obj, indexers, expected, error):
if error is not None:
with pytest.raises(error):
obj.pint.interp(indexers)
else:
actual = obj.pint.interp(indexers)
assert_units_equal(actual, expected)
assert_identical(actual, expected)
@requires_scipy
@pytest.mark.parametrize(
["obj", "other", "expected", "error"],
(
pytest.param(
xr.Dataset(
{
"x": ("x", [10, 20, 30], {"units": unit_registry.Unit("dm")}),
"y": ("y", [60, 120], {"units": unit_registry.Unit("s")}),
}
),
xr.Dataset(
{
"x": ("x", [10, 30, 50], {"units": unit_registry.Unit("dm")}),
"y": ("y", [0, 120, 240], {"units": unit_registry.Unit("s")}),
}
),
xr.Dataset(
{
"x": ("x", [10, 30, 50], {"units": unit_registry.Unit("dm")}),
"y": ("y", [0, 120, 240], {"units": unit_registry.Unit("s")}),
}
),
None,
id="Dataset-identical units",
),
pytest.param(
xr.Dataset(
{
"x": ("x", [10, 20, 30], {"units": unit_registry.Unit("dm")}),
"y": ("y", [60, 120], {"units": unit_registry.Unit("s")}),
}
),
xr.Dataset(
{
"x": ("x", [0, 1, 3, 5], {"units": unit_registry.Unit("m")}),
"y": ("y", [0, 2, 4], {"units": unit_registry.Unit("min")}),
}
),
xr.Dataset(
{
"x": ("x", [0, 1, 3, 5], {"units": unit_registry.Unit("m")}),
"y": ("y", [0, 2, 4], {"units": unit_registry.Unit("min")}),
}
),
None,
id="Dataset-compatible units",
),
pytest.param(
xr.Dataset(
{
"x": ("x", [10, 20, 30], {"units": unit_registry.Unit("dm")}),
"y": ("y", [60, 120], {"units": unit_registry.Unit("s")}),
}
),
xr.Dataset(
{
"x": ("x", [1, 3], {"units": unit_registry.Unit("s")}),
"y": ("y", [1], {"units": unit_registry.Unit("m")}),
}
),
None,
ValueError,
id="Dataset-incompatible units",
),
pytest.param(
xr.DataArray(
[[0, 1], [2, 3], [4, 5]],
dims=("x", "y"),
coords={
"x": ("x", [10, 20, 30], {"units": unit_registry.Unit("dm")}),
"y": ("y", [60, 120], {"units": unit_registry.Unit("s")}),
},
),
xr.Dataset(
{
"x": ("x", [10, 30, 50], {"units": unit_registry.Unit("dm")}),
"y": ("y", [0, 240], {"units": unit_registry.Unit("s")}),
}
),
xr.DataArray(
[[np.nan, np.nan], [np.nan, np.nan], [np.nan, np.nan]],
dims=("x", "y"),
coords={
"x": ("x", [10, 30, 50], {"units": unit_registry.Unit("dm")}),
"y": ("y", [0, 240], {"units": unit_registry.Unit("s")}),
},
),
None,
id="DataArray-identical units",
),
pytest.param(
xr.Dataset(
{
"a": (("x", "y"), Quantity([[0, 1], [2, 3], [4, 5]], "kg")),
"x": [10, 20, 30],
"y": [60, 120],
}
),
xr.Dataset(
{
"x": [15, 25],
"y": [75, 105],
}
),
xr.Dataset(
{
"a": (("x", "y"), Quantity([[1.25, 1.75], [3.25, 3.75]], "kg")),
"x": [15, 25],
"y": [75, 105],
}
),
None,
id="Dataset-data units",
),
pytest.param(
xr.DataArray(
[[0, 1], [2, 3], [4, 5]],
dims=("x", "y"),
coords={
"x": ("x", [10, 20, 30], {"units": unit_registry.Unit("dm")}),
"y": ("y", [60, 120], {"units": unit_registry.Unit("s")}),
},
),
xr.Dataset(
{
"x": ("x", [1, 3, 5], {"units": unit_registry.Unit("m")}),
"y": ("y", [0, 2], {"units": unit_registry.Unit("min")}),
}
),
xr.DataArray(
[[np.nan, 1], [np.nan, 5], [np.nan, np.nan]],
dims=("x", "y"),
coords={
"x": ("x", [1, 3, 5], {"units": unit_registry.Unit("m")}),
"y": ("y", [0, 2], {"units": unit_registry.Unit("min")}),
},
),
None,
id="DataArray-compatible units",
),
pytest.param(
xr.DataArray(
[[0, 1], [2, 3], [4, 5]],
dims=("x", "y"),
coords={
"x": ("x", [10, 20, 30], {"units": unit_registry.Unit("dm")}),
"y": ("y", [60, 120], {"units": unit_registry.Unit("s")}),
},
),
xr.Dataset(
{
"x": ("x", [10, 30], {"units": unit_registry.Unit("s")}),
"y": ("y", [60], {"units": unit_registry.Unit("m")}),
}
),
None,
ValueError,
id="DataArray-incompatible units",
),
pytest.param(
xr.DataArray(
Quantity([[0, 1], [2, 3], [4, 5]], "kg"),
dims=("x", "y"),
coords={
"x": [10, 20, 30],
"y": [60, 120],
},
),
xr.Dataset(
{
"x": [15, 25],
"y": [75, 105],
}
),
xr.DataArray(
Quantity([[1.25, 1.75], [3.25, 3.75]], "kg"),
dims=("x", "y"),
coords={
"x": [15, 25],
"y": [75, 105],
},
),
None,
id="DataArray-data units",
),
),
)
def test_interp_like(obj, other, expected, error):
if error is not None:
with pytest.raises(error):
obj.pint.interp_like(other)
else:
actual = obj.pint.interp_like(other)
assert_units_equal(actual, expected)
assert_identical(actual, expected)
@requires_bottleneck
@pytest.mark.parametrize(
["obj", "expected"],
(
pytest.param(
xr.Dataset(
{"a": ("x", [nan, 0, nan, 1, nan, nan, 2, nan])},
coords={"u": ("x", [nan, 0, nan, 1, nan, nan, 2, nan])},
),
xr.Dataset(
{"a": ("x", [nan, 0, 0, 1, 1, 1, 2, 2])},
coords={"u": ("x", [nan, 0, nan, 1, nan, nan, 2, nan])},
),
id="Dataset-no units",
),
pytest.param(
xr.Dataset(
{
"a": (
"x",
Quantity([nan, 0, nan, 1, nan, nan, 2, nan], "m"),
)
},
coords={
"u": (
"x",
Quantity([nan, 0, nan, 1, nan, nan, 2, nan], "m"),
)
},
),
xr.Dataset(
{"a": ("x", Quantity([nan, 0, 0, 1, 1, 1, 2, 2], "m"))},
coords={
"u": (
"x",
Quantity([nan, 0, nan, 1, nan, nan, 2, nan], "m"),
)
},
),
id="Dataset-units",
),
pytest.param(
xr.DataArray(
[nan, 0, nan, 1, nan, nan, 2, nan],
coords={
"u": (
"x",
[nan, 0, nan, 1, nan, nan, 2, nan],
)
},
dims="x",
),
xr.DataArray(
[nan, 0, 0, 1, 1, 1, 2, 2],
coords={
"u": (
"x",
[nan, 0, nan, 1, nan, nan, 2, nan],
)
},
dims="x",
),
id="DataArray-no units",
),
pytest.param(
xr.DataArray(
Quantity([nan, 0, nan, 1, nan, nan, 2, nan], "m"),
coords={
"u": (
"x",
Quantity([nan, 0, nan, 1, nan, nan, 2, nan], "m"),
)
},
dims="x",
),
xr.DataArray(
Quantity([nan, 0, 0, 1, 1, 1, 2, 2], "m"),
coords={
"u": (
"x",
Quantity([nan, 0, nan, 1, nan, nan, 2, nan], "m"),
)
},
dims="x",
),
id="DataArray-units",
),
),
)
def test_ffill(obj, expected):
actual = obj.pint.ffill(dim="x")
assert_identical(actual, expected)
assert_units_equal(actual, expected)
@requires_bottleneck
@pytest.mark.parametrize(
["obj", "expected"],
(
pytest.param(
xr.Dataset(
{"a": ("x", [nan, 0, nan, 1, nan, nan, 2, nan])},
coords={"u": ("x", [nan, 0, nan, 1, nan, nan, 2, nan])},
),
xr.Dataset(
{"a": ("x", [0, 0, 1, 1, 2, 2, 2, nan])},
coords={"u": ("x", [nan, 0, nan, 1, nan, nan, 2, nan])},
),
id="Dataset-no units",
),
pytest.param(
xr.Dataset(
{
"a": (
"x",
Quantity([nan, 0, nan, 1, nan, nan, 2, nan], "m"),
)
},
coords={
"u": (
"x",
Quantity([nan, 0, nan, 1, nan, nan, 2, nan], "m"),
)
},
),
xr.Dataset(
{"a": ("x", Quantity([0, 0, 1, 1, 2, 2, 2, nan], "m"))},
coords={
"u": (
"x",
Quantity([nan, 0, nan, 1, nan, nan, 2, nan], "m"),
)
},
),
id="Dataset-units",
),
pytest.param(
xr.DataArray(
[nan, 0, nan, 1, nan, nan, 2, nan],
coords={
"u": (
"x",
[nan, 0, nan, 1, nan, nan, 2, nan],
)
},
dims="x",
),
xr.DataArray(
[0, 0, 1, 1, 2, 2, 2, nan],
coords={
"u": (
"x",
[nan, 0, nan, 1, nan, nan, 2, nan],
)
},
dims="x",
),
id="DataArray-no units",
),
pytest.param(
xr.DataArray(
Quantity([nan, 0, nan, 1, nan, nan, 2, nan], "m"),
coords={
"u": (
"x",
Quantity([nan, 0, nan, 1, nan, nan, 2, nan], "m"),
)
},
dims="x",
),
xr.DataArray(
Quantity([0, 0, 1, 1, 2, 2, 2, nan], "m"),
coords={
"u": (
"x",
Quantity([nan, 0, nan, 1, nan, nan, 2, nan], "m"),
)
},
dims="x",
),
id="DataArray-units",
),
),
)
def test_bfill(obj, expected):
actual = obj.pint.bfill(dim="x")
assert_identical(actual, expected)
assert_units_equal(actual, expected)
@pytest.mark.parametrize(
["obj", "expected"],
(
pytest.param(
xr.Dataset(
{"a": ("x", [nan, 0, nan, 1, nan, nan, nan, 2, nan])},
coords={"u": ("x", [nan, 0, nan, 1, nan, nan, nan, 2, nan])},
),
xr.Dataset(
{"a": ("x", [nan, 0, 0.5, 1, 1.25, 1.5, 1.75, 2, nan])},
coords={"u": ("x", [nan, 0, nan, 1, nan, nan, nan, 2, nan])},
),
id="Dataset-no units",
),
pytest.param(
xr.Dataset(
{
"a": (
"x",
Quantity([nan, 0, nan, 1, nan, nan, nan, 2, nan], "m"),
)
},
coords={
"u": (
"x",
Quantity([nan, 0, nan, 1, nan, nan, nan, 2, nan], "m"),
)
},
),
xr.Dataset(
{"a": ("x", Quantity([nan, 0, 0.5, 1, 1.25, 1.5, 1.75, 2, nan], "m"))},
coords={
"u": (
"x",
Quantity([nan, 0, nan, 1, nan, nan, nan, 2, nan], "m"),
)
},
),
id="Dataset-units",
),
pytest.param(
xr.DataArray(
[nan, 0, nan, 1, nan, nan, nan, 2, nan],
coords={
"u": (
"x",
Quantity([nan, 0, nan, 1, nan, nan, nan, 2, nan], "m"),
)
},
dims="x",
),
xr.DataArray(
[nan, 0, 0.5, 1, 1.25, 1.5, 1.75, 2, nan],
coords={
"u": (
"x",
Quantity([nan, 0, nan, 1, nan, nan, nan, 2, nan], "m"),
)
},
dims="x",
),
id="DataArray-units",
),
pytest.param(
xr.DataArray(
Quantity([nan, 0, nan, 1, nan, nan, nan, 2, nan], "m"),
coords={
"u": (
"x",
Quantity([nan, 0, nan, 1, nan, nan, nan, 2, nan], "m"),
)
},
dims="x",
),
xr.DataArray(
Quantity([nan, 0, 0.5, 1, 1.25, 1.5, 1.75, 2, nan], "m"),
coords={
"u": (
"x",
Quantity([nan, 0, nan, 1, nan, nan, nan, 2, nan], "m"),
)
},
dims="x",
),
id="DataArray-units",
),
),
)
def test_interpolate_na(obj, expected):
actual = obj.pint.interpolate_na(dim="x")
assert_identical(actual, expected)
assert_units_equal(actual, expected)
| 34.123853
| 88
| 0.381966
| 6,595
| 66,951
| 3.788931
| 0.03533
| 0.10325
| 0.128702
| 0.164719
| 0.885505
| 0.859733
| 0.829638
| 0.808708
| 0.797503
| 0.77073
| 0
| 0.051212
| 0.427477
| 66,951
| 1,961
| 89
| 34.141254
| 0.600689
| 0.003869
| 0
| 0.730542
| 0
| 0
| 0.079628
| 0.004814
| 0
| 0
| 0
| 0.00051
| 0.037574
| 1
| 0.025228
| false
| 0.001074
| 0.004294
| 0
| 0.034353
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
1c10bed209282eda1f1b6b7379cdbecacf4ebe0a
| 298
|
py
|
Python
|
frontend/Akina/Akina/captcha.py
|
Yveh/Akina
|
2bf66c749f31905a62d1028dcaf4908981f0e090
|
[
"MIT"
] | null | null | null |
frontend/Akina/Akina/captcha.py
|
Yveh/Akina
|
2bf66c749f31905a62d1028dcaf4908981f0e090
|
[
"MIT"
] | null | null | null |
frontend/Akina/Akina/captcha.py
|
Yveh/Akina
|
2bf66c749f31905a62d1028dcaf4908981f0e090
|
[
"MIT"
] | null | null | null |
import random
def generate_captcha():
x = (random.randint(1, 5) * 2, random.randint(1, 5), random.randint(1, 5))
return (x, '$\\int_0^{{ {0} }} {1} x + {2} \\, dx$'.format(str(x[0]), str(x[1]), str(x[2])))
def get_captcha_ans(x):
return float(x[0] * x[2] + 0.5 * x[0] * x[0] * x[1])
| 29.8
| 96
| 0.533557
| 57
| 298
| 2.719298
| 0.333333
| 0.051613
| 0.270968
| 0.290323
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.086777
| 0.187919
| 298
| 9
| 97
| 33.111111
| 0.553719
| 0
| 0
| 0
| 1
| 0
| 0.127946
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.166667
| 0.166667
| 0.833333
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
1c52aadf4ba4bc290f3ff0f1a9f12d5f2e49b5ce
| 72
|
py
|
Python
|
venv/Lib/site-packages/pygame/__pyinstaller/__init__.py
|
unbun/snake.ai
|
0c017357608dc7c06af0ca3ca57d870641461207
|
[
"MIT"
] | 824
|
2016-01-07T19:27:57.000Z
|
2020-08-01T03:15:47.000Z
|
venv/Lib/site-packages/pygame/__pyinstaller/__init__.py
|
unbun/snake.ai
|
0c017357608dc7c06af0ca3ca57d870641461207
|
[
"MIT"
] | 646
|
2016-01-08T02:42:31.000Z
|
2020-08-03T14:13:27.000Z
|
venv/Lib/site-packages/pygame/__pyinstaller/__init__.py
|
unbun/snake.ai
|
0c017357608dc7c06af0ca3ca57d870641461207
|
[
"MIT"
] | 221
|
2016-01-07T22:36:33.000Z
|
2020-07-24T23:30:08.000Z
|
import os
def get_hook_dirs():
return [os.path.dirname(__file__)]
| 12
| 38
| 0.708333
| 11
| 72
| 4.090909
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 72
| 5
| 39
| 14.4
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
1c550d9d5f400f5db7d8b1b4909b71e4269e43de
| 42,008
|
py
|
Python
|
Framework/LanguageSupport/thrift/gen-py/MMIStandard/services/MMIServiceBase.py
|
FG-92/MOSIM_Core
|
abc32fd0d2213859b11b6d41193d5d7f760e4104
|
[
"MIT"
] | 19
|
2020-11-30T09:29:11.000Z
|
2021-12-10T06:10:11.000Z
|
Framework/LanguageSupport/thrift/gen-py/MMIStandard/services/MMIServiceBase.py
|
FG-92/MOSIM_Core
|
abc32fd0d2213859b11b6d41193d5d7f760e4104
|
[
"MIT"
] | null | null | null |
Framework/LanguageSupport/thrift/gen-py/MMIStandard/services/MMIServiceBase.py
|
FG-92/MOSIM_Core
|
abc32fd0d2213859b11b6d41193d5d7f760e4104
|
[
"MIT"
] | 6
|
2021-01-20T01:46:37.000Z
|
2021-09-28T10:22:14.000Z
|
#
# Autogenerated by Thrift Compiler (0.13.0)
#
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
#
# options string: py
#
from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException
from thrift.protocol.TProtocol import TProtocolException
from thrift.TRecursive import fix_spec
import sys
import logging
from .ttypes import *
from thrift.Thrift import TProcessor
from thrift.transport import TTransport
all_structs = []
class Iface(object):
def GetStatus(self):
pass
def GetDescription(self):
pass
def Setup(self, avatar, properties):
"""
Parameters:
- avatar
- properties
"""
pass
def Consume(self, properties):
"""
Parameters:
- properties
"""
pass
def Dispose(self, properties):
"""
Parameters:
- properties
"""
pass
def Restart(self, properties):
"""
Parameters:
- properties
"""
pass
class Client(Iface):
def __init__(self, iprot, oprot=None):
self._iprot = self._oprot = iprot
if oprot is not None:
self._oprot = oprot
self._seqid = 0
def GetStatus(self):
self.send_GetStatus()
return self.recv_GetStatus()
def send_GetStatus(self):
self._oprot.writeMessageBegin('GetStatus', TMessageType.CALL, self._seqid)
args = GetStatus_args()
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_GetStatus(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = GetStatus_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "GetStatus failed: unknown result")
def GetDescription(self):
self.send_GetDescription()
return self.recv_GetDescription()
def send_GetDescription(self):
self._oprot.writeMessageBegin('GetDescription', TMessageType.CALL, self._seqid)
args = GetDescription_args()
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_GetDescription(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = GetDescription_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "GetDescription failed: unknown result")
def Setup(self, avatar, properties):
"""
Parameters:
- avatar
- properties
"""
self.send_Setup(avatar, properties)
return self.recv_Setup()
def send_Setup(self, avatar, properties):
self._oprot.writeMessageBegin('Setup', TMessageType.CALL, self._seqid)
args = Setup_args()
args.avatar = avatar
args.properties = properties
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_Setup(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = Setup_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "Setup failed: unknown result")
def Consume(self, properties):
"""
Parameters:
- properties
"""
self.send_Consume(properties)
return self.recv_Consume()
def send_Consume(self, properties):
self._oprot.writeMessageBegin('Consume', TMessageType.CALL, self._seqid)
args = Consume_args()
args.properties = properties
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_Consume(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = Consume_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "Consume failed: unknown result")
def Dispose(self, properties):
"""
Parameters:
- properties
"""
self.send_Dispose(properties)
return self.recv_Dispose()
def send_Dispose(self, properties):
self._oprot.writeMessageBegin('Dispose', TMessageType.CALL, self._seqid)
args = Dispose_args()
args.properties = properties
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_Dispose(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = Dispose_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "Dispose failed: unknown result")
def Restart(self, properties):
"""
Parameters:
- properties
"""
self.send_Restart(properties)
return self.recv_Restart()
def send_Restart(self, properties):
self._oprot.writeMessageBegin('Restart', TMessageType.CALL, self._seqid)
args = Restart_args()
args.properties = properties
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_Restart(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = Restart_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "Restart failed: unknown result")
class Processor(Iface, TProcessor):
def __init__(self, handler):
self._handler = handler
self._processMap = {}
self._processMap["GetStatus"] = Processor.process_GetStatus
self._processMap["GetDescription"] = Processor.process_GetDescription
self._processMap["Setup"] = Processor.process_Setup
self._processMap["Consume"] = Processor.process_Consume
self._processMap["Dispose"] = Processor.process_Dispose
self._processMap["Restart"] = Processor.process_Restart
self._on_message_begin = None
def on_message_begin(self, func):
self._on_message_begin = func
def process(self, iprot, oprot):
(name, type, seqid) = iprot.readMessageBegin()
if self._on_message_begin:
self._on_message_begin(name, type, seqid)
if name not in self._processMap:
iprot.skip(TType.STRUCT)
iprot.readMessageEnd()
x = TApplicationException(TApplicationException.UNKNOWN_METHOD, 'Unknown function %s' % (name))
oprot.writeMessageBegin(name, TMessageType.EXCEPTION, seqid)
x.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
return
else:
self._processMap[name](self, seqid, iprot, oprot)
return True
def process_GetStatus(self, seqid, iprot, oprot):
args = GetStatus_args()
args.read(iprot)
iprot.readMessageEnd()
result = GetStatus_result()
try:
result.success = self._handler.GetStatus()
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("GetStatus", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_GetDescription(self, seqid, iprot, oprot):
args = GetDescription_args()
args.read(iprot)
iprot.readMessageEnd()
result = GetDescription_result()
try:
result.success = self._handler.GetDescription()
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("GetDescription", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_Setup(self, seqid, iprot, oprot):
args = Setup_args()
args.read(iprot)
iprot.readMessageEnd()
result = Setup_result()
try:
result.success = self._handler.Setup(args.avatar, args.properties)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("Setup", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_Consume(self, seqid, iprot, oprot):
args = Consume_args()
args.read(iprot)
iprot.readMessageEnd()
result = Consume_result()
try:
result.success = self._handler.Consume(args.properties)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("Consume", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_Dispose(self, seqid, iprot, oprot):
args = Dispose_args()
args.read(iprot)
iprot.readMessageEnd()
result = Dispose_result()
try:
result.success = self._handler.Dispose(args.properties)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("Dispose", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_Restart(self, seqid, iprot, oprot):
args = Restart_args()
args.read(iprot)
iprot.readMessageEnd()
result = Restart_result()
try:
result.success = self._handler.Restart(args.properties)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("Restart", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
# HELPER FUNCTIONS AND STRUCTURES
class GetStatus_args(object):
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('GetStatus_args')
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(GetStatus_args)
GetStatus_args.thrift_spec = (
)
class GetStatus_result(object):
"""
Attributes:
- success
"""
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.MAP:
self.success = {}
(_ktype15, _vtype16, _size14) = iprot.readMapBegin()
for _i18 in range(_size14):
_key19 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
_val20 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
self.success[_key19] = _val20
iprot.readMapEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('GetStatus_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.MAP, 0)
oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.success))
for kiter21, viter22 in self.success.items():
oprot.writeString(kiter21.encode('utf-8') if sys.version_info[0] == 2 else kiter21)
oprot.writeString(viter22.encode('utf-8') if sys.version_info[0] == 2 else viter22)
oprot.writeMapEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(GetStatus_result)
GetStatus_result.thrift_spec = (
(0, TType.MAP, 'success', (TType.STRING, 'UTF8', TType.STRING, 'UTF8', False), None, ), # 0
)
class GetDescription_args(object):
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('GetDescription_args')
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(GetDescription_args)
GetDescription_args.thrift_spec = (
)
class GetDescription_result(object):
"""
Attributes:
- success
"""
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = MMIStandard.core.ttypes.MServiceDescription()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('GetDescription_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(GetDescription_result)
GetDescription_result.thrift_spec = (
(0, TType.STRUCT, 'success', [MMIStandard.core.ttypes.MServiceDescription, None], None, ), # 0
)
class Setup_args(object):
"""
Attributes:
- avatar
- properties
"""
def __init__(self, avatar=None, properties=None,):
self.avatar = avatar
self.properties = properties
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.avatar = MMIStandard.avatar.ttypes.MAvatarDescription()
self.avatar.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.MAP:
self.properties = {}
(_ktype24, _vtype25, _size23) = iprot.readMapBegin()
for _i27 in range(_size23):
_key28 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
_val29 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
self.properties[_key28] = _val29
iprot.readMapEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('Setup_args')
if self.avatar is not None:
oprot.writeFieldBegin('avatar', TType.STRUCT, 1)
self.avatar.write(oprot)
oprot.writeFieldEnd()
if self.properties is not None:
oprot.writeFieldBegin('properties', TType.MAP, 2)
oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.properties))
for kiter30, viter31 in self.properties.items():
oprot.writeString(kiter30.encode('utf-8') if sys.version_info[0] == 2 else kiter30)
oprot.writeString(viter31.encode('utf-8') if sys.version_info[0] == 2 else viter31)
oprot.writeMapEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(Setup_args)
Setup_args.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'avatar', [MMIStandard.avatar.ttypes.MAvatarDescription, None], None, ), # 1
(2, TType.MAP, 'properties', (TType.STRING, 'UTF8', TType.STRING, 'UTF8', False), None, ), # 2
)
class Setup_result(object):
"""
Attributes:
- success
"""
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = MMIStandard.core.ttypes.MBoolResponse()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('Setup_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(Setup_result)
Setup_result.thrift_spec = (
(0, TType.STRUCT, 'success', [MMIStandard.core.ttypes.MBoolResponse, None], None, ), # 0
)
class Consume_args(object):
"""
Attributes:
- properties
"""
def __init__(self, properties=None,):
self.properties = properties
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.MAP:
self.properties = {}
(_ktype33, _vtype34, _size32) = iprot.readMapBegin()
for _i36 in range(_size32):
_key37 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
_val38 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
self.properties[_key37] = _val38
iprot.readMapEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('Consume_args')
if self.properties is not None:
oprot.writeFieldBegin('properties', TType.MAP, 1)
oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.properties))
for kiter39, viter40 in self.properties.items():
oprot.writeString(kiter39.encode('utf-8') if sys.version_info[0] == 2 else kiter39)
oprot.writeString(viter40.encode('utf-8') if sys.version_info[0] == 2 else viter40)
oprot.writeMapEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(Consume_args)
Consume_args.thrift_spec = (
None, # 0
(1, TType.MAP, 'properties', (TType.STRING, 'UTF8', TType.STRING, 'UTF8', False), None, ), # 1
)
class Consume_result(object):
"""
Attributes:
- success
"""
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.MAP:
self.success = {}
(_ktype42, _vtype43, _size41) = iprot.readMapBegin()
for _i45 in range(_size41):
_key46 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
_val47 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
self.success[_key46] = _val47
iprot.readMapEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('Consume_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.MAP, 0)
oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.success))
for kiter48, viter49 in self.success.items():
oprot.writeString(kiter48.encode('utf-8') if sys.version_info[0] == 2 else kiter48)
oprot.writeString(viter49.encode('utf-8') if sys.version_info[0] == 2 else viter49)
oprot.writeMapEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(Consume_result)
Consume_result.thrift_spec = (
(0, TType.MAP, 'success', (TType.STRING, 'UTF8', TType.STRING, 'UTF8', False), None, ), # 0
)
class Dispose_args(object):
"""
Attributes:
- properties
"""
def __init__(self, properties=None,):
self.properties = properties
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.MAP:
self.properties = {}
(_ktype51, _vtype52, _size50) = iprot.readMapBegin()
for _i54 in range(_size50):
_key55 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
_val56 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
self.properties[_key55] = _val56
iprot.readMapEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('Dispose_args')
if self.properties is not None:
oprot.writeFieldBegin('properties', TType.MAP, 1)
oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.properties))
for kiter57, viter58 in self.properties.items():
oprot.writeString(kiter57.encode('utf-8') if sys.version_info[0] == 2 else kiter57)
oprot.writeString(viter58.encode('utf-8') if sys.version_info[0] == 2 else viter58)
oprot.writeMapEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(Dispose_args)
Dispose_args.thrift_spec = (
None, # 0
(1, TType.MAP, 'properties', (TType.STRING, 'UTF8', TType.STRING, 'UTF8', False), None, ), # 1
)
class Dispose_result(object):
"""
Attributes:
- success
"""
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = MMIStandard.core.ttypes.MBoolResponse()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('Dispose_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(Dispose_result)
Dispose_result.thrift_spec = (
(0, TType.STRUCT, 'success', [MMIStandard.core.ttypes.MBoolResponse, None], None, ), # 0
)
class Restart_args(object):
"""
Attributes:
- properties
"""
def __init__(self, properties=None,):
self.properties = properties
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.MAP:
self.properties = {}
(_ktype60, _vtype61, _size59) = iprot.readMapBegin()
for _i63 in range(_size59):
_key64 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
_val65 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
self.properties[_key64] = _val65
iprot.readMapEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('Restart_args')
if self.properties is not None:
oprot.writeFieldBegin('properties', TType.MAP, 1)
oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.properties))
for kiter66, viter67 in self.properties.items():
oprot.writeString(kiter66.encode('utf-8') if sys.version_info[0] == 2 else kiter66)
oprot.writeString(viter67.encode('utf-8') if sys.version_info[0] == 2 else viter67)
oprot.writeMapEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(Restart_args)
Restart_args.thrift_spec = (
None, # 0
(1, TType.MAP, 'properties', (TType.STRING, 'UTF8', TType.STRING, 'UTF8', False), None, ), # 1
)
class Restart_result(object):
"""
Attributes:
- success
"""
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = MMIStandard.core.ttypes.MBoolResponse()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('Restart_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(Restart_result)
Restart_result.thrift_spec = (
(0, TType.STRUCT, 'success', [MMIStandard.core.ttypes.MBoolResponse, None], None, ), # 0
)
fix_spec(all_structs)
del all_structs
| 34.919368
| 134
| 0.597196
| 4,383
| 42,008
| 5.485969
| 0.051791
| 0.013724
| 0.024704
| 0.011978
| 0.840383
| 0.815138
| 0.783281
| 0.76989
| 0.765398
| 0.765398
| 0
| 0.010754
| 0.2983
| 42,008
| 1,202
| 135
| 34.948419
| 0.804967
| 0.015926
| 0
| 0.768577
| 1
| 0
| 0.036509
| 0.000514
| 0
| 0
| 0
| 0
| 0
| 1
| 0.123142
| false
| 0.006369
| 0.008493
| 0.038217
| 0.238854
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
1c567fdb40b7740e932eef8654a83fea5aa1b51c
| 170
|
py
|
Python
|
GenerIter/app/clep_categorise.py
|
GridPresence/GenerIter
|
f0b74cd6c1d1bb9a23fccb28fa8b972f9eeccaf8
|
[
"MIT"
] | 7
|
2021-01-07T19:03:10.000Z
|
2021-03-05T08:05:17.000Z
|
GenerIter/app/clep_categorise.py
|
GridPresence/GenerIter
|
f0b74cd6c1d1bb9a23fccb28fa8b972f9eeccaf8
|
[
"MIT"
] | 2
|
2021-01-26T12:45:58.000Z
|
2021-02-15T11:06:14.000Z
|
GenerIter/app/clep_categorise.py
|
GridPresence/GenerIter
|
f0b74cd6c1d1bb9a23fccb28fa8b972f9eeccaf8
|
[
"MIT"
] | 1
|
2021-01-24T05:21:28.000Z
|
2021-01-24T05:21:28.000Z
|
from GenerIter.app.categorise import Categorise
def main():
"""The Command Line Entry Point for the packaged GenerIter.app.categorise app."""
app = Categorise()
| 28.333333
| 85
| 0.735294
| 22
| 170
| 5.681818
| 0.636364
| 0.312
| 0.352
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.164706
| 170
| 5
| 86
| 34
| 0.880282
| 0.441176
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
98c421a57dd4403d4627ebb753fd6fede207b49c
| 3,435
|
py
|
Python
|
tests/test_datas.py
|
MarcJavin/ODYNN
|
8187b8db71c8621d17bce94749aecc0a81f11e10
|
[
"MIT"
] | 1
|
2019-01-08T15:55:29.000Z
|
2019-01-08T15:55:29.000Z
|
tests/test_datas.py
|
MarcusJP/ODYNN
|
8187b8db71c8621d17bce94749aecc0a81f11e10
|
[
"MIT"
] | 7
|
2019-12-16T21:03:57.000Z
|
2022-02-10T00:01:22.000Z
|
tests/test_datas.py
|
MarcusJP/ODYNN
|
8187b8db71c8621d17bce94749aecc0a81f11e10
|
[
"MIT"
] | 2
|
2019-11-23T10:21:09.000Z
|
2019-12-13T16:02:46.000Z
|
from unittest import TestCase
from odynn import datas
class TestDatas(TestCase):
def test_check_alpha(self):
datas.check_alpha(show=False)
def test_get_real_data_norm(self):
train, test = datas.get_real_data_norm()
self.assertEqual(train[0][1] - train[0][0], test[0][1] - test[0][0])
def test_get_real_data(self):
dt=0.3
ft = 2000.
train, test = datas.get_real_data(delta=500, final_time=ft, dt=dt)
t, i, [v, ca] = train
self.assertEqual(len(t), round(ft/dt))
self.assertEqual(len(t), len(i))
self.assertEqual(v, None)
self.assertEqual(len(ca), len(t))
t, i, [v, ca] = test
self.assertEqual(len(t), len(i))
self.assertEqual(v, None)
self.assertEqual(len(ca), len(t))
dt = 0.5
ft = 3500.
train, test = datas.get_real_data(delta=500, final_time=ft, dt=dt)
t, i, [v, ca] = train
self.assertEqual(len(t), round(ft / dt))
self.assertEqual(len(t), len(i))
self.assertEqual(v, None)
self.assertEqual(len(ca), len(t))
t, i, [v, ca] = test
self.assertEqual(len(t), len(i))
self.assertEqual(v, None)
self.assertEqual(len(ca), len(t))
def test_give_train(self):
t,i = datas.give_train(dt=0.2, nb_neuron_zero=None, max_t=1200.)
self.assertEqual(t[-1], 1200.-0.2)
self.assertEqual(t[1] - t[0], 0.2)
self.assertEqual(i.shape[0], 1200./0.2)
self.assertEqual(i.ndim, 2)
t, i = datas.give_train(dt=0.5, nb_neuron_zero=3, max_t=800.)
self.assertEqual(t[-1], 800.-0.5)
self.assertEqual(t[1] - t[0], 0.5)
self.assertEqual(i.shape[0], 800. / 0.5)
self.assertEqual(i.shape[2], 4)
self.assertEqual(i.ndim, 3)
def test_give_test(self):
t,i = datas.give_test(dt=0.2, max_t=1200.)
self.assertEqual(t[-1], 1200.-0.2)
self.assertEqual(t[1] - t[0], 0.2)
self.assertEqual(i.shape[0], 1200./0.2)
self.assertEqual(i.ndim, 2)
t, i = datas.give_test(dt=0.5, max_t=800.)
self.assertEqual(t[-1], 800.-0.5)
self.assertEqual(t[1] - t[0], 0.5)
self.assertEqual(i.shape[0], 800. / 0.5)
def test_full4(self):
t, i = datas.full4(dt=0.2, nb_neuron_zero=None, max_t=1200.)
self.assertEqual(t[-1], 1200.-0.2)
self.assertEqual(t[1] - t[0], 0.2)
self.assertEqual(i.shape[0], 1200. / 0.2)
self.assertEqual(i.shape[2], 4)
self.assertEqual(i.ndim, 3)
t, i = datas.full4(dt=0.5, nb_neuron_zero=3, max_t=800.)
self.assertEqual(t[-1], 800.-0.5)
self.assertEqual(t[1] - t[0], 0.5)
self.assertEqual(i.shape[0], 800. / 0.5)
self.assertEqual(i.shape[2], 7)
self.assertEqual(i.ndim, 3)
def test_full4_test(self):
t, i = datas.full4_test(dt=0.2, nb_neuron_zero=None, max_t=1200.)
self.assertEqual(t[-1], 1200.-0.2)
self.assertEqual(t[1] - t[0], 0.2)
self.assertEqual(i.shape[0], 1200. / 0.2)
self.assertEqual(i.shape[2], 4)
self.assertEqual(i.ndim, 3)
t, i = datas.full4_test(dt=0.5, nb_neuron_zero=3, max_t=800.)
self.assertEqual(t[-1], 800.-0.5)
self.assertEqual(t[1] - t[0], 0.5)
self.assertEqual(i.shape[0], 800. / 0.5)
self.assertEqual(i.shape[2], 7)
self.assertEqual(i.ndim, 3)
| 35.78125
| 76
| 0.57147
| 567
| 3,435
| 3.37037
| 0.093474
| 0.400314
| 0.167452
| 0.142334
| 0.88854
| 0.84877
| 0.829409
| 0.787546
| 0.787546
| 0.787546
| 0
| 0.090272
| 0.25182
| 3,435
| 96
| 77
| 35.78125
| 0.653307
| 0
| 0
| 0.691358
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.62963
| 1
| 0.08642
| false
| 0
| 0.024691
| 0
| 0.123457
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
98f22b914587bb085e5b9cdd2f2b702aee355a50
| 169
|
py
|
Python
|
tests/test_snek.py
|
lucis-fluxum/snek
|
3579edfb39f3e590a45e176e2181636f5e486ece
|
[
"MIT"
] | null | null | null |
tests/test_snek.py
|
lucis-fluxum/snek
|
3579edfb39f3e590a45e176e2181636f5e486ece
|
[
"MIT"
] | null | null | null |
tests/test_snek.py
|
lucis-fluxum/snek
|
3579edfb39f3e590a45e176e2181636f5e486ece
|
[
"MIT"
] | null | null | null |
from snek import __version__
def test_version():
assert __version__ == '0.1.0'
# fetches dependencies with valid markers
# fetches dependencies with valid extras
| 18.777778
| 41
| 0.763314
| 22
| 169
| 5.454545
| 0.681818
| 0.316667
| 0.383333
| 0.466667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.021429
| 0.171598
| 169
| 8
| 42
| 21.125
| 0.835714
| 0.461538
| 0
| 0
| 0
| 0
| 0.056818
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 9
|
c72bfc15e6d93e0abba1c641d21bd805206ef353
| 3,205
|
py
|
Python
|
collision_handler.py
|
accelthreat/TankWarz
|
4b9b8a3783dfa24551c8e39b84000ec0a6d78d77
|
[
"Apache-2.0"
] | 1
|
2019-09-19T13:15:20.000Z
|
2019-09-19T13:15:20.000Z
|
collision_handler.py
|
accelthreat/TankWarz
|
4b9b8a3783dfa24551c8e39b84000ec0a6d78d77
|
[
"Apache-2.0"
] | null | null | null |
collision_handler.py
|
accelthreat/TankWarz
|
4b9b8a3783dfa24551c8e39b84000ec0a6d78d77
|
[
"Apache-2.0"
] | null | null | null |
import pymunk
import legume
from constants import Coll_Type
from shared import TankHit
from shared import ProjectileDestroy
class Collision_Handler:
def initialize_handler(space, tanks, projectiles):
projectile_tank_handler = space.add_collision_handler(Coll_Type.TANK, Coll_Type.PROJECTILE)
projectile_environment_handler = space.add_collision_handler(Coll_Type.ENVIRONMENT, Coll_Type.PROJECTILE)
def begin(arbiter, space, data):
tankShape = arbiter.shapes[0]
projectileShape = arbiter.shapes[1]
tank = tanks[tankShape.idn]
if projectiles.get(projectileShape.idn) is not None:
projectile = projectiles[projectileShape.idn]
if projectile.src_idn != tank.idn:
projectile.destroy()
projectiles.pop(projectile.idn)
tank.hit(projectile.damage)
return True
def beginP(arbiter, space, data):
projectileShape = arbiter.shapes[1]
if projectiles.get(projectileShape.idn) is not None:
projectile = projectiles[projectileShape.idn]
projectile.destroy()
projectiles.pop(projectile.idn)
return True
projectile_environment_handler.begin = beginP
projectile_tank_handler.begin = begin
class Server_Collision_Handler:
def initialize_handler(space, tanks, projectiles, server):
projectile_tank_handler = space.add_collision_handler(Coll_Type.TANK, Coll_Type.PROJECTILE)
projectile_environment_handler = space.add_collision_handler(Coll_Type.ENVIRONMENT, Coll_Type.PROJECTILE)
def begin(arbiter, space, data):
tankShape = arbiter.shapes[0]
projectileShape = arbiter.shapes[1]
tank = tanks[tankShape.idn]
if projectiles.get(projectileShape.idn) is not None:
projectile = projectiles[projectileShape.idn]
if projectile.src_idn != tank.idn:
server.game_clients_scores.setdefault(projectile.CLIENT_ID, 0)
if tank.alive:
server.game_clients_scores[projectile.CLIENT_ID] += 10
projectile.destroy()
projectiles.pop(projectile.idn)
tank.hit(projectile.damage)
msg = TankHit()
msg.id.value = tank.idn
msg.projectile_id.value = projectile.idn
server.server.send_reliable_message_to_all(msg)
return True
def beginP(arbiter, space, data):
projectileShape = arbiter.shapes[1]
if projectiles.get(projectileShape.idn) is not None:
projectile = projectiles[projectileShape.idn]
projectile.destroy()
projectiles.pop(projectile.idn)
msg = ProjectileDestroy()
msg.projectile_id.value = projectileShape.idn
server.server.send_reliable_message_to_all(msg)
return True
projectile_environment_handler.begin = beginP
projectile_tank_handler.begin = begin
| 44.513889
| 113
| 0.627457
| 318
| 3,205
| 6.147799
| 0.188679
| 0.036829
| 0.042967
| 0.049105
| 0.822506
| 0.822506
| 0.822506
| 0.822506
| 0.764194
| 0.764194
| 0
| 0.004029
| 0.302964
| 3,205
| 72
| 114
| 44.513889
| 0.871083
| 0
| 0
| 0.730159
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.095238
| false
| 0
| 0.079365
| 0
| 0.269841
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c734eb2dc9d9dbec1e00b3164b20b5be36c5e872
| 66,659
|
py
|
Python
|
libs/PureCloudPlatformClientV2/apis/language_understanding_api.py
|
rocketbot-cl/genesysCloud
|
dd9d9b5ebb90a82bab98c0d88b9585c22c91f333
|
[
"MIT"
] | 1
|
2021-10-08T20:46:45.000Z
|
2021-10-08T20:46:45.000Z
|
libs/PureCloudPlatformClientV2/apis/language_understanding_api.py
|
rocketbot-cl/genesysCloud
|
dd9d9b5ebb90a82bab98c0d88b9585c22c91f333
|
[
"MIT"
] | null | null | null |
libs/PureCloudPlatformClientV2/apis/language_understanding_api.py
|
rocketbot-cl/genesysCloud
|
dd9d9b5ebb90a82bab98c0d88b9585c22c91f333
|
[
"MIT"
] | null | null | null |
# coding: utf-8
"""
LanguageUnderstandingApi.py
Copyright 2016 SmartBear Software
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import absolute_import
import sys
import os
import re
# python 2 and python 3 compatibility library
from six import iteritems
from ..configuration import Configuration
from ..api_client import ApiClient
class LanguageUnderstandingApi(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
config = Configuration()
if api_client:
self.api_client = api_client
else:
if not config.api_client:
config.api_client = ApiClient()
self.api_client = config.api_client
def delete_languageunderstanding_domain(self, domain_id, **kwargs):
"""
Delete an NLU Domain.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_languageunderstanding_domain(domain_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str domain_id: ID of the NLU domain. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['domain_id']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_languageunderstanding_domain" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'domain_id' is set
if ('domain_id' not in params) or (params['domain_id'] is None):
raise ValueError("Missing the required parameter `domain_id` when calling `delete_languageunderstanding_domain`")
resource_path = '/api/v2/languageunderstanding/domains/{domainId}'.replace('{format}', 'json')
path_params = {}
if 'domain_id' in params:
path_params['domainId'] = params['domain_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def delete_languageunderstanding_domain_feedback_feedback_id(self, domain_id, feedback_id, **kwargs):
"""
Delete the feedback on the NLU Domain Version.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_languageunderstanding_domain_feedback_feedback_id(domain_id, feedback_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str domain_id: ID of the NLU domain. (required)
:param str feedback_id: ID of the Feedback (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['domain_id', 'feedback_id']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_languageunderstanding_domain_feedback_feedback_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'domain_id' is set
if ('domain_id' not in params) or (params['domain_id'] is None):
raise ValueError("Missing the required parameter `domain_id` when calling `delete_languageunderstanding_domain_feedback_feedback_id`")
# verify the required parameter 'feedback_id' is set
if ('feedback_id' not in params) or (params['feedback_id'] is None):
raise ValueError("Missing the required parameter `feedback_id` when calling `delete_languageunderstanding_domain_feedback_feedback_id`")
resource_path = '/api/v2/languageunderstanding/domains/{domainId}/feedback/{feedbackId}'.replace('{format}', 'json')
path_params = {}
if 'domain_id' in params:
path_params['domainId'] = params['domain_id']
if 'feedback_id' in params:
path_params['feedbackId'] = params['feedback_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def delete_languageunderstanding_domain_version(self, domain_id, domain_version_id, **kwargs):
"""
Delete an NLU Domain Version
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_languageunderstanding_domain_version(domain_id, domain_version_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str domain_id: ID of the NLU domain. (required)
:param str domain_version_id: ID of the NLU domain version. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['domain_id', 'domain_version_id']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_languageunderstanding_domain_version" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'domain_id' is set
if ('domain_id' not in params) or (params['domain_id'] is None):
raise ValueError("Missing the required parameter `domain_id` when calling `delete_languageunderstanding_domain_version`")
# verify the required parameter 'domain_version_id' is set
if ('domain_version_id' not in params) or (params['domain_version_id'] is None):
raise ValueError("Missing the required parameter `domain_version_id` when calling `delete_languageunderstanding_domain_version`")
resource_path = '/api/v2/languageunderstanding/domains/{domainId}/versions/{domainVersionId}'.replace('{format}', 'json')
path_params = {}
if 'domain_id' in params:
path_params['domainId'] = params['domain_id']
if 'domain_version_id' in params:
path_params['domainVersionId'] = params['domain_version_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_languageunderstanding_domain(self, domain_id, **kwargs):
"""
Find an NLU Domain.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_languageunderstanding_domain(domain_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str domain_id: ID of the NLU domain. (required)
:return: NluDomain
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['domain_id']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_languageunderstanding_domain" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'domain_id' is set
if ('domain_id' not in params) or (params['domain_id'] is None):
raise ValueError("Missing the required parameter `domain_id` when calling `get_languageunderstanding_domain`")
resource_path = '/api/v2/languageunderstanding/domains/{domainId}'.replace('{format}', 'json')
path_params = {}
if 'domain_id' in params:
path_params['domainId'] = params['domain_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='NluDomain',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_languageunderstanding_domain_feedback(self, domain_id, **kwargs):
"""
Get all feedback in the given NLU Domain Version.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_languageunderstanding_domain_feedback(domain_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str domain_id: ID of the NLU domain. (required)
:param str intent_name: The top intent name to retrieve feedback for.
:param str assessment: The top assessment to retrieve feedback for.
:param date date_start: Begin of time window as ISO-8601 date.
:param date date_end: End of time window as ISO-8601 date.
:param bool include_deleted: Whether to include soft-deleted items in the result.
:param int page_number: Page number
:param int page_size: Page size
:param bool enable_cursor_pagination: Enable Cursor Pagination
:param str after: The cursor that points to the end of the set of entities that has been returned. This is considered only when enableCursorPagination=true
:param list[str] fields: Fields and properties to get, comma-separated
:return: NluFeedbackListing
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['domain_id', 'intent_name', 'assessment', 'date_start', 'date_end', 'include_deleted', 'page_number', 'page_size', 'enable_cursor_pagination', 'after', 'fields']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_languageunderstanding_domain_feedback" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'domain_id' is set
if ('domain_id' not in params) or (params['domain_id'] is None):
raise ValueError("Missing the required parameter `domain_id` when calling `get_languageunderstanding_domain_feedback`")
resource_path = '/api/v2/languageunderstanding/domains/{domainId}/feedback'.replace('{format}', 'json')
path_params = {}
if 'domain_id' in params:
path_params['domainId'] = params['domain_id']
query_params = {}
if 'intent_name' in params:
query_params['intentName'] = params['intent_name']
if 'assessment' in params:
query_params['assessment'] = params['assessment']
if 'date_start' in params:
query_params['dateStart'] = params['date_start']
if 'date_end' in params:
query_params['dateEnd'] = params['date_end']
if 'include_deleted' in params:
query_params['includeDeleted'] = params['include_deleted']
if 'page_number' in params:
query_params['pageNumber'] = params['page_number']
if 'page_size' in params:
query_params['pageSize'] = params['page_size']
if 'enable_cursor_pagination' in params:
query_params['enableCursorPagination'] = params['enable_cursor_pagination']
if 'after' in params:
query_params['after'] = params['after']
if 'fields' in params:
query_params['fields'] = params['fields']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='NluFeedbackListing',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_languageunderstanding_domain_feedback_feedback_id(self, domain_id, feedback_id, **kwargs):
"""
Find a Feedback
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_languageunderstanding_domain_feedback_feedback_id(domain_id, feedback_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str domain_id: ID of the NLU domain. (required)
:param str feedback_id: ID of the Feedback (required)
:param list[str] fields: Fields and properties to get, comma-separated
:return: NluFeedbackResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['domain_id', 'feedback_id', 'fields']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_languageunderstanding_domain_feedback_feedback_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'domain_id' is set
if ('domain_id' not in params) or (params['domain_id'] is None):
raise ValueError("Missing the required parameter `domain_id` when calling `get_languageunderstanding_domain_feedback_feedback_id`")
# verify the required parameter 'feedback_id' is set
if ('feedback_id' not in params) or (params['feedback_id'] is None):
raise ValueError("Missing the required parameter `feedback_id` when calling `get_languageunderstanding_domain_feedback_feedback_id`")
resource_path = '/api/v2/languageunderstanding/domains/{domainId}/feedback/{feedbackId}'.replace('{format}', 'json')
path_params = {}
if 'domain_id' in params:
path_params['domainId'] = params['domain_id']
if 'feedback_id' in params:
path_params['feedbackId'] = params['feedback_id']
query_params = {}
if 'fields' in params:
query_params['fields'] = params['fields']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='NluFeedbackResponse',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_languageunderstanding_domain_version(self, domain_id, domain_version_id, **kwargs):
"""
Find an NLU Domain Version.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_languageunderstanding_domain_version(domain_id, domain_version_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str domain_id: ID of the NLU domain. (required)
:param str domain_version_id: ID of the NLU domain version. (required)
:param bool include_utterances: Whether utterances for intent definition should be included when marshalling response.
:return: NluDomainVersion
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['domain_id', 'domain_version_id', 'include_utterances']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_languageunderstanding_domain_version" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'domain_id' is set
if ('domain_id' not in params) or (params['domain_id'] is None):
raise ValueError("Missing the required parameter `domain_id` when calling `get_languageunderstanding_domain_version`")
# verify the required parameter 'domain_version_id' is set
if ('domain_version_id' not in params) or (params['domain_version_id'] is None):
raise ValueError("Missing the required parameter `domain_version_id` when calling `get_languageunderstanding_domain_version`")
resource_path = '/api/v2/languageunderstanding/domains/{domainId}/versions/{domainVersionId}'.replace('{format}', 'json')
path_params = {}
if 'domain_id' in params:
path_params['domainId'] = params['domain_id']
if 'domain_version_id' in params:
path_params['domainVersionId'] = params['domain_version_id']
query_params = {}
if 'include_utterances' in params:
query_params['includeUtterances'] = params['include_utterances']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='NluDomainVersion',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_languageunderstanding_domain_version_report(self, domain_id, domain_version_id, **kwargs):
"""
Retrieved quality report for the specified NLU Domain Version
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_languageunderstanding_domain_version_report(domain_id, domain_version_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str domain_id: ID of the NLU domain. (required)
:param str domain_version_id: ID of the NLU domain version. (required)
:return: NluDomainVersionQualityReport
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['domain_id', 'domain_version_id']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_languageunderstanding_domain_version_report" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'domain_id' is set
if ('domain_id' not in params) or (params['domain_id'] is None):
raise ValueError("Missing the required parameter `domain_id` when calling `get_languageunderstanding_domain_version_report`")
# verify the required parameter 'domain_version_id' is set
if ('domain_version_id' not in params) or (params['domain_version_id'] is None):
raise ValueError("Missing the required parameter `domain_version_id` when calling `get_languageunderstanding_domain_version_report`")
resource_path = '/api/v2/languageunderstanding/domains/{domainId}/versions/{domainVersionId}/report'.replace('{format}', 'json')
path_params = {}
if 'domain_id' in params:
path_params['domainId'] = params['domain_id']
if 'domain_version_id' in params:
path_params['domainVersionId'] = params['domain_version_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='NluDomainVersionQualityReport',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_languageunderstanding_domain_versions(self, domain_id, **kwargs):
"""
Get all NLU Domain Versions for a given Domain.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_languageunderstanding_domain_versions(domain_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str domain_id: ID of the NLU domain. (required)
:param bool include_utterances: Whether utterances for intent definition should be included when marshalling response.
:param int page_number: Page number
:param int page_size: Page size
:return: NluDomainVersionListing
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['domain_id', 'include_utterances', 'page_number', 'page_size']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_languageunderstanding_domain_versions" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'domain_id' is set
if ('domain_id' not in params) or (params['domain_id'] is None):
raise ValueError("Missing the required parameter `domain_id` when calling `get_languageunderstanding_domain_versions`")
resource_path = '/api/v2/languageunderstanding/domains/{domainId}/versions'.replace('{format}', 'json')
path_params = {}
if 'domain_id' in params:
path_params['domainId'] = params['domain_id']
query_params = {}
if 'include_utterances' in params:
query_params['includeUtterances'] = params['include_utterances']
if 'page_number' in params:
query_params['pageNumber'] = params['page_number']
if 'page_size' in params:
query_params['pageSize'] = params['page_size']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='NluDomainVersionListing',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_languageunderstanding_domains(self, **kwargs):
"""
Get all NLU Domains.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_languageunderstanding_domains(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int page_number: Page number
:param int page_size: Page size
:return: NluDomainListing
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['page_number', 'page_size']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_languageunderstanding_domains" % key
)
params[key] = val
del params['kwargs']
resource_path = '/api/v2/languageunderstanding/domains'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'page_number' in params:
query_params['pageNumber'] = params['page_number']
if 'page_size' in params:
query_params['pageSize'] = params['page_size']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='NluDomainListing',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def patch_languageunderstanding_domain(self, domain_id, body, **kwargs):
"""
Update an NLU Domain.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.patch_languageunderstanding_domain(domain_id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str domain_id: ID of the NLU domain. (required)
:param NluDomain body: The updated NLU Domain. (required)
:return: NluDomain
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['domain_id', 'body']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method patch_languageunderstanding_domain" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'domain_id' is set
if ('domain_id' not in params) or (params['domain_id'] is None):
raise ValueError("Missing the required parameter `domain_id` when calling `patch_languageunderstanding_domain`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `patch_languageunderstanding_domain`")
resource_path = '/api/v2/languageunderstanding/domains/{domainId}'.replace('{format}', 'json')
path_params = {}
if 'domain_id' in params:
path_params['domainId'] = params['domain_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='NluDomain',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def post_languageunderstanding_domain_feedback(self, domain_id, body, **kwargs):
"""
Create feedback for the NLU Domain Version.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.post_languageunderstanding_domain_feedback(domain_id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str domain_id: ID of the NLU domain. (required)
:param NluFeedbackRequest body: The Feedback to create. (required)
:return: NluFeedbackResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['domain_id', 'body']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method post_languageunderstanding_domain_feedback" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'domain_id' is set
if ('domain_id' not in params) or (params['domain_id'] is None):
raise ValueError("Missing the required parameter `domain_id` when calling `post_languageunderstanding_domain_feedback`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `post_languageunderstanding_domain_feedback`")
resource_path = '/api/v2/languageunderstanding/domains/{domainId}/feedback'.replace('{format}', 'json')
path_params = {}
if 'domain_id' in params:
path_params['domainId'] = params['domain_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='NluFeedbackResponse',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def post_languageunderstanding_domain_version_detect(self, domain_id, domain_version_id, body, **kwargs):
"""
Detect intent, entities, etc. in the submitted text using the specified NLU domain version.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.post_languageunderstanding_domain_version_detect(domain_id, domain_version_id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str domain_id: ID of the NLU domain. (required)
:param str domain_version_id: ID of the NLU domain version. (required)
:param NluDetectionRequest body: The input data to perform detection on. (required)
:return: NluDetectionResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['domain_id', 'domain_version_id', 'body']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method post_languageunderstanding_domain_version_detect" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'domain_id' is set
if ('domain_id' not in params) or (params['domain_id'] is None):
raise ValueError("Missing the required parameter `domain_id` when calling `post_languageunderstanding_domain_version_detect`")
# verify the required parameter 'domain_version_id' is set
if ('domain_version_id' not in params) or (params['domain_version_id'] is None):
raise ValueError("Missing the required parameter `domain_version_id` when calling `post_languageunderstanding_domain_version_detect`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `post_languageunderstanding_domain_version_detect`")
resource_path = '/api/v2/languageunderstanding/domains/{domainId}/versions/{domainVersionId}/detect'.replace('{format}', 'json')
path_params = {}
if 'domain_id' in params:
path_params['domainId'] = params['domain_id']
if 'domain_version_id' in params:
path_params['domainVersionId'] = params['domain_version_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='NluDetectionResponse',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def post_languageunderstanding_domain_version_publish(self, domain_id, domain_version_id, **kwargs):
"""
Publish the draft NLU Domain Version.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.post_languageunderstanding_domain_version_publish(domain_id, domain_version_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str domain_id: ID of the NLU domain. (required)
:param str domain_version_id: ID of the NLU domain version. (required)
:return: NluDomainVersion
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['domain_id', 'domain_version_id']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method post_languageunderstanding_domain_version_publish" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'domain_id' is set
if ('domain_id' not in params) or (params['domain_id'] is None):
raise ValueError("Missing the required parameter `domain_id` when calling `post_languageunderstanding_domain_version_publish`")
# verify the required parameter 'domain_version_id' is set
if ('domain_version_id' not in params) or (params['domain_version_id'] is None):
raise ValueError("Missing the required parameter `domain_version_id` when calling `post_languageunderstanding_domain_version_publish`")
resource_path = '/api/v2/languageunderstanding/domains/{domainId}/versions/{domainVersionId}/publish'.replace('{format}', 'json')
path_params = {}
if 'domain_id' in params:
path_params['domainId'] = params['domain_id']
if 'domain_version_id' in params:
path_params['domainVersionId'] = params['domain_version_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='NluDomainVersion',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def post_languageunderstanding_domain_version_train(self, domain_id, domain_version_id, **kwargs):
"""
Train the draft NLU Domain Version.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.post_languageunderstanding_domain_version_train(domain_id, domain_version_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str domain_id: ID of the NLU domain. (required)
:param str domain_version_id: ID of the NLU domain version. (required)
:return: NluDomainVersionTrainingResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['domain_id', 'domain_version_id']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method post_languageunderstanding_domain_version_train" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'domain_id' is set
if ('domain_id' not in params) or (params['domain_id'] is None):
raise ValueError("Missing the required parameter `domain_id` when calling `post_languageunderstanding_domain_version_train`")
# verify the required parameter 'domain_version_id' is set
if ('domain_version_id' not in params) or (params['domain_version_id'] is None):
raise ValueError("Missing the required parameter `domain_version_id` when calling `post_languageunderstanding_domain_version_train`")
resource_path = '/api/v2/languageunderstanding/domains/{domainId}/versions/{domainVersionId}/train'.replace('{format}', 'json')
path_params = {}
if 'domain_id' in params:
path_params['domainId'] = params['domain_id']
if 'domain_version_id' in params:
path_params['domainVersionId'] = params['domain_version_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='NluDomainVersionTrainingResponse',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def post_languageunderstanding_domain_versions(self, domain_id, body, **kwargs):
"""
Create an NLU Domain Version.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.post_languageunderstanding_domain_versions(domain_id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str domain_id: ID of the NLU domain. (required)
:param NluDomainVersion body: The NLU Domain Version to create. (required)
:return: NluDomainVersion
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['domain_id', 'body']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method post_languageunderstanding_domain_versions" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'domain_id' is set
if ('domain_id' not in params) or (params['domain_id'] is None):
raise ValueError("Missing the required parameter `domain_id` when calling `post_languageunderstanding_domain_versions`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `post_languageunderstanding_domain_versions`")
resource_path = '/api/v2/languageunderstanding/domains/{domainId}/versions'.replace('{format}', 'json')
path_params = {}
if 'domain_id' in params:
path_params['domainId'] = params['domain_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='NluDomainVersion',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def post_languageunderstanding_domains(self, body, **kwargs):
"""
Create an NLU Domain.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.post_languageunderstanding_domains(body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param NluDomain body: The NLU Domain to create. (required)
:return: NluDomain
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method post_languageunderstanding_domains" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `post_languageunderstanding_domains`")
resource_path = '/api/v2/languageunderstanding/domains'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='NluDomain',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def put_languageunderstanding_domain_version(self, domain_id, domain_version_id, body, **kwargs):
"""
Update an NLU Domain Version.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.put_languageunderstanding_domain_version(domain_id, domain_version_id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str domain_id: ID of the NLU domain. (required)
:param str domain_version_id: ID of the NLU domain version. (required)
:param NluDomainVersion body: The updated NLU Domain Version. (required)
:return: NluDomainVersion
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['domain_id', 'domain_version_id', 'body']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method put_languageunderstanding_domain_version" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'domain_id' is set
if ('domain_id' not in params) or (params['domain_id'] is None):
raise ValueError("Missing the required parameter `domain_id` when calling `put_languageunderstanding_domain_version`")
# verify the required parameter 'domain_version_id' is set
if ('domain_version_id' not in params) or (params['domain_version_id'] is None):
raise ValueError("Missing the required parameter `domain_version_id` when calling `put_languageunderstanding_domain_version`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `put_languageunderstanding_domain_version`")
resource_path = '/api/v2/languageunderstanding/domains/{domainId}/versions/{domainVersionId}'.replace('{format}', 'json')
path_params = {}
if 'domain_id' in params:
path_params['domainId'] = params['domain_id']
if 'domain_version_id' in params:
path_params['domainVersionId'] = params['domain_version_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='NluDomainVersion',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
| 42.162555
| 183
| 0.573891
| 6,633
| 66,659
| 5.553294
| 0.042364
| 0.03475
| 0.028506
| 0.032469
| 0.929768
| 0.920619
| 0.913941
| 0.910086
| 0.90528
| 0.893932
| 0
| 0.000847
| 0.34462
| 66,659
| 1,580
| 184
| 42.189241
| 0.84231
| 0.25953
| 0
| 0.840476
| 0
| 0
| 0.235528
| 0.074916
| 0
| 0
| 0
| 0
| 0
| 1
| 0.022619
| false
| 0
| 0.008333
| 0
| 0.053571
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c792a32c630cde28805fd69c8edec7fbd4f6c86f
| 228
|
py
|
Python
|
scale_client/sensors/dummy/dummy_threaded_sensor.py
|
prav33nv/scale_client
|
dcbd6ed4c8f4a27606ebef5b5f9dabb2e4f3b806
|
[
"BSD-2-Clause-FreeBSD"
] | 3
|
2018-05-24T00:59:05.000Z
|
2020-01-03T08:03:33.000Z
|
scale_client/sensors/dummy/dummy_threaded_sensor.py
|
prav33nv/scale_client
|
dcbd6ed4c8f4a27606ebef5b5f9dabb2e4f3b806
|
[
"BSD-2-Clause-FreeBSD"
] | 26
|
2015-01-19T22:47:07.000Z
|
2017-05-03T01:43:10.000Z
|
scale_client/sensors/dummy/dummy_threaded_sensor.py
|
prav33nv/scale_client
|
dcbd6ed4c8f4a27606ebef5b5f9dabb2e4f3b806
|
[
"BSD-2-Clause-FreeBSD"
] | 6
|
2015-01-20T20:05:09.000Z
|
2017-06-01T02:19:01.000Z
|
from ..threaded_virtual_sensor import ThreadedVirtualSensor
class DummyThreadedSensor(ThreadedVirtualSensor):
def read_raw(self):
return "dummy_thread_reading"
def get_type(self):
return "dummy_thread"
| 25.333333
| 59
| 0.758772
| 24
| 228
| 6.916667
| 0.75
| 0.120482
| 0.180723
| 0.253012
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.175439
| 228
| 9
| 60
| 25.333333
| 0.882979
| 0
| 0
| 0
| 0
| 0
| 0.139738
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.166667
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
40411bed90a5a6b53730be361ff83687672786bc
| 3,653
|
py
|
Python
|
tests/fixers/test_signal_providing_args.py
|
matthiask/django-upgrade
|
f8e2a50e4830a00ef89beb4758bb90e7bb75ab0b
|
[
"MIT"
] | null | null | null |
tests/fixers/test_signal_providing_args.py
|
matthiask/django-upgrade
|
f8e2a50e4830a00ef89beb4758bb90e7bb75ab0b
|
[
"MIT"
] | null | null | null |
tests/fixers/test_signal_providing_args.py
|
matthiask/django-upgrade
|
f8e2a50e4830a00ef89beb4758bb90e7bb75ab0b
|
[
"MIT"
] | null | null | null |
from django_upgrade.data import Settings
from tests.fixers.tools import check_noop, check_transformed
settings = Settings(target_version=(3, 1))
def test_no_deprecated_arg():
check_noop(
"""\
from django.dispatch import Signal
Signal(use_caching=True)
""",
settings,
)
def test_pos_arg_alone():
check_transformed(
"""\
from django.dispatch import Signal
Signal(["documented", "arg"])
""",
"""\
from django.dispatch import Signal
Signal()
""",
settings,
)
def test_pos_arg_alone_module_imported():
check_transformed(
"""\
from django import dispatch
dispatch.Signal(["documented", "arg"])
""",
"""\
from django import dispatch
dispatch.Signal()
""",
settings,
)
def test_pos_arg_alone_multiline():
check_transformed(
"""\
from django.dispatch import Signal
my_signal = Signal(
[
"documented", "arg"
])
""",
"""\
from django.dispatch import Signal
my_signal = Signal()
""",
settings,
)
def test_pos_arg_with_caching():
check_transformed(
"""\
from django.dispatch import Signal
Signal(["documented", "arg"], True)
""",
"""\
from django.dispatch import Signal
Signal(None, True)
""",
settings,
)
def test_kwarg_alone():
check_transformed(
"""\
from django.dispatch import Signal
Signal(providing_args=["documented", "arg"])
""",
"""\
from django.dispatch import Signal
Signal()
""",
settings,
)
def test_kwarg_with_caching():
check_transformed(
"""\
from django.dispatch import Signal
Signal(providing_args=["documented", "arg"], use_caching=True)
""",
"""\
from django.dispatch import Signal
Signal(use_caching=True)
""",
settings,
)
def test_kwarg_with_caching_no_space():
check_transformed(
"""\
from django.dispatch import Signal
Signal(providing_args=["documented", "arg"],use_caching=True)
""",
"""\
from django.dispatch import Signal
Signal(use_caching=True)
""",
settings,
)
def test_kwarg_with_caching_reordered():
check_transformed(
"""\
from django.dispatch import Signal
Signal(use_caching=True, providing_args=["documented", "arg"])
""",
"""\
from django.dispatch import Signal
Signal(use_caching=True)
""",
settings,
)
def test_kwarg_with_caching_multiline():
check_transformed(
"""\
from django.dispatch import Signal
Signal(
providing_args=["documented", "arg"],
use_caching=True,
)
""",
"""\
from django.dispatch import Signal
Signal(
use_caching=True,
)
""",
settings,
)
def test_kwarg_with_all_extras():
check_transformed(
"""\
from django.dispatch import Signal
Signal(
providing_args=[
"documented",
"arg",
] , # documents the arguments
use_caching=True,
)
""",
"""\
from django.dispatch import Signal
Signal(
use_caching=True,
)
""",
settings,
)
| 21.362573
| 70
| 0.520668
| 318
| 3,653
| 5.745283
| 0.141509
| 0.120416
| 0.187192
| 0.249589
| 0.875205
| 0.854953
| 0.810619
| 0.753695
| 0.701149
| 0.638205
| 0
| 0.000865
| 0.367096
| 3,653
| 170
| 71
| 21.488235
| 0.78936
| 0
| 0
| 0.308824
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.161765
| false
| 0
| 0.044118
| 0
| 0.205882
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
4061ccbed0d7a3c16c41418858894002847e8bc1
| 2,614
|
py
|
Python
|
tests/rulelistmodel/gaussianmodel/test_gaussiantarget.py
|
HMProenca/RuleList
|
4e500c3a9813aadb149286458f11460904fd15d4
|
[
"MIT"
] | 8
|
2021-02-23T10:57:29.000Z
|
2022-03-15T10:29:08.000Z
|
tests/rulelistmodel/gaussianmodel/test_gaussiantarget.py
|
HMProenca/RuleList
|
4e500c3a9813aadb149286458f11460904fd15d4
|
[
"MIT"
] | 3
|
2021-02-26T21:54:24.000Z
|
2021-06-09T13:28:10.000Z
|
tests/rulelistmodel/gaussianmodel/test_gaussiantarget.py
|
HMProenca/RuleList
|
4e500c3a9813aadb149286458f11460904fd15d4
|
[
"MIT"
] | 2
|
2021-02-26T21:32:22.000Z
|
2021-11-01T09:32:38.000Z
|
import numpy as np
import pandas as pd
from gmpy2 import bit_mask
from rulelist.rulelistmodel.gaussianmodel.gaussiantarget import GaussianTargets
class TestGaussianTargets(object):
def test_onetarget(self):
dictoutput = {"target1": np.arange(100)}
input_target_data = pd.DataFrame(data=dictoutput)
expected_number_targets = 1
expected_bit_array = bit_mask(100)
expected_mean_vector = np.array([49.5])
expected_variance_vector = np.var([*range(100)])
output_gaussiantargets= GaussianTargets(input_target_data)
assert expected_bit_array == output_gaussiantargets.bit_array
assert expected_number_targets == len(output_gaussiantargets.mean)
assert expected_number_targets == len(output_gaussiantargets.variance)
np.testing.assert_array_equal(expected_mean_vector, output_gaussiantargets.mean)
np.testing.assert_array_equal(expected_variance_vector, output_gaussiantargets.variance)
def test_twotargets(self):
dictoutput = {"target1": np.arange(100), "target2": np.ones(100)}
input_target_data = pd.DataFrame(data=dictoutput)
expected_number_targets = 2
expected_bit_array = bit_mask(100)
expected_mean_vector = np.array([49.5,1])
expected_variance_vector = [np.var([*range(100)]),0]
output_gaussiantargets= GaussianTargets(input_target_data)
assert expected_bit_array == output_gaussiantargets.bit_array
assert expected_number_targets == len(output_gaussiantargets.mean)
assert expected_number_targets == len(output_gaussiantargets.variance)
np.testing.assert_array_equal(expected_mean_vector, output_gaussiantargets.mean)
np.testing.assert_array_equal(expected_variance_vector, output_gaussiantargets.variance)
def test_onlyzeros(self):
dictoutput = {"target1": np.zeros(100)}
input_target_data = pd.DataFrame(data=dictoutput)
expected_number_targets = 1
expected_bit_array = bit_mask(100)
expected_mean_vector = np.array([0])
expected_variance_vector = np.array([0])
output_gaussiantargets = GaussianTargets(input_target_data)
assert expected_bit_array == output_gaussiantargets.bit_array
assert expected_number_targets == len(output_gaussiantargets.mean)
assert expected_number_targets == len(output_gaussiantargets.variance)
np.testing.assert_array_equal(expected_mean_vector, output_gaussiantargets.mean)
np.testing.assert_array_equal(expected_variance_vector, output_gaussiantargets.variance)
| 44.305085
| 96
| 0.745983
| 303
| 2,614
| 6.085809
| 0.178218
| 0.204989
| 0.102495
| 0.087852
| 0.856833
| 0.856833
| 0.823753
| 0.785792
| 0.785792
| 0.785792
| 0
| 0.020853
| 0.174445
| 2,614
| 59
| 97
| 44.305085
| 0.833642
| 0
| 0
| 0.590909
| 0
| 0
| 0.010707
| 0
| 0
| 0
| 0
| 0
| 0.340909
| 1
| 0.068182
| false
| 0
| 0.090909
| 0
| 0.181818
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
40992c9f60f13e277f2b37561151ac35798a53ea
| 336,807
|
py
|
Python
|
pysnmp/CPQHSV110V3-MIB.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 11
|
2021-02-02T16:27:16.000Z
|
2021-08-31T06:22:49.000Z
|
pysnmp/CPQHSV110V3-MIB.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 75
|
2021-02-24T17:30:31.000Z
|
2021-12-08T00:01:18.000Z
|
pysnmp/CPQHSV110V3-MIB.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 10
|
2019-04-30T05:51:36.000Z
|
2022-02-16T03:33:41.000Z
|
#
# PySNMP MIB module CPQHSV110V3-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/CPQHSV110V3-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 18:11:47 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
Integer, ObjectIdentifier, OctetString = mibBuilder.importSymbols("ASN1", "Integer", "ObjectIdentifier", "OctetString")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ValueRangeConstraint, ValueSizeConstraint, ConstraintsUnion, ConstraintsIntersection, SingleValueConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueRangeConstraint", "ValueSizeConstraint", "ConstraintsUnion", "ConstraintsIntersection", "SingleValueConstraint")
NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance")
iso, Unsigned32, NotificationType, MibScalar, MibTable, MibTableRow, MibTableColumn, Gauge32, IpAddress, MibIdentifier, ObjectIdentity, Bits, ModuleIdentity, Counter32, NotificationType, Integer32, Counter64, TimeTicks, enterprises = mibBuilder.importSymbols("SNMPv2-SMI", "iso", "Unsigned32", "NotificationType", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Gauge32", "IpAddress", "MibIdentifier", "ObjectIdentity", "Bits", "ModuleIdentity", "Counter32", "NotificationType", "Integer32", "Counter64", "TimeTicks", "enterprises")
DisplayString, TextualConvention = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "TextualConvention")
compaq = MibIdentifier((1, 3, 6, 1, 4, 1, 232))
cpqElementManager = MibIdentifier((1, 3, 6, 1, 4, 1, 232, 136))
cpqHSV = MibIdentifier((1, 3, 6, 1, 4, 1, 232, 136, 1))
cpqHSVAgent = MibIdentifier((1, 3, 6, 1, 4, 1, 232, 136, 1, 1))
cpqHSVServer = MibIdentifier((1, 3, 6, 1, 4, 1, 232, 136, 1, 2))
hsvObject = MibIdentifier((1, 3, 6, 1, 4, 1, 232, 136, 1, 3))
maHSVMibRev = MibIdentifier((1, 3, 6, 1, 4, 1, 232, 136, 1, 4))
scell = MibIdentifier((1, 3, 6, 1, 4, 1, 232, 136, 1, 3, 1))
agent = MibIdentifier((1, 3, 6, 1, 4, 1, 232, 136, 1, 3, 2))
host = MibIdentifier((1, 3, 6, 1, 4, 1, 232, 136, 1, 3, 3))
nsc = MibIdentifier((1, 3, 6, 1, 4, 1, 232, 136, 1, 3, 4))
shelf = MibIdentifier((1, 3, 6, 1, 4, 1, 232, 136, 1, 3, 8))
agManufacturer = MibScalar((1, 3, 6, 1, 4, 1, 232, 136, 1, 1, 1), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: agManufacturer.setStatus('mandatory')
agMajVersion = MibScalar((1, 3, 6, 1, 4, 1, 232, 136, 1, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: agMajVersion.setStatus('mandatory')
agMinVersion = MibScalar((1, 3, 6, 1, 4, 1, 232, 136, 1, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: agMinVersion.setStatus('mandatory')
agHostName = MibScalar((1, 3, 6, 1, 4, 1, 232, 136, 1, 1, 4), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: agHostName.setStatus('mandatory')
agEnterprise = MibScalar((1, 3, 6, 1, 4, 1, 232, 136, 1, 1, 5), ObjectIdentifier()).setMaxAccess("readonly")
if mibBuilder.loadTexts: agEnterprise.setStatus('mandatory')
agDescription = MibScalar((1, 3, 6, 1, 4, 1, 232, 136, 1, 1, 6), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: agDescription.setStatus('mandatory')
agStatusTable = MibTable((1, 3, 6, 1, 4, 1, 232, 136, 1, 1, 7), )
if mibBuilder.loadTexts: agStatusTable.setStatus('mandatory')
agentEntry = MibTableRow((1, 3, 6, 1, 4, 1, 232, 136, 1, 1, 7, 1), ).setIndexNames((0, "CPQHSV110V3-MIB", "agentEntryIndex"))
if mibBuilder.loadTexts: agentEntry.setStatus('mandatory')
agentEntryIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 232, 136, 1, 1, 7, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentEntryIndex.setStatus('mandatory')
agentStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 232, 136, 1, 1, 7, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("other", 1), ("ok", 2), ("degraded", 3), ("failed", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentStatus.setStatus('mandatory')
agentEventCode = MibTableColumn((1, 3, 6, 1, 4, 1, 232, 136, 1, 1, 7, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentEventCode.setStatus('mandatory')
agentEventLevel = MibTableColumn((1, 3, 6, 1, 4, 1, 232, 136, 1, 1, 7, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentEventLevel.setStatus('mandatory')
agentEventTimeDate = MibTableColumn((1, 3, 6, 1, 4, 1, 232, 136, 1, 1, 7, 1, 5), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentEventTimeDate.setStatus('mandatory')
agentEventDescription = MibTableColumn((1, 3, 6, 1, 4, 1, 232, 136, 1, 1, 7, 1, 6), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentEventDescription.setStatus('mandatory')
srvCPU = MibScalar((1, 3, 6, 1, 4, 1, 232, 136, 1, 2, 1), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: srvCPU.setStatus('mandatory')
srvComputerType = MibScalar((1, 3, 6, 1, 4, 1, 232, 136, 1, 2, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: srvComputerType.setStatus('mandatory')
srvModel = MibScalar((1, 3, 6, 1, 4, 1, 232, 136, 1, 2, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: srvModel.setStatus('mandatory')
srvSubModel = MibScalar((1, 3, 6, 1, 4, 1, 232, 136, 1, 2, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: srvSubModel.setStatus('mandatory')
srvBiosVersion = MibScalar((1, 3, 6, 1, 4, 1, 232, 136, 1, 2, 5), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: srvBiosVersion.setStatus('mandatory')
srvOS = MibScalar((1, 3, 6, 1, 4, 1, 232, 136, 1, 2, 6), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: srvOS.setStatus('mandatory')
srvOSMajVersion = MibScalar((1, 3, 6, 1, 4, 1, 232, 136, 1, 2, 7), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: srvOSMajVersion.setStatus('mandatory')
srvOSMinVersion = MibScalar((1, 3, 6, 1, 4, 1, 232, 136, 1, 2, 8), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: srvOSMinVersion.setStatus('mandatory')
maHSVMibRevMajor = MibScalar((1, 3, 6, 1, 4, 1, 232, 136, 1, 4, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: maHSVMibRevMajor.setStatus('mandatory')
maHSVMibRevMinor = MibScalar((1, 3, 6, 1, 4, 1, 232, 136, 1, 4, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: maHSVMibRevMinor.setStatus('mandatory')
scellTotal = MibScalar((1, 3, 6, 1, 4, 1, 232, 136, 1, 3, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: scellTotal.setStatus('mandatory')
scellStatusTable = MibTable((1, 3, 6, 1, 4, 1, 232, 136, 1, 3, 1, 2), )
if mibBuilder.loadTexts: scellStatusTable.setStatus('mandatory')
scellEntry = MibTableRow((1, 3, 6, 1, 4, 1, 232, 136, 1, 3, 1, 2, 1), ).setIndexNames((0, "CPQHSV110V3-MIB", "scellEntryIndex"))
if mibBuilder.loadTexts: scellEntry.setStatus('mandatory')
scellEntryIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 232, 136, 1, 3, 1, 2, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: scellEntryIndex.setStatus('mandatory')
scellName = MibTableColumn((1, 3, 6, 1, 4, 1, 232, 136, 1, 3, 1, 2, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: scellName.setStatus('mandatory')
scellUUID = MibTableColumn((1, 3, 6, 1, 4, 1, 232, 136, 1, 3, 1, 2, 1, 3), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: scellUUID.setStatus('mandatory')
scellStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 232, 136, 1, 3, 1, 2, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("informational", 1), ("minor", 2), ("major", 3), ("failed", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: scellStatus.setStatus('mandatory')
scellEventDescription = MibTableColumn((1, 3, 6, 1, 4, 1, 232, 136, 1, 3, 1, 2, 1, 5), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: scellEventDescription.setStatus('mandatory')
scellEventTimeDate = MibTableColumn((1, 3, 6, 1, 4, 1, 232, 136, 1, 3, 1, 2, 1, 6), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: scellEventTimeDate.setStatus('mandatory')
scellEventCode = MibTableColumn((1, 3, 6, 1, 4, 1, 232, 136, 1, 3, 1, 2, 1, 7), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: scellEventCode.setStatus('mandatory')
scellSWComponent = MibTableColumn((1, 3, 6, 1, 4, 1, 232, 136, 1, 3, 1, 2, 1, 8), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: scellSWComponent.setStatus('mandatory')
scellECode = MibTableColumn((1, 3, 6, 1, 4, 1, 232, 136, 1, 3, 1, 2, 1, 9), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: scellECode.setStatus('mandatory')
scellCAC = MibTableColumn((1, 3, 6, 1, 4, 1, 232, 136, 1, 3, 1, 2, 1, 10), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: scellCAC.setStatus('mandatory')
scellEIP = MibTableColumn((1, 3, 6, 1, 4, 1, 232, 136, 1, 3, 1, 2, 1, 11), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: scellEIP.setStatus('mandatory')
scellNameDateTime = MibTableColumn((1, 3, 6, 1, 4, 1, 232, 136, 1, 3, 1, 2, 1, 12), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: scellNameDateTime.setStatus('mandatory')
hostTotal = MibScalar((1, 3, 6, 1, 4, 1, 232, 136, 1, 3, 3, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hostTotal.setStatus('mandatory')
hostStatusTable = MibTable((1, 3, 6, 1, 4, 1, 232, 136, 1, 3, 3, 2), )
if mibBuilder.loadTexts: hostStatusTable.setStatus('mandatory')
hostEntry = MibTableRow((1, 3, 6, 1, 4, 1, 232, 136, 1, 3, 3, 2, 1), ).setIndexNames((0, "CPQHSV110V3-MIB", "hostEntryIndex"))
if mibBuilder.loadTexts: hostEntry.setStatus('mandatory')
hostEntryIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 232, 136, 1, 3, 3, 2, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hostEntryIndex.setStatus('mandatory')
hostName = MibTableColumn((1, 3, 6, 1, 4, 1, 232, 136, 1, 3, 3, 2, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hostName.setStatus('mandatory')
hostUUID = MibTableColumn((1, 3, 6, 1, 4, 1, 232, 136, 1, 3, 3, 2, 1, 3), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hostUUID.setStatus('mandatory')
hostStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 232, 136, 1, 3, 3, 2, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3))).clone(namedValues=NamedValues(("informational", 0), ("minor", 1), ("major", 2), ("critical", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hostStatus.setStatus('mandatory')
nscTotal = MibScalar((1, 3, 6, 1, 4, 1, 232, 136, 1, 3, 4, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nscTotal.setStatus('mandatory')
nscStatusTable = MibTable((1, 3, 6, 1, 4, 1, 232, 136, 1, 3, 4, 2), )
if mibBuilder.loadTexts: nscStatusTable.setStatus('mandatory')
nscEntry = MibTableRow((1, 3, 6, 1, 4, 1, 232, 136, 1, 3, 4, 2, 1), ).setIndexNames((0, "CPQHSV110V3-MIB", "nscEntryIndex"))
if mibBuilder.loadTexts: nscEntry.setStatus('mandatory')
nscEntryIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 232, 136, 1, 3, 4, 2, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nscEntryIndex.setStatus('mandatory')
nscName = MibTableColumn((1, 3, 6, 1, 4, 1, 232, 136, 1, 3, 4, 2, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nscName.setStatus('mandatory')
nscUUID = MibTableColumn((1, 3, 6, 1, 4, 1, 232, 136, 1, 3, 4, 2, 1, 3), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nscUUID.setStatus('mandatory')
nscStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 232, 136, 1, 3, 4, 2, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3))).clone(namedValues=NamedValues(("informational", 0), ("minor", 1), ("major", 2), ("critical", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: nscStatus.setStatus('mandatory')
shelfTotal = MibScalar((1, 3, 6, 1, 4, 1, 232, 136, 1, 3, 8, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: shelfTotal.setStatus('mandatory')
shelfStatusTable = MibTable((1, 3, 6, 1, 4, 1, 232, 136, 1, 3, 8, 2), )
if mibBuilder.loadTexts: shelfStatusTable.setStatus('mandatory')
shelfEntry = MibTableRow((1, 3, 6, 1, 4, 1, 232, 136, 1, 3, 8, 2, 1), ).setIndexNames((0, "CPQHSV110V3-MIB", "shelfEntryIndex"))
if mibBuilder.loadTexts: shelfEntry.setStatus('mandatory')
shelfEntryIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 232, 136, 1, 3, 8, 2, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: shelfEntryIndex.setStatus('mandatory')
shelfStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 232, 136, 1, 3, 8, 2, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("other", 1), ("ok", 2), ("degraded", 3), ("failed", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: shelfStatus.setStatus('mandatory')
shelfId = MibTableColumn((1, 3, 6, 1, 4, 1, 232, 136, 1, 3, 8, 2, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: shelfId.setStatus('mandatory')
shelfElementType = MibTableColumn((1, 3, 6, 1, 4, 1, 232, 136, 1, 3, 8, 2, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: shelfElementType.setStatus('mandatory')
shelfElementNum = MibTableColumn((1, 3, 6, 1, 4, 1, 232, 136, 1, 3, 8, 2, 1, 5), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: shelfElementNum.setStatus('mandatory')
shelfErrorCode = MibTableColumn((1, 3, 6, 1, 4, 1, 232, 136, 1, 3, 8, 2, 1, 6), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: shelfErrorCode.setStatus('mandatory')
emuEventTrapInformative = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136001)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "shelfId"), ("CPQHSV110V3-MIB", "shelfElementType"), ("CPQHSV110V3-MIB", "shelfElementNum"), ("CPQHSV110V3-MIB", "shelfErrorCode"))
emuEventTrapNoncritical = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136002)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "shelfId"), ("CPQHSV110V3-MIB", "shelfElementType"), ("CPQHSV110V3-MIB", "shelfElementNum"), ("CPQHSV110V3-MIB", "shelfErrorCode"))
emuEventTrapCritical = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136003)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "shelfId"), ("CPQHSV110V3-MIB", "shelfElementType"), ("CPQHSV110V3-MIB", "shelfElementNum"), ("CPQHSV110V3-MIB", "shelfErrorCode"))
emuEventTrapUnrecoverable = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136004)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "shelfId"), ("CPQHSV110V3-MIB", "shelfElementType"), ("CPQHSV110V3-MIB", "shelfElementNum"), ("CPQHSV110V3-MIB", "shelfErrorCode"))
sCellEventTrap_1_0 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13600256)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_1_1 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13600257)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_3_0 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13600768)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_3_1 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13600769)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_3_2 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13600770)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_3_3 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13600771)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_3_4 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13600772)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_3_5 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13600773)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_3_6 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13600774)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_3_7 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13600775)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_3_8 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13600776)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_4_0 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13601024)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_4_1 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13601025)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_4_2 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13601026)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_4_3 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13601027)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_4_4 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13601028)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_4_5 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13601029)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_4_6 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13601030)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_4_7 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13601031)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_4_8 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13601032)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_4_9 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13601033)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_4_a = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13601034)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_4_b = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13601035)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_4_c = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13601036)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_4_d = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13601037)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_4_e = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13601038)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_4_f = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13601039)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_4_10 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13601040)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_4_11 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13601041)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_6_0 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13601536)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_6_1 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13601537)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_6_2 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13601538)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_6_3 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13601539)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_6_4 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13601540)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_6_5 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13601541)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_6_7 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13601543)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_6_8 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13601544)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_6_9 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13601545)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_6_a = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13601546)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_6_b = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13601547)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_6_c = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13601548)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_6_d = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13601549)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_6_e = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13601550)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_6_f = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13601551)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_6_10 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13601552)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_6_12 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13601554)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_6_13 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13601555)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_6_14 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13601556)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_6_15 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13601557)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_6_16 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13601558)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_6_18 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13601560)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_6_19 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13601561)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_6_1a = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13601562)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_6_1b = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13601563)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_6_1c = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13601564)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_6_1d = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13601565)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_6_1e = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13601566)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_6_1f = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13601567)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_6_20 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13601568)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_6_21 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13601569)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_6_23 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13601571)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_6_24 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13601572)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_6_25 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13601573)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_6_26 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13601574)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_6_27 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13601575)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_6_28 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13601576)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_6_29 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13601577)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_6_2a = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13601578)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_6_2b = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13601579)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_6_2c = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13601580)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_6_2d = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13601581)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_6_2e = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13601582)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_6_30 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13601584)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_6_31 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13601585)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_6_32 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13601586)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_6_33 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13601587)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_6_34 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13601588)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_6_35 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13601589)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_6_36 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13601590)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_6_37 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13601591)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_6_38 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13601592)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_6_39 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13601593)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_6_3a = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13601594)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_6_3b = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13601595)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_6_3c = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13601596)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_6_3d = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13601597)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_6_3e = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13601598)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_7_0 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13601792)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_7_1 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13601793)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_7_2 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13601794)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_7_3 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13601795)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_7_4 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13601796)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_7_5 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13601797)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_7_6 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13601798)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_7_7 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13601799)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_7_8 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13601800)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_1 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602305)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_2 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602306)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_3 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602307)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_4 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602308)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_5 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602309)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_6 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602310)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_7 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602311)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_8 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602312)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_9 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602313)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_a = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602314)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_c = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602316)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_d = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602317)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_e = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602318)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_f = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602319)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_11 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602321)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_12 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602322)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_13 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602323)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_14 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602324)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_15 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602325)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_16 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602326)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_17 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602327)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_18 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602328)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_19 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602329)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_1a = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602330)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_1b = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602331)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_1c = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602332)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_1d = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602333)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_1e = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602334)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_1f = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602335)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_20 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602336)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_21 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602337)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_22 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602338)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_23 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602339)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_24 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602340)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_25 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602341)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_26 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602342)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_27 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602343)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_28 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602344)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_29 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602345)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_2a = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602346)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_2b = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602347)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_2c = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602348)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_2d = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602349)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_2e = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602350)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_2f = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602351)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_30 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602352)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_31 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602353)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_32 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602354)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_33 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602355)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_34 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602356)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_35 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602357)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_36 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602358)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_37 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602359)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_38 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602360)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_39 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602361)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_3a = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602362)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_3b = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602363)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_3c = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602364)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_3d = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602365)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_3e = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602366)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_3f = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602367)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_40 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602368)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_41 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602369)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_43 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602371)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_44 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602372)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_45 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602373)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_46 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602374)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_47 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602375)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_48 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602376)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_49 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602377)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_65 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602405)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_66 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602406)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_67 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602407)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_68 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602408)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_69 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602409)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_6a = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602410)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_6b = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602411)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_6c = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602412)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_6d = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602413)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_6e = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602414)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_70 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602416)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_71 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602417)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_72 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602418)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_73 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602419)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_74 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602420)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_75 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602421)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_76 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602422)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_77 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602423)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_78 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602424)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_79 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602425)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_7a = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602426)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_c8 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602504)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_c9 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602505)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_ca = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602506)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_cb = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602507)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_cc = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602508)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_cd = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602509)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_ce = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602510)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_cf = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602511)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_d0 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602512)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_d1 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602513)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_d2 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602514)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_d3 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602515)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_d4 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602516)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_d5 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602517)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_d6 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602518)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_9_d7 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602519)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_b_0 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13602816)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_c_0 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13603072)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_c_1 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13603073)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_c_2 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13603074)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_c_3 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13603075)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_c_4 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13603076)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_c_5 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13603077)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_c_6 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13603078)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_c_7 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13603079)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_c_8 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13603080)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_c_9 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13603081)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_c_a = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13603082)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_c_c = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13603084)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_c_f = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13603087)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_c_10 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13603088)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_c_11 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13603089)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_c_12 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13603090)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_c_15 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13603093)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_d_0 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13603328)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_d_1 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13603329)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_d_2 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13603330)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_d_3 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13603331)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_d_4 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13603332)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_d_33 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13603379)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_d_34 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13603380)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_d_35 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13603381)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_d_47 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13603399)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_d_4b = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13603403)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_d_4c = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13603404)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_d_5b = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13603419)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_d_5f = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13603423)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_d_6f = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13603439)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_d_71 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13603441)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_d_72 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13603442)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_d_7e = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13603454)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_d_7f = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13603455)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_d_82 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13603458)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_d_83 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13603459)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_d_85 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13603461)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_d_8d = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13603469)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_d_a1 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13603489)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_d_b5 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13603509)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_d_d8 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13603544)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_d_d9 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13603545)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_d_dd = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13603549)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_d_de = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13603550)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_d_ec = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13603564)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_d_f0 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13603568)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_42_0 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13616896)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_42_1 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13616897)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_42_3 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13616899)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_42_4 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13616900)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_42_5 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13616901)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_83_0 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13633536)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_83_1 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13633537)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_83_2 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13633538)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_83_3 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13633539)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_83_4 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13633540)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_83_5 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13633541)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
sCellEventTrap_83_6 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,13633542)).setObjects(("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "scellSWComponent"), ("CPQHSV110V3-MIB", "scellECode"), ("CPQHSV110V3-MIB", "scellCAC"), ("CPQHSV110V3-MIB", "scellEIP"))
mngmtAgentTrap_1 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000001)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_2 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000002)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_3 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000003)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_4 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000004)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_5 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000005)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_6 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000006)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_7 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000007)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_8 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000008)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_9 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000009)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_10 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000010)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_11 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000011)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_12 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000012)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_13 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000013)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_14 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000014)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_15 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000015)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_16 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000016)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_17 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000017)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_18 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000018)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_19 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000019)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_20 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000020)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_21 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000021)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_22 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000022)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_23 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000023)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_24 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000024)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_25 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000025)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_26 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000026)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_27 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000027)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_28 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000028)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_29 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000029)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_30 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000030)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_31 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000031)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_32 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000032)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_33 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000033)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_34 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000034)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_35 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000035)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_36 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000036)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_37 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000037)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_38 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000038)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_39 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000039)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_40 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000040)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_41 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000041)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_42 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000042)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_43 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000043)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_44 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000044)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_45 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000045)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_46 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000046)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_47 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000047)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_48 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000048)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_49 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000049)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_50 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000050)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_51 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000051)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_52 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000052)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_53 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000053)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_54 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000054)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_55 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000055)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_56 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000056)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_57 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000057)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_58 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000058)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_59 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000059)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_60 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000060)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_61 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000061)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_62 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000062)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_63 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000063)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_64 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000064)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_65 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000065)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_66 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000066)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_67 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000067)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_68 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000068)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_69 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000069)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_70 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000070)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_71 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000071)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_72 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000072)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_73 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000073)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_74 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000074)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_75 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000075)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_76 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000076)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_77 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000077)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_78 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000078)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_79 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000079)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_80 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000080)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_81 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000081)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_82 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000082)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_83 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000083)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_84 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000084)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_85 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000085)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_86 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000086)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_87 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000087)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_88 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000088)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_89 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000089)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_90 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000090)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_91 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000091)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_92 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000092)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_93 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000093)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_94 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000094)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_95 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000095)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_96 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000096)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_97 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000097)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_98 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000098)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_99 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000099)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_100 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000100)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_101 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000101)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_102 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000102)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_103 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000103)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_104 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000104)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_105 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000105)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_106 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000106)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_107 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000107)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_108 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000108)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_109 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000109)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_110 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000110)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_111 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000111)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_112 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000112)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_113 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000113)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_114 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000114)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_115 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000115)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_116 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000116)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_117 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000117)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_118 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000118)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_119 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000119)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_120 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000120)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_121 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000121)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_122 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000122)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_123 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000123)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_124 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000124)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_125 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000125)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_126 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000126)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_127 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000127)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_128 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000128)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_129 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000129)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_130 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000130)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_131 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000131)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_132 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000132)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_133 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136000133)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_1000 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136001000)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_1001 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136001001)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_1002 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136001002)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_1003 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136001003)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_1004 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136001004)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_1005 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136001005)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_1006 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136001006)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_1007 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136001007)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_1008 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136001008)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_1009 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136001009)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_1010 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136001010)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_1011 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136001011)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_1012 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136001012)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_1013 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136001013)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_1014 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136001014)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_2001 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136002001)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_2002 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136002002)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_2003 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136002003)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_2004 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136002004)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_2006 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136002006)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_2007 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136002007)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_2008 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136002008)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_2010 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136002010)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_2011 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136002011)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_2012 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136002012)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_2013 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136002013)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_2016 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136002016)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_2021 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136002021)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_2022 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136002022)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_2023 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136002023)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_2025 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136002025)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_2026 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136002026)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_2030 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136002030)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_2031 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136002031)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_2032 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136002032)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_2033 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136002033)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_2034 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136002034)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_2035 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136002035)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_2036 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136002036)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_2038 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136002038)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_2040 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136002040)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_2041 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136002041)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_2042 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136002042)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_2047 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136002047)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_2048 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136002048)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_2049 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136002049)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_2050 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136002050)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_2051 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136002051)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_2052 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136002052)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_2057 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136002057)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_2058 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136002058)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_2059 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136002059)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_2060 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136002060)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_2061 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136002061)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_2062 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136002062)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_2063 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136002063)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_2064 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136002064)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_2065 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136002065)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_2066 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136002066)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_2067 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136002067)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_2068 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136002068)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_2069 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136002069)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_2070 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136002070)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_2071 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136002071)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_2072 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136002072)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_2073 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136002073)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_2074 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136002074)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_2075 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136002075)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_2076 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136002076)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_2077 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136002077)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_2078 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136002078)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_2079 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136002079)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_2080 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136002080)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_2081 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136002081)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_2082 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136002082)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_2083 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136002083)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_2084 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136002084)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_2085 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136002085)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_2086 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136002086)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_2087 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136002087)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_2088 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136002088)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_2089 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136002089)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_2090 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136002090)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_2091 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136002091)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_2092 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136002092)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_2093 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136002093)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_2095 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136002095)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_2096 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136002096)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_2097 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136002097)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_2098 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136002098)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_2099 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136002099)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_2100 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136002100)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_2102 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136002102)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_2103 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136002103)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_3001 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136003001)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_3002 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136003002)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_3003 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136003003)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_3004 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136003004)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_3007 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136003007)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_3009 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136003009)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_3012 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136003012)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_3013 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136003013)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_3015 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136003015)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_3016 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136003016)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_3017 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136003017)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_3019 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136003019)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_3020 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136003020)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_3021 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136003021)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_3022 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136003022)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_3024 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136003024)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_3025 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136003025)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_3028 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136003028)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_3029 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136003029)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_3035 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136003035)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_3036 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136003036)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_3037 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136003037)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_3038 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136003038)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_3039 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136003039)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_3044 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136003044)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_3045 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136003045)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_3046 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136003046)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_3047 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136003047)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_3048 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136003048)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_3049 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136003049)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_3050 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136003050)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_3051 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136003051)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_3053 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136003053)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_3054 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136003054)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_3055 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136003055)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_3056 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136003056)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_3057 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136003057)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_3058 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136003058)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_3059 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136003059)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_3060 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136003060)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_3061 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136003061)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_3062 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136003062)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_3063 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136003063)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_3064 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136003064)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_3065 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136003065)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_3066 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136003066)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_3067 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136003067)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_3068 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136003068)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_3069 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136003069)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_3070 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136003070)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_3071 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136003071)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_3072 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136003072)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_3075 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136003075)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_3076 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136003076)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_3077 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136003077)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_3078 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136003078)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_3079 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136003079)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_3080 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136003080)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_3081 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136003081)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_3083 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136003083)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_3084 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136003084)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_3086 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136003086)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_3090 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136003090)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_3091 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136003091)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_3092 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136003092)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_3094 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136003094)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_3095 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136003095)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_4000 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136004000)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_4001 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136004001)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_4004 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136004004)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_4005 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136004005)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_4007 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136004007)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_4011 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136004011)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_4012 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136004012)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_4013 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136004013)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_4014 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136004014)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_4015 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136004015)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_4016 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136004016)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_4017 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136004017)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_4018 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136004018)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_4020 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136004020)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_4021 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136004021)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_4023 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136004023)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_4024 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136004024)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_4025 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136004025)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_4027 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136004027)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_4028 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136004028)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_4029 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136004029)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_4030 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136004030)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_4031 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136004031)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_4032 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136004032)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_4033 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136004033)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_4034 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136004034)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_4035 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136004035)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_4036 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136004036)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_4037 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136004037)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_4040 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136004040)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_4041 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136004041)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_4042 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136004042)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_4043 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136004043)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_4047 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136004047)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_4048 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136004048)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_4049 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136004049)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_4050 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136004050)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_4051 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136004051)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_4052 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136004052)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_4053 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136004053)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_4054 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136004054)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_4058 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136004058)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_4059 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136004059)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_5001 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136005001)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_5002 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136005002)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_5003 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136005003)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_5004 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136005004)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_5005 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136005005)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_5006 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136005006)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_5007 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136005007)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_5008 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136005008)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_5010 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136005010)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_5011 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136005011)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_5012 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136005012)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_5013 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136005013)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_5014 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136005014)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_5015 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136005015)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_5016 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136005016)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_5017 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136005017)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_5018 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136005018)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_5019 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136005019)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_6001 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136006001)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_6002 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136006002)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_6003 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136006003)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_6004 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136006004)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_6005 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136006005)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_6006 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136006006)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_6007 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136006007)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_6008 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136006008)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_6009 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136006009)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_6010 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136006010)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_6011 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136006011)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_6012 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136006012)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_6013 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136006013)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_6014 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136006014)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_6015 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136006015)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_6016 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136006016)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_6017 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136006017)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_6018 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136006018)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_6019 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136006019)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_6020 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136006020)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_6021 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136006021)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_6022 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136006022)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_6023 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136006023)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_6024 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136006024)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_6025 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136006025)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_6026 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136006026)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_6027 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136006027)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_6028 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136006028)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_6029 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136006029)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_6030 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136006030)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_6031 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136006031)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_6032 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136006032)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_6033 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136006033)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_6034 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136006034)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_6035 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136006035)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_6036 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136006036)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_6037 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136006037)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_6038 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136006038)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_8001 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136008001)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_8002 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136008002)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_8003 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136008003)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_8004 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136008004)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_8005 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136008005)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_8006 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136008006)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_8007 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136008007)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_8008 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136008008)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_8009 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136008009)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_8010 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136008010)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_8011 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136008011)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_8012 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136008012)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_8013 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136008013)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_8014 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136008014)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_8015 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136008015)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_8016 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136008016)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_8017 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136008017)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_8018 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136008018)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_8019 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136008019)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_8020 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136008020)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_8021 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136008021)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_8022 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136008022)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_8023 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136008023)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_8024 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136008024)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_8025 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136008025)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_8026 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136008026)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_8027 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136008027)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_8028 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136008028)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_8029 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136008029)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_8030 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136008030)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_8031 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136008031)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_8032 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136008032)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_8033 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136008033)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_8034 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136008034)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_8035 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136008035)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_8036 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136008036)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_8037 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136008037)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_8038 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136008038)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_8039 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136008039)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_8040 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136008040)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_8041 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136008041)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_8042 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136008042)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_8043 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136008043)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_8044 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136008044)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_8045 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136008045)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_8046 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136008046)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_8047 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136008047)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_8048 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136008048)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_8049 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136008049)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_8050 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136008050)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_8051 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136008051)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_8052 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136008052)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_8053 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136008053)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_8054 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136008054)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_8055 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136008055)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_8056 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136008056)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_8057 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136008057)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_8058 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136008058)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_8059 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136008059)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_8060 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136008060)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_8061 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136008061)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_8062 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136008062)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_8063 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136008063)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_8064 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136008064)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_8065 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136008065)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_8066 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136008066)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_8067 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136008067)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_8068 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136008068)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_8069 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136008069)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_8070 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136008070)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_8071 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136008071)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_8073 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136008073)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_8074 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136008074)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_8075 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136008075)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_8076 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136008076)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_8077 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136008077)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_8078 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136008078)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_8079 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136008079)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_8080 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136008080)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_8081 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136008081)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_8082 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136008082)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_8083 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136008083)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_8084 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136008084)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_8085 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136008085)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_8086 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136008086)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_8087 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136008087)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_8088 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136008088)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_8089 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136008089)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_8090 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136008090)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_9001 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136009001)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_9002 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136009002)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_9003 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136009003)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_9004 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136009004)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_9005 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136009005)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_9006 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136009006)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_9007 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136009007)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_9008 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136009008)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_9009 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136009009)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_9010 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136009010)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_9011 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136009011)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_9012 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136009012)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_9013 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136009013)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_9014 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136009014)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_9015 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136009015)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_9016 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136009016)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_9017 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136009017)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_9018 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136009018)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_9019 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136009019)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_9020 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136009020)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_9021 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136009021)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_9022 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136009022)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_9023 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136009023)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_9025 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136009025)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_9026 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136009026)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_9027 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136009027)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_9028 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136009028)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_9029 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136009029)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_9030 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136009030)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_9031 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136009031)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_9032 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136009032)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_9033 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136009033)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_9034 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136009034)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_9035 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136009035)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_9036 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136009036)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_10001 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136010001)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_10004 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136010004)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_10006 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136010006)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_10010 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136010010)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_10011 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136010011)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_10012 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136010012)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_10013 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136010013)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_10014 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136010014)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_10015 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136010015)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_10017 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136010017)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_10018 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136010018)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_10019 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136010019)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_10020 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136010020)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_10021 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136010021)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_10022 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136010022)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_10023 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136010023)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_10024 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136010024)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_10025 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136010025)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_10026 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136010026)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_10027 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136010027)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_10028 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136010028)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_10029 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136010029)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_10030 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136010030)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_10031 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136010031)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_10035 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136010035)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_10036 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136010036)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_10037 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136010037)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_10038 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136010038)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_10039 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136010039)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_10040 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136010040)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_10041 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136010041)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_10042 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136010042)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_10043 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136010043)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_10044 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136010044)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_11001 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136011001)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_11002 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136011002)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_11003 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136011003)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_11004 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136011004)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_12001 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136012001)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_12002 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136012002)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_12003 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136012003)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_12004 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136012004)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_12005 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136012005)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_12008 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136012008)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_13002 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136013002)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_13003 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136013003)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_13004 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136013004)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_13007 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136013007)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_13009 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136013009)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_13012 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136013012)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_13015 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136013015)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_13017 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136013017)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_13018 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136013018)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_13019 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136013019)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_13020 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136013020)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_14001 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136014001)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_14002 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136014002)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_14003 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136014003)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_14004 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136014004)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_14005 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136014005)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_14006 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136014006)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_14007 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136014007)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_14008 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136014008)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_14009 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136014009)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_14010 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136014010)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_14012 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136014012)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_14013 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136014013)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_14017 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136014017)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_15001 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136015001)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_15002 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136015002)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_15003 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136015003)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_15004 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136015004)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_15005 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136015005)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_15006 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136015006)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_15007 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136015007)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_15008 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136015008)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_15009 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136015009)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_16001 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136016001)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_16004 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136016004)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_16005 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136016005)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_16008 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136016008)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_16010 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136016010)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_16012 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136016012)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_16013 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136016013)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_16014 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136016014)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_16015 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136016015)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_16016 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136016016)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_16017 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136016017)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_16018 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136016018)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_16019 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136016019)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_16020 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136016020)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_16021 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136016021)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_16022 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136016022)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_16023 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136016023)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_16024 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136016024)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_16025 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136016025)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_16026 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136016026)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_16027 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136016027)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_16028 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136016028)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_16029 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136016029)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_16030 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136016030)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_16031 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136016031)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_16032 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136016032)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_16033 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136016033)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_16034 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136016034)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_16035 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136016035)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_16036 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136016036)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_16037 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136016037)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_16038 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136016038)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_16039 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136016039)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_16040 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136016040)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_17001 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136017001)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_17002 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136017002)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_17003 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136017003)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_17004 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136017004)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_17005 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136017005)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_17006 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136017006)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_17007 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136017007)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_17008 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136017008)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_17009 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136017009)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_17012 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136017012)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_17013 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136017013)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_17014 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136017014)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_17015 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136017015)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_17016 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136017016)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_17017 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136017017)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_18001 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136018001)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_18002 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136018002)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_18003 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136018003)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_18004 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136018004)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_18005 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136018005)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_18006 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136018006)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_18007 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136018007)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_18008 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136018008)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_18009 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136018009)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_18010 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136018010)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_18018 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136018018)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_18019 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136018019)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_18022 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136018022)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_18024 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136018024)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_18025 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136018025)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_18028 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136018028)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_18034 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136018034)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_18036 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136018036)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_18038 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136018038)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_18039 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136018039)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_18040 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136018040)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_18041 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136018041)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_18042 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136018042)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_18045 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136018045)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_18047 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136018047)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_18048 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136018048)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_18049 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136018049)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_18050 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136018050)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_18051 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136018051)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_18052 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136018052)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_18059 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136018059)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_18060 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136018060)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_18063 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136018063)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_18065 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136018065)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_18066 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136018066)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_18067 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136018067)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_18068 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136018068)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_18070 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136018070)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_18071 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136018071)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_18073 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136018073)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_18074 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136018074)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_18075 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136018075)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_18076 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136018076)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_18080 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136018080)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_18081 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136018081)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_20001 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136020001)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_20002 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136020002)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_20003 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136020003)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_20004 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136020004)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_20005 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136020005)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_20011 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136020011)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_20013 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136020013)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_20015 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136020015)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_20016 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136020016)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_20017 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136020017)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_20018 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136020018)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_20019 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136020019)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_20020 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136020020)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_20021 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136020021)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_20022 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136020022)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_20023 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136020023)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_21001 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136021001)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_21002 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136021002)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_21003 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136021003)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_21004 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136021004)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_21006 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136021006)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_21007 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136021007)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_21008 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136021008)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_21009 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136021009)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_21010 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136021010)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_21011 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136021011)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_21012 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136021012)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_21013 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136021013)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_21014 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136021014)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_21015 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136021015)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_21016 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136021016)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_21017 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136021017)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_21018 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136021018)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_21019 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136021019)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_22001 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136022001)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_22002 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136022002)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_23002 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136023002)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_23003 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136023003)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_24001 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136024001)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_24002 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136024002)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_24003 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136024003)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_24004 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136024004)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_25001 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136025001)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_25002 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136025002)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_25003 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136025003)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_25004 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136025004)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_25005 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136025005)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_25006 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136025006)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_25007 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136025007)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_25008 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136025008)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_25009 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136025009)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_25010 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136025010)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_25011 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136025011)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_25012 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136025012)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_25013 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136025013)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_25014 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136025014)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_25015 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136025015)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_25016 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136025016)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_25017 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136025017)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_25018 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136025018)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_25019 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136025019)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_26002 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136026002)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_26005 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136026005)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_26006 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136026006)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_26007 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136026007)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_26008 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136026008)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_26009 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136026009)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_26010 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136026010)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_26011 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136026011)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_26012 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136026012)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_26013 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136026013)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_26014 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136026014)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_26015 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136026015)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_26016 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136026016)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_27001 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136027001)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_27002 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136027002)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_27003 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136027003)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_27004 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136027004)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_27005 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136027005)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_27006 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136027006)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_27007 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136027007)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_27008 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136027008)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_27009 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136027009)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_27010 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136027010)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_27011 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136027011)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_27012 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136027012)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_27013 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136027013)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_27014 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136027014)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_27015 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136027015)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_27016 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136027016)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_27017 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136027017)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_27018 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136027018)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_27019 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136027019)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_27020 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136027020)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_27021 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136027021)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_27022 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136027022)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_27023 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136027023)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_27024 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136027024)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_27025 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136027025)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_27026 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136027026)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_27027 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136027027)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_27028 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136027028)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_27029 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136027029)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_27030 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136027030)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_27031 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136027031)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_27032 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136027032)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_27033 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136027033)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_27034 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136027034)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_27035 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136027035)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_27036 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136027036)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_27037 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136027037)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_27038 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136027038)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_27039 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136027039)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_27040 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136027040)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_27041 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136027041)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_27042 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136027042)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_27043 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136027043)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_27044 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136027044)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_27045 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136027045)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_27046 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136027046)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_27047 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136027047)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_27048 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136027048)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mngmtAgentTrap_27049 = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,136027049)).setObjects(("CPQHSV110V3-MIB", "hostName"), ("CPQHSV110V3-MIB", "scellNameDateTime"), ("CPQHSV110V3-MIB", "agentEventCode"), ("CPQHSV110V3-MIB", "agentEventDescription"))
mibBuilder.exportSymbols("CPQHSV110V3-MIB", mngmtAgentTrap_103=mngmtAgentTrap_103, mngmtAgentTrap_3021=mngmtAgentTrap_3021, mngmtAgentTrap_8001=mngmtAgentTrap_8001, sCellEventTrap_9_6=sCellEventTrap_9_6, scellEventDescription=scellEventDescription, mngmtAgentTrap_3072=mngmtAgentTrap_3072, sCellEventTrap_6_2c=sCellEventTrap_6_2c, mngmtAgentTrap_2072=mngmtAgentTrap_2072, mngmtAgentTrap_8078=mngmtAgentTrap_8078, mngmtAgentTrap_16001=mngmtAgentTrap_16001, mngmtAgentTrap_2008=mngmtAgentTrap_2008, mngmtAgentTrap_83=mngmtAgentTrap_83, sCellEventTrap_6_2d=sCellEventTrap_6_2d, sCellEventTrap_9_72=sCellEventTrap_9_72, mngmtAgentTrap_8044=mngmtAgentTrap_8044, mngmtAgentTrap_18038=mngmtAgentTrap_18038, sCellEventTrap_d_ec=sCellEventTrap_d_ec, mngmtAgentTrap_27020=mngmtAgentTrap_27020, mngmtAgentTrap_4000=mngmtAgentTrap_4000, mngmtAgentTrap_26009=mngmtAgentTrap_26009, mngmtAgentTrap_27042=mngmtAgentTrap_27042, mngmtAgentTrap_4050=mngmtAgentTrap_4050, mngmtAgentTrap_27046=mngmtAgentTrap_27046, mngmtAgentTrap_9027=mngmtAgentTrap_9027, mngmtAgentTrap_27048=mngmtAgentTrap_27048, mngmtAgentTrap_18025=mngmtAgentTrap_18025, mngmtAgentTrap_8013=mngmtAgentTrap_8013, mngmtAgentTrap_9035=mngmtAgentTrap_9035, mngmtAgentTrap_2007=mngmtAgentTrap_2007, mngmtAgentTrap_11003=mngmtAgentTrap_11003, mngmtAgentTrap_16037=mngmtAgentTrap_16037, mngmtAgentTrap_88=mngmtAgentTrap_88, mngmtAgentTrap_20019=mngmtAgentTrap_20019, mngmtAgentTrap_25005=mngmtAgentTrap_25005, sCellEventTrap_6_1e=sCellEventTrap_6_1e, sCellEventTrap_9_3f=sCellEventTrap_9_3f, maHSVMibRevMinor=maHSVMibRevMinor, mngmtAgentTrap_68=mngmtAgentTrap_68, sCellEventTrap_9_c=sCellEventTrap_9_c, mngmtAgentTrap_15=mngmtAgentTrap_15, sCellEventTrap_6_f=sCellEventTrap_6_f, mngmtAgentTrap_2079=mngmtAgentTrap_2079, mngmtAgentTrap_8030=mngmtAgentTrap_8030, mngmtAgentTrap_14017=mngmtAgentTrap_14017, mngmtAgentTrap_18048=mngmtAgentTrap_18048, sCellEventTrap_6_1=sCellEventTrap_6_1, mngmtAgentTrap_99=mngmtAgentTrap_99, mngmtAgentTrap_8054=mngmtAgentTrap_8054, mngmtAgentTrap_21009=mngmtAgentTrap_21009, mngmtAgentTrap_6016=mngmtAgentTrap_6016, mngmtAgentTrap_21014=mngmtAgentTrap_21014, mngmtAgentTrap_9025=mngmtAgentTrap_9025, mngmtAgentTrap_94=mngmtAgentTrap_94, mngmtAgentTrap_15007=mngmtAgentTrap_15007, sCellEventTrap_9_34=sCellEventTrap_9_34, mngmtAgentTrap_6023=mngmtAgentTrap_6023, sCellEventTrap_9_30=sCellEventTrap_9_30, mngmtAgentTrap_57=mngmtAgentTrap_57, sCellEventTrap_9_41=sCellEventTrap_9_41, mngmtAgentTrap_14005=mngmtAgentTrap_14005, sCellEventTrap_83_4=sCellEventTrap_83_4, mngmtAgentTrap_13017=mngmtAgentTrap_13017, sCellEventTrap_9_19=sCellEventTrap_9_19, mngmtAgentTrap_15002=mngmtAgentTrap_15002, nscName=nscName, mngmtAgentTrap_18005=mngmtAgentTrap_18005, mngmtAgentTrap_2033=mngmtAgentTrap_2033, agManufacturer=agManufacturer, mngmtAgentTrap_9013=mngmtAgentTrap_9013, mngmtAgentTrap_2010=mngmtAgentTrap_2010, sCellEventTrap_6_1a=sCellEventTrap_6_1a, mngmtAgentTrap_26007=mngmtAgentTrap_26007, mngmtAgentTrap_10031=mngmtAgentTrap_10031, mngmtAgentTrap_130=mngmtAgentTrap_130, mngmtAgentTrap_3044=mngmtAgentTrap_3044, mngmtAgentTrap_8064=mngmtAgentTrap_8064, mngmtAgentTrap_20011=mngmtAgentTrap_20011, sCellEventTrap_6_19=sCellEventTrap_6_19, mngmtAgentTrap_8071=mngmtAgentTrap_8071, agMajVersion=agMajVersion, sCellEventTrap_4_4=sCellEventTrap_4_4, mngmtAgentTrap_17012=mngmtAgentTrap_17012, mngmtAgentTrap_27018=mngmtAgentTrap_27018, emuEventTrapInformative=emuEventTrapInformative, scellEntry=scellEntry, sCellEventTrap_4_8=sCellEventTrap_4_8, mngmtAgentTrap_2092=mngmtAgentTrap_2092, mngmtAgentTrap_21007=mngmtAgentTrap_21007, mngmtAgentTrap_111=mngmtAgentTrap_111, sCellEventTrap_7_1=sCellEventTrap_7_1, sCellEventTrap_6_2a=sCellEventTrap_6_2a, sCellEventTrap_c_9=sCellEventTrap_c_9, srvModel=srvModel, mngmtAgentTrap_1010=mngmtAgentTrap_1010, mngmtAgentTrap_18=mngmtAgentTrap_18, mngmtAgentTrap_4042=mngmtAgentTrap_4042, mngmtAgentTrap_2081=mngmtAgentTrap_2081, hostName=hostName, mngmtAgentTrap_27006=mngmtAgentTrap_27006, mngmtAgentTrap_42=mngmtAgentTrap_42, mngmtAgentTrap_6036=mngmtAgentTrap_6036, mngmtAgentTrap_3067=mngmtAgentTrap_3067, mngmtAgentTrap_16012=mngmtAgentTrap_16012, sCellEventTrap_4_3=sCellEventTrap_4_3, mngmtAgentTrap_74=mngmtAgentTrap_74, mngmtAgentTrap_17002=mngmtAgentTrap_17002, mngmtAgentTrap_8052=mngmtAgentTrap_8052, mngmtAgentTrap_6028=mngmtAgentTrap_6028, mngmtAgentTrap_9021=mngmtAgentTrap_9021, sCellEventTrap_9_23=sCellEventTrap_9_23, mngmtAgentTrap_17=mngmtAgentTrap_17, mngmtAgentTrap_27028=mngmtAgentTrap_27028, mngmtAgentTrap_110=mngmtAgentTrap_110, mngmtAgentTrap_5011=mngmtAgentTrap_5011, sCellEventTrap_d_82=sCellEventTrap_d_82, sCellEventTrap_9_24=sCellEventTrap_9_24, mngmtAgentTrap_6018=mngmtAgentTrap_6018, mngmtAgentTrap_4034=mngmtAgentTrap_4034, sCellEventTrap_d_7e=sCellEventTrap_d_7e, sCellEventTrap_9_40=sCellEventTrap_9_40, mngmtAgentTrap_27=mngmtAgentTrap_27, mngmtAgentTrap_2021=mngmtAgentTrap_2021, sCellEventTrap_9_2f=sCellEventTrap_9_2f, mngmtAgentTrap_55=mngmtAgentTrap_55, sCellEventTrap_83_2=sCellEventTrap_83_2, mngmtAgentTrap_1008=mngmtAgentTrap_1008, mngmtAgentTrap_79=mngmtAgentTrap_79, mngmtAgentTrap_1011=mngmtAgentTrap_1011, mngmtAgentTrap_77=mngmtAgentTrap_77, sCellEventTrap_3_6=sCellEventTrap_3_6, mngmtAgentTrap_18002=mngmtAgentTrap_18002, mngmtAgentTrap_24004=mngmtAgentTrap_24004, sCellEventTrap_3_3=sCellEventTrap_3_3, mngmtAgentTrap_8050=mngmtAgentTrap_8050, agentEntry=agentEntry, mngmtAgentTrap_20005=mngmtAgentTrap_20005, mngmtAgentTrap_10010=mngmtAgentTrap_10010, sCellEventTrap_6_8=sCellEventTrap_6_8, mngmtAgentTrap_27047=mngmtAgentTrap_27047, sCellEventTrap_9_d7=sCellEventTrap_9_d7, mngmtAgentTrap_9002=mngmtAgentTrap_9002, mngmtAgentTrap_9020=mngmtAgentTrap_9020, mngmtAgentTrap_5018=mngmtAgentTrap_5018, sCellEventTrap_d_33=sCellEventTrap_d_33, mngmtAgentTrap_109=mngmtAgentTrap_109, mngmtAgentTrap_2042=mngmtAgentTrap_2042, mngmtAgentTrap_3062=mngmtAgentTrap_3062, mngmtAgentTrap_8021=mngmtAgentTrap_8021, mngmtAgentTrap_3012=mngmtAgentTrap_3012, mngmtAgentTrap_10035=mngmtAgentTrap_10035, mngmtAgentTrap_17005=mngmtAgentTrap_17005, mngmtAgentTrap_9032=mngmtAgentTrap_9032, mngmtAgentTrap_20023=mngmtAgentTrap_20023, mngmtAgentTrap_4005=mngmtAgentTrap_4005, mngmtAgentTrap_5019=mngmtAgentTrap_5019, mngmtAgentTrap_10038=mngmtAgentTrap_10038, sCellEventTrap_9_f=sCellEventTrap_9_f, mngmtAgentTrap_13020=mngmtAgentTrap_13020, mngmtAgentTrap_14007=mngmtAgentTrap_14007, mngmtAgentTrap_27031=mngmtAgentTrap_27031, mngmtAgentTrap_6=mngmtAgentTrap_6, mngmtAgentTrap_18070=mngmtAgentTrap_18070, agDescription=agDescription, sCellEventTrap_c_10=sCellEventTrap_c_10, mngmtAgentTrap_4036=mngmtAgentTrap_4036, mngmtAgentTrap_8058=mngmtAgentTrap_8058, mngmtAgentTrap_16008=mngmtAgentTrap_16008, mngmtAgentTrap_21016=mngmtAgentTrap_21016, mngmtAgentTrap_24002=mngmtAgentTrap_24002, sCellEventTrap_9_76=sCellEventTrap_9_76, mngmtAgentTrap_4014=mngmtAgentTrap_4014, mngmtAgentTrap_8084=mngmtAgentTrap_8084, sCellEventTrap_9_3c=sCellEventTrap_9_3c, mngmtAgentTrap_8046=mngmtAgentTrap_8046, mngmtAgentTrap_26016=mngmtAgentTrap_26016, mngmtAgentTrap_78=mngmtAgentTrap_78, sCellEventTrap_4_f=sCellEventTrap_4_f, sCellEventTrap_6_21=sCellEventTrap_6_21, mngmtAgentTrap_85=mngmtAgentTrap_85, mngmtAgentTrap_3079=mngmtAgentTrap_3079, mngmtAgentTrap_4017=mngmtAgentTrap_4017, mngmtAgentTrap_6013=mngmtAgentTrap_6013, mngmtAgentTrap_4021=mngmtAgentTrap_4021, mngmtAgentTrap_16023=mngmtAgentTrap_16023, mngmtAgentTrap_19=mngmtAgentTrap_19, mngmtAgentTrap_8014=mngmtAgentTrap_8014, mngmtAgentTrap_21001=mngmtAgentTrap_21001, mngmtAgentTrap_21019=mngmtAgentTrap_21019, sCellEventTrap_9_67=sCellEventTrap_9_67, mngmtAgentTrap_53=mngmtAgentTrap_53, mngmtAgentTrap_2096=mngmtAgentTrap_2096, mngmtAgentTrap_71=mngmtAgentTrap_71, mngmtAgentTrap_27036=mngmtAgentTrap_27036, mngmtAgentTrap_89=mngmtAgentTrap_89, mngmtAgentTrap_1012=mngmtAgentTrap_1012, mngmtAgentTrap_1013=mngmtAgentTrap_1013, mngmtAgentTrap_8081=mngmtAgentTrap_8081, mngmtAgentTrap_11002=mngmtAgentTrap_11002, shelf=shelf, sCellEventTrap_6_16=sCellEventTrap_6_16, mngmtAgentTrap_17001=mngmtAgentTrap_17001, mngmtAgentTrap_6037=mngmtAgentTrap_6037, mngmtAgentTrap_2086=mngmtAgentTrap_2086, mngmtAgentTrap_6035=mngmtAgentTrap_6035, sCellEventTrap_d_83=sCellEventTrap_d_83, mngmtAgentTrap_8074=mngmtAgentTrap_8074, mngmtAgentTrap_26002=mngmtAgentTrap_26002, sCellEventTrap_6_32=sCellEventTrap_6_32, mngmtAgentTrap_27023=mngmtAgentTrap_27023, srvCPU=srvCPU, agEnterprise=agEnterprise, mngmtAgentTrap_18066=mngmtAgentTrap_18066, mngmtAgentTrap_126=mngmtAgentTrap_126, mngmtAgentTrap_16022=mngmtAgentTrap_16022, sCellEventTrap_9_47=sCellEventTrap_9_47, sCellEventTrap_9_7a=sCellEventTrap_9_7a, sCellEventTrap_c_7=sCellEventTrap_c_7, mngmtAgentTrap_2002=mngmtAgentTrap_2002, mngmtAgentTrap_9006=mngmtAgentTrap_9006, sCellEventTrap_c_5=sCellEventTrap_c_5, sCellEventTrap_d_dd=sCellEventTrap_d_dd, mngmtAgentTrap_12=mngmtAgentTrap_12, mngmtAgentTrap_2089=mngmtAgentTrap_2089, mngmtAgentTrap_3057=mngmtAgentTrap_3057, mngmtAgentTrap_6019=mngmtAgentTrap_6019, mngmtAgentTrap_6021=mngmtAgentTrap_6021, sCellEventTrap_6_10=sCellEventTrap_6_10, mngmtAgentTrap_3050=mngmtAgentTrap_3050, sCellEventTrap_6_29=sCellEventTrap_6_29, mngmtAgentTrap_8018=mngmtAgentTrap_8018, shelfTotal=shelfTotal, sCellEventTrap_9_1d=sCellEventTrap_9_1d, mngmtAgentTrap_106=mngmtAgentTrap_106, mngmtAgentTrap_17008=mngmtAgentTrap_17008, mngmtAgentTrap_21006=mngmtAgentTrap_21006, sCellEventTrap_9_2=sCellEventTrap_9_2, mngmtAgentTrap_16030=mngmtAgentTrap_16030, mngmtAgentTrap_3028=mngmtAgentTrap_3028, mngmtAgentTrap_3077=mngmtAgentTrap_3077, mngmtAgentTrap_26=mngmtAgentTrap_26, mngmtAgentTrap_122=mngmtAgentTrap_122, mngmtAgentTrap_9011=mngmtAgentTrap_9011, mngmtAgentTrap_27011=mngmtAgentTrap_27011, mngmtAgentTrap_90=mngmtAgentTrap_90, mngmtAgentTrap_26014=mngmtAgentTrap_26014, sCellEventTrap_4_11=sCellEventTrap_4_11, mngmtAgentTrap_25010=mngmtAgentTrap_25010, mngmtAgentTrap_4015=mngmtAgentTrap_4015, sCellEventTrap_d_0=sCellEventTrap_d_0, mngmtAgentTrap_113=mngmtAgentTrap_113, sCellEventTrap_9_3b=sCellEventTrap_9_3b, mngmtAgentTrap_8088=mngmtAgentTrap_8088, mngmtAgentTrap_27012=mngmtAgentTrap_27012, sCellEventTrap_4_a=sCellEventTrap_4_a)
mibBuilder.exportSymbols("CPQHSV110V3-MIB", mngmtAgentTrap_3080=mngmtAgentTrap_3080, mngmtAgentTrap_16026=mngmtAgentTrap_16026, sCellEventTrap_9_2b=sCellEventTrap_9_2b, mngmtAgentTrap_39=mngmtAgentTrap_39, scellEventTimeDate=scellEventTimeDate, srvOS=srvOS, mngmtAgentTrap_17014=mngmtAgentTrap_17014, mngmtAgentTrap_5001=mngmtAgentTrap_5001, mngmtAgentTrap_6029=mngmtAgentTrap_6029, mngmtAgentTrap_8047=mngmtAgentTrap_8047, mngmtAgentTrap_16021=mngmtAgentTrap_16021, mngmtAgentTrap_1014=mngmtAgentTrap_1014, mngmtAgentTrap_3071=mngmtAgentTrap_3071, mngmtAgentTrap_16027=mngmtAgentTrap_16027, mngmtAgentTrap_26013=mngmtAgentTrap_26013, mngmtAgentTrap_8004=mngmtAgentTrap_8004, mngmtAgentTrap_20015=mngmtAgentTrap_20015, sCellEventTrap_4_0=sCellEventTrap_4_0, sCellEventTrap_9_6e=sCellEventTrap_9_6e, mngmtAgentTrap_8089=mngmtAgentTrap_8089, mngmtAgentTrap_25014=mngmtAgentTrap_25014, mngmtAgentTrap_21008=mngmtAgentTrap_21008, sCellEventTrap_3_5=sCellEventTrap_3_5, mngmtAgentTrap_4025=mngmtAgentTrap_4025, mngmtAgentTrap_23=mngmtAgentTrap_23, mngmtAgentTrap_2083=mngmtAgentTrap_2083, mngmtAgentTrap_3002=mngmtAgentTrap_3002, sCellEventTrap_9_2d=sCellEventTrap_9_2d, sCellEventTrap_6_33=sCellEventTrap_6_33, mngmtAgentTrap_3092=mngmtAgentTrap_3092, mngmtAgentTrap_6038=mngmtAgentTrap_6038, sCellEventTrap_9_6b=sCellEventTrap_9_6b, sCellEventTrap_6_2b=sCellEventTrap_6_2b, sCellEventTrap_9_cd=sCellEventTrap_9_cd, sCellEventTrap_4_10=sCellEventTrap_4_10, nscEntryIndex=nscEntryIndex, scellEIP=scellEIP, mngmtAgentTrap_12008=mngmtAgentTrap_12008, mngmtAgentTrap_10042=mngmtAgentTrap_10042, mngmtAgentTrap_24003=mngmtAgentTrap_24003, mngmtAgentTrap_25003=mngmtAgentTrap_25003, sCellEventTrap_9_13=sCellEventTrap_9_13, mngmtAgentTrap_30=mngmtAgentTrap_30, sCellEventTrap_9_65=sCellEventTrap_9_65, mngmtAgentTrap_21004=mngmtAgentTrap_21004, mngmtAgentTrap_27027=mngmtAgentTrap_27027, srvBiosVersion=srvBiosVersion, mngmtAgentTrap_5012=mngmtAgentTrap_5012, cpqHSVServer=cpqHSVServer, mngmtAgentTrap_33=mngmtAgentTrap_33, sCellEventTrap_6_23=sCellEventTrap_6_23, mngmtAgentTrap_16033=mngmtAgentTrap_16033, cpqHSVAgent=cpqHSVAgent, mngmtAgentTrap_18076=mngmtAgentTrap_18076, mngmtAgentTrap_27035=mngmtAgentTrap_27035, mngmtAgentTrap_9026=mngmtAgentTrap_9026, mngmtAgentTrap_1009=mngmtAgentTrap_1009, sCellEventTrap_6_12=sCellEventTrap_6_12, mngmtAgentTrap_2022=mngmtAgentTrap_2022, mngmtAgentTrap_16013=mngmtAgentTrap_16013, mngmtAgentTrap_6007=mngmtAgentTrap_6007, mngmtAgentTrap_3076=mngmtAgentTrap_3076, mngmtAgentTrap_9029=mngmtAgentTrap_9029, agentEventCode=agentEventCode, mngmtAgentTrap_3009=mngmtAgentTrap_3009, mngmtAgentTrap_8020=mngmtAgentTrap_8020, mngmtAgentTrap_81=mngmtAgentTrap_81, mngmtAgentTrap_6025=mngmtAgentTrap_6025, mngmtAgentTrap_18019=mngmtAgentTrap_18019, mngmtAgentTrap_95=mngmtAgentTrap_95, mngmtAgentTrap_2090=mngmtAgentTrap_2090, mngmtAgentTrap_8061=mngmtAgentTrap_8061, mngmtAgentTrap_18047=mngmtAgentTrap_18047, sCellEventTrap_7_8=sCellEventTrap_7_8, mngmtAgentTrap_18067=mngmtAgentTrap_18067, sCellEventTrap_4_6=sCellEventTrap_4_6, srvSubModel=srvSubModel, mngmtAgentTrap_108=mngmtAgentTrap_108, mngmtAgentTrap_6015=mngmtAgentTrap_6015, mngmtAgentTrap_8035=mngmtAgentTrap_8035, scellStatusTable=scellStatusTable, sCellEventTrap_c_1=sCellEventTrap_c_1, sCellEventTrap_83_5=sCellEventTrap_83_5, mngmtAgentTrap_6009=mngmtAgentTrap_6009, mngmtAgentTrap_8026=mngmtAgentTrap_8026, mngmtAgentTrap_8045=mngmtAgentTrap_8045, mngmtAgentTrap_14004=mngmtAgentTrap_14004, mngmtAgentTrap_2016=mngmtAgentTrap_2016, mngmtAgentTrap_18080=mngmtAgentTrap_18080, mngmtAgentTrap_26008=mngmtAgentTrap_26008, sCellEventTrap_1_1=sCellEventTrap_1_1, sCellEventTrap_3_8=sCellEventTrap_3_8, mngmtAgentTrap_8065=mngmtAgentTrap_8065, mngmtAgentTrap_115=mngmtAgentTrap_115, mngmtAgentTrap_128=mngmtAgentTrap_128, sCellEventTrap_9_38=sCellEventTrap_9_38, sCellEventTrap_9_36=sCellEventTrap_9_36, mngmtAgentTrap_8032=mngmtAgentTrap_8032, sCellEventTrap_6_b=sCellEventTrap_6_b, mngmtAgentTrap_4027=mngmtAgentTrap_4027, mngmtAgentTrap_2082=mngmtAgentTrap_2082, mngmtAgentTrap_10024=mngmtAgentTrap_10024, sCellEventTrap_4_c=sCellEventTrap_4_c, mngmtAgentTrap_27007=mngmtAgentTrap_27007, mngmtAgentTrap_10004=mngmtAgentTrap_10004, mngmtAgentTrap_21011=mngmtAgentTrap_21011, sCellEventTrap_9_6a=sCellEventTrap_9_6a, mngmtAgentTrap_2025=mngmtAgentTrap_2025, mngmtAgentTrap_3036=mngmtAgentTrap_3036, mngmtAgentTrap_18042=mngmtAgentTrap_18042, sCellEventTrap_9_27=sCellEventTrap_9_27, mngmtAgentTrap_15004=mngmtAgentTrap_15004, mngmtAgentTrap_132=mngmtAgentTrap_132, sCellEventTrap_83_1=sCellEventTrap_83_1, mngmtAgentTrap_9=mngmtAgentTrap_9, mngmtAgentTrap_17009=mngmtAgentTrap_17009, mngmtAgentTrap_9014=mngmtAgentTrap_9014, mngmtAgentTrap_4011=mngmtAgentTrap_4011, mngmtAgentTrap_13015=mngmtAgentTrap_13015, mngmtAgentTrap_8016=mngmtAgentTrap_8016, mngmtAgentTrap_9010=mngmtAgentTrap_9010, mngmtAgentTrap_18010=mngmtAgentTrap_18010, mngmtAgentTrap_119=mngmtAgentTrap_119, mngmtAgentTrap_131=mngmtAgentTrap_131, scellEntryIndex=scellEntryIndex, sCellEventTrap_9_32=sCellEventTrap_9_32, mngmtAgentTrap_18068=mngmtAgentTrap_18068, scellECode=scellECode, sCellEventTrap_6_1f=sCellEventTrap_6_1f, sCellEventTrap_4_5=sCellEventTrap_4_5, mngmtAgentTrap_6026=mngmtAgentTrap_6026, sCellEventTrap_9_1e=sCellEventTrap_9_1e, sCellEventTrap_9_29=sCellEventTrap_9_29, mngmtAgentTrap_6027=mngmtAgentTrap_6027, mngmtAgentTrap_4001=mngmtAgentTrap_4001, mngmtAgentTrap_3024=mngmtAgentTrap_3024, mngmtAgentTrap_16025=mngmtAgentTrap_16025, mngmtAgentTrap_13009=mngmtAgentTrap_13009, sCellEventTrap_6_d=sCellEventTrap_6_d, sCellEventTrap_9_11=sCellEventTrap_9_11, mngmtAgentTrap_2075=mngmtAgentTrap_2075, mngmtAgentTrap_8076=mngmtAgentTrap_8076, mngmtAgentTrap_8082=mngmtAgentTrap_8082, mngmtAgentTrap_25=mngmtAgentTrap_25, sCellEventTrap_6_1c=sCellEventTrap_6_1c, mngmtAgentTrap_4051=mngmtAgentTrap_4051, sCellEventTrap_d_47=sCellEventTrap_d_47, sCellEventTrap_6_9=sCellEventTrap_6_9, mngmtAgentTrap_125=mngmtAgentTrap_125, mngmtAgentTrap_10011=mngmtAgentTrap_10011, mngmtAgentTrap_2074=mngmtAgentTrap_2074, mngmtAgentTrap_12004=mngmtAgentTrap_12004, mngmtAgentTrap_3091=mngmtAgentTrap_3091, mngmtAgentTrap_6017=mngmtAgentTrap_6017, nsc=nsc, mngmtAgentTrap_14006=mngmtAgentTrap_14006, sCellEventTrap_c_8=sCellEventTrap_c_8, mngmtAgentTrap_16029=mngmtAgentTrap_16029, mngmtAgentTrap_3=mngmtAgentTrap_3, mngmtAgentTrap_8003=mngmtAgentTrap_8003, host=host, mngmtAgentTrap_10013=mngmtAgentTrap_10013, sCellEventTrap_d_34=sCellEventTrap_d_34, mngmtAgentTrap_18081=mngmtAgentTrap_18081, mngmtAgentTrap_82=mngmtAgentTrap_82, mngmtAgentTrap_5006=mngmtAgentTrap_5006, mngmtAgentTrap_2048=mngmtAgentTrap_2048, mngmtAgentTrap_3065=mngmtAgentTrap_3065, sCellEventTrap_c_a=sCellEventTrap_c_a, mngmtAgentTrap_58=mngmtAgentTrap_58, mngmtAgentTrap_8017=mngmtAgentTrap_8017, mngmtAgentTrap_25017=mngmtAgentTrap_25017, mngmtAgentTrap_13004=mngmtAgentTrap_13004, mngmtAgentTrap_8059=mngmtAgentTrap_8059, sCellEventTrap_6_27=sCellEventTrap_6_27, sCellEventTrap_9_2c=sCellEventTrap_9_2c, mngmtAgentTrap_51=mngmtAgentTrap_51, hsvObject=hsvObject, sCellEventTrap_6_39=sCellEventTrap_6_39, mngmtAgentTrap_76=mngmtAgentTrap_76, sCellEventTrap_9_68=sCellEventTrap_9_68, mngmtAgentTrap_133=mngmtAgentTrap_133, mngmtAgentTrap_2034=mngmtAgentTrap_2034, mngmtAgentTrap_2069=mngmtAgentTrap_2069, mngmtAgentTrap_10025=mngmtAgentTrap_10025, mngmtAgentTrap_2026=mngmtAgentTrap_2026, mngmtAgentTrap_2084=mngmtAgentTrap_2084, sCellEventTrap_9_1b=sCellEventTrap_9_1b, sCellEventTrap_3_7=sCellEventTrap_3_7, sCellEventTrap_1_0=sCellEventTrap_1_0, sCellEventTrap_9_75=sCellEventTrap_9_75, mngmtAgentTrap_5=mngmtAgentTrap_5, mngmtAgentTrap_18036=mngmtAgentTrap_18036, mngmtAgentTrap_5008=mngmtAgentTrap_5008, shelfErrorCode=shelfErrorCode, sCellEventTrap_6_36=sCellEventTrap_6_36, mngmtAgentTrap_8068=mngmtAgentTrap_8068, mngmtAgentTrap_8079=mngmtAgentTrap_8079, mngmtAgentTrap_27039=mngmtAgentTrap_27039, mngmtAgentTrap_17013=mngmtAgentTrap_17013, sCellEventTrap_3_0=sCellEventTrap_3_0, sCellEventTrap_d_3=sCellEventTrap_d_3, mngmtAgentTrap_18063=mngmtAgentTrap_18063, mngmtAgentTrap_8066=mngmtAgentTrap_8066, mngmtAgentTrap_18073=mngmtAgentTrap_18073, cpqElementManager=cpqElementManager, sCellEventTrap_6_31=sCellEventTrap_6_31, mngmtAgentTrap_70=mngmtAgentTrap_70, sCellEventTrap_d_71=sCellEventTrap_d_71, mngmtAgentTrap_18040=mngmtAgentTrap_18040, sCellEventTrap_9_6c=sCellEventTrap_9_6c, mngmtAgentTrap_21012=mngmtAgentTrap_21012, mngmtAgentTrap_4035=mngmtAgentTrap_4035, mngmtAgentTrap_18008=mngmtAgentTrap_18008, mngmtAgentTrap_8069=mngmtAgentTrap_8069, sCellEventTrap_9_d6=sCellEventTrap_9_d6, mngmtAgentTrap_3055=mngmtAgentTrap_3055, mngmtAgentTrap_10030=mngmtAgentTrap_10030, mngmtAgentTrap_73=mngmtAgentTrap_73, mngmtAgentTrap_25006=mngmtAgentTrap_25006, sCellEventTrap_9_d3=sCellEventTrap_9_d3, mngmtAgentTrap_3045=mngmtAgentTrap_3045, sCellEventTrap_d_a1=sCellEventTrap_d_a1, mngmtAgentTrap_66=mngmtAgentTrap_66, mngmtAgentTrap_25011=mngmtAgentTrap_25011, mngmtAgentTrap_10041=mngmtAgentTrap_10041, mngmtAgentTrap_100=mngmtAgentTrap_100, mngmtAgentTrap_26015=mngmtAgentTrap_26015, mngmtAgentTrap_21013=mngmtAgentTrap_21013, agentEventLevel=agentEventLevel, hostEntry=hostEntry, mngmtAgentTrap_4053=mngmtAgentTrap_4053, mngmtAgentTrap_27041=mngmtAgentTrap_27041, sCellEventTrap_d_8d=sCellEventTrap_d_8d, mngmtAgentTrap_7=mngmtAgentTrap_7, emuEventTrapUnrecoverable=emuEventTrapUnrecoverable, mngmtAgentTrap_9005=mngmtAgentTrap_9005, sCellEventTrap_7_0=sCellEventTrap_7_0, mngmtAgentTrap_121=mngmtAgentTrap_121, scellSWComponent=scellSWComponent, sCellEventTrap_4_d=sCellEventTrap_4_d, sCellEventTrap_6_3a=sCellEventTrap_6_3a, mngmtAgentTrap_2093=mngmtAgentTrap_2093, mngmtAgentTrap_2036=mngmtAgentTrap_2036, mngmtAgentTrap_9012=mngmtAgentTrap_9012, mngmtAgentTrap_25019=mngmtAgentTrap_25019, sCellEventTrap_9_37=sCellEventTrap_9_37, mngmtAgentTrap_3081=mngmtAgentTrap_3081, mngmtAgentTrap_9007=mngmtAgentTrap_9007, mngmtAgentTrap_3049=mngmtAgentTrap_3049, mngmtAgentTrap_10012=mngmtAgentTrap_10012, sCellEventTrap_9_49=sCellEventTrap_9_49, mngmtAgentTrap_8028=mngmtAgentTrap_8028, mngmtAgentTrap_16024=mngmtAgentTrap_16024)
mibBuilder.exportSymbols("CPQHSV110V3-MIB", mngmtAgentTrap_17017=mngmtAgentTrap_17017, mngmtAgentTrap_6034=mngmtAgentTrap_6034, mngmtAgentTrap_4052=mngmtAgentTrap_4052, mngmtAgentTrap_20003=mngmtAgentTrap_20003, mngmtAgentTrap_3046=mngmtAgentTrap_3046, sCellEventTrap_7_4=sCellEventTrap_7_4, mngmtAgentTrap_5016=mngmtAgentTrap_5016, emuEventTrapNoncritical=emuEventTrapNoncritical, sCellEventTrap_9_1=sCellEventTrap_9_1, mngmtAgentTrap_8010=mngmtAgentTrap_8010, mngmtAgentTrap_9001=mngmtAgentTrap_9001, mngmtAgentTrap_18001=mngmtAgentTrap_18001, mngmtAgentTrap_27004=mngmtAgentTrap_27004, mngmtAgentTrap_2013=mngmtAgentTrap_2013, sCellEventTrap_9_7=sCellEventTrap_9_7, mngmtAgentTrap_3020=mngmtAgentTrap_3020, mngmtAgentTrap_2077=mngmtAgentTrap_2077, mngmtAgentTrap_120=mngmtAgentTrap_120, mngmtAgentTrap_14013=mngmtAgentTrap_14013, maHSVMibRev=maHSVMibRev, mngmtAgentTrap_23002=mngmtAgentTrap_23002, agStatusTable=agStatusTable, mngmtAgentTrap_8036=mngmtAgentTrap_8036, mngmtAgentTrap_6008=mngmtAgentTrap_6008, sCellEventTrap_d_4c=sCellEventTrap_d_4c, mngmtAgentTrap_17015=mngmtAgentTrap_17015, sCellEventTrap_d_b5=sCellEventTrap_d_b5, sCellEventTrap_c_11=sCellEventTrap_c_11, mngmtAgentTrap_2040=mngmtAgentTrap_2040, mngmtAgentTrap_8009=mngmtAgentTrap_8009, mngmtAgentTrap_27044=mngmtAgentTrap_27044, mngmtAgentTrap_5014=mngmtAgentTrap_5014, sCellEventTrap_9_44=sCellEventTrap_9_44, srvComputerType=srvComputerType, mngmtAgentTrap_2032=mngmtAgentTrap_2032, sCellEventTrap_9_22=sCellEventTrap_9_22, scellEventCode=scellEventCode, sCellEventTrap_9_2a=sCellEventTrap_9_2a, mngmtAgentTrap_6014=mngmtAgentTrap_6014, mngmtAgentTrap_8006=mngmtAgentTrap_8006, mngmtAgentTrap_15001=mngmtAgentTrap_15001, shelfId=shelfId, sCellEventTrap_3_4=sCellEventTrap_3_4, mngmtAgentTrap_48=mngmtAgentTrap_48, maHSVMibRevMajor=maHSVMibRevMajor, mngmtAgentTrap_16015=mngmtAgentTrap_16015, mngmtAgentTrap_25016=mngmtAgentTrap_25016, mngmtAgentTrap_2041=mngmtAgentTrap_2041, mngmtAgentTrap_6024=mngmtAgentTrap_6024, mngmtAgentTrap_8041=mngmtAgentTrap_8041, hostTotal=hostTotal, mngmtAgentTrap_15008=mngmtAgentTrap_15008, sCellEventTrap_9_20=sCellEventTrap_9_20, mngmtAgentTrap_34=mngmtAgentTrap_34, mngmtAgentTrap_6005=mngmtAgentTrap_6005, nscTotal=nscTotal, agentEventDescription=agentEventDescription, mngmtAgentTrap_9015=mngmtAgentTrap_9015, mngmtAgentTrap_3048=mngmtAgentTrap_3048, mngmtAgentTrap_40=mngmtAgentTrap_40, mngmtAgentTrap_25001=mngmtAgentTrap_25001, sCellEventTrap_9_ca=sCellEventTrap_9_ca, mngmtAgentTrap_8033=mngmtAgentTrap_8033, mngmtAgentTrap_2103=mngmtAgentTrap_2103, mngmtAgentTrap_18004=mngmtAgentTrap_18004, mngmtAgentTrap_25004=mngmtAgentTrap_25004, mngmtAgentTrap_2078=mngmtAgentTrap_2078, sCellEventTrap_6_14=sCellEventTrap_6_14, mngmtAgentTrap_9017=mngmtAgentTrap_9017, emuEventTrapCritical=emuEventTrapCritical, sCellEventTrap_42_3=sCellEventTrap_42_3, mngmtAgentTrap_6012=mngmtAgentTrap_6012, mngmtAgentTrap_8056=mngmtAgentTrap_8056, mngmtAgentTrap_9022=mngmtAgentTrap_9022, mngmtAgentTrap_24001=mngmtAgentTrap_24001, mngmtAgentTrap_27049=mngmtAgentTrap_27049, sCellEventTrap_7_2=sCellEventTrap_7_2, nscStatus=nscStatus, mngmtAgentTrap_3051=mngmtAgentTrap_3051, sCellEventTrap_9_14=sCellEventTrap_9_14, mngmtAgentTrap_16016=mngmtAgentTrap_16016, mngmtAgentTrap_105=mngmtAgentTrap_105, sCellEventTrap_9_3a=sCellEventTrap_9_3a, sCellEventTrap_d_7f=sCellEventTrap_d_7f, mngmtAgentTrap_15006=mngmtAgentTrap_15006, mngmtAgentTrap_3061=mngmtAgentTrap_3061, mngmtAgentTrap_25012=mngmtAgentTrap_25012, mngmtAgentTrap_13002=mngmtAgentTrap_13002, sCellEventTrap_6_5=sCellEventTrap_6_5, mngmtAgentTrap_8=mngmtAgentTrap_8, mngmtAgentTrap_18052=mngmtAgentTrap_18052, mngmtAgentTrap_27014=mngmtAgentTrap_27014, mngmtAgentTrap_3017=mngmtAgentTrap_3017, sCellEventTrap_9_d0=sCellEventTrap_9_d0, agentEntryIndex=agentEntryIndex, sCellEventTrap_d_35=sCellEventTrap_d_35, sCellEventTrap_9_43=sCellEventTrap_9_43, sCellEventTrap_7_3=sCellEventTrap_7_3, mngmtAgentTrap_1003=mngmtAgentTrap_1003, mngmtAgentTrap_10019=mngmtAgentTrap_10019, sCellEventTrap_9_69=sCellEventTrap_9_69, mngmtAgentTrap_2035=mngmtAgentTrap_2035, mngmtAgentTrap_4049=mngmtAgentTrap_4049, mngmtAgentTrap_10017=mngmtAgentTrap_10017, mngmtAgentTrap_8039=mngmtAgentTrap_8039, mngmtAgentTrap_16004=mngmtAgentTrap_16004, mngmtAgentTrap_10043=mngmtAgentTrap_10043, mngmtAgentTrap_10=mngmtAgentTrap_10, mngmtAgentTrap_8051=mngmtAgentTrap_8051, mngmtAgentTrap_8070=mngmtAgentTrap_8070, mngmtAgentTrap_18074=mngmtAgentTrap_18074, sCellEventTrap_c_4=sCellEventTrap_c_4, mngmtAgentTrap_14002=mngmtAgentTrap_14002, mngmtAgentTrap_5010=mngmtAgentTrap_5010, sCellEventTrap_9_cc=sCellEventTrap_9_cc, mngmtAgentTrap_10018=mngmtAgentTrap_10018, sCellEventTrap_b_0=sCellEventTrap_b_0, mngmtAgentTrap_27016=mngmtAgentTrap_27016, mngmtAgentTrap_10014=mngmtAgentTrap_10014, sCellEventTrap_9_28=sCellEventTrap_9_28, mngmtAgentTrap_52=mngmtAgentTrap_52, mngmtAgentTrap_13003=mngmtAgentTrap_13003, mngmtAgentTrap_11001=mngmtAgentTrap_11001, mngmtAgentTrap_4037=mngmtAgentTrap_4037, mngmtAgentTrap_27034=mngmtAgentTrap_27034, mngmtAgentTrap_35=mngmtAgentTrap_35, sCellEventTrap_d_85=sCellEventTrap_d_85, mngmtAgentTrap_2023=mngmtAgentTrap_2023, sCellEventTrap_9_c8=sCellEventTrap_9_c8, sCellEventTrap_c_3=sCellEventTrap_c_3, mngmtAgentTrap_20020=mngmtAgentTrap_20020, hostStatus=hostStatus, mngmtAgentTrap_27002=mngmtAgentTrap_27002, mngmtAgentTrap_10023=mngmtAgentTrap_10023, mngmtAgentTrap_18018=mngmtAgentTrap_18018, sCellEventTrap_6_34=sCellEventTrap_6_34, mngmtAgentTrap_104=mngmtAgentTrap_104, mngmtAgentTrap_12003=mngmtAgentTrap_12003, mngmtAgentTrap_117=mngmtAgentTrap_117, sCellEventTrap_6_24=sCellEventTrap_6_24, mngmtAgentTrap_2085=mngmtAgentTrap_2085, mngmtAgentTrap_8019=mngmtAgentTrap_8019, mngmtAgentTrap_5005=mngmtAgentTrap_5005, mngmtAgentTrap_16014=mngmtAgentTrap_16014, mngmtAgentTrap_2003=mngmtAgentTrap_2003, mngmtAgentTrap_8025=mngmtAgentTrap_8025, mngmtAgentTrap_8087=mngmtAgentTrap_8087, mngmtAgentTrap_14001=mngmtAgentTrap_14001, mngmtAgentTrap_2071=mngmtAgentTrap_2071, mngmtAgentTrap_6033=mngmtAgentTrap_6033, mngmtAgentTrap_2063=mngmtAgentTrap_2063, sCellEventTrap_c_2=sCellEventTrap_c_2, sCellEventTrap_6_1d=sCellEventTrap_6_1d, mngmtAgentTrap_18060=mngmtAgentTrap_18060, mngmtAgentTrap_20021=mngmtAgentTrap_20021, mngmtAgentTrap_27010=mngmtAgentTrap_27010, mngmtAgentTrap_61=mngmtAgentTrap_61, mngmtAgentTrap_3003=mngmtAgentTrap_3003, mngmtAgentTrap_31=mngmtAgentTrap_31, mngmtAgentTrap_17006=mngmtAgentTrap_17006, sCellEventTrap_d_2=sCellEventTrap_d_2, sCellEventTrap_6_30=sCellEventTrap_6_30, mngmtAgentTrap_2006=mngmtAgentTrap_2006, sCellEventTrap_9_2e=sCellEventTrap_9_2e, mngmtAgentTrap_6001=mngmtAgentTrap_6001, sCellEventTrap_9_35=sCellEventTrap_9_35, mngmtAgentTrap_6030=mngmtAgentTrap_6030, sCellEventTrap_9_16=sCellEventTrap_9_16, mngmtAgentTrap_56=mngmtAgentTrap_56, mngmtAgentTrap_118=mngmtAgentTrap_118, mngmtAgentTrap_4032=mngmtAgentTrap_4032, mngmtAgentTrap_3090=mngmtAgentTrap_3090, mngmtAgentTrap_16032=mngmtAgentTrap_16032, sCellEventTrap_9_79=sCellEventTrap_9_79, mngmtAgentTrap_1004=mngmtAgentTrap_1004, mngmtAgentTrap_25018=mngmtAgentTrap_25018, mngmtAgentTrap_8024=mngmtAgentTrap_8024, sCellEventTrap_9_73=sCellEventTrap_9_73, sCellEventTrap_7_6=sCellEventTrap_7_6, mngmtAgentTrap_2070=mngmtAgentTrap_2070, mngmtAgentTrap_9008=mngmtAgentTrap_9008, mngmtAgentTrap_123=mngmtAgentTrap_123, mngmtAgentTrap_28=mngmtAgentTrap_28, mngmtAgentTrap_21015=mngmtAgentTrap_21015, mngmtAgentTrap_15009=mngmtAgentTrap_15009, mngmtAgentTrap_43=mngmtAgentTrap_43, scellUUID=scellUUID, mngmtAgentTrap_93=mngmtAgentTrap_93, sCellEventTrap_d_f0=sCellEventTrap_d_f0, sCellEventTrap_83_6=sCellEventTrap_83_6, mngmtAgentTrap_4031=mngmtAgentTrap_4031, mngmtAgentTrap_2102=mngmtAgentTrap_2102, mngmtAgentTrap_16035=mngmtAgentTrap_16035, mngmtAgentTrap_8023=mngmtAgentTrap_8023, mngmtAgentTrap_38=mngmtAgentTrap_38, mngmtAgentTrap_14=mngmtAgentTrap_14, mngmtAgentTrap_3037=mngmtAgentTrap_3037, mngmtAgentTrap_3053=mngmtAgentTrap_3053, mngmtAgentTrap_4024=mngmtAgentTrap_4024, mngmtAgentTrap_1006=mngmtAgentTrap_1006, mngmtAgentTrap_9019=mngmtAgentTrap_9019, mngmtAgentTrap_62=mngmtAgentTrap_62, sCellEventTrap_6_3d=sCellEventTrap_6_3d, mngmtAgentTrap_13007=mngmtAgentTrap_13007, mngmtAgentTrap_21018=mngmtAgentTrap_21018, mngmtAgentTrap_27005=mngmtAgentTrap_27005, nscUUID=nscUUID, mngmtAgentTrap_6011=mngmtAgentTrap_6011, mngmtAgentTrap_2097=mngmtAgentTrap_2097, mngmtAgentTrap_2073=mngmtAgentTrap_2073, mngmtAgentTrap_60=mngmtAgentTrap_60, mngmtAgentTrap_3066=mngmtAgentTrap_3066, sCellEventTrap_9_45=sCellEventTrap_9_45, mngmtAgentTrap_44=mngmtAgentTrap_44, mngmtAgentTrap_63=mngmtAgentTrap_63, mngmtAgentTrap_3069=mngmtAgentTrap_3069, mngmtAgentTrap_8015=mngmtAgentTrap_8015, mngmtAgentTrap_3025=mngmtAgentTrap_3025, mngmtAgentTrap_4058=mngmtAgentTrap_4058, mngmtAgentTrap_8048=mngmtAgentTrap_8048, sCellEventTrap_6_1b=sCellEventTrap_6_1b, mngmtAgentTrap_6022=mngmtAgentTrap_6022, mngmtAgentTrap_8090=mngmtAgentTrap_8090, mngmtAgentTrap_9031=mngmtAgentTrap_9031, mngmtAgentTrap_14009=mngmtAgentTrap_14009, mngmtAgentTrap_16=mngmtAgentTrap_16, agentStatus=agentStatus, mngmtAgentTrap_10029=mngmtAgentTrap_10029, agent=agent, sCellEventTrap_9_25=sCellEventTrap_9_25, mngmtAgentTrap_4016=mngmtAgentTrap_4016, mngmtAgentTrap_2031=mngmtAgentTrap_2031, mngmtAgentTrap_8053=mngmtAgentTrap_8053, mngmtAgentTrap_10037=mngmtAgentTrap_10037, sCellEventTrap_9_48=sCellEventTrap_9_48, mngmtAgentTrap_3063=mngmtAgentTrap_3063, mngmtAgentTrap_4013=mngmtAgentTrap_4013, mngmtAgentTrap_8037=mngmtAgentTrap_8037, mngmtAgentTrap_2066=mngmtAgentTrap_2066, mngmtAgentTrap_9028=mngmtAgentTrap_9028, sCellEventTrap_9_a=sCellEventTrap_9_a, sCellEventTrap_6_26=sCellEventTrap_6_26, mngmtAgentTrap_1=mngmtAgentTrap_1, mngmtAgentTrap_2057=mngmtAgentTrap_2057, mngmtAgentTrap_26005=mngmtAgentTrap_26005, sCellEventTrap_4_b=sCellEventTrap_4_b, mngmtAgentTrap_8049=mngmtAgentTrap_8049, sCellEventTrap_6_2e=sCellEventTrap_6_2e, mngmtAgentTrap_3086=mngmtAgentTrap_3086, mngmtAgentTrap_4030=mngmtAgentTrap_4030, sCellEventTrap_9_18=sCellEventTrap_9_18, mngmtAgentTrap_6003=mngmtAgentTrap_6003, srvOSMajVersion=srvOSMajVersion, mngmtAgentTrap_6010=mngmtAgentTrap_6010)
mibBuilder.exportSymbols("CPQHSV110V3-MIB", mngmtAgentTrap_18071=mngmtAgentTrap_18071, mngmtAgentTrap_27022=mngmtAgentTrap_27022, mngmtAgentTrap_27045=mngmtAgentTrap_27045, sCellEventTrap_9_71=sCellEventTrap_9_71, agMinVersion=agMinVersion, mngmtAgentTrap_2012=mngmtAgentTrap_2012, mngmtAgentTrap_2065=mngmtAgentTrap_2065, mngmtAgentTrap_4041=mngmtAgentTrap_4041, sCellEventTrap_6_28=sCellEventTrap_6_28, mngmtAgentTrap_8062=mngmtAgentTrap_8062, sCellEventTrap_9_9=sCellEventTrap_9_9, mngmtAgentTrap_27024=mngmtAgentTrap_27024, mngmtAgentTrap_3016=mngmtAgentTrap_3016, mngmtAgentTrap_24=mngmtAgentTrap_24, mngmtAgentTrap_8022=mngmtAgentTrap_8022, mngmtAgentTrap_16017=mngmtAgentTrap_16017, mngmtAgentTrap_4020=mngmtAgentTrap_4020, mngmtAgentTrap_116=mngmtAgentTrap_116, mngmtAgentTrap_2095=mngmtAgentTrap_2095, mngmtAgentTrap_2100=mngmtAgentTrap_2100, mngmtAgentTrap_8038=mngmtAgentTrap_8038, mngmtAgentTrap_8043=mngmtAgentTrap_8043, mngmtAgentTrap_16038=mngmtAgentTrap_16038, mngmtAgentTrap_10028=mngmtAgentTrap_10028, sCellEventTrap_9_39=sCellEventTrap_9_39, mngmtAgentTrap_1007=mngmtAgentTrap_1007, mngmtAgentTrap_2050=mngmtAgentTrap_2050, srvOSMinVersion=srvOSMinVersion, mngmtAgentTrap_2051=mngmtAgentTrap_2051, sCellEventTrap_9_1a=sCellEventTrap_9_1a, mngmtAgentTrap_14008=mngmtAgentTrap_14008, mngmtAgentTrap_18049=mngmtAgentTrap_18049, mngmtAgentTrap_26011=mngmtAgentTrap_26011, sCellEventTrap_c_c=sCellEventTrap_c_c, mngmtAgentTrap_8057=mngmtAgentTrap_8057, mngmtAgentTrap_26006=mngmtAgentTrap_26006, mngmtAgentTrap_2068=mngmtAgentTrap_2068, mngmtAgentTrap_4004=mngmtAgentTrap_4004, sCellEventTrap_d_de=sCellEventTrap_d_de, mngmtAgentTrap_2087=mngmtAgentTrap_2087, mngmtAgentTrap_14010=mngmtAgentTrap_14010, mngmtAgentTrap_22002=mngmtAgentTrap_22002, mngmtAgentTrap_18022=mngmtAgentTrap_18022, agHostName=agHostName, mngmtAgentTrap_2038=mngmtAgentTrap_2038, mngmtAgentTrap_2091=mngmtAgentTrap_2091, mngmtAgentTrap_16028=mngmtAgentTrap_16028, mngmtAgentTrap_1002=mngmtAgentTrap_1002, mngmtAgentTrap_45=mngmtAgentTrap_45, mngmtAgentTrap_8080=mngmtAgentTrap_8080, mngmtAgentTrap_2011=mngmtAgentTrap_2011, mngmtAgentTrap_3095=mngmtAgentTrap_3095, scellName=scellName, mngmtAgentTrap_8034=mngmtAgentTrap_8034, sCellEventTrap_42_1=sCellEventTrap_42_1, mngmtAgentTrap_2030=mngmtAgentTrap_2030, mngmtAgentTrap_3013=mngmtAgentTrap_3013, mngmtAgentTrap_8067=mngmtAgentTrap_8067, mngmtAgentTrap_2080=mngmtAgentTrap_2080, mngmtAgentTrap_9009=mngmtAgentTrap_9009, mngmtAgentTrap_9030=mngmtAgentTrap_9030, mngmtAgentTrap_10015=mngmtAgentTrap_10015, mngmtAgentTrap_16005=mngmtAgentTrap_16005, mngmtAgentTrap_17004=mngmtAgentTrap_17004, mngmtAgentTrap_27013=mngmtAgentTrap_27013, sCellEventTrap_d_72=sCellEventTrap_d_72, mngmtAgentTrap_101=mngmtAgentTrap_101, mngmtAgentTrap_16020=mngmtAgentTrap_16020, sCellEventTrap_6_4=sCellEventTrap_6_4, shelfElementType=shelfElementType, shelfStatusTable=shelfStatusTable, sCellEventTrap_9_d=sCellEventTrap_9_d, sCellEventTrap_6_e=sCellEventTrap_6_e, mngmtAgentTrap_2067=mngmtAgentTrap_2067, mngmtAgentTrap_12002=mngmtAgentTrap_12002, mngmtAgentTrap_37=mngmtAgentTrap_37, mngmtAgentTrap_16019=mngmtAgentTrap_16019, mngmtAgentTrap_17007=mngmtAgentTrap_17007, mngmtAgentTrap_4040=mngmtAgentTrap_4040, mngmtAgentTrap_86=mngmtAgentTrap_86, sCellEventTrap_3_1=sCellEventTrap_3_1, sCellEventTrap_4_2=sCellEventTrap_4_2, mngmtAgentTrap_4=mngmtAgentTrap_4, mngmtAgentTrap_6004=mngmtAgentTrap_6004, mngmtAgentTrap_3083=mngmtAgentTrap_3083, mngmtAgentTrap_8011=mngmtAgentTrap_8011, mngmtAgentTrap_25009=mngmtAgentTrap_25009, mngmtAgentTrap_8040=mngmtAgentTrap_8040, mngmtAgentTrap_20=mngmtAgentTrap_20, mngmtAgentTrap_13018=mngmtAgentTrap_13018, mngmtAgentTrap_80=mngmtAgentTrap_80, mngmtAgentTrap_6006=mngmtAgentTrap_6006, sCellEventTrap_6_3b=sCellEventTrap_6_3b, mngmtAgentTrap_97=mngmtAgentTrap_97, mngmtAgentTrap_3007=mngmtAgentTrap_3007, mngmtAgentTrap_9003=mngmtAgentTrap_9003, mngmtAgentTrap_18041=mngmtAgentTrap_18041, mngmtAgentTrap_20001=mngmtAgentTrap_20001, mngmtAgentTrap_16010=mngmtAgentTrap_16010, sCellEventTrap_7_7=sCellEventTrap_7_7, mngmtAgentTrap_69=mngmtAgentTrap_69, mngmtAgentTrap_25007=mngmtAgentTrap_25007, sCellEventTrap_6_35=sCellEventTrap_6_35, mngmtAgentTrap_3059=mngmtAgentTrap_3059, sCellEventTrap_9_78=sCellEventTrap_9_78, mngmtAgentTrap_2099=mngmtAgentTrap_2099, sCellEventTrap_7_5=sCellEventTrap_7_5, mngmtAgentTrap_1005=mngmtAgentTrap_1005, mngmtAgentTrap_11=mngmtAgentTrap_11, mngmtAgentTrap_3022=mngmtAgentTrap_3022, mngmtAgentTrap_32=mngmtAgentTrap_32, mngmtAgentTrap_3004=mngmtAgentTrap_3004, mngmtAgentTrap_8027=mngmtAgentTrap_8027, mngmtAgentTrap_65=mngmtAgentTrap_65, mngmtAgentTrap_2061=mngmtAgentTrap_2061, mngmtAgentTrap_20004=mngmtAgentTrap_20004, sCellEventTrap_d_6f=sCellEventTrap_d_6f, mngmtAgentTrap_16034=mngmtAgentTrap_16034, mngmtAgentTrap_18059=mngmtAgentTrap_18059, mngmtAgentTrap_3064=mngmtAgentTrap_3064, mngmtAgentTrap_9034=mngmtAgentTrap_9034, mngmtAgentTrap_8086=mngmtAgentTrap_8086, sCellEventTrap_9_6d=sCellEventTrap_9_6d, mngmtAgentTrap_27037=mngmtAgentTrap_27037, mngmtAgentTrap_41=mngmtAgentTrap_41, mngmtAgentTrap_18007=mngmtAgentTrap_18007, sCellEventTrap_6_2=sCellEventTrap_6_2, sCellEventTrap_9_d5=sCellEventTrap_9_d5, sCellEventTrap_d_5b=sCellEventTrap_d_5b, mngmtAgentTrap_2058=mngmtAgentTrap_2058, mngmtAgentTrap_2088=mngmtAgentTrap_2088, mngmtAgentTrap_5002=mngmtAgentTrap_5002, mngmtAgentTrap_4012=mngmtAgentTrap_4012, mngmtAgentTrap_27025=mngmtAgentTrap_27025, mngmtAgentTrap_8085=mngmtAgentTrap_8085, mngmtAgentTrap_3068=mngmtAgentTrap_3068, mngmtAgentTrap_5003=mngmtAgentTrap_5003, mngmtAgentTrap_27038=mngmtAgentTrap_27038, mngmtAgentTrap_21=mngmtAgentTrap_21, mngmtAgentTrap_2047=mngmtAgentTrap_2047, sCellEventTrap_83_0=sCellEventTrap_83_0, agentEventTimeDate=agentEventTimeDate, sCellEventTrap_6_13=sCellEventTrap_6_13, mngmtAgentTrap_3019=mngmtAgentTrap_3019, mngmtAgentTrap_16031=mngmtAgentTrap_16031, mngmtAgentTrap_75=mngmtAgentTrap_75, mngmtAgentTrap_25013=mngmtAgentTrap_25013, mngmtAgentTrap_18003=mngmtAgentTrap_18003, mngmtAgentTrap_3060=mngmtAgentTrap_3060, mngmtAgentTrap_27032=mngmtAgentTrap_27032, mngmtAgentTrap_107=mngmtAgentTrap_107, sCellEventTrap_9_33=sCellEventTrap_9_33, sCellEventTrap_9_e=sCellEventTrap_9_e, mngmtAgentTrap_21010=mngmtAgentTrap_21010, mngmtAgentTrap_127=mngmtAgentTrap_127, mngmtAgentTrap_12001=mngmtAgentTrap_12001, mngmtAgentTrap_16036=mngmtAgentTrap_16036, mngmtAgentTrap_6031=mngmtAgentTrap_6031, mngmtAgentTrap_10022=mngmtAgentTrap_10022, sCellEventTrap_6_7=sCellEventTrap_6_7, mngmtAgentTrap_4043=mngmtAgentTrap_4043, sCellEventTrap_9_1f=sCellEventTrap_9_1f, mngmtAgentTrap_3070=mngmtAgentTrap_3070, mngmtAgentTrap_9033=mngmtAgentTrap_9033, mngmtAgentTrap_10020=mngmtAgentTrap_10020, mngmtAgentTrap_27029=mngmtAgentTrap_27029, sCellEventTrap_9_3d=sCellEventTrap_9_3d, sCellEventTrap_9_66=sCellEventTrap_9_66, hostStatusTable=hostStatusTable, mngmtAgentTrap_102=mngmtAgentTrap_102, mngmtAgentTrap_13019=mngmtAgentTrap_13019, mngmtAgentTrap_3047=mngmtAgentTrap_3047, mngmtAgentTrap_3094=mngmtAgentTrap_3094, sCellEventTrap_d_5f=sCellEventTrap_d_5f, mngmtAgentTrap_27021=mngmtAgentTrap_27021, mngmtAgentTrap_1001=mngmtAgentTrap_1001, compaq=compaq, mngmtAgentTrap_3029=mngmtAgentTrap_3029, mngmtAgentTrap_5007=mngmtAgentTrap_5007, sCellEventTrap_3_2=sCellEventTrap_3_2, sCellEventTrap_9_4=sCellEventTrap_9_4, mngmtAgentTrap_15003=mngmtAgentTrap_15003, mngmtAgentTrap_6032=mngmtAgentTrap_6032, mngmtAgentTrap_18034=mngmtAgentTrap_18034, mngmtAgentTrap_18028=mngmtAgentTrap_18028, mngmtAgentTrap_26010=mngmtAgentTrap_26010, mngmtAgentTrap_5004=mngmtAgentTrap_5004, sCellEventTrap_6_20=sCellEventTrap_6_20, mngmtAgentTrap_50=mngmtAgentTrap_50, mngmtAgentTrap_2098=mngmtAgentTrap_2098, mngmtAgentTrap_4007=mngmtAgentTrap_4007, mngmtAgentTrap_9023=mngmtAgentTrap_9023, hostUUID=hostUUID, mngmtAgentTrap_10040=mngmtAgentTrap_10040, mngmtAgentTrap_25002=mngmtAgentTrap_25002, mngmtAgentTrap_18065=mngmtAgentTrap_18065, sCellEventTrap_9_cf=sCellEventTrap_9_cf, sCellEventTrap_9_74=sCellEventTrap_9_74, mngmtAgentTrap_46=mngmtAgentTrap_46, mngmtAgentTrap_8063=mngmtAgentTrap_8063, sCellEventTrap_4_7=sCellEventTrap_4_7, sCellEventTrap_6_3=sCellEventTrap_6_3, sCellEventTrap_6_18=sCellEventTrap_6_18, mngmtAgentTrap_36=mngmtAgentTrap_36, mngmtAgentTrap_3015=mngmtAgentTrap_3015, mngmtAgentTrap_4048=mngmtAgentTrap_4048, sCellEventTrap_9_17=sCellEventTrap_9_17, mngmtAgentTrap_16039=mngmtAgentTrap_16039, mngmtAgentTrap_2062=mngmtAgentTrap_2062, mngmtAgentTrap_59=mngmtAgentTrap_59, mngmtAgentTrap_4023=mngmtAgentTrap_4023, mngmtAgentTrap_10027=mngmtAgentTrap_10027, sCellEventTrap_9_46=sCellEventTrap_9_46, sCellEventTrap_9_ce=sCellEventTrap_9_ce, mngmtAgentTrap_27019=mngmtAgentTrap_27019, mngmtAgentTrap_27026=mngmtAgentTrap_27026, mngmtAgentTrap_87=mngmtAgentTrap_87, sCellEventTrap_6_a=sCellEventTrap_6_a, sCellEventTrap_6_37=sCellEventTrap_6_37, sCellEventTrap_c_12=sCellEventTrap_c_12, hostEntryIndex=hostEntryIndex, sCellEventTrap_9_26=sCellEventTrap_9_26, mngmtAgentTrap_18045=mngmtAgentTrap_18045, mngmtAgentTrap_4059=mngmtAgentTrap_4059, mngmtAgentTrap_15005=mngmtAgentTrap_15005, sCellEventTrap_6_3c=sCellEventTrap_6_3c, nscStatusTable=nscStatusTable, mngmtAgentTrap_124=mngmtAgentTrap_124, sCellEventTrap_6_c=sCellEventTrap_6_c, sCellEventTrap_9_8=sCellEventTrap_9_8, mngmtAgentTrap_18039=mngmtAgentTrap_18039, sCellEventTrap_9_d1=sCellEventTrap_9_d1, mngmtAgentTrap_9018=mngmtAgentTrap_9018, sCellEventTrap_c_0=sCellEventTrap_c_0, mngmtAgentTrap_8007=mngmtAgentTrap_8007, mngmtAgentTrap_21017=mngmtAgentTrap_21017, sCellEventTrap_6_3e=sCellEventTrap_6_3e, sCellEventTrap_9_1c=sCellEventTrap_9_1c, mngmtAgentTrap_5015=mngmtAgentTrap_5015, mngmtAgentTrap_20016=mngmtAgentTrap_20016, mngmtAgentTrap_20017=mngmtAgentTrap_20017, scellTotal=scellTotal, mngmtAgentTrap_84=mngmtAgentTrap_84, mngmtAgentTrap_8075=mngmtAgentTrap_8075, mngmtAgentTrap_8031=mngmtAgentTrap_8031, mngmtAgentTrap_10036=mngmtAgentTrap_10036, sCellEventTrap_42_5=sCellEventTrap_42_5, mngmtAgentTrap_13012=mngmtAgentTrap_13012, mngmtAgentTrap_6002=mngmtAgentTrap_6002, sCellEventTrap_d_4b=sCellEventTrap_d_4b, mngmtAgentTrap_21002=mngmtAgentTrap_21002, mngmtAgentTrap_27017=mngmtAgentTrap_27017, mngmtAgentTrap_22001=mngmtAgentTrap_22001, mngmtAgentTrap_27040=mngmtAgentTrap_27040)
mibBuilder.exportSymbols("CPQHSV110V3-MIB", sCellEventTrap_9_3e=sCellEventTrap_9_3e, mngmtAgentTrap_18075=mngmtAgentTrap_18075, cpqHSV=cpqHSV, sCellEventTrap_6_38=sCellEventTrap_6_38, sCellEventTrap_42_4=sCellEventTrap_42_4, mngmtAgentTrap_16018=mngmtAgentTrap_16018, mngmtAgentTrap_13=mngmtAgentTrap_13, mngmtAgentTrap_20013=mngmtAgentTrap_20013, mngmtAgentTrap_2001=mngmtAgentTrap_2001, mngmtAgentTrap_8029=mngmtAgentTrap_8029, sCellEventTrap_9_d4=sCellEventTrap_9_d4, sCellEventTrap_6_25=sCellEventTrap_6_25, sCellEventTrap_6_15=sCellEventTrap_6_15, mngmtAgentTrap_4047=mngmtAgentTrap_4047, shelfEntry=shelfEntry, mngmtAgentTrap_4028=mngmtAgentTrap_4028, mngmtAgentTrap_9016=mngmtAgentTrap_9016, sCellEventTrap_d_4=sCellEventTrap_d_4, sCellEventTrap_83_3=sCellEventTrap_83_3, mngmtAgentTrap_8060=mngmtAgentTrap_8060, mngmtAgentTrap_47=mngmtAgentTrap_47, mngmtAgentTrap_2059=mngmtAgentTrap_2059, mngmtAgentTrap_21003=mngmtAgentTrap_21003, mngmtAgentTrap_27009=mngmtAgentTrap_27009, scellCAC=scellCAC, mngmtAgentTrap_9036=mngmtAgentTrap_9036, mngmtAgentTrap_8005=mngmtAgentTrap_8005, sCellEventTrap_9_21=sCellEventTrap_9_21, mngmtAgentTrap_3084=mngmtAgentTrap_3084, mngmtAgentTrap_3039=mngmtAgentTrap_3039, mngmtAgentTrap_2004=mngmtAgentTrap_2004, mngmtAgentTrap_6020=mngmtAgentTrap_6020, mngmtAgentTrap_8042=mngmtAgentTrap_8042, sCellEventTrap_d_d8=sCellEventTrap_d_d8, mngmtAgentTrap_20018=mngmtAgentTrap_20018, mngmtAgentTrap_27003=mngmtAgentTrap_27003, mngmtAgentTrap_27033=mngmtAgentTrap_27033, mngmtAgentTrap_5017=mngmtAgentTrap_5017, sCellEventTrap_c_f=sCellEventTrap_c_f, mngmtAgentTrap_20002=mngmtAgentTrap_20002, sCellEventTrap_4_e=sCellEventTrap_4_e, sCellEventTrap_9_15=sCellEventTrap_9_15, mngmtAgentTrap_3058=mngmtAgentTrap_3058, mngmtAgentTrap_2060=mngmtAgentTrap_2060, mngmtAgentTrap_3056=mngmtAgentTrap_3056, mngmtAgentTrap_8083=mngmtAgentTrap_8083, mngmtAgentTrap_17003=mngmtAgentTrap_17003, mngmtAgentTrap_92=mngmtAgentTrap_92, mngmtAgentTrap_98=mngmtAgentTrap_98, mngmtAgentTrap_23003=mngmtAgentTrap_23003, mngmtAgentTrap_25015=mngmtAgentTrap_25015, scellStatus=scellStatus, mngmtAgentTrap_3054=mngmtAgentTrap_3054, mngmtAgentTrap_4029=mngmtAgentTrap_4029, sCellEventTrap_9_3=sCellEventTrap_9_3, mngmtAgentTrap_11004=mngmtAgentTrap_11004, sCellEventTrap_6_0=sCellEventTrap_6_0, mngmtAgentTrap_2076=mngmtAgentTrap_2076, sCellEventTrap_9_c9=sCellEventTrap_9_c9, mngmtAgentTrap_1000=mngmtAgentTrap_1000, shelfElementNum=shelfElementNum, mngmtAgentTrap_29=mngmtAgentTrap_29, mngmtAgentTrap_8012=mngmtAgentTrap_8012, mngmtAgentTrap_27043=mngmtAgentTrap_27043, mngmtAgentTrap_114=mngmtAgentTrap_114, mngmtAgentTrap_2049=mngmtAgentTrap_2049, mngmtAgentTrap_54=mngmtAgentTrap_54, mngmtAgentTrap_27015=mngmtAgentTrap_27015, sCellEventTrap_c_15=sCellEventTrap_c_15, sCellEventTrap_9_31=sCellEventTrap_9_31, mngmtAgentTrap_22=mngmtAgentTrap_22, mngmtAgentTrap_27001=mngmtAgentTrap_27001, mngmtAgentTrap_10006=mngmtAgentTrap_10006, mngmtAgentTrap_14003=mngmtAgentTrap_14003, mngmtAgentTrap_18024=mngmtAgentTrap_18024, mngmtAgentTrap_49=mngmtAgentTrap_49, nscEntry=nscEntry, sCellEventTrap_9_5=sCellEventTrap_9_5, sCellEventTrap_42_0=sCellEventTrap_42_0, mngmtAgentTrap_72=mngmtAgentTrap_72, mngmtAgentTrap_10001=mngmtAgentTrap_10001, mngmtAgentTrap_18009=mngmtAgentTrap_18009, mngmtAgentTrap_2064=mngmtAgentTrap_2064, mngmtAgentTrap_4033=mngmtAgentTrap_4033, mngmtAgentTrap_10039=mngmtAgentTrap_10039, mngmtAgentTrap_3035=mngmtAgentTrap_3035, mngmtAgentTrap_8002=mngmtAgentTrap_8002, mngmtAgentTrap_17016=mngmtAgentTrap_17016, sCellEventTrap_d_d9=sCellEventTrap_d_d9, sCellEventTrap_d_1=sCellEventTrap_d_1, mngmtAgentTrap_9004=mngmtAgentTrap_9004, mngmtAgentTrap_27030=mngmtAgentTrap_27030, scellNameDateTime=scellNameDateTime, mngmtAgentTrap_3078=mngmtAgentTrap_3078, mngmtAgentTrap_18006=mngmtAgentTrap_18006, sCellEventTrap_9_12=sCellEventTrap_9_12, mngmtAgentTrap_8077=mngmtAgentTrap_8077, sCellEventTrap_9_cb=sCellEventTrap_9_cb, sCellEventTrap_4_9=sCellEventTrap_4_9, mngmtAgentTrap_129=mngmtAgentTrap_129, mngmtAgentTrap_2052=mngmtAgentTrap_2052, mngmtAgentTrap_4054=mngmtAgentTrap_4054, mngmtAgentTrap_10021=mngmtAgentTrap_10021, mngmtAgentTrap_16040=mngmtAgentTrap_16040, mngmtAgentTrap_3075=mngmtAgentTrap_3075, mngmtAgentTrap_96=mngmtAgentTrap_96, mngmtAgentTrap_18050=mngmtAgentTrap_18050, mngmtAgentTrap_25008=mngmtAgentTrap_25008, mngmtAgentTrap_3001=mngmtAgentTrap_3001, mngmtAgentTrap_18051=mngmtAgentTrap_18051, sCellEventTrap_9_d2=sCellEventTrap_9_d2, mngmtAgentTrap_8055=mngmtAgentTrap_8055, sCellEventTrap_9_77=sCellEventTrap_9_77, mngmtAgentTrap_8073=mngmtAgentTrap_8073, sCellEventTrap_c_6=sCellEventTrap_c_6, shelfEntryIndex=shelfEntryIndex, scell=scell, sCellEventTrap_9_70=sCellEventTrap_9_70, mngmtAgentTrap_91=mngmtAgentTrap_91, mngmtAgentTrap_5013=mngmtAgentTrap_5013, mngmtAgentTrap_10026=mngmtAgentTrap_10026, mngmtAgentTrap_10044=mngmtAgentTrap_10044, shelfStatus=shelfStatus, mngmtAgentTrap_14012=mngmtAgentTrap_14012, mngmtAgentTrap_64=mngmtAgentTrap_64, mngmtAgentTrap_67=mngmtAgentTrap_67, mngmtAgentTrap_3038=mngmtAgentTrap_3038, mngmtAgentTrap_4018=mngmtAgentTrap_4018, mngmtAgentTrap_20022=mngmtAgentTrap_20022, mngmtAgentTrap_26012=mngmtAgentTrap_26012, mngmtAgentTrap_27008=mngmtAgentTrap_27008, sCellEventTrap_4_1=sCellEventTrap_4_1, mngmtAgentTrap_12005=mngmtAgentTrap_12005, mngmtAgentTrap_2=mngmtAgentTrap_2, mngmtAgentTrap_112=mngmtAgentTrap_112, mngmtAgentTrap_8008=mngmtAgentTrap_8008)
| 273.160584
| 10,293
| 0.741742
| 34,862
| 336,807
| 7.05103
| 0.061012
| 0.261475
| 0.014072
| 0.018746
| 0.772465
| 0.768373
| 0.766038
| 0.763259
| 0.75998
| 0.674252
| 0
| 0.163935
| 0.067038
| 336,807
| 1,232
| 10,294
| 273.382305
| 0.618341
| 0.000962
| 0
| 0
| 0
| 0
| 0.401024
| 0.050747
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.004898
| 0
| 0.004898
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.