hexsha
string
size
int64
ext
string
lang
string
max_stars_repo_path
string
max_stars_repo_name
string
max_stars_repo_head_hexsha
string
max_stars_repo_licenses
list
max_stars_count
int64
max_stars_repo_stars_event_min_datetime
string
max_stars_repo_stars_event_max_datetime
string
max_issues_repo_path
string
max_issues_repo_name
string
max_issues_repo_head_hexsha
string
max_issues_repo_licenses
list
max_issues_count
int64
max_issues_repo_issues_event_min_datetime
string
max_issues_repo_issues_event_max_datetime
string
max_forks_repo_path
string
max_forks_repo_name
string
max_forks_repo_head_hexsha
string
max_forks_repo_licenses
list
max_forks_count
int64
max_forks_repo_forks_event_min_datetime
string
max_forks_repo_forks_event_max_datetime
string
content
string
avg_line_length
float64
max_line_length
int64
alphanum_fraction
float64
qsc_code_num_words_quality_signal
int64
qsc_code_num_chars_quality_signal
float64
qsc_code_mean_word_length_quality_signal
float64
qsc_code_frac_words_unique_quality_signal
float64
qsc_code_frac_chars_top_2grams_quality_signal
float64
qsc_code_frac_chars_top_3grams_quality_signal
float64
qsc_code_frac_chars_top_4grams_quality_signal
float64
qsc_code_frac_chars_dupe_5grams_quality_signal
float64
qsc_code_frac_chars_dupe_6grams_quality_signal
float64
qsc_code_frac_chars_dupe_7grams_quality_signal
float64
qsc_code_frac_chars_dupe_8grams_quality_signal
float64
qsc_code_frac_chars_dupe_9grams_quality_signal
float64
qsc_code_frac_chars_dupe_10grams_quality_signal
float64
qsc_code_frac_chars_replacement_symbols_quality_signal
float64
qsc_code_frac_chars_digital_quality_signal
float64
qsc_code_frac_chars_whitespace_quality_signal
float64
qsc_code_size_file_byte_quality_signal
float64
qsc_code_num_lines_quality_signal
float64
qsc_code_num_chars_line_max_quality_signal
float64
qsc_code_num_chars_line_mean_quality_signal
float64
qsc_code_frac_chars_alphabet_quality_signal
float64
qsc_code_frac_chars_comments_quality_signal
float64
qsc_code_cate_xml_start_quality_signal
float64
qsc_code_frac_lines_dupe_lines_quality_signal
float64
qsc_code_cate_autogen_quality_signal
float64
qsc_code_frac_lines_long_string_quality_signal
float64
qsc_code_frac_chars_string_length_quality_signal
float64
qsc_code_frac_chars_long_word_length_quality_signal
float64
qsc_code_frac_lines_string_concat_quality_signal
float64
qsc_code_cate_encoded_data_quality_signal
float64
qsc_code_frac_chars_hex_words_quality_signal
float64
qsc_code_frac_lines_prompt_comments_quality_signal
float64
qsc_code_frac_lines_assert_quality_signal
float64
qsc_codepython_cate_ast_quality_signal
float64
qsc_codepython_frac_lines_func_ratio_quality_signal
float64
qsc_codepython_cate_var_zero_quality_signal
bool
qsc_codepython_frac_lines_pass_quality_signal
float64
qsc_codepython_frac_lines_import_quality_signal
float64
qsc_codepython_frac_lines_simplefunc_quality_signal
float64
qsc_codepython_score_lines_no_logic_quality_signal
float64
qsc_codepython_frac_lines_print_quality_signal
float64
qsc_code_num_words
int64
qsc_code_num_chars
int64
qsc_code_mean_word_length
int64
qsc_code_frac_words_unique
null
qsc_code_frac_chars_top_2grams
int64
qsc_code_frac_chars_top_3grams
int64
qsc_code_frac_chars_top_4grams
int64
qsc_code_frac_chars_dupe_5grams
int64
qsc_code_frac_chars_dupe_6grams
int64
qsc_code_frac_chars_dupe_7grams
int64
qsc_code_frac_chars_dupe_8grams
int64
qsc_code_frac_chars_dupe_9grams
int64
qsc_code_frac_chars_dupe_10grams
int64
qsc_code_frac_chars_replacement_symbols
int64
qsc_code_frac_chars_digital
int64
qsc_code_frac_chars_whitespace
int64
qsc_code_size_file_byte
int64
qsc_code_num_lines
int64
qsc_code_num_chars_line_max
int64
qsc_code_num_chars_line_mean
int64
qsc_code_frac_chars_alphabet
int64
qsc_code_frac_chars_comments
int64
qsc_code_cate_xml_start
int64
qsc_code_frac_lines_dupe_lines
int64
qsc_code_cate_autogen
int64
qsc_code_frac_lines_long_string
int64
qsc_code_frac_chars_string_length
int64
qsc_code_frac_chars_long_word_length
int64
qsc_code_frac_lines_string_concat
null
qsc_code_cate_encoded_data
int64
qsc_code_frac_chars_hex_words
int64
qsc_code_frac_lines_prompt_comments
int64
qsc_code_frac_lines_assert
int64
qsc_codepython_cate_ast
int64
qsc_codepython_frac_lines_func_ratio
int64
qsc_codepython_cate_var_zero
int64
qsc_codepython_frac_lines_pass
int64
qsc_codepython_frac_lines_import
int64
qsc_codepython_frac_lines_simplefunc
int64
qsc_codepython_score_lines_no_logic
int64
qsc_codepython_frac_lines_print
int64
effective
string
hits
int64
1a8490ef24ae0a7b9d0ea872f5bdb7234625f5d8
29
py
Python
py/lib/json/__init__.py
ivomarvan/ESP8266_RTC_memory_compression
7c3113533e08759fa40f2a5edfeed79a67367719
[ "MIT" ]
1
2021-04-15T04:25:16.000Z
2021-04-15T04:25:16.000Z
py/lib/json/__init__.py
ivomarvan/ESP8266_RTC_memory_compression
7c3113533e08759fa40f2a5edfeed79a67367719
[ "MIT" ]
null
null
null
py/lib/json/__init__.py
ivomarvan/ESP8266_RTC_memory_compression
7c3113533e08759fa40f2a5edfeed79a67367719
[ "MIT" ]
null
null
null
__author__ = 'ivo@marvan.cz'
14.5
28
0.724138
4
29
4.25
1
0
0
0
0
0
0
0
0
0
0
0
0.103448
29
1
29
29
0.653846
0
0
0
0
0
0.448276
0
0
0
0
0
0
1
0
false
0
0
0
0
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
1ace60bd1ab75e946abaed8375d05732cb0abd41
533
py
Python
attendence/faceRecognition/forms.py
baronrustamov/Face-Recognition
5b5c527a0be668f1e1132e95f31ab72a5d38437d
[ "Apache-2.0" ]
null
null
null
attendence/faceRecognition/forms.py
baronrustamov/Face-Recognition
5b5c527a0be668f1e1132e95f31ab72a5d38437d
[ "Apache-2.0" ]
null
null
null
attendence/faceRecognition/forms.py
baronrustamov/Face-Recognition
5b5c527a0be668f1e1132e95f31ab72a5d38437d
[ "Apache-2.0" ]
null
null
null
from django import forms from django.contrib.auth.forms import UserCreationForm from django.db import transaction class UserRegistrationForm(forms.Form): username = forms.CharField( required = True, label = 'Username', max_length = 32 ) email = forms.CharField( required = True, label = 'Email', max_length = 32 ) password = forms.CharField( required = True, label = 'Password', max_length = 32, widget = forms.PasswordInput() )
25.380952
54
0.613508
53
533
6.113208
0.45283
0.092593
0.203704
0.240741
0.287037
0
0
0
0
0
0
0.016086
0.300188
533
21
55
25.380952
0.852547
0
0
0.25
0
0
0.039326
0
0
0
0
0
0
1
0
false
0.15
0.15
0
0.35
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
3
46c74108472dbc8bb79bdb944797a7e3669c3a83
242
py
Python
jirabug/config.py
miaolujing/python_script
57ccf89f53ce0ce551804b5693515d8a8db4ce78
[ "Apache-2.0" ]
null
null
null
jirabug/config.py
miaolujing/python_script
57ccf89f53ce0ce551804b5693515d8a8db4ce78
[ "Apache-2.0" ]
null
null
null
jirabug/config.py
miaolujing/python_script
57ccf89f53ce0ce551804b5693515d8a8db4ce78
[ "Apache-2.0" ]
null
null
null
# config.py # -*- coding: utf-8 -*- __author__ = 'lilian' import gettime loginurl = "http://jira.uyunsoft.cn/login.jsp" username = "miaolj" passwd = "miaolj161208" searchurl = "http://jira.uyunsoft.cn/rest/api/2/search" maxResults = "10000"
22
55
0.698347
31
242
5.322581
0.870968
0.09697
0.193939
0.218182
0
0
0
0
0
0
0
0.060748
0.115702
242
11
56
22
0.71028
0.128099
0
0
0
0
0.492823
0
0
0
0
0
0
1
0
false
0.142857
0.142857
0
0.142857
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
3
46e9bf12bf1aaf80fa7427bada639f9c72dd6c07
61,990
py
Python
azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py
JonathanGailliez/azure-sdk-for-python
f0f051bfd27f8ea512aea6fc0c3212ee9ee0029b
[ "MIT" ]
1
2021-09-07T18:36:04.000Z
2021-09-07T18:36:04.000Z
azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py
JonathanGailliez/azure-sdk-for-python
f0f051bfd27f8ea512aea6fc0c3212ee9ee0029b
[ "MIT" ]
2
2019-10-02T23:37:38.000Z
2020-10-02T01:17:31.000Z
azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py
JonathanGailliez/azure-sdk-for-python
f0f051bfd27f8ea512aea6fc0c3212ee9ee0029b
[ "MIT" ]
1
2019-06-17T22:18:23.000Z
2019-06-17T22:18:23.000Z
# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for # license information. # # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is # regenerated. # -------------------------------------------------------------------------- try: from .resource_py3 import Resource from .sub_resource_py3 import SubResource from .expression_py3 import Expression from .secure_string_py3 import SecureString from .linked_service_reference_py3 import LinkedServiceReference from .azure_key_vault_secret_reference_py3 import AzureKeyVaultSecretReference from .secret_base_py3 import SecretBase from .factory_identity_py3 import FactoryIdentity from .factory_repo_configuration_py3 import FactoryRepoConfiguration from .factory_py3 import Factory from .integration_runtime_py3 import IntegrationRuntime from .integration_runtime_resource_py3 import IntegrationRuntimeResource from .integration_runtime_reference_py3 import IntegrationRuntimeReference from .integration_runtime_status_py3 import IntegrationRuntimeStatus from .integration_runtime_status_response_py3 import IntegrationRuntimeStatusResponse from .integration_runtime_status_list_response_py3 import IntegrationRuntimeStatusListResponse from .update_integration_runtime_request_py3 import UpdateIntegrationRuntimeRequest from .update_integration_runtime_node_request_py3 import UpdateIntegrationRuntimeNodeRequest from .linked_integration_runtime_request_py3 import LinkedIntegrationRuntimeRequest from .create_linked_integration_runtime_request_py3 import CreateLinkedIntegrationRuntimeRequest from .parameter_specification_py3 import ParameterSpecification from .linked_service_py3 import LinkedService from .linked_service_resource_py3 import LinkedServiceResource from .dataset_folder_py3 import DatasetFolder from .dataset_py3 import Dataset from .dataset_resource_py3 import DatasetResource from .activity_dependency_py3 import ActivityDependency from .user_property_py3 import UserProperty from .activity_py3 import Activity from .variable_specification_py3 import VariableSpecification from .pipeline_folder_py3 import PipelineFolder from .pipeline_resource_py3 import PipelineResource from .trigger_py3 import Trigger from .trigger_resource_py3 import TriggerResource from .create_run_response_py3 import CreateRunResponse from .factory_vsts_configuration_py3 import FactoryVSTSConfiguration from .factory_git_hub_configuration_py3 import FactoryGitHubConfiguration from .factory_repo_update_py3 import FactoryRepoUpdate from .git_hub_access_token_request_py3 import GitHubAccessTokenRequest from .git_hub_access_token_response_py3 import GitHubAccessTokenResponse from .user_access_policy_py3 import UserAccessPolicy from .access_policy_response_py3 import AccessPolicyResponse from .pipeline_reference_py3 import PipelineReference from .trigger_pipeline_reference_py3 import TriggerPipelineReference from .factory_update_parameters_py3 import FactoryUpdateParameters from .dataset_reference_py3 import DatasetReference from .run_query_filter_py3 import RunQueryFilter from .run_query_order_by_py3 import RunQueryOrderBy from .run_filter_parameters_py3 import RunFilterParameters from .pipeline_run_invoked_by_py3 import PipelineRunInvokedBy from .pipeline_run_py3 import PipelineRun from .pipeline_runs_query_response_py3 import PipelineRunsQueryResponse from .activity_run_py3 import ActivityRun from .activity_runs_query_response_py3 import ActivityRunsQueryResponse from .trigger_run_py3 import TriggerRun from .trigger_runs_query_response_py3 import TriggerRunsQueryResponse from .rerun_tumbling_window_trigger_action_parameters_py3 import RerunTumblingWindowTriggerActionParameters from .rerun_tumbling_window_trigger_py3 import RerunTumblingWindowTrigger from .rerun_trigger_resource_py3 import RerunTriggerResource from .operation_display_py3 import OperationDisplay from .operation_log_specification_py3 import OperationLogSpecification from .operation_metric_availability_py3 import OperationMetricAvailability from .operation_metric_dimension_py3 import OperationMetricDimension from .operation_metric_specification_py3 import OperationMetricSpecification from .operation_service_specification_py3 import OperationServiceSpecification from .operation_py3 import Operation from .get_ssis_object_metadata_request_py3 import GetSsisObjectMetadataRequest from .ssis_object_metadata_status_response_py3 import SsisObjectMetadataStatusResponse from .exposure_control_request_py3 import ExposureControlRequest from .exposure_control_response_py3 import ExposureControlResponse from .self_dependency_tumbling_window_trigger_reference_py3 import SelfDependencyTumblingWindowTriggerReference from .trigger_reference_py3 import TriggerReference from .tumbling_window_trigger_dependency_reference_py3 import TumblingWindowTriggerDependencyReference from .trigger_dependency_reference_py3 import TriggerDependencyReference from .dependency_reference_py3 import DependencyReference from .retry_policy_py3 import RetryPolicy from .tumbling_window_trigger_py3 import TumblingWindowTrigger from .blob_events_trigger_py3 import BlobEventsTrigger from .blob_trigger_py3 import BlobTrigger from .recurrence_schedule_occurrence_py3 import RecurrenceScheduleOccurrence from .recurrence_schedule_py3 import RecurrenceSchedule from .schedule_trigger_recurrence_py3 import ScheduleTriggerRecurrence from .schedule_trigger_py3 import ScheduleTrigger from .multiple_pipeline_trigger_py3 import MultiplePipelineTrigger from .azure_function_linked_service_py3 import AzureFunctionLinkedService from .responsys_linked_service_py3 import ResponsysLinkedService from .azure_databricks_linked_service_py3 import AzureDatabricksLinkedService from .azure_data_lake_analytics_linked_service_py3 import AzureDataLakeAnalyticsLinkedService from .script_action_py3 import ScriptAction from .hd_insight_on_demand_linked_service_py3 import HDInsightOnDemandLinkedService from .salesforce_marketing_cloud_linked_service_py3 import SalesforceMarketingCloudLinkedService from .netezza_linked_service_py3 import NetezzaLinkedService from .vertica_linked_service_py3 import VerticaLinkedService from .zoho_linked_service_py3 import ZohoLinkedService from .xero_linked_service_py3 import XeroLinkedService from .square_linked_service_py3 import SquareLinkedService from .spark_linked_service_py3 import SparkLinkedService from .shopify_linked_service_py3 import ShopifyLinkedService from .service_now_linked_service_py3 import ServiceNowLinkedService from .quick_books_linked_service_py3 import QuickBooksLinkedService from .presto_linked_service_py3 import PrestoLinkedService from .phoenix_linked_service_py3 import PhoenixLinkedService from .paypal_linked_service_py3 import PaypalLinkedService from .marketo_linked_service_py3 import MarketoLinkedService from .maria_db_linked_service_py3 import MariaDBLinkedService from .magento_linked_service_py3 import MagentoLinkedService from .jira_linked_service_py3 import JiraLinkedService from .impala_linked_service_py3 import ImpalaLinkedService from .hubspot_linked_service_py3 import HubspotLinkedService from .hive_linked_service_py3 import HiveLinkedService from .hbase_linked_service_py3 import HBaseLinkedService from .greenplum_linked_service_py3 import GreenplumLinkedService from .google_big_query_linked_service_py3 import GoogleBigQueryLinkedService from .eloqua_linked_service_py3 import EloquaLinkedService from .drill_linked_service_py3 import DrillLinkedService from .couchbase_linked_service_py3 import CouchbaseLinkedService from .concur_linked_service_py3 import ConcurLinkedService from .azure_postgre_sql_linked_service_py3 import AzurePostgreSqlLinkedService from .amazon_mws_linked_service_py3 import AmazonMWSLinkedService from .sap_hana_linked_service_py3 import SapHanaLinkedService from .sap_bw_linked_service_py3 import SapBWLinkedService from .sftp_server_linked_service_py3 import SftpServerLinkedService from .ftp_server_linked_service_py3 import FtpServerLinkedService from .http_linked_service_py3 import HttpLinkedService from .azure_search_linked_service_py3 import AzureSearchLinkedService from .custom_data_source_linked_service_py3 import CustomDataSourceLinkedService from .amazon_redshift_linked_service_py3 import AmazonRedshiftLinkedService from .amazon_s3_linked_service_py3 import AmazonS3LinkedService from .sap_ecc_linked_service_py3 import SapEccLinkedService from .sap_cloud_for_customer_linked_service_py3 import SapCloudForCustomerLinkedService from .salesforce_linked_service_py3 import SalesforceLinkedService from .azure_data_lake_store_linked_service_py3 import AzureDataLakeStoreLinkedService from .mongo_db_linked_service_py3 import MongoDbLinkedService from .cassandra_linked_service_py3 import CassandraLinkedService from .web_client_certificate_authentication_py3 import WebClientCertificateAuthentication from .web_basic_authentication_py3 import WebBasicAuthentication from .web_anonymous_authentication_py3 import WebAnonymousAuthentication from .web_linked_service_type_properties_py3 import WebLinkedServiceTypeProperties from .web_linked_service_py3 import WebLinkedService from .odata_linked_service_py3 import ODataLinkedService from .hdfs_linked_service_py3 import HdfsLinkedService from .odbc_linked_service_py3 import OdbcLinkedService from .azure_ml_linked_service_py3 import AzureMLLinkedService from .teradata_linked_service_py3 import TeradataLinkedService from .db2_linked_service_py3 import Db2LinkedService from .sybase_linked_service_py3 import SybaseLinkedService from .postgre_sql_linked_service_py3 import PostgreSqlLinkedService from .my_sql_linked_service_py3 import MySqlLinkedService from .azure_my_sql_linked_service_py3 import AzureMySqlLinkedService from .oracle_linked_service_py3 import OracleLinkedService from .file_server_linked_service_py3 import FileServerLinkedService from .hd_insight_linked_service_py3 import HDInsightLinkedService from .dynamics_linked_service_py3 import DynamicsLinkedService from .cosmos_db_linked_service_py3 import CosmosDbLinkedService from .azure_key_vault_linked_service_py3 import AzureKeyVaultLinkedService from .azure_batch_linked_service_py3 import AzureBatchLinkedService from .azure_sql_database_linked_service_py3 import AzureSqlDatabaseLinkedService from .sql_server_linked_service_py3 import SqlServerLinkedService from .azure_sql_dw_linked_service_py3 import AzureSqlDWLinkedService from .azure_table_storage_linked_service_py3 import AzureTableStorageLinkedService from .azure_blob_storage_linked_service_py3 import AzureBlobStorageLinkedService from .azure_storage_linked_service_py3 import AzureStorageLinkedService from .responsys_object_dataset_py3 import ResponsysObjectDataset from .salesforce_marketing_cloud_object_dataset_py3 import SalesforceMarketingCloudObjectDataset from .vertica_table_dataset_py3 import VerticaTableDataset from .netezza_table_dataset_py3 import NetezzaTableDataset from .zoho_object_dataset_py3 import ZohoObjectDataset from .xero_object_dataset_py3 import XeroObjectDataset from .square_object_dataset_py3 import SquareObjectDataset from .spark_object_dataset_py3 import SparkObjectDataset from .shopify_object_dataset_py3 import ShopifyObjectDataset from .service_now_object_dataset_py3 import ServiceNowObjectDataset from .quick_books_object_dataset_py3 import QuickBooksObjectDataset from .presto_object_dataset_py3 import PrestoObjectDataset from .phoenix_object_dataset_py3 import PhoenixObjectDataset from .paypal_object_dataset_py3 import PaypalObjectDataset from .marketo_object_dataset_py3 import MarketoObjectDataset from .maria_db_table_dataset_py3 import MariaDBTableDataset from .magento_object_dataset_py3 import MagentoObjectDataset from .jira_object_dataset_py3 import JiraObjectDataset from .impala_object_dataset_py3 import ImpalaObjectDataset from .hubspot_object_dataset_py3 import HubspotObjectDataset from .hive_object_dataset_py3 import HiveObjectDataset from .hbase_object_dataset_py3 import HBaseObjectDataset from .greenplum_table_dataset_py3 import GreenplumTableDataset from .google_big_query_object_dataset_py3 import GoogleBigQueryObjectDataset from .eloqua_object_dataset_py3 import EloquaObjectDataset from .drill_table_dataset_py3 import DrillTableDataset from .couchbase_table_dataset_py3 import CouchbaseTableDataset from .concur_object_dataset_py3 import ConcurObjectDataset from .azure_postgre_sql_table_dataset_py3 import AzurePostgreSqlTableDataset from .amazon_mws_object_dataset_py3 import AmazonMWSObjectDataset from .dataset_zip_deflate_compression_py3 import DatasetZipDeflateCompression from .dataset_deflate_compression_py3 import DatasetDeflateCompression from .dataset_gzip_compression_py3 import DatasetGZipCompression from .dataset_bzip2_compression_py3 import DatasetBZip2Compression from .dataset_compression_py3 import DatasetCompression from .parquet_format_py3 import ParquetFormat from .orc_format_py3 import OrcFormat from .avro_format_py3 import AvroFormat from .json_format_py3 import JsonFormat from .text_format_py3 import TextFormat from .dataset_storage_format_py3 import DatasetStorageFormat from .http_dataset_py3 import HttpDataset from .azure_search_index_dataset_py3 import AzureSearchIndexDataset from .web_table_dataset_py3 import WebTableDataset from .sql_server_table_dataset_py3 import SqlServerTableDataset from .sap_ecc_resource_dataset_py3 import SapEccResourceDataset from .sap_cloud_for_customer_resource_dataset_py3 import SapCloudForCustomerResourceDataset from .salesforce_object_dataset_py3 import SalesforceObjectDataset from .relational_table_dataset_py3 import RelationalTableDataset from .azure_my_sql_table_dataset_py3 import AzureMySqlTableDataset from .oracle_table_dataset_py3 import OracleTableDataset from .odata_resource_dataset_py3 import ODataResourceDataset from .mongo_db_collection_dataset_py3 import MongoDbCollectionDataset from .file_share_dataset_py3 import FileShareDataset from .azure_data_lake_store_dataset_py3 import AzureDataLakeStoreDataset from .dynamics_entity_dataset_py3 import DynamicsEntityDataset from .document_db_collection_dataset_py3 import DocumentDbCollectionDataset from .custom_dataset_py3 import CustomDataset from .cassandra_table_dataset_py3 import CassandraTableDataset from .azure_sql_dw_table_dataset_py3 import AzureSqlDWTableDataset from .azure_sql_table_dataset_py3 import AzureSqlTableDataset from .azure_table_dataset_py3 import AzureTableDataset from .azure_blob_dataset_py3 import AzureBlobDataset from .amazon_s3_dataset_py3 import AmazonS3Dataset from .activity_policy_py3 import ActivityPolicy from .azure_function_activity_py3 import AzureFunctionActivity from .databricks_spark_python_activity_py3 import DatabricksSparkPythonActivity from .databricks_spark_jar_activity_py3 import DatabricksSparkJarActivity from .databricks_notebook_activity_py3 import DatabricksNotebookActivity from .data_lake_analytics_usql_activity_py3 import DataLakeAnalyticsUSQLActivity from .azure_ml_update_resource_activity_py3 import AzureMLUpdateResourceActivity from .azure_ml_web_service_file_py3 import AzureMLWebServiceFile from .azure_ml_batch_execution_activity_py3 import AzureMLBatchExecutionActivity from .get_metadata_activity_py3 import GetMetadataActivity from .web_activity_authentication_py3 import WebActivityAuthentication from .web_activity_py3 import WebActivity from .redshift_unload_settings_py3 import RedshiftUnloadSettings from .amazon_redshift_source_py3 import AmazonRedshiftSource from .responsys_source_py3 import ResponsysSource from .salesforce_marketing_cloud_source_py3 import SalesforceMarketingCloudSource from .vertica_source_py3 import VerticaSource from .netezza_source_py3 import NetezzaSource from .zoho_source_py3 import ZohoSource from .xero_source_py3 import XeroSource from .square_source_py3 import SquareSource from .spark_source_py3 import SparkSource from .shopify_source_py3 import ShopifySource from .service_now_source_py3 import ServiceNowSource from .quick_books_source_py3 import QuickBooksSource from .presto_source_py3 import PrestoSource from .phoenix_source_py3 import PhoenixSource from .paypal_source_py3 import PaypalSource from .marketo_source_py3 import MarketoSource from .maria_db_source_py3 import MariaDBSource from .magento_source_py3 import MagentoSource from .jira_source_py3 import JiraSource from .impala_source_py3 import ImpalaSource from .hubspot_source_py3 import HubspotSource from .hive_source_py3 import HiveSource from .hbase_source_py3 import HBaseSource from .greenplum_source_py3 import GreenplumSource from .google_big_query_source_py3 import GoogleBigQuerySource from .eloqua_source_py3 import EloquaSource from .drill_source_py3 import DrillSource from .couchbase_source_py3 import CouchbaseSource from .concur_source_py3 import ConcurSource from .azure_postgre_sql_source_py3 import AzurePostgreSqlSource from .amazon_mws_source_py3 import AmazonMWSSource from .http_source_py3 import HttpSource from .azure_data_lake_store_source_py3 import AzureDataLakeStoreSource from .mongo_db_source_py3 import MongoDbSource from .cassandra_source_py3 import CassandraSource from .web_source_py3 import WebSource from .oracle_source_py3 import OracleSource from .azure_my_sql_source_py3 import AzureMySqlSource from .distcp_settings_py3 import DistcpSettings from .hdfs_source_py3 import HdfsSource from .file_system_source_py3 import FileSystemSource from .sql_dw_source_py3 import SqlDWSource from .stored_procedure_parameter_py3 import StoredProcedureParameter from .sql_source_py3 import SqlSource from .sap_ecc_source_py3 import SapEccSource from .sap_cloud_for_customer_source_py3 import SapCloudForCustomerSource from .salesforce_source_py3 import SalesforceSource from .relational_source_py3 import RelationalSource from .dynamics_source_py3 import DynamicsSource from .document_db_collection_source_py3 import DocumentDbCollectionSource from .blob_source_py3 import BlobSource from .azure_table_source_py3 import AzureTableSource from .copy_source_py3 import CopySource from .lookup_activity_py3 import LookupActivity from .log_storage_settings_py3 import LogStorageSettings from .delete_activity_py3 import DeleteActivity from .sql_server_stored_procedure_activity_py3 import SqlServerStoredProcedureActivity from .custom_activity_reference_object_py3 import CustomActivityReferenceObject from .custom_activity_py3 import CustomActivity from .ssis_property_override_py3 import SSISPropertyOverride from .ssis_execution_parameter_py3 import SSISExecutionParameter from .ssis_execution_credential_py3 import SSISExecutionCredential from .ssis_package_location_py3 import SSISPackageLocation from .execute_ssis_package_activity_py3 import ExecuteSSISPackageActivity from .hd_insight_spark_activity_py3 import HDInsightSparkActivity from .hd_insight_streaming_activity_py3 import HDInsightStreamingActivity from .hd_insight_map_reduce_activity_py3 import HDInsightMapReduceActivity from .hd_insight_pig_activity_py3 import HDInsightPigActivity from .hd_insight_hive_activity_py3 import HDInsightHiveActivity from .redirect_incompatible_row_settings_py3 import RedirectIncompatibleRowSettings from .staging_settings_py3 import StagingSettings from .tabular_translator_py3 import TabularTranslator from .copy_translator_py3 import CopyTranslator from .salesforce_sink_py3 import SalesforceSink from .dynamics_sink_py3 import DynamicsSink from .odbc_sink_py3 import OdbcSink from .azure_search_index_sink_py3 import AzureSearchIndexSink from .azure_data_lake_store_sink_py3 import AzureDataLakeStoreSink from .oracle_sink_py3 import OracleSink from .polybase_settings_py3 import PolybaseSettings from .sql_dw_sink_py3 import SqlDWSink from .sql_sink_py3 import SqlSink from .document_db_collection_sink_py3 import DocumentDbCollectionSink from .file_system_sink_py3 import FileSystemSink from .blob_sink_py3 import BlobSink from .azure_table_sink_py3 import AzureTableSink from .azure_queue_sink_py3 import AzureQueueSink from .sap_cloud_for_customer_sink_py3 import SapCloudForCustomerSink from .copy_sink_py3 import CopySink from .copy_activity_py3 import CopyActivity from .execution_activity_py3 import ExecutionActivity from .append_variable_activity_py3 import AppendVariableActivity from .set_variable_activity_py3 import SetVariableActivity from .filter_activity_py3 import FilterActivity from .until_activity_py3 import UntilActivity from .wait_activity_py3 import WaitActivity from .for_each_activity_py3 import ForEachActivity from .if_condition_activity_py3 import IfConditionActivity from .execute_pipeline_activity_py3 import ExecutePipelineActivity from .control_activity_py3 import ControlActivity from .linked_integration_runtime_py3 import LinkedIntegrationRuntime from .self_hosted_integration_runtime_node_py3 import SelfHostedIntegrationRuntimeNode from .self_hosted_integration_runtime_status_py3 import SelfHostedIntegrationRuntimeStatus from .managed_integration_runtime_operation_result_py3 import ManagedIntegrationRuntimeOperationResult from .managed_integration_runtime_error_py3 import ManagedIntegrationRuntimeError from .managed_integration_runtime_node_py3 import ManagedIntegrationRuntimeNode from .managed_integration_runtime_status_py3 import ManagedIntegrationRuntimeStatus from .linked_integration_runtime_rbac_authorization_py3 import LinkedIntegrationRuntimeRbacAuthorization from .linked_integration_runtime_key_authorization_py3 import LinkedIntegrationRuntimeKeyAuthorization from .linked_integration_runtime_type_py3 import LinkedIntegrationRuntimeType from .self_hosted_integration_runtime_py3 import SelfHostedIntegrationRuntime from .integration_runtime_custom_setup_script_properties_py3 import IntegrationRuntimeCustomSetupScriptProperties from .integration_runtime_ssis_catalog_info_py3 import IntegrationRuntimeSsisCatalogInfo from .integration_runtime_ssis_properties_py3 import IntegrationRuntimeSsisProperties from .integration_runtime_vnet_properties_py3 import IntegrationRuntimeVNetProperties from .integration_runtime_compute_properties_py3 import IntegrationRuntimeComputeProperties from .managed_integration_runtime_py3 import ManagedIntegrationRuntime from .integration_runtime_node_ip_address_py3 import IntegrationRuntimeNodeIpAddress from .ssis_object_metadata_py3 import SsisObjectMetadata from .ssis_object_metadata_list_response_py3 import SsisObjectMetadataListResponse from .integration_runtime_node_monitoring_data_py3 import IntegrationRuntimeNodeMonitoringData from .integration_runtime_monitoring_data_py3 import IntegrationRuntimeMonitoringData from .integration_runtime_auth_keys_py3 import IntegrationRuntimeAuthKeys from .integration_runtime_regenerate_key_parameters_py3 import IntegrationRuntimeRegenerateKeyParameters from .integration_runtime_connection_info_py3 import IntegrationRuntimeConnectionInfo except (SyntaxError, ImportError): from .resource import Resource from .sub_resource import SubResource from .expression import Expression from .secure_string import SecureString from .linked_service_reference import LinkedServiceReference from .azure_key_vault_secret_reference import AzureKeyVaultSecretReference from .secret_base import SecretBase from .factory_identity import FactoryIdentity from .factory_repo_configuration import FactoryRepoConfiguration from .factory import Factory from .integration_runtime import IntegrationRuntime from .integration_runtime_resource import IntegrationRuntimeResource from .integration_runtime_reference import IntegrationRuntimeReference from .integration_runtime_status import IntegrationRuntimeStatus from .integration_runtime_status_response import IntegrationRuntimeStatusResponse from .integration_runtime_status_list_response import IntegrationRuntimeStatusListResponse from .update_integration_runtime_request import UpdateIntegrationRuntimeRequest from .update_integration_runtime_node_request import UpdateIntegrationRuntimeNodeRequest from .linked_integration_runtime_request import LinkedIntegrationRuntimeRequest from .create_linked_integration_runtime_request import CreateLinkedIntegrationRuntimeRequest from .parameter_specification import ParameterSpecification from .linked_service import LinkedService from .linked_service_resource import LinkedServiceResource from .dataset_folder import DatasetFolder from .dataset import Dataset from .dataset_resource import DatasetResource from .activity_dependency import ActivityDependency from .user_property import UserProperty from .activity import Activity from .variable_specification import VariableSpecification from .pipeline_folder import PipelineFolder from .pipeline_resource import PipelineResource from .trigger import Trigger from .trigger_resource import TriggerResource from .create_run_response import CreateRunResponse from .factory_vsts_configuration import FactoryVSTSConfiguration from .factory_git_hub_configuration import FactoryGitHubConfiguration from .factory_repo_update import FactoryRepoUpdate from .git_hub_access_token_request import GitHubAccessTokenRequest from .git_hub_access_token_response import GitHubAccessTokenResponse from .user_access_policy import UserAccessPolicy from .access_policy_response import AccessPolicyResponse from .pipeline_reference import PipelineReference from .trigger_pipeline_reference import TriggerPipelineReference from .factory_update_parameters import FactoryUpdateParameters from .dataset_reference import DatasetReference from .run_query_filter import RunQueryFilter from .run_query_order_by import RunQueryOrderBy from .run_filter_parameters import RunFilterParameters from .pipeline_run_invoked_by import PipelineRunInvokedBy from .pipeline_run import PipelineRun from .pipeline_runs_query_response import PipelineRunsQueryResponse from .activity_run import ActivityRun from .activity_runs_query_response import ActivityRunsQueryResponse from .trigger_run import TriggerRun from .trigger_runs_query_response import TriggerRunsQueryResponse from .rerun_tumbling_window_trigger_action_parameters import RerunTumblingWindowTriggerActionParameters from .rerun_tumbling_window_trigger import RerunTumblingWindowTrigger from .rerun_trigger_resource import RerunTriggerResource from .operation_display import OperationDisplay from .operation_log_specification import OperationLogSpecification from .operation_metric_availability import OperationMetricAvailability from .operation_metric_dimension import OperationMetricDimension from .operation_metric_specification import OperationMetricSpecification from .operation_service_specification import OperationServiceSpecification from .operation import Operation from .get_ssis_object_metadata_request import GetSsisObjectMetadataRequest from .ssis_object_metadata_status_response import SsisObjectMetadataStatusResponse from .exposure_control_request import ExposureControlRequest from .exposure_control_response import ExposureControlResponse from .self_dependency_tumbling_window_trigger_reference import SelfDependencyTumblingWindowTriggerReference from .trigger_reference import TriggerReference from .tumbling_window_trigger_dependency_reference import TumblingWindowTriggerDependencyReference from .trigger_dependency_reference import TriggerDependencyReference from .dependency_reference import DependencyReference from .retry_policy import RetryPolicy from .tumbling_window_trigger import TumblingWindowTrigger from .blob_events_trigger import BlobEventsTrigger from .blob_trigger import BlobTrigger from .recurrence_schedule_occurrence import RecurrenceScheduleOccurrence from .recurrence_schedule import RecurrenceSchedule from .schedule_trigger_recurrence import ScheduleTriggerRecurrence from .schedule_trigger import ScheduleTrigger from .multiple_pipeline_trigger import MultiplePipelineTrigger from .azure_function_linked_service import AzureFunctionLinkedService from .responsys_linked_service import ResponsysLinkedService from .azure_databricks_linked_service import AzureDatabricksLinkedService from .azure_data_lake_analytics_linked_service import AzureDataLakeAnalyticsLinkedService from .script_action import ScriptAction from .hd_insight_on_demand_linked_service import HDInsightOnDemandLinkedService from .salesforce_marketing_cloud_linked_service import SalesforceMarketingCloudLinkedService from .netezza_linked_service import NetezzaLinkedService from .vertica_linked_service import VerticaLinkedService from .zoho_linked_service import ZohoLinkedService from .xero_linked_service import XeroLinkedService from .square_linked_service import SquareLinkedService from .spark_linked_service import SparkLinkedService from .shopify_linked_service import ShopifyLinkedService from .service_now_linked_service import ServiceNowLinkedService from .quick_books_linked_service import QuickBooksLinkedService from .presto_linked_service import PrestoLinkedService from .phoenix_linked_service import PhoenixLinkedService from .paypal_linked_service import PaypalLinkedService from .marketo_linked_service import MarketoLinkedService from .maria_db_linked_service import MariaDBLinkedService from .magento_linked_service import MagentoLinkedService from .jira_linked_service import JiraLinkedService from .impala_linked_service import ImpalaLinkedService from .hubspot_linked_service import HubspotLinkedService from .hive_linked_service import HiveLinkedService from .hbase_linked_service import HBaseLinkedService from .greenplum_linked_service import GreenplumLinkedService from .google_big_query_linked_service import GoogleBigQueryLinkedService from .eloqua_linked_service import EloquaLinkedService from .drill_linked_service import DrillLinkedService from .couchbase_linked_service import CouchbaseLinkedService from .concur_linked_service import ConcurLinkedService from .azure_postgre_sql_linked_service import AzurePostgreSqlLinkedService from .amazon_mws_linked_service import AmazonMWSLinkedService from .sap_hana_linked_service import SapHanaLinkedService from .sap_bw_linked_service import SapBWLinkedService from .sftp_server_linked_service import SftpServerLinkedService from .ftp_server_linked_service import FtpServerLinkedService from .http_linked_service import HttpLinkedService from .azure_search_linked_service import AzureSearchLinkedService from .custom_data_source_linked_service import CustomDataSourceLinkedService from .amazon_redshift_linked_service import AmazonRedshiftLinkedService from .amazon_s3_linked_service import AmazonS3LinkedService from .sap_ecc_linked_service import SapEccLinkedService from .sap_cloud_for_customer_linked_service import SapCloudForCustomerLinkedService from .salesforce_linked_service import SalesforceLinkedService from .azure_data_lake_store_linked_service import AzureDataLakeStoreLinkedService from .mongo_db_linked_service import MongoDbLinkedService from .cassandra_linked_service import CassandraLinkedService from .web_client_certificate_authentication import WebClientCertificateAuthentication from .web_basic_authentication import WebBasicAuthentication from .web_anonymous_authentication import WebAnonymousAuthentication from .web_linked_service_type_properties import WebLinkedServiceTypeProperties from .web_linked_service import WebLinkedService from .odata_linked_service import ODataLinkedService from .hdfs_linked_service import HdfsLinkedService from .odbc_linked_service import OdbcLinkedService from .azure_ml_linked_service import AzureMLLinkedService from .teradata_linked_service import TeradataLinkedService from .db2_linked_service import Db2LinkedService from .sybase_linked_service import SybaseLinkedService from .postgre_sql_linked_service import PostgreSqlLinkedService from .my_sql_linked_service import MySqlLinkedService from .azure_my_sql_linked_service import AzureMySqlLinkedService from .oracle_linked_service import OracleLinkedService from .file_server_linked_service import FileServerLinkedService from .hd_insight_linked_service import HDInsightLinkedService from .dynamics_linked_service import DynamicsLinkedService from .cosmos_db_linked_service import CosmosDbLinkedService from .azure_key_vault_linked_service import AzureKeyVaultLinkedService from .azure_batch_linked_service import AzureBatchLinkedService from .azure_sql_database_linked_service import AzureSqlDatabaseLinkedService from .sql_server_linked_service import SqlServerLinkedService from .azure_sql_dw_linked_service import AzureSqlDWLinkedService from .azure_table_storage_linked_service import AzureTableStorageLinkedService from .azure_blob_storage_linked_service import AzureBlobStorageLinkedService from .azure_storage_linked_service import AzureStorageLinkedService from .responsys_object_dataset import ResponsysObjectDataset from .salesforce_marketing_cloud_object_dataset import SalesforceMarketingCloudObjectDataset from .vertica_table_dataset import VerticaTableDataset from .netezza_table_dataset import NetezzaTableDataset from .zoho_object_dataset import ZohoObjectDataset from .xero_object_dataset import XeroObjectDataset from .square_object_dataset import SquareObjectDataset from .spark_object_dataset import SparkObjectDataset from .shopify_object_dataset import ShopifyObjectDataset from .service_now_object_dataset import ServiceNowObjectDataset from .quick_books_object_dataset import QuickBooksObjectDataset from .presto_object_dataset import PrestoObjectDataset from .phoenix_object_dataset import PhoenixObjectDataset from .paypal_object_dataset import PaypalObjectDataset from .marketo_object_dataset import MarketoObjectDataset from .maria_db_table_dataset import MariaDBTableDataset from .magento_object_dataset import MagentoObjectDataset from .jira_object_dataset import JiraObjectDataset from .impala_object_dataset import ImpalaObjectDataset from .hubspot_object_dataset import HubspotObjectDataset from .hive_object_dataset import HiveObjectDataset from .hbase_object_dataset import HBaseObjectDataset from .greenplum_table_dataset import GreenplumTableDataset from .google_big_query_object_dataset import GoogleBigQueryObjectDataset from .eloqua_object_dataset import EloquaObjectDataset from .drill_table_dataset import DrillTableDataset from .couchbase_table_dataset import CouchbaseTableDataset from .concur_object_dataset import ConcurObjectDataset from .azure_postgre_sql_table_dataset import AzurePostgreSqlTableDataset from .amazon_mws_object_dataset import AmazonMWSObjectDataset from .dataset_zip_deflate_compression import DatasetZipDeflateCompression from .dataset_deflate_compression import DatasetDeflateCompression from .dataset_gzip_compression import DatasetGZipCompression from .dataset_bzip2_compression import DatasetBZip2Compression from .dataset_compression import DatasetCompression from .parquet_format import ParquetFormat from .orc_format import OrcFormat from .avro_format import AvroFormat from .json_format import JsonFormat from .text_format import TextFormat from .dataset_storage_format import DatasetStorageFormat from .http_dataset import HttpDataset from .azure_search_index_dataset import AzureSearchIndexDataset from .web_table_dataset import WebTableDataset from .sql_server_table_dataset import SqlServerTableDataset from .sap_ecc_resource_dataset import SapEccResourceDataset from .sap_cloud_for_customer_resource_dataset import SapCloudForCustomerResourceDataset from .salesforce_object_dataset import SalesforceObjectDataset from .relational_table_dataset import RelationalTableDataset from .azure_my_sql_table_dataset import AzureMySqlTableDataset from .oracle_table_dataset import OracleTableDataset from .odata_resource_dataset import ODataResourceDataset from .mongo_db_collection_dataset import MongoDbCollectionDataset from .file_share_dataset import FileShareDataset from .azure_data_lake_store_dataset import AzureDataLakeStoreDataset from .dynamics_entity_dataset import DynamicsEntityDataset from .document_db_collection_dataset import DocumentDbCollectionDataset from .custom_dataset import CustomDataset from .cassandra_table_dataset import CassandraTableDataset from .azure_sql_dw_table_dataset import AzureSqlDWTableDataset from .azure_sql_table_dataset import AzureSqlTableDataset from .azure_table_dataset import AzureTableDataset from .azure_blob_dataset import AzureBlobDataset from .amazon_s3_dataset import AmazonS3Dataset from .activity_policy import ActivityPolicy from .azure_function_activity import AzureFunctionActivity from .databricks_spark_python_activity import DatabricksSparkPythonActivity from .databricks_spark_jar_activity import DatabricksSparkJarActivity from .databricks_notebook_activity import DatabricksNotebookActivity from .data_lake_analytics_usql_activity import DataLakeAnalyticsUSQLActivity from .azure_ml_update_resource_activity import AzureMLUpdateResourceActivity from .azure_ml_web_service_file import AzureMLWebServiceFile from .azure_ml_batch_execution_activity import AzureMLBatchExecutionActivity from .get_metadata_activity import GetMetadataActivity from .web_activity_authentication import WebActivityAuthentication from .web_activity import WebActivity from .redshift_unload_settings import RedshiftUnloadSettings from .amazon_redshift_source import AmazonRedshiftSource from .responsys_source import ResponsysSource from .salesforce_marketing_cloud_source import SalesforceMarketingCloudSource from .vertica_source import VerticaSource from .netezza_source import NetezzaSource from .zoho_source import ZohoSource from .xero_source import XeroSource from .square_source import SquareSource from .spark_source import SparkSource from .shopify_source import ShopifySource from .service_now_source import ServiceNowSource from .quick_books_source import QuickBooksSource from .presto_source import PrestoSource from .phoenix_source import PhoenixSource from .paypal_source import PaypalSource from .marketo_source import MarketoSource from .maria_db_source import MariaDBSource from .magento_source import MagentoSource from .jira_source import JiraSource from .impala_source import ImpalaSource from .hubspot_source import HubspotSource from .hive_source import HiveSource from .hbase_source import HBaseSource from .greenplum_source import GreenplumSource from .google_big_query_source import GoogleBigQuerySource from .eloqua_source import EloquaSource from .drill_source import DrillSource from .couchbase_source import CouchbaseSource from .concur_source import ConcurSource from .azure_postgre_sql_source import AzurePostgreSqlSource from .amazon_mws_source import AmazonMWSSource from .http_source import HttpSource from .azure_data_lake_store_source import AzureDataLakeStoreSource from .mongo_db_source import MongoDbSource from .cassandra_source import CassandraSource from .web_source import WebSource from .oracle_source import OracleSource from .azure_my_sql_source import AzureMySqlSource from .distcp_settings import DistcpSettings from .hdfs_source import HdfsSource from .file_system_source import FileSystemSource from .sql_dw_source import SqlDWSource from .stored_procedure_parameter import StoredProcedureParameter from .sql_source import SqlSource from .sap_ecc_source import SapEccSource from .sap_cloud_for_customer_source import SapCloudForCustomerSource from .salesforce_source import SalesforceSource from .relational_source import RelationalSource from .dynamics_source import DynamicsSource from .document_db_collection_source import DocumentDbCollectionSource from .blob_source import BlobSource from .azure_table_source import AzureTableSource from .copy_source import CopySource from .lookup_activity import LookupActivity from .log_storage_settings import LogStorageSettings from .delete_activity import DeleteActivity from .sql_server_stored_procedure_activity import SqlServerStoredProcedureActivity from .custom_activity_reference_object import CustomActivityReferenceObject from .custom_activity import CustomActivity from .ssis_property_override import SSISPropertyOverride from .ssis_execution_parameter import SSISExecutionParameter from .ssis_execution_credential import SSISExecutionCredential from .ssis_package_location import SSISPackageLocation from .execute_ssis_package_activity import ExecuteSSISPackageActivity from .hd_insight_spark_activity import HDInsightSparkActivity from .hd_insight_streaming_activity import HDInsightStreamingActivity from .hd_insight_map_reduce_activity import HDInsightMapReduceActivity from .hd_insight_pig_activity import HDInsightPigActivity from .hd_insight_hive_activity import HDInsightHiveActivity from .redirect_incompatible_row_settings import RedirectIncompatibleRowSettings from .staging_settings import StagingSettings from .tabular_translator import TabularTranslator from .copy_translator import CopyTranslator from .salesforce_sink import SalesforceSink from .dynamics_sink import DynamicsSink from .odbc_sink import OdbcSink from .azure_search_index_sink import AzureSearchIndexSink from .azure_data_lake_store_sink import AzureDataLakeStoreSink from .oracle_sink import OracleSink from .polybase_settings import PolybaseSettings from .sql_dw_sink import SqlDWSink from .sql_sink import SqlSink from .document_db_collection_sink import DocumentDbCollectionSink from .file_system_sink import FileSystemSink from .blob_sink import BlobSink from .azure_table_sink import AzureTableSink from .azure_queue_sink import AzureQueueSink from .sap_cloud_for_customer_sink import SapCloudForCustomerSink from .copy_sink import CopySink from .copy_activity import CopyActivity from .execution_activity import ExecutionActivity from .append_variable_activity import AppendVariableActivity from .set_variable_activity import SetVariableActivity from .filter_activity import FilterActivity from .until_activity import UntilActivity from .wait_activity import WaitActivity from .for_each_activity import ForEachActivity from .if_condition_activity import IfConditionActivity from .execute_pipeline_activity import ExecutePipelineActivity from .control_activity import ControlActivity from .linked_integration_runtime import LinkedIntegrationRuntime from .self_hosted_integration_runtime_node import SelfHostedIntegrationRuntimeNode from .self_hosted_integration_runtime_status import SelfHostedIntegrationRuntimeStatus from .managed_integration_runtime_operation_result import ManagedIntegrationRuntimeOperationResult from .managed_integration_runtime_error import ManagedIntegrationRuntimeError from .managed_integration_runtime_node import ManagedIntegrationRuntimeNode from .managed_integration_runtime_status import ManagedIntegrationRuntimeStatus from .linked_integration_runtime_rbac_authorization import LinkedIntegrationRuntimeRbacAuthorization from .linked_integration_runtime_key_authorization import LinkedIntegrationRuntimeKeyAuthorization from .linked_integration_runtime_type import LinkedIntegrationRuntimeType from .self_hosted_integration_runtime import SelfHostedIntegrationRuntime from .integration_runtime_custom_setup_script_properties import IntegrationRuntimeCustomSetupScriptProperties from .integration_runtime_ssis_catalog_info import IntegrationRuntimeSsisCatalogInfo from .integration_runtime_ssis_properties import IntegrationRuntimeSsisProperties from .integration_runtime_vnet_properties import IntegrationRuntimeVNetProperties from .integration_runtime_compute_properties import IntegrationRuntimeComputeProperties from .managed_integration_runtime import ManagedIntegrationRuntime from .integration_runtime_node_ip_address import IntegrationRuntimeNodeIpAddress from .ssis_object_metadata import SsisObjectMetadata from .ssis_object_metadata_list_response import SsisObjectMetadataListResponse from .integration_runtime_node_monitoring_data import IntegrationRuntimeNodeMonitoringData from .integration_runtime_monitoring_data import IntegrationRuntimeMonitoringData from .integration_runtime_auth_keys import IntegrationRuntimeAuthKeys from .integration_runtime_regenerate_key_parameters import IntegrationRuntimeRegenerateKeyParameters from .integration_runtime_connection_info import IntegrationRuntimeConnectionInfo from .operation_paged import OperationPaged from .factory_paged import FactoryPaged from .integration_runtime_resource_paged import IntegrationRuntimeResourcePaged from .linked_service_resource_paged import LinkedServiceResourcePaged from .dataset_resource_paged import DatasetResourcePaged from .pipeline_resource_paged import PipelineResourcePaged from .trigger_resource_paged import TriggerResourcePaged from .rerun_trigger_resource_paged import RerunTriggerResourcePaged from .data_factory_management_client_enums import ( IntegrationRuntimeState, IntegrationRuntimeAutoUpdate, ParameterType, DependencyCondition, VariableType, TriggerRuntimeState, RunQueryFilterOperand, RunQueryFilterOperator, RunQueryOrderByField, RunQueryOrder, TriggerRunStatus, TumblingWindowFrequency, BlobEventTypes, DayOfWeek, DaysOfWeek, RecurrenceFrequency, SparkServerType, SparkThriftTransportProtocol, SparkAuthenticationType, ServiceNowAuthenticationType, PrestoAuthenticationType, PhoenixAuthenticationType, ImpalaAuthenticationType, HiveServerType, HiveThriftTransportProtocol, HiveAuthenticationType, HBaseAuthenticationType, GoogleBigQueryAuthenticationType, SapHanaAuthenticationType, SftpAuthenticationType, FtpAuthenticationType, HttpAuthenticationType, MongoDbAuthenticationType, ODataAuthenticationType, TeradataAuthenticationType, Db2AuthenticationType, SybaseAuthenticationType, DatasetCompressionLevel, JsonFormatFilePattern, AzureFunctionActivityMethod, WebActivityMethod, CassandraSourceReadConsistencyLevels, StoredProcedureParameterType, SalesforceSourceReadBehavior, HDInsightActivityDebugInfoOption, SalesforceSinkWriteBehavior, AzureSearchIndexWriteBehaviorType, CopyBehaviorType, PolybaseSettingsRejectType, SapCloudForCustomerSinkWriteBehavior, IntegrationRuntimeType, SelfHostedIntegrationRuntimeNodeStatus, IntegrationRuntimeUpdateResult, IntegrationRuntimeInternalChannelEncryptionMode, ManagedIntegrationRuntimeNodeStatus, IntegrationRuntimeSsisCatalogPricingTier, IntegrationRuntimeLicenseType, IntegrationRuntimeEdition, SsisObjectMetadataType, IntegrationRuntimeAuthKeyName, ) __all__ = [ 'Resource', 'SubResource', 'Expression', 'SecureString', 'LinkedServiceReference', 'AzureKeyVaultSecretReference', 'SecretBase', 'FactoryIdentity', 'FactoryRepoConfiguration', 'Factory', 'IntegrationRuntime', 'IntegrationRuntimeResource', 'IntegrationRuntimeReference', 'IntegrationRuntimeStatus', 'IntegrationRuntimeStatusResponse', 'IntegrationRuntimeStatusListResponse', 'UpdateIntegrationRuntimeRequest', 'UpdateIntegrationRuntimeNodeRequest', 'LinkedIntegrationRuntimeRequest', 'CreateLinkedIntegrationRuntimeRequest', 'ParameterSpecification', 'LinkedService', 'LinkedServiceResource', 'DatasetFolder', 'Dataset', 'DatasetResource', 'ActivityDependency', 'UserProperty', 'Activity', 'VariableSpecification', 'PipelineFolder', 'PipelineResource', 'Trigger', 'TriggerResource', 'CreateRunResponse', 'FactoryVSTSConfiguration', 'FactoryGitHubConfiguration', 'FactoryRepoUpdate', 'GitHubAccessTokenRequest', 'GitHubAccessTokenResponse', 'UserAccessPolicy', 'AccessPolicyResponse', 'PipelineReference', 'TriggerPipelineReference', 'FactoryUpdateParameters', 'DatasetReference', 'RunQueryFilter', 'RunQueryOrderBy', 'RunFilterParameters', 'PipelineRunInvokedBy', 'PipelineRun', 'PipelineRunsQueryResponse', 'ActivityRun', 'ActivityRunsQueryResponse', 'TriggerRun', 'TriggerRunsQueryResponse', 'RerunTumblingWindowTriggerActionParameters', 'RerunTumblingWindowTrigger', 'RerunTriggerResource', 'OperationDisplay', 'OperationLogSpecification', 'OperationMetricAvailability', 'OperationMetricDimension', 'OperationMetricSpecification', 'OperationServiceSpecification', 'Operation', 'GetSsisObjectMetadataRequest', 'SsisObjectMetadataStatusResponse', 'ExposureControlRequest', 'ExposureControlResponse', 'SelfDependencyTumblingWindowTriggerReference', 'TriggerReference', 'TumblingWindowTriggerDependencyReference', 'TriggerDependencyReference', 'DependencyReference', 'RetryPolicy', 'TumblingWindowTrigger', 'BlobEventsTrigger', 'BlobTrigger', 'RecurrenceScheduleOccurrence', 'RecurrenceSchedule', 'ScheduleTriggerRecurrence', 'ScheduleTrigger', 'MultiplePipelineTrigger', 'AzureFunctionLinkedService', 'ResponsysLinkedService', 'AzureDatabricksLinkedService', 'AzureDataLakeAnalyticsLinkedService', 'ScriptAction', 'HDInsightOnDemandLinkedService', 'SalesforceMarketingCloudLinkedService', 'NetezzaLinkedService', 'VerticaLinkedService', 'ZohoLinkedService', 'XeroLinkedService', 'SquareLinkedService', 'SparkLinkedService', 'ShopifyLinkedService', 'ServiceNowLinkedService', 'QuickBooksLinkedService', 'PrestoLinkedService', 'PhoenixLinkedService', 'PaypalLinkedService', 'MarketoLinkedService', 'MariaDBLinkedService', 'MagentoLinkedService', 'JiraLinkedService', 'ImpalaLinkedService', 'HubspotLinkedService', 'HiveLinkedService', 'HBaseLinkedService', 'GreenplumLinkedService', 'GoogleBigQueryLinkedService', 'EloquaLinkedService', 'DrillLinkedService', 'CouchbaseLinkedService', 'ConcurLinkedService', 'AzurePostgreSqlLinkedService', 'AmazonMWSLinkedService', 'SapHanaLinkedService', 'SapBWLinkedService', 'SftpServerLinkedService', 'FtpServerLinkedService', 'HttpLinkedService', 'AzureSearchLinkedService', 'CustomDataSourceLinkedService', 'AmazonRedshiftLinkedService', 'AmazonS3LinkedService', 'SapEccLinkedService', 'SapCloudForCustomerLinkedService', 'SalesforceLinkedService', 'AzureDataLakeStoreLinkedService', 'MongoDbLinkedService', 'CassandraLinkedService', 'WebClientCertificateAuthentication', 'WebBasicAuthentication', 'WebAnonymousAuthentication', 'WebLinkedServiceTypeProperties', 'WebLinkedService', 'ODataLinkedService', 'HdfsLinkedService', 'OdbcLinkedService', 'AzureMLLinkedService', 'TeradataLinkedService', 'Db2LinkedService', 'SybaseLinkedService', 'PostgreSqlLinkedService', 'MySqlLinkedService', 'AzureMySqlLinkedService', 'OracleLinkedService', 'FileServerLinkedService', 'HDInsightLinkedService', 'DynamicsLinkedService', 'CosmosDbLinkedService', 'AzureKeyVaultLinkedService', 'AzureBatchLinkedService', 'AzureSqlDatabaseLinkedService', 'SqlServerLinkedService', 'AzureSqlDWLinkedService', 'AzureTableStorageLinkedService', 'AzureBlobStorageLinkedService', 'AzureStorageLinkedService', 'ResponsysObjectDataset', 'SalesforceMarketingCloudObjectDataset', 'VerticaTableDataset', 'NetezzaTableDataset', 'ZohoObjectDataset', 'XeroObjectDataset', 'SquareObjectDataset', 'SparkObjectDataset', 'ShopifyObjectDataset', 'ServiceNowObjectDataset', 'QuickBooksObjectDataset', 'PrestoObjectDataset', 'PhoenixObjectDataset', 'PaypalObjectDataset', 'MarketoObjectDataset', 'MariaDBTableDataset', 'MagentoObjectDataset', 'JiraObjectDataset', 'ImpalaObjectDataset', 'HubspotObjectDataset', 'HiveObjectDataset', 'HBaseObjectDataset', 'GreenplumTableDataset', 'GoogleBigQueryObjectDataset', 'EloquaObjectDataset', 'DrillTableDataset', 'CouchbaseTableDataset', 'ConcurObjectDataset', 'AzurePostgreSqlTableDataset', 'AmazonMWSObjectDataset', 'DatasetZipDeflateCompression', 'DatasetDeflateCompression', 'DatasetGZipCompression', 'DatasetBZip2Compression', 'DatasetCompression', 'ParquetFormat', 'OrcFormat', 'AvroFormat', 'JsonFormat', 'TextFormat', 'DatasetStorageFormat', 'HttpDataset', 'AzureSearchIndexDataset', 'WebTableDataset', 'SqlServerTableDataset', 'SapEccResourceDataset', 'SapCloudForCustomerResourceDataset', 'SalesforceObjectDataset', 'RelationalTableDataset', 'AzureMySqlTableDataset', 'OracleTableDataset', 'ODataResourceDataset', 'MongoDbCollectionDataset', 'FileShareDataset', 'AzureDataLakeStoreDataset', 'DynamicsEntityDataset', 'DocumentDbCollectionDataset', 'CustomDataset', 'CassandraTableDataset', 'AzureSqlDWTableDataset', 'AzureSqlTableDataset', 'AzureTableDataset', 'AzureBlobDataset', 'AmazonS3Dataset', 'ActivityPolicy', 'AzureFunctionActivity', 'DatabricksSparkPythonActivity', 'DatabricksSparkJarActivity', 'DatabricksNotebookActivity', 'DataLakeAnalyticsUSQLActivity', 'AzureMLUpdateResourceActivity', 'AzureMLWebServiceFile', 'AzureMLBatchExecutionActivity', 'GetMetadataActivity', 'WebActivityAuthentication', 'WebActivity', 'RedshiftUnloadSettings', 'AmazonRedshiftSource', 'ResponsysSource', 'SalesforceMarketingCloudSource', 'VerticaSource', 'NetezzaSource', 'ZohoSource', 'XeroSource', 'SquareSource', 'SparkSource', 'ShopifySource', 'ServiceNowSource', 'QuickBooksSource', 'PrestoSource', 'PhoenixSource', 'PaypalSource', 'MarketoSource', 'MariaDBSource', 'MagentoSource', 'JiraSource', 'ImpalaSource', 'HubspotSource', 'HiveSource', 'HBaseSource', 'GreenplumSource', 'GoogleBigQuerySource', 'EloquaSource', 'DrillSource', 'CouchbaseSource', 'ConcurSource', 'AzurePostgreSqlSource', 'AmazonMWSSource', 'HttpSource', 'AzureDataLakeStoreSource', 'MongoDbSource', 'CassandraSource', 'WebSource', 'OracleSource', 'AzureMySqlSource', 'DistcpSettings', 'HdfsSource', 'FileSystemSource', 'SqlDWSource', 'StoredProcedureParameter', 'SqlSource', 'SapEccSource', 'SapCloudForCustomerSource', 'SalesforceSource', 'RelationalSource', 'DynamicsSource', 'DocumentDbCollectionSource', 'BlobSource', 'AzureTableSource', 'CopySource', 'LookupActivity', 'LogStorageSettings', 'DeleteActivity', 'SqlServerStoredProcedureActivity', 'CustomActivityReferenceObject', 'CustomActivity', 'SSISPropertyOverride', 'SSISExecutionParameter', 'SSISExecutionCredential', 'SSISPackageLocation', 'ExecuteSSISPackageActivity', 'HDInsightSparkActivity', 'HDInsightStreamingActivity', 'HDInsightMapReduceActivity', 'HDInsightPigActivity', 'HDInsightHiveActivity', 'RedirectIncompatibleRowSettings', 'StagingSettings', 'TabularTranslator', 'CopyTranslator', 'SalesforceSink', 'DynamicsSink', 'OdbcSink', 'AzureSearchIndexSink', 'AzureDataLakeStoreSink', 'OracleSink', 'PolybaseSettings', 'SqlDWSink', 'SqlSink', 'DocumentDbCollectionSink', 'FileSystemSink', 'BlobSink', 'AzureTableSink', 'AzureQueueSink', 'SapCloudForCustomerSink', 'CopySink', 'CopyActivity', 'ExecutionActivity', 'AppendVariableActivity', 'SetVariableActivity', 'FilterActivity', 'UntilActivity', 'WaitActivity', 'ForEachActivity', 'IfConditionActivity', 'ExecutePipelineActivity', 'ControlActivity', 'LinkedIntegrationRuntime', 'SelfHostedIntegrationRuntimeNode', 'SelfHostedIntegrationRuntimeStatus', 'ManagedIntegrationRuntimeOperationResult', 'ManagedIntegrationRuntimeError', 'ManagedIntegrationRuntimeNode', 'ManagedIntegrationRuntimeStatus', 'LinkedIntegrationRuntimeRbacAuthorization', 'LinkedIntegrationRuntimeKeyAuthorization', 'LinkedIntegrationRuntimeType', 'SelfHostedIntegrationRuntime', 'IntegrationRuntimeCustomSetupScriptProperties', 'IntegrationRuntimeSsisCatalogInfo', 'IntegrationRuntimeSsisProperties', 'IntegrationRuntimeVNetProperties', 'IntegrationRuntimeComputeProperties', 'ManagedIntegrationRuntime', 'IntegrationRuntimeNodeIpAddress', 'SsisObjectMetadata', 'SsisObjectMetadataListResponse', 'IntegrationRuntimeNodeMonitoringData', 'IntegrationRuntimeMonitoringData', 'IntegrationRuntimeAuthKeys', 'IntegrationRuntimeRegenerateKeyParameters', 'IntegrationRuntimeConnectionInfo', 'OperationPaged', 'FactoryPaged', 'IntegrationRuntimeResourcePaged', 'LinkedServiceResourcePaged', 'DatasetResourcePaged', 'PipelineResourcePaged', 'TriggerResourcePaged', 'RerunTriggerResourcePaged', 'IntegrationRuntimeState', 'IntegrationRuntimeAutoUpdate', 'ParameterType', 'DependencyCondition', 'VariableType', 'TriggerRuntimeState', 'RunQueryFilterOperand', 'RunQueryFilterOperator', 'RunQueryOrderByField', 'RunQueryOrder', 'TriggerRunStatus', 'TumblingWindowFrequency', 'BlobEventTypes', 'DayOfWeek', 'DaysOfWeek', 'RecurrenceFrequency', 'SparkServerType', 'SparkThriftTransportProtocol', 'SparkAuthenticationType', 'ServiceNowAuthenticationType', 'PrestoAuthenticationType', 'PhoenixAuthenticationType', 'ImpalaAuthenticationType', 'HiveServerType', 'HiveThriftTransportProtocol', 'HiveAuthenticationType', 'HBaseAuthenticationType', 'GoogleBigQueryAuthenticationType', 'SapHanaAuthenticationType', 'SftpAuthenticationType', 'FtpAuthenticationType', 'HttpAuthenticationType', 'MongoDbAuthenticationType', 'ODataAuthenticationType', 'TeradataAuthenticationType', 'Db2AuthenticationType', 'SybaseAuthenticationType', 'DatasetCompressionLevel', 'JsonFormatFilePattern', 'AzureFunctionActivityMethod', 'WebActivityMethod', 'CassandraSourceReadConsistencyLevels', 'StoredProcedureParameterType', 'SalesforceSourceReadBehavior', 'HDInsightActivityDebugInfoOption', 'SalesforceSinkWriteBehavior', 'AzureSearchIndexWriteBehaviorType', 'CopyBehaviorType', 'PolybaseSettingsRejectType', 'SapCloudForCustomerSinkWriteBehavior', 'IntegrationRuntimeType', 'SelfHostedIntegrationRuntimeNodeStatus', 'IntegrationRuntimeUpdateResult', 'IntegrationRuntimeInternalChannelEncryptionMode', 'ManagedIntegrationRuntimeNodeStatus', 'IntegrationRuntimeSsisCatalogPricingTier', 'IntegrationRuntimeLicenseType', 'IntegrationRuntimeEdition', 'SsisObjectMetadataType', 'IntegrationRuntimeAuthKeyName', ]
49.711307
117
0.828021
5,496
61,990
8.973799
0.129367
0.066423
0.024006
0.033009
0.800162
0.648094
0.370641
0.199635
0.080819
0.058475
0
0.00722
0.135296
61,990
1,246
118
49.751204
0.912878
0.007292
0
0
0
0
0.14696
0.093195
0
0
0
0
0
1
0
false
0
0.598055
0
0.598055
0
0
0
0
null
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
3
46fbb8768fa190dbf7a7cfd3ae11ca17815de654
112
py
Python
PYTHON/pythonDesafios/desafio008.py
Santos1000/Curso-Python
549223a1633f6f619c87554dd8078cf7841bb1df
[ "MIT" ]
null
null
null
PYTHON/pythonDesafios/desafio008.py
Santos1000/Curso-Python
549223a1633f6f619c87554dd8078cf7841bb1df
[ "MIT" ]
null
null
null
PYTHON/pythonDesafios/desafio008.py
Santos1000/Curso-Python
549223a1633f6f619c87554dd8078cf7841bb1df
[ "MIT" ]
null
null
null
n = int(input('Digite a metragem:')) print('Centimetros: {}cm. \n Milimetros: {}mm.'.format(n * 100, n * 1000))
37.333333
74
0.625
17
112
4.117647
0.823529
0
0
0
0
0
0
0
0
0
0
0.072165
0.133929
112
2
75
56
0.649485
0
0
0
0
0
0.508929
0
0
0
0
0
0
1
0
false
0
0
0
0
0.5
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
3
200bec0a0cf77cca4a38e51e6e429fa78f409ebc
767
py
Python
code/preprocessing/check_logs.py
lukassnoek/MorbidCuriosityFMRI
2f6e7794b722276a5e660fa27cfe9741565cf28b
[ "BSD-3-Clause" ]
null
null
null
code/preprocessing/check_logs.py
lukassnoek/MorbidCuriosityFMRI
2f6e7794b722276a5e660fa27cfe9741565cf28b
[ "BSD-3-Clause" ]
null
null
null
code/preprocessing/check_logs.py
lukassnoek/MorbidCuriosityFMRI
2f6e7794b722276a5e660fa27cfe9741565cf28b
[ "BSD-3-Clause" ]
null
null
null
import os import os.path as op from glob import glob import pandas as pd import numpy as np files = sorted(glob('../data/raw/*/*events.tsv')) for f in files: df = pd.read_csv(f, sep='\t') df['onset'] = df['rel-onset-pulse'] / 1000 df['trial_type'] = df['trial-type'][df['trial-type'] != 'resp'] df['trial_type'] = ['dec' if 'dec' in t else t for t in df['trial_type']] df['trial_type'] = ['_'.join(t.split('_')[:-1]) if t != 'dec' else t for t in df['trial_type']] #df['trial_type'] = ['_'.join(t.split('_')[:-1]) if len(t.split('_')) > 3 else t for t in df['trial_type']] df = df[df.trial_type.str.contains("resp") == False] print("%s: %r" % (f.split(os.sep)[2], ['_'.join(f.split('_')[2:]) for f in np.unique(df['trial_type'])]))s
45.117647
111
0.589309
138
767
3.152174
0.347826
0.177011
0.278161
0.149425
0.342529
0.342529
0.266667
0.266667
0.266667
0.211494
0
0.014041
0.164276
767
16
112
47.9375
0.664587
0.138201
0
0
0
0
0.233333
0.037879
0
0
0
0
0
0
null
null
0
0.357143
null
null
0.071429
0
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
1
0
0
0
0
3
2012d2ba8010a47e3ae51d64631e3f60eaa877d3
146
py
Python
tests/ngrex/test_ngrex.py
bionlplab/radtext
f011749184e585839d599a94d7867b87e02fa23d
[ "BSD-3-Clause" ]
null
null
null
tests/ngrex/test_ngrex.py
bionlplab/radtext
f011749184e585839d599a94d7867b87e02fa23d
[ "BSD-3-Clause" ]
null
null
null
tests/ngrex/test_ngrex.py
bionlplab/radtext
f011749184e585839d599a94d7867b87e02fa23d
[ "BSD-3-Clause" ]
null
null
null
import pytest from radtext.models.neg import ngrex def test_compile(): p = '@' with pytest.raises(TypeError): ngrex.compile(p)
14.6
36
0.664384
19
146
5.052632
0.736842
0.166667
0
0
0
0
0
0
0
0
0
0
0.226027
146
9
37
16.222222
0.849558
0
0
0
0
0
0.006849
0
0
0
0
0
0
1
0.166667
false
0
0.333333
0
0.5
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
3
2016fca1a3dd9ad25c4ddbfbd9c571f61697e926
193
py
Python
docs/examples/tutorial/2_split/perf_main3.py
ynikitenko/lena
d0fbae47f21007685edbd4e77bc91413421bebd1
[ "Apache-2.0" ]
4
2020-03-01T14:01:48.000Z
2021-02-23T19:33:36.000Z
docs/examples/tutorial/2_split/perf_main3.py
ynikitenko/lena
d0fbae47f21007685edbd4e77bc91413421bebd1
[ "Apache-2.0" ]
1
2021-05-09T15:47:17.000Z
2021-05-09T16:12:03.000Z
docs/examples/tutorial/2_split/perf_main3.py
ynikitenko/lena
d0fbae47f21007685edbd4e77bc91413421bebd1
[ "Apache-2.0" ]
null
null
null
import cProfile from main3 import main if __name__ == "__main__": cProfile.run("main()") #, "perf.txt") # 1.015 s of total 1.115 s # is spent on latex_to_pdf, 10 events analysed
19.3
50
0.658031
31
193
3.774194
0.806452
0
0
0
0
0
0
0
0
0
0
0.073333
0.222798
193
9
51
21.444444
0.706667
0.430052
0
0
0
0
0.130841
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
3
2017b12ebec2f9b9b5ecb72e9c3da17ef4d79271
4,337
py
Python
skbandit/environments/base.py
dourouc05/scikit-bandit
aa0a0a58304844f593682ff1fe7f0960977bd0d2
[ "BSD-3-Clause" ]
null
null
null
skbandit/environments/base.py
dourouc05/scikit-bandit
aa0a0a58304844f593682ff1fe7f0960977bd0d2
[ "BSD-3-Clause" ]
null
null
null
skbandit/environments/base.py
dourouc05/scikit-bandit
aa0a0a58304844f593682ff1fe7f0960977bd0d2
[ "BSD-3-Clause" ]
null
null
null
from abc import ABC, abstractmethod from typing import List, Union, Dict class Environment(ABC): """An environment on which the bandit acts. This class is made for subclassing. Its subclasses are supposed to be used in experiments. An environment mainly implements a method `reward`, which returns the reward(s) obtained when the bandit plays a (set of) arms. Depending on the exact setting, this environment performs stochastically or adversarially, giving one reward (full-bandit feedback) or one reward per arm (semi-bandit feedback/full information), when these terms make sense (linear bandits, adversarial settings, mostly). A round corresponds to one call of the `reward` or the `rewards` methods. If calling this method becomes impossible (because resources are exhausted, for instance), you must implement both `may_stop_accepting_inputs` and `will_accept_input`. `reward` must be implemented when only full-bandit information is available, i.e. one reward per round. `rewards` must be implemented when the environment provides full information, i.e. one reward per arm and per round (the function returns a list, indexed by the arms), or semi-bandit feedback, i.e. one reward per played arm and per round (the function returns a dictionary, indexed by the played arms); in all cases, this function also returns the (scalar!) reward associated with the arm combination. Neither `reward` nor `rewards` should consider that their input is well-formed, in the set that, if constraints on the set of arms to play should be enforced (like in combinatorial bandits), these constraints are not necessarily satisfied by the input value. Indeed, the bandit algorithm is not always ensured to be able to take these constraints into account when choosing an arm combination to play. """ def reward(self, arm: Union[int, List[int]]) -> float: """Record the interaction and return a scalar reward.""" raise NotImplementedError def rewards(self, arm: Union[int, List[int]]) -> (Union[List[float], Dict[int, float]], float): """Record the interaction and return a vector reward.""" raise NotImplementedError @abstractmethod def regret(self, reward: float) -> float: """Compute the exact regret when getting a given reward at the last round. The regret is defined as the difference in reward between the best action and the one that was taken. For stationary stochastic bandits, the best action does not change in time, and thus calling this function with the same reward should return the same value between round. This is not necessarily the case for nonstationary bandits or adversarial bandits. """ pass @property def may_stop_accepting_inputs(self) -> bool: """Indicates whether the environment may stop accepting inputs or always accepts bandit actions. For instance, the environment has the notion of resource: when it is exhausted, the bandit can no more play. In this case, the property should be overwritten to return `True`. """ return False # noinspection PyMethodMayBeStatic def will_accept_input(self) -> bool: """Indicates whether the environment will react correctly at the next round.""" return True class EnvironmentNoMoreAcceptingInputsException(Exception): """The environment no more accepts interactions.""" pass class FullInformationEnvironment(Environment, ABC): @abstractmethod def rewards(self, arm: Union[int, List[int]]) -> (List[float], float): """Record the interaction and return a reward for each arm (be it played or not). As a consequence, the `arm` argument is ignored: the reward is automatically returned for all arms. """ pass class SemiBanditFeedbackEnvironment(Environment, ABC): @abstractmethod def rewards(self, arm: Union[int, List[int]]) -> (Dict[int, float], float): """Record the interaction and return a reward for each of the played arms.""" pass class BanditFeedbackEnvironment(Environment, ABC): @abstractmethod def reward(self, arm: Union[int, List[int]]) -> float: """Record the interaction and return a scalar reward.""" pass
48.188889
119
0.71893
591
4,337
5.258883
0.343486
0.013514
0.019305
0.024131
0.209781
0.196268
0.171815
0.171815
0.140283
0.140283
0
0
0.212589
4,337
89
120
48.730337
0.910102
0.67881
0
0.448276
0
0
0
0
0
0
0
0
0
1
0.275862
false
0.172414
0.068966
0
0.586207
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
0
1
0
0
3
201c3be5411acece2f7b29ad5cd2821e94e4b591
84,154
py
Python
experimental/fitTest/templates_electron.py
jjacob/DailyPythonScripts
cd6c515c6242d1f3b44e97c8ad05946721b6a36a
[ "Apache-2.0" ]
null
null
null
experimental/fitTest/templates_electron.py
jjacob/DailyPythonScripts
cd6c515c6242d1f3b44e97c8ad05946721b6a36a
[ "Apache-2.0" ]
null
null
null
experimental/fitTest/templates_electron.py
jjacob/DailyPythonScripts
cd6c515c6242d1f3b44e97c8ad05946721b6a36a
[ "Apache-2.0" ]
null
null
null
inputTemplates = { "M3": { "QCD": [ [ 0.0, 0.0004418671450315185, 0.01605229202018541, 0.05825334528354453, 0.08815209477081883, 0.1030093733105401, 0.10513254530282935, 0.09543913272896494, 0.08512520799056993, 0.07489480051367764, 0.06022183314704696, 0.05275434234820922, 0.04426859109280948, 0.03513151550399711, 0.029967498933300033, 0.024464081551639382, 0.020998482981290444, 0.015085519053806937, 0.013212552619472365, 0.010921640496635408, 0.010786019690654257, 0.0079331542797628, 0.007046335967650891, 0.0066796992713355876, 0.00484778962979943, 0.0043565342466071995, 0.0040794364492200835, 0.0027885993970205324, 0.002756423064350152, 0.002331355056268065, 0.0021416660945894994, 0.0017759566793966012, 0.0011629584663072486, 0.001009451124859111, 0.0012831188698206983, 0.0007967669328031654, 0.004698017985185181, 0.0, 0.0, 0.0 ], [ 0.0, 0.0004418671450315186, 0.016052292020185415, 0.05825334528354453, 0.08815209477081884, 0.10300937331054011, 0.10513254530282935, 0.09543913272896497, 0.08512520799056993, 0.07489480051367763, 0.06022183314704697, 0.05275434234820922, 0.04426859109280948, 0.03513151550399711, 0.029967498933300037, 0.024464081551639386, 0.020998482981290444, 0.015085519053806937, 0.013212552619472366, 0.010921640496635408, 0.010786019690654257, 0.0079331542797628, 0.007046335967650892, 0.006679699271335588, 0.00484778962979943, 0.0043565342466071995, 0.004079436449220084, 0.002788599397020533, 0.002756423064350152, 0.002331355056268065, 0.0021416660945894994, 0.0017759566793966015, 0.0011629584663072486, 0.0010094511248591113, 0.0012831188698206985, 0.0007967669328031655, 0.004698017985185181, 0.0, 0.0, 0.0 ], [ 0.0, 0.00044186714503151845, 0.016052292020185405, 0.058253345283544505, 0.0881520947708188, 0.10300937331054005, 0.10513254530282928, 0.09543913272896491, 0.08512520799056988, 0.0748948005136776, 0.06022183314704694, 0.0527543423482092, 0.044268591092809464, 0.035131515503997096, 0.029967498933300023, 0.02446408155163937, 0.020998482981290433, 0.015085519053806931, 0.013212552619472361, 0.010921640496635406, 0.010786019690654252, 0.007933154279762796, 0.007046335967650889, 0.006679699271335586, 0.004847789629799428, 0.004356534246607198, 0.004079436449220083, 0.0027885993970205316, 0.002756423064350151, 0.0023313550562680647, 0.0021416660945894985, 0.0017759566793966006, 0.0011629584663072482, 0.0010094511248591109, 0.0012831188698206976, 0.0007967669328031652, 0.0046980179851851805, 0.0, 0.0, 0.0 ], [ 0.0, 0.00044186714503151856, 0.016052292020185415, 0.058253345283544526, 0.08815209477081883, 0.1030093733105401, 0.10513254530282934, 0.09543913272896495, 0.08512520799056993, 0.07489480051367763, 0.06022183314704697, 0.05275434234820922, 0.04426859109280948, 0.03513151550399711, 0.029967498933300033, 0.024464081551639382, 0.020998482981290444, 0.01508551905380694, 0.013212552619472365, 0.010921640496635408, 0.010786019690654257, 0.0079331542797628, 0.0070463359676508925, 0.006679699271335588, 0.0048477896297994295, 0.0043565342466071995, 0.004079436449220084, 0.002788599397020533, 0.002756423064350152, 0.0023313550562680655, 0.0021416660945894994, 0.0017759566793966015, 0.0011629584663072486, 0.0010094511248591113, 0.0012831188698206983, 0.0007967669328031656, 0.004698017985185182, 0.0, 0.0, 0.0 ], [ 0.0, 0.00044186714503151845, 0.016052292020185408, 0.05825334528354451, 0.08815209477081881, 0.10300937331054007, 0.10513254530282931, 0.09543913272896493, 0.0851252079905699, 0.07489480051367763, 0.060221833147046946, 0.05275434234820921, 0.044268591092809464, 0.0351315155039971, 0.02996749893330003, 0.02446408155163937, 0.02099848298129044, 0.015085519053806935, 0.013212552619472363, 0.010921640496635408, 0.010786019690654255, 0.007933154279762798, 0.00704633596765089, 0.006679699271335587, 0.004847789629799429, 0.004356534246607199, 0.0040794364492200835, 0.0027885993970205324, 0.002756423064350151, 0.0023313550562680647, 0.002141666094589499, 0.001775956679396601, 0.0011629584663072484, 0.001009451124859111, 0.0012831188698206976, 0.0007967669328031653, 0.0046980179851851805, 0.0, 0.0, 0.0 ], [ 0.0, 0.00044186714503151845, 0.016052292020185408, 0.05825334528354452, 0.08815209477081881, 0.1030093733105401, 0.10513254530282932, 0.09543913272896495, 0.08512520799056991, 0.07489480051367763, 0.060221833147046946, 0.052754342348209214, 0.044268591092809464, 0.0351315155039971, 0.02996749893330003, 0.02446408155163937, 0.02099848298129044, 0.015085519053806935, 0.013212552619472363, 0.010921640496635408, 0.010786019690654253, 0.007933154279762798, 0.007046335967650891, 0.006679699271335587, 0.0048477896297994295, 0.004356534246607198, 0.0040794364492200835, 0.0027885993970205324, 0.0027564230643501515, 0.002331355056268065, 0.002141666094589499, 0.001775956679396601, 0.0011629584663072484, 0.001009451124859111, 0.0012831188698206979, 0.0007967669328031655, 0.0046980179851851805, 0.0, 0.0, 0.0 ] ], "SingleTop": [ [ 0.0, 0.0, 0.005832177218207422, 0.03178392883540604, 0.07169459952763149, 0.11287210836676197, 0.12176478715865051, 0.12613012243319208, 0.09279832309977645, 0.0794145472218926, 0.06006907882516122, 0.05255651613575753, 0.040665134899883736, 0.03624912116256268, 0.019772311111878084, 0.021835717802889197, 0.01593884467082636, 0.019743744021810564, 0.008520333396281287, 0.008972922567677826, 0.00866376260113745, 0.010388777243208618, 0.007465864279390942, 0.008541920826701573, 0.0069814633847972846, 0.004641107502972211, 0.005051657691380684, 0.003139862804695983, 0.0064010107328637145, 0.0018922562881085226, 0.000984504344510512, 0.0006270305039092417, 0.0, 0.0004977320603021565, 0.0004854616714298998, 0.0007746782412963946, 0.006848591367048002, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.006464253844897605, 0.026118917653010134, 0.060112787514841975, 0.09584106586560479, 0.12925563138779625, 0.11715408034300653, 0.09440622815766804, 0.0716779573912037, 0.06916348777011985, 0.05498302637465252, 0.042697413569360194, 0.03739191469243603, 0.030265823351673254, 0.02017805782885195, 0.020967376788849502, 0.018012820429670742, 0.01701231602942316, 0.011241488781432862, 0.013054428387246958, 0.011576167379020312, 0.006085157039789595, 0.008138644498565663, 0.003003338789354045, 0.0031858415089799684, 0.005792637555725393, 0.004107311320351441, 0.0009511534181744243, 0.00275249480381622, 0.004384640907243928, 0.0018999453119997332, 0.0023273554030316707, 0.0016001069427164683, 0.0010718452947245862, 0.0012111180899201381, 0.005913165574839969, 0.0, 0.0, 0.0 ], [ 0.0, 0.000730898105715262, 0.004345582215870544, 0.02798607884595784, 0.058042809009575966, 0.08283302044377722, 0.12365219544544459, 0.11925280674249097, 0.07900946342989748, 0.0729889060967615, 0.06555652145311269, 0.05500702243367231, 0.03727507362592039, 0.03497935430893025, 0.038758173666694595, 0.022026642610227798, 0.020289974876133793, 0.02390005152672489, 0.015996129394221792, 0.015502623414619431, 0.013383170539170194, 0.008222802588575384, 0.008923541477108209, 0.009493835160659504, 0.007416365558061059, 0.006450296693177907, 0.006153831272067389, 0.004196024662363883, 0.003400842304431063, 0.005252293744180164, 0.0039361210244639845, 0.004358739163202468, 0.00287066741824079, 0.002285628915640503, 0.002764779086954986, 0.0014452620379491693, 0.011312470708004386, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.01600155485970672, 0.03639273911473297, 0.05968874574025049, 0.10598450262625334, 0.11388841443601717, 0.08307443249770091, 0.08639375480501353, 0.05741122320100601, 0.06314537056043235, 0.053346999957043284, 0.043790356322535307, 0.03638268872477183, 0.028617051623100895, 0.032593396108858065, 0.013798591164209608, 0.02230033822729821, 0.01580926917445075, 0.006236987296217145, 0.017497312659310434, 0.011933038792119726, 0.011202353000571373, 0.005562222992955815, 0.006945493185204689, 0.009975553245066583, 0.005022960250692474, 0.007047783899380394, 0.006433393500369129, 0.006724854313022172, 0.004743829720451964, 0.004778845074539503, 0.005801395394700851, 0.003081124868615164, 0.0023767450175057143, 0.016016677645895455, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0034855705626097616, 0.010025916336663884, 0.027364239400232984, 0.037694745308698235, 0.09663206840536868, 0.09970075477836253, 0.06106233761893696, 0.056412910319662585, 0.057912542580936144, 0.041328923907318675, 0.06704352646140234, 0.047223924190355336, 0.031021114902507748, 0.03758027943204309, 0.022834400563328236, 0.024808105880289092, 0.036654808787627724, 0.031893202006165335, 0.02739260061438384, 0.012395283525136105, 0.01054237296279351, 0.020567685747427896, 0.012583514912694046, 0.009008845505670094, 0.012527762077976097, 0.017938595283369566, 0.010789448025986654, 0.003632023138839643, 0.01816042267512064, 0.0015585333839998625, 0.0027675759622464006, 0.019484546123949304, 0.004103330254325486, 0.009631143922782703, 0.016236944440788555, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.007336647376339268, 0.02347544978530838, 0.06263222854439522, 0.06610613086026246, 0.035369146080455695, 0.04408531650826217, 0.043560791950211564, 0.09582421142939976, 0.024197640811710778, 0.05814140173848918, 0.025521248131010324, 0.053856110255041584, 0.03162162916096726, 0.07115652068544676, 0.016567788392908175, 0.020825518489763186, 0.02428868407680062, 0.031889226678125646, 0.013637606734355396, 0.029101557862528757, 0.0, 0.025475944986739763, 0.029517463743032837, 0.01538260011507948, 0.006017436893000935, 0.01865575381438486, 0.015824792234214888, 0.0030866067227976217, 0.013980448208807261, 0.0, 0.008943475556169688, 0.025516356476877198, 0.058404265697113315, 0.0, 0.0, 0.0 ] ], "TTJet": [ [ 0.0, 0.0, 0.005866260054832973, 0.032014476569687776, 0.07611743723353019, 0.12269913163544673, 0.204669959277149, 0.1780022148266413, 0.0902222568133845, 0.06720996262179303, 0.045621861833730876, 0.03585930513808567, 0.027308801181492228, 0.022550773464298984, 0.01697272151879135, 0.014303918822958318, 0.009536675235431601, 0.008151062240943892, 0.006990013596176034, 0.005622428289760508, 0.005432733235420568, 0.004571707788875856, 0.0030247979877491816, 0.0025379990687445786, 0.0021322114740614994, 0.0018967639068011742, 0.001600198531484904, 0.001760907897112518, 0.0010930826959057282, 0.0012682896008723822, 0.0007330317183938927, 0.0007410646772215062, 0.000665137515949025, 0.0005346326125257873, 0.00039168832065960966, 0.00025771205206101376, 0.0016387805620258422, 0.0, 0.0, 0.0 ], [ 0.0, 7.262560201399413e-05, 0.005459965341619546, 0.03172214511310284, 0.07699353745814363, 0.12491126396204628, 0.20003559424615688, 0.17066165025335486, 0.08996890767405352, 0.06649813957714566, 0.05061011517828175, 0.03584184011202735, 0.027588795134809475, 0.023294770858547913, 0.01757932753487905, 0.013027107776830716, 0.012195905630129025, 0.009600558514058243, 0.006854961641894884, 0.0058081556303386135, 0.004698636898302152, 0.004593892903081976, 0.003409789788416189, 0.0021757282806029953, 0.002259041246620286, 0.00221698886107549, 0.0020069814624712992, 0.0014849074170667832, 0.0014821376683882836, 0.0009165103021072048, 0.0005584379824681107, 0.0007847520606728024, 0.0004956669967263232, 0.0007192863381122417, 0.00044359338561214204, 0.000283998679673084, 0.0027442824891683155, 0.0, 0.0, 0.0 ], [ 0.0, 0.00010999244899275781, 0.004524250010912452, 0.030522524117397948, 0.06765121697833965, 0.11925184104915117, 0.18208569394951088, 0.1678624854487025, 0.09332137586299127, 0.06492288997550127, 0.05287619322834948, 0.038590428081298624, 0.03148344300415997, 0.024806268142032767, 0.021888358435464855, 0.016696569370059054, 0.012486064354286668, 0.011756333188735998, 0.00958964683937508, 0.007049851662028582, 0.005738821121195343, 0.005400646562344024, 0.004162263767899146, 0.003934996784796343, 0.00356665539154776, 0.0026563631439693474, 0.0022957990527197767, 0.001734408902217747, 0.0018663108219949586, 0.0012663255620451552, 0.0008837863377775026, 0.0013098166043183537, 0.0005873395717754564, 0.0008883902275125473, 0.0008014397012084036, 0.0008264688444718471, 0.004604741454914939, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0023000264038757798, 0.01705439603921528, 0.04910070346416718, 0.09122001899630586, 0.17588293609181563, 0.18507124253611063, 0.09123159958610425, 0.05960162269734152, 0.05217691285173646, 0.04256359509315278, 0.03655345441422678, 0.03347575846500428, 0.024626447013499187, 0.020432978830003327, 0.018561221373061258, 0.015463103655853658, 0.013714304272130806, 0.010177342106655817, 0.007905429232179009, 0.007063551755709067, 0.006327206254554339, 0.005548799854541709, 0.00473154328286029, 0.0028560148177622416, 0.003584833469792769, 0.0031925925174912575, 0.0020649767313211443, 0.002451003450795212, 0.0021984280785497233, 0.0011494329646675383, 0.0013478106212170275, 0.0012014834048370694, 0.0007953963931541348, 0.0008277202379397255, 0.007546113042367408, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.008510891182227956, 0.027618114739544775, 0.0636993366514693, 0.14246282154415016, 0.19098302504658946, 0.0872846975295914, 0.05823132301671626, 0.054630204652622194, 0.05224415539511138, 0.03774462633291826, 0.03664568879307432, 0.03118798584556276, 0.025179552074013272, 0.027141927663582498, 0.018962738084157393, 0.017223786461534024, 0.012446304790499648, 0.016704397350814998, 0.010397188282338298, 0.011885364365900734, 0.00654541669225773, 0.0040150732738811505, 0.00794356577980281, 0.004932553709318321, 0.00832159001726371, 0.004070225735221562, 0.004060922854342003, 0.002952745784692842, 0.0018856875946734997, 0.0019478722702617498, 0.0023680681218760063, 0.0023425849040899396, 0.002593594715463811, 0.014835968744435398, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0021323691782451095, 0.006747644148045247, 0.02513638990110083, 0.09137187949815516, 0.17543353593257902, 0.082276692770331, 0.055565932825706814, 0.04899686699219004, 0.048526846692094026, 0.0430927385806642, 0.045633959497397604, 0.03788645694588343, 0.03701656523569493, 0.027036724921725, 0.025113196273446294, 0.031936114241276965, 0.01971309713885621, 0.01574158090833572, 0.019199474721680914, 0.015918849921157444, 0.020868544525546172, 0.01747935795697653, 0.011748327914222433, 0.011164996360747675, 0.005749847798091752, 0.008617229066506017, 0.0075600827784392515, 0.005840168166936298, 0.004992409839912211, 0.0034805126985320766, 0.004064677716135877, 0.008239422111627327, 0.00550017795981303, 0.030217328781947526, 0.0, 0.0, 0.0 ] ], "V+Jets": [ [ 0.0, 0.0003142927948550799, 0.014512953130632845, 0.05277088304876486, 0.08603434012912506, 0.10506007172016002, 0.1067469629669173, 0.09819689497361543, 0.08685199609224331, 0.0751422047093353, 0.06330097494347225, 0.05305337891368768, 0.042655702448659835, 0.03651482989040328, 0.02983395150144785, 0.02444199511955661, 0.019900292964464696, 0.017253925481071554, 0.014184603279955183, 0.011779383448541635, 0.00996429756420057, 0.008054020034949594, 0.006814650499510039, 0.005461215783698751, 0.004786373871279337, 0.004022818533689391, 0.0032319528725585503, 0.002762787299816871, 0.0021703208306309983, 0.0021801373564605993, 0.0018673122370412762, 0.0014108542664235356, 0.0012263135535731382, 0.0010058133245547092, 0.0009499002073703987, 0.0008661948281679969, 0.004675399379164311, 0.0, 0.0, 0.0 ], [ 0.0, 0.00022095428396729953, 0.012707124119856623, 0.05007359016055423, 0.0816346426541373, 0.10173271618458331, 0.10290453572384554, 0.09596862491288694, 0.0884082114992676, 0.07362064853080143, 0.06504901232482868, 0.05374270427931259, 0.04477683652470922, 0.03677843045188911, 0.03136199798739972, 0.02541267694460663, 0.02154691592437478, 0.018001364153149808, 0.015421585508605756, 0.012398978119919494, 0.01003514773672651, 0.009161746424217295, 0.007765969135257237, 0.006138711956013194, 0.0052930499804763696, 0.004422823978351614, 0.0037285515669491375, 0.0033000573885081388, 0.0021477243482608396, 0.0023129205624489945, 0.0020536026976429207, 0.0015157122589255537, 0.0013647949956127356, 0.001133513142190638, 0.0011879424280584426, 0.0010872865461224862, 0.005588894565542552, 0.0, 0.0, 0.0 ], [ 0.0, 0.00013748062767261643, 0.009877166688101645, 0.03985777001865452, 0.07035317931696078, 0.08564991747635955, 0.09321755789073534, 0.09152769571355399, 0.08454969496307257, 0.07437875824892262, 0.06683322444614226, 0.056037952121561985, 0.04893731863087072, 0.04248881561509729, 0.03354193912141039, 0.029281795324918397, 0.024076685745943033, 0.021720554346112426, 0.018596818125468612, 0.017199212713088562, 0.013127731797598251, 0.010460260246874836, 0.00954157139587826, 0.007835480797804073, 0.007545069860911577, 0.006347245018720189, 0.005118963773785227, 0.004003056250401388, 0.004070861464439643, 0.0029419886267184367, 0.0025250167311909725, 0.002418125998750675, 0.002143447981672106, 0.002081763711086102, 0.0014299016844281053, 0.0011152608200720772, 0.009030716705020644, 0.0, 0.0, 0.0 ], [ 0.0, 3.810027164398639e-05, 0.003721172602538792, 0.022266225487734905, 0.052880858317373075, 0.06936234087048895, 0.08056057120449574, 0.07733315150944509, 0.08125574087757274, 0.07549143445289774, 0.06728456262529578, 0.06192836491888306, 0.05536944645421989, 0.045633394393801645, 0.04032623500121411, 0.03503166182734747, 0.03146724875386978, 0.027856292271809024, 0.021188248413009467, 0.020359562334310768, 0.017957863346561007, 0.015314505194193905, 0.013059129371655766, 0.01171264945102591, 0.010269417507639498, 0.0082138966340941, 0.007373820414424706, 0.005699349402425763, 0.005494052845004428, 0.0035857607089320705, 0.004141242402878959, 0.00366804368163215, 0.0036880485413815184, 0.0022419443424711526, 0.002389293316833953, 0.0017510306661065013, 0.014085339584786628, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0008402959145172021, 0.01012304376017877, 0.03413361809478428, 0.055835686302263654, 0.06144655746472205, 0.07274592538382918, 0.0695957039335041, 0.06912507857595256, 0.06049550766683466, 0.06076662998718386, 0.05139536619729753, 0.05230417197185052, 0.04817265716878424, 0.042399582340700605, 0.038569102780889535, 0.03069187593457968, 0.02962141430746964, 0.02472928826393837, 0.020171917940323816, 0.022754474514120604, 0.018291841915874994, 0.0156584356121414, 0.013409375018530394, 0.01158163392322997, 0.008650132858846873, 0.00852104997162392, 0.0077395923376918545, 0.007206155491780463, 0.006961542442628282, 0.005860017366697022, 0.00391848289653562, 0.004549713178876572, 0.003079624087702752, 0.0036477362613671497, 0.025006768132747617, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 9.377464267757747e-05, 0.0009536114146946773, 0.008546856978907304, 0.025105409968864692, 0.03635345299364116, 0.05233149636769967, 0.06279576970695548, 0.0619969969286244, 0.06518475509471011, 0.057002525687038286, 0.05525064394424468, 0.055771707355333607, 0.050594964937779464, 0.048175642600253726, 0.03993835458284252, 0.04207656080204922, 0.0334895737691743, 0.03163942279546589, 0.02498391970544256, 0.027361052245516607, 0.020400636414576803, 0.01862628852026504, 0.015241004242652663, 0.016301343991875956, 0.013793322994982956, 0.016745694488133258, 0.012666671579355788, 0.011513765256619228, 0.006897541689718618, 0.009931398827012047, 0.005615202714648623, 0.008076860989598289, 0.005774921550162517, 0.005468100083585777, 0.05330075413489653, 0.0, 0.0, 0.0 ] ], "data": [ [ 0.0, 0.00020732908308713005, 0.006997356554190639, 0.037267402684911625, 0.07494946353599752, 0.11750375783963096, 0.17384543616855855, 0.16451562742963768, 0.0927797646814907, 0.06878142331415539, 0.05043279946094439, 0.0388223708080651, 0.03120302700461307, 0.026278961281293735, 0.0210439019333437, 0.0177784688747214, 0.013580054942207018, 0.012698906339086716, 0.007930337428082725, 0.007204685637277769, 0.005286891618721816, 0.0050277302648629035, 0.003835588037111906, 0.0030062717047633857, 0.002954439433991603, 0.0020732908308713003, 0.002021458560099518, 0.0021769553724148654, 0.0016586326646970404, 0.0013476390400663453, 0.0004664904369460426, 0.0009329808738920852, 0.0003628258954024776, 0.0006738195200331726, 0.0004664904369460426, 0.0003628258954024776, 0.003524594412481211, 0.0, 0.0, 0.0 ], [ 0.0, 3.771307889576105e-05, 0.006335797254487856, 0.035827424950973, 0.07572786242268818, 0.1239251772514708, 0.17084024739779755, 0.15775380902096847, 0.09582893347412882, 0.07007090058832403, 0.05336400663750188, 0.04080555136521345, 0.031829838588022324, 0.024362648966661637, 0.02191129883843717, 0.015877206215115403, 0.013086438376829084, 0.011351636747624076, 0.007806607331422537, 0.006373510333383617, 0.006524362648966661, 0.005317544124302308, 0.003884447126263388, 0.0032810378639312114, 0.0027907678382863175, 0.002300497812641424, 0.0018102277869965302, 0.0019610801025795746, 0.0013953839191431588, 0.0011691054457685925, 0.0009051138934982651, 0.000791974656810982, 0.0005656961834364158, 0.0005656961834364158, 0.00041484386785337155, 0.00045255694674913255, 0.0027530547593905565, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.004676616915422886, 0.03189054726368159, 0.07034825870646766, 0.11233830845771144, 0.16432835820895522, 0.16218905472636816, 0.10064676616915423, 0.0691542288557214, 0.052089552238805965, 0.04318407960199005, 0.03228855721393035, 0.028308457711442785, 0.021194029850746268, 0.01890547263681592, 0.014328358208955224, 0.01218905472636816, 0.01054726368159204, 0.008059701492537314, 0.007064676616915423, 0.005920398009950248, 0.004676616915422886, 0.0038308457711442785, 0.003233830845771144, 0.002338308457711443, 0.002238805970149254, 0.0015920398009950248, 0.0019402985074626865, 0.0014427860696517413, 0.0013930348258706466, 0.0011442786069651742, 0.0007462686567164179, 0.0008955223880597015, 0.0008457711442786069, 0.00029850746268656717, 0.0037313432835820895, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0020511418022699304, 0.01887050458088336, 0.055791057021742106, 0.088062354710789, 0.1500068371393409, 0.17489402434021606, 0.09462600847805279, 0.06262819636264187, 0.05825242718446602, 0.04307397784766854, 0.03733078080131273, 0.03432243949131684, 0.025981129495419118, 0.021058389169971284, 0.01736633392588541, 0.01627239163134145, 0.014221249829071518, 0.013400793108163545, 0.010529194584985642, 0.009298509503623684, 0.005059483112265829, 0.0051962258990838235, 0.005469711472719814, 0.003965540817721865, 0.005743197046355805, 0.0030083413099958978, 0.002734855736359907, 0.0025981129495419118, 0.0015041706549979489, 0.0025981129495419118, 0.0013674278681799535, 0.0017776562286339398, 0.0009571995077259675, 0.0012306850813619582, 0.008751538356351703, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0009174311926605505, 0.006880733944954129, 0.03165137614678899, 0.05963302752293578, 0.12201834862385322, 0.19174311926605506, 0.09128440366972478, 0.060550458715596334, 0.05733944954128441, 0.05045871559633028, 0.044954128440366975, 0.0463302752293578, 0.027522935779816515, 0.021559633027522937, 0.029357798165137616, 0.01834862385321101, 0.017889908256880735, 0.012844036697247707, 0.01651376146788991, 0.00871559633027523, 0.01055045871559633, 0.00871559633027523, 0.008256880733944955, 0.0045871559633027525, 0.0045871559633027525, 0.006422018348623854, 0.005045871559633028, 0.004128440366972477, 0.0013761467889908258, 0.003211009174311927, 0.0013761467889908258, 0.003211009174311927, 0.0022935779816513763, 0.0013761467889908258, 0.01834862385321101, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.007164790174002047, 0.03377686796315251, 0.08290685772773798, 0.14534288638689868, 0.08290685772773798, 0.07164790174002048, 0.05834186284544524, 0.06038894575230297, 0.04401228249744115, 0.04401228249744115, 0.037871033776867964, 0.037871033776867964, 0.03991811668372569, 0.022517911975435005, 0.02456499488229273, 0.016376663254861822, 0.019447287615148415, 0.01023541453428864, 0.016376663254861822, 0.015353121801432959, 0.012282497441146366, 0.012282497441146366, 0.009211873080859774, 0.011258955987717503, 0.011258955987717503, 0.006141248720573183, 0.007164790174002047, 0.008188331627430911, 0.006141248720573183, 0.006141248720573183, 0.0040941658137154556, 0.0040941658137154556, 0.030706243602865918, 0.0, 0.0, 0.0 ] ] }, "absolute_eta": { "QCD": [ [ 0.05987675238307467, 0.06522186801449939, 0.06936891269300377, 0.07829640333350174, 0.08460292507612137, 0.07806791978226571, 0.07703068379218096, 0.031704073447097934, 0.09427044093820772, 0.11600675641580427, 0.1344869003709273, 0.0900188937850448, 0.021047469968270324, 0.0, 0.0 ], [ 0.05987675238307468, 0.06522186801449939, 0.06936891269300377, 0.07829640333350173, 0.08460292507612138, 0.07806791978226571, 0.07703068379218096, 0.03170407344709794, 0.09427044093820772, 0.11600675641580428, 0.1344869003709273, 0.09001889378504481, 0.021047469968270324, 0.0, 0.0 ], [ 0.05987675238307468, 0.06522186801449939, 0.06936891269300377, 0.07829640333350174, 0.08460292507612137, 0.07806791978226571, 0.07703068379218095, 0.03170407344709794, 0.09427044093820772, 0.11600675641580428, 0.1344869003709273, 0.0900188937850448, 0.02104746996827032, 0.0, 0.0 ], [ 0.05987675238307467, 0.06522186801449939, 0.06936891269300376, 0.07829640333350173, 0.08460292507612137, 0.07806791978226571, 0.07703068379218095, 0.031704073447097934, 0.09427044093820772, 0.11600675641580427, 0.1344869003709273, 0.09001889378504481, 0.021047469968270324, 0.0, 0.0 ], [ 0.05987675238307469, 0.0652218680144994, 0.0693689126930038, 0.07829640333350177, 0.0846029250761214, 0.07806791978226574, 0.07703068379218098, 0.03170407344709795, 0.09427044093820774, 0.11600675641580432, 0.1344869003709274, 0.09001889378504484, 0.02104746996827033, 0.0, 0.0 ], [ 0.05987675238307467, 0.06522186801449939, 0.06936891269300376, 0.07829640333350174, 0.08460292507612137, 0.07806791978226572, 0.07703068379218095, 0.031704073447097934, 0.09427044093820772, 0.11600675641580428, 0.1344869003709273, 0.0900188937850448, 0.02104746996827032, 0.0, 0.0 ] ], "SingleTop": [ [ 0.18057558218441327, 0.14499025725382114, 0.14301457067321732, 0.11905510149999471, 0.11116281843218555, 0.10175203166424354, 0.08358722065220654, 0.024903272404934507, 0.0270673005092648, 0.027792316493843693, 0.020813846295080696, 0.01136307728578325, 0.003922604651011212, 0.0, 0.0 ], [ 0.16907559988820747, 0.15424141546526418, 0.1460068746361783, 0.1300906193839936, 0.1063964739259216, 0.1000232454738788, 0.08228563667081995, 0.018089722201396815, 0.032019124346381045, 0.02602573031911643, 0.022796910191220873, 0.01072218606059552, 0.0022264614370255727, 0.0, 0.0 ], [ 0.16003961460015984, 0.1626323442032677, 0.1558254727209446, 0.11305712673294387, 0.12336094402733422, 0.09205742315926084, 0.0775142068231737, 0.026461227676590055, 0.03517330384409255, 0.027647442061625817, 0.012133430061119343, 0.011603624701286785, 0.0024938393882007735, 0.0, 0.0 ], [ 0.15734594848409675, 0.15888786299617055, 0.14814652354908442, 0.14086782219989935, 0.09786627247386094, 0.09662926694197307, 0.09284393641032654, 0.02523098583944025, 0.02753618419509209, 0.02286857701816419, 0.012113382427501733, 0.014989407173932287, 0.0046738302904579246, 0.0, 0.0 ], [ 0.16246752294791864, 0.15318236969029106, 0.1906390406902598, 0.1297060823117596, 0.1386042400986142, 0.07852093522670325, 0.06554985680699696, 0.02079115609683946, 0.03400310240241055, 0.004458509076762609, 0.012248645880851786, 0.009792302376248768, 3.6236394343341215e-05, 0.0, 0.0 ], [ 0.15871640987822966, 0.15470658723972922, 0.21755053984397163, 0.1533375154822228, 0.11998310855118453, 0.08626869845902692, 0.04116515500954365, 0.013292578526454766, 0.026961724289846035, 0.014452548628575262, 0.013565134091215745, 0.0, 0.0, 0.0, 0.0 ] ], "TTJet": [ [ 0.1550545124008465, 0.15200333447806322, 0.1436862018143008, 0.13298893526240232, 0.12138064638016564, 0.09762957809840218, 0.08305499669941117, 0.02363678761091243, 0.036811901085146016, 0.026785286346499862, 0.01634337857373554, 0.00847384086084817, 0.0021506003892662103, 0.0, 0.0 ], [ 0.15808975127740826, 0.15219383074518758, 0.14271015995344613, 0.13167512293756195, 0.11396100462917787, 0.09912118618932568, 0.08319327208386104, 0.022194287617898788, 0.03707235631115356, 0.027925522225522366, 0.019326643549551395, 0.009897596165277064, 0.002639266314628483, 0.0, 0.0 ], [ 0.15874199770924025, 0.15089836772498144, 0.14680274569852497, 0.13660298679841007, 0.11534445603379266, 0.09858125663432105, 0.0838638803072332, 0.019756645643396943, 0.035524658273683216, 0.027139786100692998, 0.01637367661515989, 0.008113890656868862, 0.0022556518036943285, 0.0, 0.0 ], [ 0.16582660651645012, 0.1610555374976697, 0.14677026228381246, 0.1378879509576639, 0.110469194129696, 0.1028065053750657, 0.07775811978136431, 0.019685941077031324, 0.03254771208542584, 0.022355523180477437, 0.014241466088826444, 0.006291155754810762, 0.0023040252717060825, 0.0, 0.0 ], [ 0.1736521960304793, 0.16350182984525446, 0.1422609250027656, 0.14455830491061172, 0.11273444407704002, 0.10339767524840501, 0.07521970265615449, 0.017155293766308036, 0.029652231359880883, 0.020994220007513983, 0.011485919309238313, 0.0037617621544219094, 0.0016254956319265003, 0.0, 0.0 ], [ 0.1747817277705337, 0.16889849861722145, 0.16018415241705022, 0.1451208694903543, 0.12013365391432626, 0.09263394205358792, 0.0704598773869391, 0.021411741844544023, 0.025683289949135886, 0.01197646461682272, 0.004616187660517965, 0.003423724033064587, 0.000675870245902091, 0.0, 0.0 ] ], "V+Jets": [ [ 0.09810317004334954, 0.0987340013013674, 0.10245387658905408, 0.10237809106375455, 0.10598791899285333, 0.10457449516769708, 0.10681369313771015, 0.03443851151421318, 0.07294918890165078, 0.06783182732875304, 0.056051872555258064, 0.037127155408704524, 0.012556197995634203, 0.0, 0.0 ], [ 0.09656302149893584, 0.09777789491691936, 0.10039883058469286, 0.10281822034922027, 0.1033500404439202, 0.10539633428984513, 0.10338779203893626, 0.0335961678370141, 0.07271336912630523, 0.07106426178033483, 0.0588231789442643, 0.04137150761797474, 0.012739380571636847, 0.0, 0.0 ], [ 0.09618069059555734, 0.09993127772863424, 0.10301892816397343, 0.10465766501421543, 0.10326547778632636, 0.10645977234658961, 0.10304636166633643, 0.032782572681552075, 0.07221093148581915, 0.07108301513391589, 0.05628005779754584, 0.0390270624198895, 0.012056187179644715, 0.0, 0.0 ], [ 0.10680681613625684, 0.10551260762245762, 0.10917668940639748, 0.11161748490713201, 0.10941719560109794, 0.1138374532848857, 0.10161332797791646, 0.0335299143257606, 0.0665756499092455, 0.06202811535605133, 0.045718236205316926, 0.026004507320246093, 0.008162001947235502, 0.0, 0.0 ], [ 0.12052741638537087, 0.10846579552963456, 0.11658850567084543, 0.11192714926420477, 0.11775280491057286, 0.10941590622671524, 0.10739117633730312, 0.031503909191679544, 0.06309008040662006, 0.05076946146323644, 0.03643053178704978, 0.020305098964242832, 0.005832163862524572, 0.0, 0.0 ], [ 0.12975384284701516, 0.1267251769167847, 0.12312672165614938, 0.12411696032523022, 0.11582675418832444, 0.10965521123202573, 0.10913622385814377, 0.027345217535621948, 0.053308768731306605, 0.03841122930940112, 0.021513264049698146, 0.016549558772534407, 0.004531070577764191, 0.0, 0.0 ] ], "data": [ [ 0.13787384025294147, 0.14476753226558856, 0.13740734981599545, 0.12574508889234437, 0.11584512517493392, 0.09765199813403826, 0.08806302804125848, 0.024827657699683824, 0.043020784740579486, 0.03628258954024776, 0.026227129010521953, 0.01694915254237288, 0.005338723889493599, 0.0, 0.0 ], [ 0.1466661638256147, 0.14828782621813244, 0.13784130336400663, 0.13267461155528737, 0.11491175139538391, 0.09786543973449992, 0.08451500980540051, 0.023495248152059132, 0.04122039523306682, 0.032697239402624825, 0.023721526625433698, 0.0129732991401418, 0.003130185548348167, 0.0, 0.0 ], [ 0.1528358208955224, 0.14880597014925373, 0.14751243781094528, 0.13417910447761194, 0.1172636815920398, 0.09706467661691542, 0.08373134328358209, 0.01970149253731343, 0.03691542288557214, 0.028407960199004975, 0.020597014925373133, 0.010298507462686566, 0.0026865671641791043, 0.0, 0.0 ], [ 0.1450840968138931, 0.15492957746478875, 0.1450840968138931, 0.1355121017366334, 0.12101736633392589, 0.10255709011349652, 0.0824559004512512, 0.022562559824969235, 0.03432243949131684, 0.027758785724053058, 0.017776562286339396, 0.00943525229044168, 0.0015041706549979489, 0.0, 0.0 ], [ 0.15825688073394495, 0.16834862385321103, 0.1559633027522936, 0.12385321100917432, 0.12568807339449542, 0.1018348623853211, 0.07155963302752294, 0.022018348623853212, 0.033486238532110094, 0.02018348623853211, 0.013302752293577982, 0.0045871559633027525, 0.0009174311926605505, 0.0, 0.0 ], [ 0.18628454452405324, 0.18526100307062437, 0.14943705220061412, 0.1187308085977482, 0.13715455475946775, 0.08700102354145343, 0.061412487205731836, 0.02456499488229273, 0.01842374616171955, 0.015353121801432959, 0.006141248720573183, 0.009211873080859774, 0.0010235414534288639, 0.0, 0.0 ] ] }, "angle_bl": { "QCD": [ [ 0.014948806610719855, 0.040811329597561834, 0.07230211935795718, 0.09245654083596451, 0.08198826353962475, 0.07707368362629263, 0.07128824050954027, 0.07500887052363427, 0.07313950417418742, 0.06999112443841299, 0.07117100434460526, 0.06963996625573543, 0.06937435511875396, 0.06144702385913922, 0.044024700942259534, 0.015334466265610622, 0.0, 0.0, 0.0, 0.0 ], [ 0.014948806610719857, 0.04081132959756185, 0.07230211935795719, 0.09245654083596452, 0.08198826353962477, 0.07707368362629265, 0.07128824050954029, 0.07500887052363427, 0.07313950417418744, 0.069991124438413, 0.07117100434460528, 0.06963996625573544, 0.06937435511875398, 0.06144702385913922, 0.04402470094225954, 0.015334466265610623, 0.0, 0.0, 0.0, 0.0 ], [ 0.014948806610719857, 0.04081132959756185, 0.07230211935795719, 0.09245654083596452, 0.08198826353962475, 0.07707368362629265, 0.07128824050954027, 0.07500887052363427, 0.07313950417418744, 0.069991124438413, 0.07117100434460527, 0.06963996625573544, 0.06937435511875398, 0.06144702385913922, 0.04402470094225954, 0.015334466265610622, 0.0, 0.0, 0.0, 0.0 ], [ 0.014948806610719857, 0.04081132959756185, 0.0723021193579572, 0.09245654083596454, 0.08198826353962478, 0.07707368362629266, 0.07128824050954029, 0.07500887052363428, 0.07313950417418744, 0.06999112443841302, 0.07117100434460528, 0.06963996625573544, 0.06937435511875398, 0.06144702385913923, 0.044024700942259555, 0.015334466265610625, 0.0, 0.0, 0.0, 0.0 ], [ 0.014948806610719857, 0.04081132959756185, 0.07230211935795719, 0.09245654083596454, 0.08198826353962478, 0.07707368362629266, 0.07128824050954029, 0.07500887052363427, 0.07313950417418744, 0.069991124438413, 0.07117100434460527, 0.06963996625573546, 0.06937435511875398, 0.06144702385913923, 0.04402470094225954, 0.015334466265610623, 0.0, 0.0, 0.0, 0.0 ], [ 0.014948806610719853, 0.04081132959756185, 0.07230211935795718, 0.09245654083596452, 0.08198826353962477, 0.07707368362629265, 0.07128824050954029, 0.07500887052363425, 0.07313950417418742, 0.069991124438413, 0.07117100434460527, 0.06963996625573544, 0.06937435511875396, 0.06144702385913922, 0.04402470094225954, 0.015334466265610623, 0.0, 0.0, 0.0, 0.0 ] ], "SingleTop": [ [ 0.0039046088803905993, 0.0468224953538969, 0.09579928482616777, 0.12595253180535548, 0.11777897388008238, 0.1498882626577888, 0.13266536894410155, 0.09526073124638967, 0.09009423712618884, 0.0581134004322832, 0.035990036883331286, 0.02499864359315429, 0.015675007743320927, 0.0064284873235083695, 0.0, 0.0006279293040401627, 0.0, 0.0, 0.0, 0.0 ], [ 0.0030043350271660306, 0.04090570271049234, 0.09921906893962014, 0.13280694993585118, 0.11890105132240404, 0.1384492036480165, 0.12061061527434254, 0.09462047674239636, 0.08625557938505997, 0.05693862407551142, 0.04573908605552821, 0.0370767712650885, 0.014202677471047753, 0.007966212352980747, 0.0033036457944943206, 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.005305113055895024, 0.03820766486469783, 0.08859780314960239, 0.12159514179547479, 0.13532397095489504, 0.13146481946982322, 0.1233788256486948, 0.10661246024236333, 0.07947192817124155, 0.07702284821574824, 0.03915642931492725, 0.02387711180017436, 0.015647896598402714, 0.008348368058359947, 0.005421840937790337, 0.0005677777219091426, 0.0, 0.0, 0.0, 0.0 ], [ 0.005373502159378196, 0.03394559712385344, 0.10036628252312571, 0.1268301740947897, 0.132763843085672, 0.13059388443129716, 0.11898859613758431, 0.10078268593932638, 0.08387062667655523, 0.06486620067652175, 0.04499811474394459, 0.03179146606632535, 0.011515261132118149, 0.011871971411068214, 0.0014417937984399403, 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 3.6236394343341215e-05, 0.022192024901051845, 0.0936171041756505, 0.12962619975448886, 0.14378709042327772, 0.12329263844201582, 0.1273403823145343, 0.12538907608746688, 0.07350118737075455, 0.062492193160663685, 0.034915248311680885, 0.040084252462314986, 0.015086722933157425, 0.00863964326859921, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.018180978013050075, 0.1271108104042842, 0.11009502410076885, 0.13029456150713759, 0.08477187366729436, 0.09856207259936148, 0.11798851074054852, 0.12113559223892216, 0.033864511687758385, 0.08447926937520527, 0.02574341736979062, 0.031744650817397045, 0.013459984855643353, 0.002568742622838117, 0.0, 0.0, 0.0, 0.0, 0.0 ] ], "TTJet": [ [ 0.003480091226042866, 0.043984096758788255, 0.11452728728744802, 0.14898203136159344, 0.1560439985753794, 0.1450982592356159, 0.12262834347830495, 0.09590979119544099, 0.0655258657422941, 0.04425877017362726, 0.030056612454449502, 0.015653881802439904, 0.00878533677757391, 0.0032171636031975803, 0.001700338090636742, 0.00014813223716723204, 0.0, 0.0, 0.0, 0.0 ], [ 0.004159459158989972, 0.04706654361068396, 0.11421815935270008, 0.1563841026022785, 0.14965383121264697, 0.14007040911447977, 0.1201856061748921, 0.09338970376184354, 0.0676209523871961, 0.045655689306888764, 0.030207238850880107, 0.017986929069649365, 0.008474479846015974, 0.003634897534104121, 0.0011192912172336225, 0.00017270679951702342, 0.0, 0.0, 0.0, 0.0 ], [ 0.0032581601038781756, 0.04657526905961961, 0.11743934924960031, 0.15089669601363306, 0.15263100075767067, 0.1379052659230357, 0.11695929643988157, 0.092301719219073, 0.06907903054627627, 0.04759067177768197, 0.029020888222445335, 0.019655799243985486, 0.010834201314263448, 0.00401148110295032, 0.0014235242267248685, 0.0004176467992800951, 0.0, 0.0, 0.0, 0.0 ], [ 0.002760937104959862, 0.04880297591087044, 0.12220075782334347, 0.15160861681258872, 0.15457075138800133, 0.13363217509976422, 0.11561962756713415, 0.09006578502558703, 0.06649730974585269, 0.04651019740475168, 0.03152330279310956, 0.01715118539791801, 0.012299336965665219, 0.00473613170958992, 0.0019295894161559233, 9.131983470784158e-05, 0.0, 0.0, 0.0, 0.0 ], [ 0.0044173500599130255, 0.052517009746740964, 0.15360699943658074, 0.15050589373764617, 0.16193409864810168, 0.14142959315605183, 0.10655366091622036, 0.07874301883301783, 0.05240542264521773, 0.042610043751563915, 0.02624073455032506, 0.014945834501212655, 0.008509256675874976, 0.004748373074091724, 0.0006159125147027135, 0.00021679775273854992, 0.0, 0.0, 0.0, 0.0 ], [ 0.0035190108766115155, 0.07751477565596746, 0.16486537665477827, 0.20619457767391186, 0.1561797448906424, 0.11890125995332525, 0.09240630355471696, 0.060022237958876096, 0.04004777375781091, 0.03054078206711892, 0.023459980935669884, 0.012617602409851627, 0.005836939426081265, 0.004561216918333808, 0.0026028787684306896, 0.0007295384978730949, 0.0, 0.0, 0.0, 0.0 ] ], "V+Jets": [ [ 0.005232249690924095, 0.04804504393562702, 0.08665461064850623, 0.11397685384223152, 0.10510580805725092, 0.09639128332438138, 0.10342173364229315, 0.09332936636045015, 0.09259456965073734, 0.08058673012873738, 0.06254861389416443, 0.058726208247432646, 0.029099705980963918, 0.01929873067644767, 0.004645684777564139, 0.0003428071422880995, 0.0, 0.0, 0.0, 0.0 ], [ 0.008153057569074529, 0.051270035698688925, 0.08427410766735173, 0.10196234814846003, 0.10810939436016276, 0.09373650112973582, 0.09866098791532578, 0.10491991292586804, 0.0825251934449296, 0.08118767641716787, 0.06956920693918874, 0.047840857459107176, 0.041155582300322756, 0.018617594513993613, 0.007486227105081234, 0.0005313164055415914, 0.0, 0.0, 0.0, 0.0 ], [ 0.0054733561194921085, 0.04456323225374236, 0.07592348727403073, 0.09287722886846272, 0.12044686637921556, 0.10564601306989904, 0.10295187047187623, 0.09256333027989423, 0.09998876689145801, 0.07577730959705979, 0.060838142101634886, 0.06211686282777812, 0.028783901680846664, 0.025962524091274194, 0.006087108093335067, 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.007195744806787184, 0.029642462669177853, 0.06420621534730936, 0.11233903490589119, 0.12454477143817928, 0.08216625297431576, 0.08510506064273342, 0.09040518957328482, 0.12055247076395395, 0.08712992336665631, 0.06093727342953177, 0.05935791460681155, 0.04817501504620984, 0.018791707810709934, 0.00945096261844774, 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.04458806406624592, 0.031175629763670495, 0.07809552258309761, 0.08226898864922706, 0.0934169589565704, 0.1013694079433151, 0.07734890252511513, 0.13464099347268435, 0.09485523961791913, 0.09790629756040296, 0.07536367434236739, 0.02551497295485148, 0.03759488650692585, 0.02586046105760723, 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.00260442129680468, 0.03972848751584939, 0.02924765873510957, 0.05711568918858208, 0.08990618469918282, 0.10770822649909946, 0.10793313382185941, 0.09972141665085242, 0.1461018433967429, 0.10112523775710691, 0.0721685934724986, 0.07075104525348042, 0.05771902890600391, 0.018169032806827457, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ] ], "data": [ [ 0.005442388431037164, 0.047063701860778524, 0.10459752241745711, 0.1395843051884103, 0.14310889960089151, 0.14279790597626082, 0.11439382159332401, 0.09500855232467735, 0.07422381174519256, 0.05136578033483647, 0.034831285958637846, 0.022909863681127872, 0.01404654537915306, 0.007256517908049552, 0.0032136007878505158, 0.00015549681231534753, 0.0, 0.0, 0.0, 0.0 ], [ 0.003771307889576105, 0.04623623472620304, 0.11015990345451802, 0.1397269573087947, 0.14843867853371548, 0.13829386031075577, 0.12022929551968622, 0.09390556645044501, 0.07470960929250263, 0.05072409111479861, 0.032584100165937546, 0.02164730728616684, 0.011955046009956252, 0.0057701010710514405, 0.0016593754714134862, 0.00018856539447880525, 0.0, 0.0, 0.0, 0.0 ], [ 0.0031840796019900496, 0.04497512437810945, 0.11069651741293532, 0.1417412935323383, 0.14432835820895523, 0.13706467661691543, 0.11711442786069651, 0.09731343283582089, 0.07139303482587064, 0.05616915422885572, 0.03711442786069652, 0.021641791044776117, 0.01, 0.00527363184079602, 0.0016417910447761193, 0.00034825870646766166, 0.0, 0.0, 0.0, 0.0 ], [ 0.003692055244085875, 0.048133460959934364, 0.11951319567892794, 0.15055380828661288, 0.14344318337207712, 0.13140981813209354, 0.11773553945029401, 0.09216463831532887, 0.06645699439354574, 0.04895391768084234, 0.03404895391768085, 0.023383016545877208, 0.010665937371803639, 0.00765759606180774, 0.001914399015451935, 0.0002734855736359907, 0.0, 0.0, 0.0, 0.0 ], [ 0.005045871559633028, 0.045871559633027525, 0.12752293577981652, 0.13990825688073394, 0.17293577981651376, 0.12155963302752294, 0.11788990825688074, 0.08256880733944955, 0.06651376146788991, 0.05, 0.03256880733944954, 0.01743119266055046, 0.012385321100917432, 0.004128440366972477, 0.003211009174311927, 0.00045871559633027525, 0.0, 0.0, 0.0, 0.0 ], [ 0.0020470829068577278, 0.05322415557830092, 0.172978505629478, 0.15967246673490276, 0.15967246673490276, 0.13203684749232344, 0.09825997952917093, 0.06653019447287616, 0.05834186284544524, 0.032753326509723645, 0.016376663254861822, 0.02968270214943705, 0.009211873080859774, 0.008188331627430911, 0.0010235414534288639, 0.0, 0.0, 0.0, 0.0, 0.0 ] ] } }
34.098055
40
0.430306
4,531
84,154
7.991613
0.376297
0.036564
0.046645
0.051257
0.149765
0.124109
0.121955
0.111571
0.109252
0.065037
0
0.880422
0.512869
84,154
2,468
41
34.098055
0.002878
0
0
0.32577
0
0
0.001224
0
0
0
0
0
0
1
0
false
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
1
0
0
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
202cdf2c1bdfe742a6f47309e126c4b160b4606e
152
py
Python
bin/playeah.py
fmcevoy/playeah
98c77bc6ba5b6f53cffeab9ed3133e682d26a54d
[ "Apache-2.0" ]
null
null
null
bin/playeah.py
fmcevoy/playeah
98c77bc6ba5b6f53cffeab9ed3133e682d26a54d
[ "Apache-2.0" ]
null
null
null
bin/playeah.py
fmcevoy/playeah
98c77bc6ba5b6f53cffeab9ed3133e682d26a54d
[ "Apache-2.0" ]
null
null
null
from flask import Flask app = Flask(__name__) @app.route('/') def hello_world(): return 'SLACK' if __name__ == '__main__': app.run('0.0.0.0')
15.2
26
0.638158
23
152
3.652174
0.652174
0.071429
0.071429
0
0
0
0
0
0
0
0
0.032
0.177632
152
9
27
16.888889
0.64
0
0
0
0
0
0.138158
0
0
0
0
0
0
1
0.142857
false
0
0.142857
0.142857
0.428571
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
1
0
0
0
3
20340dd789efade347d26b33c5f251e4772390c8
191
py
Python
openhab_creator/models/items/string.py
DerOetzi/openhab_creator
197876df5aae84192c34418f6b9a7cfcee23b195
[ "MIT" ]
1
2021-11-16T22:48:26.000Z
2021-11-16T22:48:26.000Z
openhab_creator/models/items/string.py
DerOetzi/openhab_creator
197876df5aae84192c34418f6b9a7cfcee23b195
[ "MIT" ]
null
null
null
openhab_creator/models/items/string.py
DerOetzi/openhab_creator
197876df5aae84192c34418f6b9a7cfcee23b195
[ "MIT" ]
null
null
null
from __future__ import annotations from openhab_creator.models.items.baseitem import BaseItem class String(BaseItem): @property def itemtype(self) -> str: return 'String'
17.363636
58
0.732984
22
191
6.136364
0.772727
0
0
0
0
0
0
0
0
0
0
0
0.193717
191
10
59
19.1
0.876623
0
0
0
0
0
0.031414
0
0
0
0
0
0
1
0.166667
false
0
0.333333
0.166667
0.833333
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
1
0
0
0
3
20535fed7c65d21e954d7c1bc7d1efa1b7d9f466
198
py
Python
flog/api/v3/__init__.py
mutalisk999/Flog
5d836e26967b39faebdf2d5a2c558316bf93221b
[ "MIT" ]
1
2020-08-24T03:39:52.000Z
2020-08-24T03:39:52.000Z
flog/api/v3/__init__.py
mutalisk999/Flog
5d836e26967b39faebdf2d5a2c558316bf93221b
[ "MIT" ]
null
null
null
flog/api/v3/__init__.py
mutalisk999/Flog
5d836e26967b39faebdf2d5a2c558316bf93221b
[ "MIT" ]
null
null
null
""" MIT License Copyright(c) 2021 Andy Zhou """ from apiflask import APIBlueprint from flask_cors import CORS api_v3 = APIBlueprint("api_v3", __name__) CORS(api_v3) from . import views
16.5
42
0.722222
28
198
4.821429
0.607143
0.111111
0.133333
0
0
0
0
0
0
0
0
0.04375
0.191919
198
11
43
18
0.8
0.19697
0
0
0
0
0.042857
0
0
0
0
0
0
1
0
false
0
0.6
0
0.6
0.4
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
3
64b1c71697b09cb95f15828fb33ae48f468492c5
880
py
Python
api/migrations/0005_auto_20210310_1807.py
TariqueNasrullah/LibraryManagementSystem
4b3da701de3b13b61f40284bc3d832578b438532
[ "MIT" ]
null
null
null
api/migrations/0005_auto_20210310_1807.py
TariqueNasrullah/LibraryManagementSystem
4b3da701de3b13b61f40284bc3d832578b438532
[ "MIT" ]
null
null
null
api/migrations/0005_auto_20210310_1807.py
TariqueNasrullah/LibraryManagementSystem
4b3da701de3b13b61f40284bc3d832578b438532
[ "MIT" ]
1
2021-04-20T12:38:39.000Z
2021-04-20T12:38:39.000Z
# Generated by Django 3.1.7 on 2021-03-10 18:07 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('api', '0004_book_length'), ] operations = [ migrations.AlterField( model_name='author', name='biography', field=models.TextField(blank=True), ), migrations.AlterField( model_name='author', name='country', field=models.CharField(blank=True, max_length=50), ), migrations.AlterField( model_name='author', name='education', field=models.CharField(blank=True, max_length=100), ), migrations.AlterField( model_name='author', name='occupation', field=models.CharField(blank=True, max_length=100), ), ]
25.882353
63
0.557955
85
880
5.670588
0.482353
0.165975
0.207469
0.240664
0.572614
0.572614
0.248963
0.170124
0
0
0
0.045378
0.323864
880
33
64
26.666667
0.764706
0.051136
0
0.518519
1
0
0.093637
0
0
0
0
0
0
1
0
false
0
0.037037
0
0.148148
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
b38186c55119cd10ab39c83127a43fee8ef8550b
705
py
Python
type_hints/prototype.py
Carsten-Leue/type-hints
839eff963d87d3ae93d7ae595b83962cf7cd570b
[ "MIT" ]
null
null
null
type_hints/prototype.py
Carsten-Leue/type-hints
839eff963d87d3ae93d7ae595b83962cf7cd570b
[ "MIT" ]
null
null
null
type_hints/prototype.py
Carsten-Leue/type-hints
839eff963d87d3ae93d7ae595b83962cf7cd570b
[ "MIT" ]
null
null
null
from typing import Protocol, Union class MyFirstClass(object): def print(self) -> None: print('first') def first(self) -> None: pass class MySecondClass(object): def print(self) -> None: print('second') def second(self) -> None: pass def printer(out: Union[MyFirstClass, MySecondClass]) -> None: out.print() # duck typing class for the print method class Printable(Protocol): def print(self) -> None: pass def better_printer(out: Printable) -> None: out.print() if __name__ == '__main__': f = MyFirstClass() s = MySecondClass() printer(f) printer(s) better_printer(f) better_printer(s)
14.1
61
0.611348
82
705
5.121951
0.353659
0.095238
0.085714
0.114286
0.128571
0.128571
0
0
0
0
0
0
0.269504
705
49
62
14.387755
0.815534
0.053901
0
0.32
0
0
0.028614
0
0
0
0
0
0
1
0.28
false
0.12
0.04
0
0.44
0.52
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
0
0
1
0
3
b393d98e9aaf1b5b27315134cdfc31f1331b71f5
126
py
Python
barcap/__main__.py
Barmaley13/CaptureBarcode
e19556dd515a1b86cf32b5bdca4dca398d1f0ef1
[ "MIT" ]
1
2021-04-17T18:04:19.000Z
2021-04-17T18:04:19.000Z
barcap/__main__.py
Barmaley13/CaptureBarcode
e19556dd515a1b86cf32b5bdca4dca398d1f0ef1
[ "MIT" ]
1
2021-07-08T09:48:07.000Z
2021-07-08T17:36:22.000Z
barcap/__main__.py
Barmaley13/CaptureBarcode
e19556dd515a1b86cf32b5bdca4dca398d1f0ef1
[ "MIT" ]
1
2019-09-27T12:37:25.000Z
2019-09-27T12:37:25.000Z
""" Adding ability to run package as an executable """ from barcap.main import main if __name__ == '__main__': main()
11.454545
46
0.674603
17
126
4.529412
0.823529
0
0
0
0
0
0
0
0
0
0
0
0.214286
126
10
47
12.6
0.777778
0.365079
0
0
0
0
0.111111
0
0
0
0
0
0
1
0
true
0
0.333333
0
0.333333
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
3
b39f44884911a34b7eda57d193bb15d3ed8ef17a
223
py
Python
hr/api/urls.py
dcopm999/thesis
22c601fd11dd8f64aca6e13d4a1fac5e41e24114
[ "MIT" ]
null
null
null
hr/api/urls.py
dcopm999/thesis
22c601fd11dd8f64aca6e13d4a1fac5e41e24114
[ "MIT" ]
null
null
null
hr/api/urls.py
dcopm999/thesis
22c601fd11dd8f64aca6e13d4a1fac5e41e24114
[ "MIT" ]
null
null
null
from rest_framework import routers from hr.api import views router = routers.SimpleRouter() router.register(r"department", views.DepartmentViewSet) router.register(r"staff", views.StaffViewSet) urlpatterns = router.urls
22.3
55
0.811659
28
223
6.428571
0.642857
0.155556
0.166667
0
0
0
0
0
0
0
0
0
0.09417
223
9
56
24.777778
0.891089
0
0
0
0
0
0.067265
0
0
0
0
0
0
1
0
false
0
0.333333
0
0.333333
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
3
b3b34fcae5e6f37e13773bf1a2332a7e04ee8514
258
py
Python
token.py
ellamosi/libgade-codegen
e9ec027ceb9c2d041725b1745fbcc909cb4d1db0
[ "MIT" ]
null
null
null
token.py
ellamosi/libgade-codegen
e9ec027ceb9c2d041725b1745fbcc909cb4d1db0
[ "MIT" ]
null
null
null
token.py
ellamosi/libgade-codegen
e9ec027ceb9c2d041725b1745fbcc909cb4d1db0
[ "MIT" ]
null
null
null
class Token: def __init__(self): self.content = [] def add_content(self, content): self.content.append(content) def close(self): if len(self.content) == 0: raise Exception("Content for token '" + self.token_name() + "' is missing")
23.454545
81
0.643411
34
258
4.705882
0.529412
0.275
0.225
0
0
0
0
0
0
0
0
0.004902
0.209302
258
10
82
25.8
0.779412
0
0
0
0
0
0.120155
0
0
0
0
0
0
1
0.375
false
0
0
0
0.5
0
0
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
3
b3bab7acbcfc9a3e9170b17c0b6e3dfb0fef9dc7
14
py
Python
joml/__init__.py
jjerphan/joml
142108b08d82e06dcdddf00b659177b166c42db8
[ "MIT" ]
null
null
null
joml/__init__.py
jjerphan/joml
142108b08d82e06dcdddf00b659177b166c42db8
[ "MIT" ]
9
2018-08-28T10:36:03.000Z
2021-02-06T05:57:19.000Z
joml/__init__.py
jjerphan/joml
142108b08d82e06dcdddf00b659177b166c42db8
[ "MIT" ]
1
2018-09-12T01:46:16.000Z
2018-09-12T01:46:16.000Z
name = "joml"
7
13
0.571429
2
14
4
1
0
0
0
0
0
0
0
0
0
0
0
0.214286
14
1
14
14
0.727273
0
0
0
0
0
0.285714
0
0
0
0
0
0
1
0
false
0
0
0
0
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
b3bf068919c953fee15dd3a15d11fc9ad492d41b
42,009
py
Python
quantifiedcode/migrations/versions/a2e0f8f4b344_initial_models.py
marcinguy/quantifiedcode
cafc8b99d56a5e51820421af5d77be8b736ab03d
[ "BSD-3-Clause" ]
118
2017-01-03T18:18:29.000Z
2022-02-06T15:32:02.000Z
quantifiedcode/migrations/versions/a2e0f8f4b344_initial_models.py
marcinguy/quantifiedcode
cafc8b99d56a5e51820421af5d77be8b736ab03d
[ "BSD-3-Clause" ]
14
2016-12-21T11:26:48.000Z
2022-03-02T10:32:24.000Z
quantifiedcode/migrations/versions/a2e0f8f4b344_initial_models.py
marcinguy/quantifiedcode
cafc8b99d56a5e51820421af5d77be8b736ab03d
[ "BSD-3-Clause" ]
26
2017-08-01T10:00:16.000Z
2022-02-06T15:31:55.000Z
"""Initial models. Revision ID: a2e0f8f4b344 Revises: Create Date: 2016-11-20 23:02:51.424015 """ # revision identifiers, used by Alembic. revision = 'a2e0f8f4b344' down_revision = None branch_labels = None depends_on = None from alembic import op import sqlalchemy as sa def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.create_table('issuecategory', sa.Column('data', sa.LargeBinary(), nullable=True), sa.Column('pk', sa.String(length=32), nullable=False), sa.Column('created_at', sa.DateTime(), nullable=True), sa.Column('updated_at', sa.DateTime(), nullable=True), sa.Column('name', sa.String(length=50), nullable=True), sa.PrimaryKeyConstraint('pk'), sa.UniqueConstraint('name', name='unique_issuecategory_name') ) op.create_index(op.f('ix_issuecategory_created_at'), 'issuecategory', ['created_at'], unique=False) op.create_index(op.f('ix_issuecategory_name'), 'issuecategory', ['name'], unique=False) op.create_index(op.f('ix_issuecategory_pk'), 'issuecategory', ['pk'], unique=False) op.create_index(op.f('ix_issuecategory_updated_at'), 'issuecategory', ['updated_at'], unique=False) op.create_table('issueclass', sa.Column('data', sa.LargeBinary(), nullable=True), sa.Column('code', sa.String(length=50), nullable=True), sa.Column('hash', sa.String(length=64), nullable=True), sa.Column('severity', sa.Integer(), nullable=True), sa.Column('language', sa.String(length=50), nullable=True), sa.Column('title', sa.String(length=100), nullable=True), sa.Column('created_at', sa.DateTime(), nullable=True), sa.Column('updated_at', sa.DateTime(), nullable=True), sa.Column('analyzer', sa.String(length=50), nullable=True), sa.Column('occurrence_description', sa.String(length=2000), nullable=True), sa.Column('pk', sa.String(length=32), nullable=False), sa.Column('description', sa.Text(), nullable=True), sa.PrimaryKeyConstraint('pk'), sa.UniqueConstraint('code', 'analyzer', name='unique_together_issueclass_code_analyzer') ) op.create_index(op.f('ix_issueclass_analyzer'), 'issueclass', ['analyzer'], unique=False) op.create_index(op.f('ix_issueclass_code'), 'issueclass', ['code'], unique=False) op.create_index(op.f('ix_issueclass_created_at'), 'issueclass', ['created_at'], unique=False) op.create_index(op.f('ix_issueclass_hash'), 'issueclass', ['hash'], unique=False) op.create_index(op.f('ix_issueclass_language'), 'issueclass', ['language'], unique=False) op.create_index(op.f('ix_issueclass_occurrence_description'), 'issueclass', ['occurrence_description'], unique=False) op.create_index(op.f('ix_issueclass_pk'), 'issueclass', ['pk'], unique=False) op.create_index(op.f('ix_issueclass_severity'), 'issueclass', ['severity'], unique=False) op.create_index(op.f('ix_issueclass_title'), 'issueclass', ['title'], unique=False) op.create_index(op.f('ix_issueclass_updated_at'), 'issueclass', ['updated_at'], unique=False) op.create_table('project', sa.Column('data', sa.LargeBinary(), nullable=True), sa.Column('analysis_priority', sa.Integer(), nullable=True), sa.Column('updated_at', sa.DateTime(), nullable=True), sa.Column('fetch_status', sa.String(), nullable=True), sa.Column('analysis_status', sa.String(length=50), nullable=True), sa.Column('analysis_requested_at', sa.DateTime(), nullable=True), sa.Column('fetch_error', sa.Text(), nullable=True), sa.Column('source', sa.String(length=100), nullable=False), sa.Column('pk', sa.String(length=32), nullable=False), sa.Column('analyze', sa.Boolean(), nullable=True), sa.Column('public', sa.Boolean(), nullable=True), sa.Column('description', sa.String(length=2000), nullable=True), sa.Column('deleted', sa.Boolean(), nullable=True), sa.Column('fetched_at', sa.DateTime(), nullable=True), sa.Column('configuration', sa.String(length=64), nullable=True), sa.Column('reset', sa.Boolean(), nullable=True), sa.Column('permalink', sa.String(length=100), nullable=False), sa.Column('name', sa.String(length=100), nullable=True), sa.Column('created_at', sa.DateTime(), nullable=True), sa.Column('analyzed_at', sa.DateTime(), nullable=True), sa.Column('reset_requested_at', sa.DateTime(), nullable=True), sa.Column('delete', sa.Boolean(), nullable=True), sa.PrimaryKeyConstraint('pk'), sa.UniqueConstraint('permalink', name=u'unique_project_permalink') ) op.create_index(op.f('ix_project_analysis_priority'), 'project', ['analysis_priority'], unique=False) op.create_index(op.f('ix_project_analysis_requested_at'), 'project', ['analysis_requested_at'], unique=False) op.create_index(op.f('ix_project_analysis_status'), 'project', ['analysis_status'], unique=False) op.create_index(op.f('ix_project_analyze'), 'project', ['analyze'], unique=False) op.create_index(op.f('ix_project_analyzed_at'), 'project', ['analyzed_at'], unique=False) op.create_index(op.f('ix_project_configuration'), 'project', ['configuration'], unique=False) op.create_index(op.f('ix_project_created_at'), 'project', ['created_at'], unique=False) op.create_index(op.f('ix_project_delete'), 'project', ['delete'], unique=False) op.create_index(op.f('ix_project_deleted'), 'project', ['deleted'], unique=False) op.create_index(op.f('ix_project_description'), 'project', ['description'], unique=False) op.create_index(op.f('ix_project_fetch_status'), 'project', ['fetch_status'], unique=False) op.create_index(op.f('ix_project_fetched_at'), 'project', ['fetched_at'], unique=False) op.create_index(op.f('ix_project_name'), 'project', ['name'], unique=False) op.create_index(op.f('ix_project_permalink'), 'project', ['permalink'], unique=False) op.create_index(op.f('ix_project_pk'), 'project', ['pk'], unique=False) op.create_index(op.f('ix_project_public'), 'project', ['public'], unique=False) op.create_index(op.f('ix_project_reset'), 'project', ['reset'], unique=False) op.create_index(op.f('ix_project_reset_requested_at'), 'project', ['reset_requested_at'], unique=False) op.create_index(op.f('ix_project_source'), 'project', ['source'], unique=False) op.create_index(op.f('ix_project_updated_at'), 'project', ['updated_at'], unique=False) op.create_table('tag', sa.Column('data', sa.LargeBinary(), nullable=True), sa.Column('pk', sa.String(length=32), nullable=False), sa.Column('created_at', sa.DateTime(), nullable=True), sa.Column('updated_at', sa.DateTime(), nullable=True), sa.Column('name', sa.String(length=50), nullable=True), sa.PrimaryKeyConstraint('pk'), sa.UniqueConstraint('name', name='unique_tag_name') ) op.create_index(op.f('ix_tag_created_at'), 'tag', ['created_at'], unique=False) op.create_index(op.f('ix_tag_name'), 'tag', ['name'], unique=False) op.create_index(op.f('ix_tag_pk'), 'tag', ['pk'], unique=False) op.create_index(op.f('ix_tag_updated_at'), 'tag', ['updated_at'], unique=False) op.create_table('user', sa.Column('data', sa.LargeBinary(), nullable=True), sa.Column('email_validated', sa.Boolean(), nullable=True), sa.Column('superuser', sa.Boolean(), nullable=True), sa.Column('new_email', sa.String(length=255), nullable=True), sa.Column('name', sa.String(length=50), nullable=True), sa.Column('email_change_requested_at', sa.DateTime(), nullable=True), sa.Column('created_at', sa.DateTime(), nullable=True), sa.Column('updated_at', sa.DateTime(), nullable=True), sa.Column('email', sa.String(length=255), nullable=True), sa.Column('terms_accepted_at', sa.DateTime(), nullable=True), sa.Column('terms_accepted', sa.Boolean(), nullable=True), sa.Column('password_reset_code', sa.String(length=64), nullable=True), sa.Column('pk', sa.String(length=32), nullable=False), sa.Column('email_validation_code', sa.String(length=64), nullable=True), sa.Column('password', sa.String(length=128), nullable=True), sa.Column('password_reset_requested_at', sa.DateTime(), nullable=True), sa.Column('delete', sa.Boolean(), nullable=True), sa.PrimaryKeyConstraint('pk'), sa.UniqueConstraint('email', name='unique_user_email'), sa.UniqueConstraint('name', name='unique_user_name') ) op.create_index(op.f('ix_user_created_at'), 'user', ['created_at'], unique=False) op.create_index(op.f('ix_user_delete'), 'user', ['delete'], unique=False) op.create_index(op.f('ix_user_email'), 'user', ['email'], unique=False) op.create_index(op.f('ix_user_email_validated'), 'user', ['email_validated'], unique=False) op.create_index(op.f('ix_user_email_validation_code'), 'user', ['email_validation_code'], unique=False) op.create_index(op.f('ix_user_name'), 'user', ['name'], unique=False) op.create_index(op.f('ix_user_new_email'), 'user', ['new_email'], unique=False) op.create_index(op.f('ix_user_password_reset_code'), 'user', ['password_reset_code'], unique=False) op.create_index(op.f('ix_user_pk'), 'user', ['pk'], unique=False) op.create_index(op.f('ix_user_updated_at'), 'user', ['updated_at'], unique=False) op.create_table('accesstoken', sa.Column('data', sa.LargeBinary(), nullable=True), sa.Column('pk', sa.String(length=32), nullable=False), sa.Column('created_at', sa.DateTime(), nullable=True), sa.Column('updated_at', sa.DateTime(), nullable=True), sa.Column('token', sa.String(length=64), nullable=True), sa.Column('user', sa.String(length=32), nullable=True), sa.ForeignKeyConstraint(['user'], ['user.pk'], name='accesstoken_user_user', ondelete='CASCADE'), sa.PrimaryKeyConstraint('pk') ) op.create_index(op.f('ix_accesstoken_created_at'), 'accesstoken', ['created_at'], unique=False) op.create_index(op.f('ix_accesstoken_pk'), 'accesstoken', ['pk'], unique=False) op.create_index(op.f('ix_accesstoken_token'), 'accesstoken', ['token'], unique=False) op.create_index(op.f('ix_accesstoken_updated_at'), 'accesstoken', ['updated_at'], unique=False) op.create_index(op.f('ix_accesstoken_user'), 'accesstoken', ['user'], unique=False) op.create_table('filerevision', sa.Column('data', sa.LargeBinary(), nullable=True), sa.Column('hash', sa.String(length=64), nullable=True), sa.Column('language', sa.String(length=50), nullable=True), sa.Column('created_at', sa.DateTime(), nullable=True), sa.Column('updated_at', sa.DateTime(), nullable=True), sa.Column('project', sa.String(length=32), nullable=True), sa.Column('sha', sa.String(length=64), nullable=True), sa.Column('pk', sa.String(length=32), nullable=False), sa.Column('path', sa.String(length=2000), nullable=True), sa.Column('configuration', sa.String(length=64), nullable=True), sa.ForeignKeyConstraint(['project'], [u'project.pk'], name=u'filerevision_project_project', ondelete='CASCADE'), sa.PrimaryKeyConstraint('pk') ) op.create_index(op.f('ix_filerevision_configuration'), 'filerevision', ['configuration'], unique=False) op.create_index(op.f('ix_filerevision_created_at'), 'filerevision', ['created_at'], unique=False) op.create_index(op.f('ix_filerevision_hash'), 'filerevision', ['hash'], unique=False) op.create_index(op.f('ix_filerevision_language'), 'filerevision', ['language'], unique=False) op.create_index(op.f('ix_filerevision_path'), 'filerevision', ['path'], unique=False) op.create_index(op.f('ix_filerevision_pk'), 'filerevision', ['pk'], unique=False) op.create_index(op.f('ix_filerevision_project'), 'filerevision', ['project'], unique=False) op.create_index(op.f('ix_filerevision_sha'), 'filerevision', ['sha'], unique=False) op.create_index(op.f('ix_filerevision_updated_at'), 'filerevision', ['updated_at'], unique=False) op.create_table('issue', sa.Column('data', sa.LargeBinary(), nullable=True), sa.Column('code', sa.String(length=100), nullable=False), sa.Column('hash', sa.String(length=64), nullable=True), sa.Column('created_at', sa.DateTime(), nullable=True), sa.Column('updated_at', sa.DateTime(), nullable=True), sa.Column('analyzer', sa.String(length=100), nullable=False), sa.Column('project', sa.String(length=32), nullable=False), sa.Column('fingerprint', sa.String(length=255), nullable=False), sa.Column('pk', sa.String(length=32), nullable=False), sa.Column('configuration', sa.String(length=64), nullable=True), sa.ForeignKeyConstraint(['project'], [u'project.pk'], name=u'issue_project_project', ondelete='CASCADE'), sa.PrimaryKeyConstraint('pk'), sa.UniqueConstraint('project', 'fingerprint', 'analyzer', 'code', name='unique_together_issue_project_fingerprint_analyzer_code') ) op.create_index(op.f('ix_issue_analyzer'), 'issue', ['analyzer'], unique=False) op.create_index(op.f('ix_issue_code'), 'issue', ['code'], unique=False) op.create_index(op.f('ix_issue_configuration'), 'issue', ['configuration'], unique=False) op.create_index(op.f('ix_issue_created_at'), 'issue', ['created_at'], unique=False) op.create_index(op.f('ix_issue_fingerprint'), 'issue', ['fingerprint'], unique=False) op.create_index(op.f('ix_issue_hash'), 'issue', ['hash'], unique=False) op.create_index(op.f('ix_issue_pk'), 'issue', ['pk'], unique=False) op.create_index(op.f('ix_issue_project'), 'issue', ['project'], unique=False) op.create_index(op.f('ix_issue_updated_at'), 'issue', ['updated_at'], unique=False) op.create_table('issueclass_issuecategory_categories', sa.Column('issuecategory', sa.String(length=32), nullable=True), sa.Column('issueclass', sa.String(length=32), nullable=True), sa.ForeignKeyConstraint(['issuecategory'], ['issuecategory.pk'], name='issueclass_issuecategory_categories_issuecategory', ondelete='CASCADE'), sa.ForeignKeyConstraint(['issueclass'], ['issueclass.pk'], name='issueclass_issuecategory_categories_issueclass', ondelete='CASCADE'), sa.UniqueConstraint('issueclass', 'issuecategory', name='issueclass_issuecategory_categories_categories_unique') ) op.create_index(op.f('ix_issueclass_issuecategory_categories_issuecategory'), 'issueclass_issuecategory_categories', ['issuecategory'], unique=False) op.create_index(op.f('ix_issueclass_issuecategory_categories_issueclass'), 'issueclass_issuecategory_categories', ['issueclass'], unique=False) op.create_table('issueclass_tag_tags', sa.Column('tag', sa.String(length=32), nullable=True), sa.Column('issueclass', sa.String(length=32), nullable=True), sa.ForeignKeyConstraint(['issueclass'], ['issueclass.pk'], name='issueclass_tag_tags_issueclass', ondelete='CASCADE'), sa.ForeignKeyConstraint(['tag'], ['tag.pk'], name='issueclass_tag_tags_tag', ondelete='CASCADE'), sa.UniqueConstraint('issueclass', 'tag', name='issueclass_tag_tags_tags_unique') ) op.create_index(op.f('ix_issueclass_tag_tags_issueclass'), 'issueclass_tag_tags', ['issueclass'], unique=False) op.create_index(op.f('ix_issueclass_tag_tags_tag'), 'issueclass_tag_tags', ['tag'], unique=False) op.create_table('project_tag_tags', sa.Column('tag', sa.String(length=32), nullable=True), sa.Column('project', sa.String(length=32), nullable=True), sa.ForeignKeyConstraint(['project'], [u'project.pk'], name=u'project_tag_tags_project', ondelete='CASCADE'), sa.ForeignKeyConstraint(['tag'], ['tag.pk'], name=u'project_tag_tags_tag', ondelete='CASCADE'), sa.UniqueConstraint('project', 'tag', name=u'project_tag_tags_tags_unique') ) op.create_index(op.f('ix_project_tag_tags_project'), 'project_tag_tags', ['project'], unique=False) op.create_index(op.f('ix_project_tag_tags_tag'), 'project_tag_tags', ['tag'], unique=False) op.create_table('projectissueclass', sa.Column('data', sa.LargeBinary(), nullable=True), sa.Column('created_at', sa.DateTime(), nullable=True), sa.Column('enabled', sa.Boolean(), nullable=True), sa.Column('updated_at', sa.DateTime(), nullable=True), sa.Column('project', sa.String(length=32), nullable=True), sa.Column('issue_class', sa.String(length=32), nullable=True), sa.Column('pk', sa.String(length=32), nullable=False), sa.ForeignKeyConstraint(['issue_class'], ['issueclass.pk'], name='projectissueclass_issueclass_issue_class', ondelete='CASCADE'), sa.ForeignKeyConstraint(['project'], [u'project.pk'], name=u'projectissueclass_project_project', ondelete='CASCADE'), sa.PrimaryKeyConstraint('pk'), sa.UniqueConstraint('project', 'issue_class', name='unique_together_projectissueclass_project_issue_class') ) op.create_index(op.f('ix_projectissueclass_created_at'), 'projectissueclass', ['created_at'], unique=False) op.create_index(op.f('ix_projectissueclass_issue_class'), 'projectissueclass', ['issue_class'], unique=False) op.create_index(op.f('ix_projectissueclass_pk'), 'projectissueclass', ['pk'], unique=False) op.create_index(op.f('ix_projectissueclass_project'), 'projectissueclass', ['project'], unique=False) op.create_index(op.f('ix_projectissueclass_updated_at'), 'projectissueclass', ['updated_at'], unique=False) op.create_table('snapshot', sa.Column('data', sa.LargeBinary(), nullable=True), sa.Column('hash', sa.String(length=64), nullable=True), sa.Column('created_at', sa.DateTime(), nullable=True), sa.Column('updated_at', sa.DateTime(), nullable=True), sa.Column('project', sa.String(length=32), nullable=True), sa.Column('analyzed', sa.Boolean(), nullable=True), sa.Column('pk', sa.String(length=32), nullable=False), sa.Column('configuration', sa.String(length=64), nullable=True), sa.ForeignKeyConstraint(['project'], [u'project.pk'], name=u'snapshot_project_project', ondelete='CASCADE'), sa.PrimaryKeyConstraint('pk') ) op.create_index(op.f('ix_snapshot_analyzed'), 'snapshot', ['analyzed'], unique=False) op.create_index(op.f('ix_snapshot_configuration'), 'snapshot', ['configuration'], unique=False) op.create_index(op.f('ix_snapshot_created_at'), 'snapshot', ['created_at'], unique=False) op.create_index(op.f('ix_snapshot_hash'), 'snapshot', ['hash'], unique=False) op.create_index(op.f('ix_snapshot_pk'), 'snapshot', ['pk'], unique=False) op.create_index(op.f('ix_snapshot_project'), 'snapshot', ['project'], unique=False) op.create_index(op.f('ix_snapshot_updated_at'), 'snapshot', ['updated_at'], unique=False) op.create_table('task', sa.Column('data', sa.LargeBinary(), nullable=True), sa.Column('status', sa.String(length=50), nullable=True), sa.Column('last_ping', sa.DateTime(), nullable=True), sa.Column('created_at', sa.DateTime(), nullable=True), sa.Column('updated_at', sa.DateTime(), nullable=True), sa.Column('project', sa.String(length=32), nullable=True), sa.Column('pk', sa.String(length=32), nullable=False), sa.Column('type', sa.String(length=50), nullable=True), sa.ForeignKeyConstraint(['project'], [u'project.pk'], name=u'task_project_project', ondelete='CASCADE'), sa.PrimaryKeyConstraint('pk') ) op.create_index(op.f('ix_task_created_at'), 'task', ['created_at'], unique=False) op.create_index(op.f('ix_task_last_ping'), 'task', ['last_ping'], unique=False) op.create_index(op.f('ix_task_pk'), 'task', ['pk'], unique=False) op.create_index(op.f('ix_task_project'), 'task', ['project'], unique=False) op.create_index(op.f('ix_task_status'), 'task', ['status'], unique=False) op.create_index(op.f('ix_task_type'), 'task', ['type'], unique=False) op.create_index(op.f('ix_task_updated_at'), 'task', ['updated_at'], unique=False) op.create_table('userrole', sa.Column('data', sa.LargeBinary(), nullable=True), sa.Column('created_at', sa.DateTime(), nullable=True), sa.Column('updated_at', sa.DateTime(), nullable=True), sa.Column('project', sa.String(length=32), nullable=True), sa.Column('role', sa.String(length=30), nullable=True), sa.Column('user', sa.String(length=32), nullable=True), sa.Column('pk', sa.String(length=32), nullable=False), sa.ForeignKeyConstraint(['project'], [u'project.pk'], name=u'userrole_project_project', ondelete='CASCADE'), sa.ForeignKeyConstraint(['user'], ['user.pk'], name='userrole_user_user', ondelete='CASCADE'), sa.PrimaryKeyConstraint('pk') ) op.create_index(op.f('ix_userrole_created_at'), 'userrole', ['created_at'], unique=False) op.create_index(op.f('ix_userrole_pk'), 'userrole', ['pk'], unique=False) op.create_index(op.f('ix_userrole_project'), 'userrole', ['project'], unique=False) op.create_index(op.f('ix_userrole_role'), 'userrole', ['role'], unique=False) op.create_index(op.f('ix_userrole_updated_at'), 'userrole', ['updated_at'], unique=False) op.create_index(op.f('ix_userrole_user'), 'userrole', ['user'], unique=False) op.create_table('diff', sa.Column('data', sa.LargeBinary(), nullable=True), sa.Column('hash', sa.String(length=64), nullable=True), sa.Column('snapshot_b', sa.String(length=32), nullable=True), sa.Column('created_at', sa.DateTime(), nullable=True), sa.Column('updated_at', sa.DateTime(), nullable=True), sa.Column('project', sa.String(length=32), nullable=True), sa.Column('pk', sa.String(length=32), nullable=False), sa.Column('configuration', sa.String(length=64), nullable=True), sa.Column('snapshot_a', sa.String(length=32), nullable=True), sa.ForeignKeyConstraint(['project'], [u'project.pk'], name=u'diff_project_project', ondelete='CASCADE'), sa.ForeignKeyConstraint(['snapshot_a'], ['snapshot.pk'], name='diff_snapshot_snapshot_a', ondelete='CASCADE'), sa.ForeignKeyConstraint(['snapshot_b'], ['snapshot.pk'], name='diff_snapshot_snapshot_b', ondelete='CASCADE'), sa.PrimaryKeyConstraint('pk') ) op.create_index(op.f('ix_diff_configuration'), 'diff', ['configuration'], unique=False) op.create_index(op.f('ix_diff_created_at'), 'diff', ['created_at'], unique=False) op.create_index(op.f('ix_diff_hash'), 'diff', ['hash'], unique=False) op.create_index(op.f('ix_diff_pk'), 'diff', ['pk'], unique=False) op.create_index(op.f('ix_diff_project'), 'diff', ['project'], unique=False) op.create_index(op.f('ix_diff_snapshot_a'), 'diff', ['snapshot_a'], unique=False) op.create_index(op.f('ix_diff_snapshot_b'), 'diff', ['snapshot_b'], unique=False) op.create_index(op.f('ix_diff_updated_at'), 'diff', ['updated_at'], unique=False) op.create_table('disksnapshot', sa.Column('data', sa.LargeBinary(), nullable=True), sa.Column('pk', sa.String(length=32), nullable=False), sa.Column('created_at', sa.DateTime(), nullable=True), sa.Column('snapshot', sa.String(length=32), nullable=True), sa.Column('updated_at', sa.DateTime(), nullable=True), sa.ForeignKeyConstraint(['snapshot'], ['snapshot.pk'], name='disksnapshot_snapshot_snapshot', ondelete='CASCADE'), sa.PrimaryKeyConstraint('pk'), sa.UniqueConstraint('snapshot', name='unique_disksnapshot_snapshot') ) op.create_index(op.f('ix_disksnapshot_created_at'), 'disksnapshot', ['created_at'], unique=False) op.create_index(op.f('ix_disksnapshot_pk'), 'disksnapshot', ['pk'], unique=False) op.create_index(op.f('ix_disksnapshot_snapshot'), 'disksnapshot', ['snapshot'], unique=False) op.create_index(op.f('ix_disksnapshot_updated_at'), 'disksnapshot', ['updated_at'], unique=False) op.create_table('filerevision_filerevision_dependencies', sa.Column('filerevision_right', sa.String(length=32), nullable=True), sa.Column('filerevision', sa.String(length=32), nullable=True), sa.ForeignKeyConstraint(['filerevision'], [u'filerevision.pk'], name=u'filerevision_filerevision_dependencies_filerevision', ondelete='CASCADE'), sa.ForeignKeyConstraint(['filerevision_right'], [u'filerevision.pk'], name=u'filerevision_filerevision_dependencies_filerevision_right', ondelete='CASCADE'), sa.UniqueConstraint('filerevision', 'filerevision_right', name=u'filerevision_filerevision_dependencies_dependencies_unique') ) op.create_index(op.f('ix_filerevision_filerevision_dependencies_filerevision'), 'filerevision_filerevision_dependencies', ['filerevision'], unique=False) op.create_index(op.f('ix_filerevision_filerevision_dependencies_filerevision_right'), 'filerevision_filerevision_dependencies', ['filerevision_right'], unique=False) op.create_table('issueoccurrence', sa.Column('data', sa.LargeBinary(), nullable=True), sa.Column('hash', sa.String(length=64), nullable=True), sa.Column('sequence', sa.Integer(), nullable=True), sa.Column('to_column', sa.Integer(), nullable=True), sa.Column('created_at', sa.DateTime(), nullable=True), sa.Column('updated_at', sa.DateTime(), nullable=True), sa.Column('to_row', sa.Integer(), nullable=True), sa.Column('file_revision', sa.String(length=32), nullable=True), sa.Column('pk', sa.String(length=32), nullable=False), sa.Column('from_row', sa.Integer(), nullable=True), sa.Column('issue', sa.String(length=32), nullable=True), sa.Column('from_column', sa.Integer(), nullable=True), sa.ForeignKeyConstraint(['file_revision'], [u'filerevision.pk'], name=u'issueoccurrence_filerevision_file_revision', ondelete='CASCADE'), sa.ForeignKeyConstraint(['issue'], ['issue.pk'], name='issueoccurrence_issue_issue', ondelete='CASCADE'), sa.PrimaryKeyConstraint('pk') ) op.create_index(op.f('ix_issueoccurrence_created_at'), 'issueoccurrence', ['created_at'], unique=False) op.create_index(op.f('ix_issueoccurrence_file_revision'), 'issueoccurrence', ['file_revision'], unique=False) op.create_index(op.f('ix_issueoccurrence_hash'), 'issueoccurrence', ['hash'], unique=False) op.create_index(op.f('ix_issueoccurrence_issue'), 'issueoccurrence', ['issue'], unique=False) op.create_index(op.f('ix_issueoccurrence_pk'), 'issueoccurrence', ['pk'], unique=False) op.create_index(op.f('ix_issueoccurrence_updated_at'), 'issueoccurrence', ['updated_at'], unique=False) op.create_table('snapshot_filerevision_file_revisions', sa.Column('filerevision', sa.String(length=32), nullable=True), sa.Column('snapshot', sa.String(length=32), nullable=True), sa.ForeignKeyConstraint(['filerevision'], [u'filerevision.pk'], name=u'snapshot_filerevision_file_revisions_filerevision', ondelete='CASCADE'), sa.ForeignKeyConstraint(['snapshot'], ['snapshot.pk'], name=u'snapshot_filerevision_file_revisions_snapshot', ondelete='CASCADE'), sa.UniqueConstraint('snapshot', 'filerevision', name=u'snapshot_filerevision_file_revisions_file_revisions_unique') ) op.create_index(op.f('ix_snapshot_filerevision_file_revisions_filerevision'), 'snapshot_filerevision_file_revisions', ['filerevision'], unique=False) op.create_index(op.f('ix_snapshot_filerevision_file_revisions_snapshot'), 'snapshot_filerevision_file_revisions', ['snapshot'], unique=False) op.create_table('difffilerevision', sa.Column('data', sa.LargeBinary(), nullable=True), sa.Column('hash', sa.String(length=64), nullable=True), sa.Column('created_at', sa.DateTime(), nullable=True), sa.Column('updated_at', sa.DateTime(), nullable=True), sa.Column('file_revision', sa.String(length=32), nullable=True), sa.Column('key', sa.Enum(u'added', u'deleted', u'modified', name='difffilerevision_key', native_enum=False), nullable=True), sa.Column('pk', sa.String(length=32), nullable=False), sa.Column('diff', sa.String(length=32), nullable=True), sa.Column('configuration', sa.String(length=64), nullable=True), sa.ForeignKeyConstraint(['diff'], ['diff.pk'], name='difffilerevision_diff_diff', ondelete='CASCADE'), sa.ForeignKeyConstraint(['file_revision'], [u'filerevision.pk'], name=u'difffilerevision_filerevision_file_revision', ondelete='CASCADE'), sa.PrimaryKeyConstraint('pk') ) op.create_index(op.f('ix_difffilerevision_configuration'), 'difffilerevision', ['configuration'], unique=False) op.create_index(op.f('ix_difffilerevision_created_at'), 'difffilerevision', ['created_at'], unique=False) op.create_index(op.f('ix_difffilerevision_diff'), 'difffilerevision', ['diff'], unique=False) op.create_index(op.f('ix_difffilerevision_file_revision'), 'difffilerevision', ['file_revision'], unique=False) op.create_index(op.f('ix_difffilerevision_hash'), 'difffilerevision', ['hash'], unique=False) op.create_index(op.f('ix_difffilerevision_pk'), 'difffilerevision', ['pk'], unique=False) op.create_index(op.f('ix_difffilerevision_updated_at'), 'difffilerevision', ['updated_at'], unique=False) op.create_table('diffissueoccurrence', sa.Column('data', sa.LargeBinary(), nullable=True), sa.Column('configuration', sa.String(length=64), nullable=True), sa.Column('hash', sa.String(length=64), nullable=True), sa.Column('created_at', sa.DateTime(), nullable=True), sa.Column('updated_at', sa.DateTime(), nullable=True), sa.Column('key', sa.Enum(u'added', u'fixed', name='diffissueoccurrence_key', native_enum=False), nullable=True), sa.Column('pk', sa.String(length=32), nullable=False), sa.Column('diff', sa.String(length=32), nullable=True), sa.Column('issue_occurrence', sa.String(length=32), nullable=True), sa.ForeignKeyConstraint(['diff'], ['diff.pk'], name='diffissueoccurrence_diff_diff', ondelete='CASCADE'), sa.ForeignKeyConstraint(['issue_occurrence'], ['issueoccurrence.pk'], name='diffissueoccurrence_issueoccurrence_issue_occurrence', ondelete='CASCADE'), sa.PrimaryKeyConstraint('pk') ) op.create_index(op.f('ix_diffissueoccurrence_configuration'), 'diffissueoccurrence', ['configuration'], unique=False) op.create_index(op.f('ix_diffissueoccurrence_created_at'), 'diffissueoccurrence', ['created_at'], unique=False) op.create_index(op.f('ix_diffissueoccurrence_diff'), 'diffissueoccurrence', ['diff'], unique=False) op.create_index(op.f('ix_diffissueoccurrence_hash'), 'diffissueoccurrence', ['hash'], unique=False) op.create_index(op.f('ix_diffissueoccurrence_issue_occurrence'), 'diffissueoccurrence', ['issue_occurrence'], unique=False) op.create_index(op.f('ix_diffissueoccurrence_pk'), 'diffissueoccurrence', ['pk'], unique=False) op.create_index(op.f('ix_diffissueoccurrence_updated_at'), 'diffissueoccurrence', ['updated_at'], unique=False) ### end Alembic commands ### def downgrade(): ### commands auto generated by Alembic - please adjust! ### op.drop_index(op.f('ix_diffissueoccurrence_updated_at'), table_name='diffissueoccurrence') op.drop_index(op.f('ix_diffissueoccurrence_pk'), table_name='diffissueoccurrence') op.drop_index(op.f('ix_diffissueoccurrence_issue_occurrence'), table_name='diffissueoccurrence') op.drop_index(op.f('ix_diffissueoccurrence_hash'), table_name='diffissueoccurrence') op.drop_index(op.f('ix_diffissueoccurrence_diff'), table_name='diffissueoccurrence') op.drop_index(op.f('ix_diffissueoccurrence_created_at'), table_name='diffissueoccurrence') op.drop_index(op.f('ix_diffissueoccurrence_configuration'), table_name='diffissueoccurrence') op.drop_table('diffissueoccurrence') op.drop_index(op.f('ix_difffilerevision_updated_at'), table_name='difffilerevision') op.drop_index(op.f('ix_difffilerevision_pk'), table_name='difffilerevision') op.drop_index(op.f('ix_difffilerevision_hash'), table_name='difffilerevision') op.drop_index(op.f('ix_difffilerevision_file_revision'), table_name='difffilerevision') op.drop_index(op.f('ix_difffilerevision_diff'), table_name='difffilerevision') op.drop_index(op.f('ix_difffilerevision_created_at'), table_name='difffilerevision') op.drop_index(op.f('ix_difffilerevision_configuration'), table_name='difffilerevision') op.drop_table('difffilerevision') op.drop_index(op.f('ix_snapshot_filerevision_file_revisions_snapshot'), table_name='snapshot_filerevision_file_revisions') op.drop_index(op.f('ix_snapshot_filerevision_file_revisions_filerevision'), table_name='snapshot_filerevision_file_revisions') op.drop_table('snapshot_filerevision_file_revisions') op.drop_index(op.f('ix_issueoccurrence_updated_at'), table_name='issueoccurrence') op.drop_index(op.f('ix_issueoccurrence_pk'), table_name='issueoccurrence') op.drop_index(op.f('ix_issueoccurrence_issue'), table_name='issueoccurrence') op.drop_index(op.f('ix_issueoccurrence_hash'), table_name='issueoccurrence') op.drop_index(op.f('ix_issueoccurrence_file_revision'), table_name='issueoccurrence') op.drop_index(op.f('ix_issueoccurrence_created_at'), table_name='issueoccurrence') op.drop_table('issueoccurrence') op.drop_index(op.f('ix_filerevision_filerevision_dependencies_filerevision_right'), table_name='filerevision_filerevision_dependencies') op.drop_index(op.f('ix_filerevision_filerevision_dependencies_filerevision'), table_name='filerevision_filerevision_dependencies') op.drop_table('filerevision_filerevision_dependencies') op.drop_index(op.f('ix_disksnapshot_updated_at'), table_name='disksnapshot') op.drop_index(op.f('ix_disksnapshot_snapshot'), table_name='disksnapshot') op.drop_index(op.f('ix_disksnapshot_pk'), table_name='disksnapshot') op.drop_index(op.f('ix_disksnapshot_created_at'), table_name='disksnapshot') op.drop_table('disksnapshot') op.drop_index(op.f('ix_diff_updated_at'), table_name='diff') op.drop_index(op.f('ix_diff_snapshot_b'), table_name='diff') op.drop_index(op.f('ix_diff_snapshot_a'), table_name='diff') op.drop_index(op.f('ix_diff_project'), table_name='diff') op.drop_index(op.f('ix_diff_pk'), table_name='diff') op.drop_index(op.f('ix_diff_hash'), table_name='diff') op.drop_index(op.f('ix_diff_created_at'), table_name='diff') op.drop_index(op.f('ix_diff_configuration'), table_name='diff') op.drop_table('diff') op.drop_index(op.f('ix_userrole_user'), table_name='userrole') op.drop_index(op.f('ix_userrole_updated_at'), table_name='userrole') op.drop_index(op.f('ix_userrole_role'), table_name='userrole') op.drop_index(op.f('ix_userrole_project'), table_name='userrole') op.drop_index(op.f('ix_userrole_pk'), table_name='userrole') op.drop_index(op.f('ix_userrole_created_at'), table_name='userrole') op.drop_table('userrole') op.drop_index(op.f('ix_task_updated_at'), table_name='task') op.drop_index(op.f('ix_task_type'), table_name='task') op.drop_index(op.f('ix_task_status'), table_name='task') op.drop_index(op.f('ix_task_project'), table_name='task') op.drop_index(op.f('ix_task_pk'), table_name='task') op.drop_index(op.f('ix_task_last_ping'), table_name='task') op.drop_index(op.f('ix_task_created_at'), table_name='task') op.drop_table('task') op.drop_index(op.f('ix_snapshot_updated_at'), table_name='snapshot') op.drop_index(op.f('ix_snapshot_project'), table_name='snapshot') op.drop_index(op.f('ix_snapshot_pk'), table_name='snapshot') op.drop_index(op.f('ix_snapshot_hash'), table_name='snapshot') op.drop_index(op.f('ix_snapshot_created_at'), table_name='snapshot') op.drop_index(op.f('ix_snapshot_configuration'), table_name='snapshot') op.drop_index(op.f('ix_snapshot_analyzed'), table_name='snapshot') op.drop_table('snapshot') op.drop_index(op.f('ix_projectissueclass_updated_at'), table_name='projectissueclass') op.drop_index(op.f('ix_projectissueclass_project'), table_name='projectissueclass') op.drop_index(op.f('ix_projectissueclass_pk'), table_name='projectissueclass') op.drop_index(op.f('ix_projectissueclass_issue_class'), table_name='projectissueclass') op.drop_index(op.f('ix_projectissueclass_created_at'), table_name='projectissueclass') op.drop_table('projectissueclass') op.drop_index(op.f('ix_project_tag_tags_tag'), table_name='project_tag_tags') op.drop_index(op.f('ix_project_tag_tags_project'), table_name='project_tag_tags') op.drop_table('project_tag_tags') op.drop_index(op.f('ix_issueclass_tag_tags_tag'), table_name='issueclass_tag_tags') op.drop_index(op.f('ix_issueclass_tag_tags_issueclass'), table_name='issueclass_tag_tags') op.drop_table('issueclass_tag_tags') op.drop_index(op.f('ix_issueclass_issuecategory_categories_issueclass'), table_name='issueclass_issuecategory_categories') op.drop_index(op.f('ix_issueclass_issuecategory_categories_issuecategory'), table_name='issueclass_issuecategory_categories') op.drop_table('issueclass_issuecategory_categories') op.drop_index(op.f('ix_issue_updated_at'), table_name='issue') op.drop_index(op.f('ix_issue_project'), table_name='issue') op.drop_index(op.f('ix_issue_pk'), table_name='issue') op.drop_index(op.f('ix_issue_hash'), table_name='issue') op.drop_index(op.f('ix_issue_fingerprint'), table_name='issue') op.drop_index(op.f('ix_issue_created_at'), table_name='issue') op.drop_index(op.f('ix_issue_configuration'), table_name='issue') op.drop_index(op.f('ix_issue_code'), table_name='issue') op.drop_index(op.f('ix_issue_analyzer'), table_name='issue') op.drop_table('issue') op.drop_index(op.f('ix_filerevision_updated_at'), table_name='filerevision') op.drop_index(op.f('ix_filerevision_sha'), table_name='filerevision') op.drop_index(op.f('ix_filerevision_project'), table_name='filerevision') op.drop_index(op.f('ix_filerevision_pk'), table_name='filerevision') op.drop_index(op.f('ix_filerevision_path'), table_name='filerevision') op.drop_index(op.f('ix_filerevision_language'), table_name='filerevision') op.drop_index(op.f('ix_filerevision_hash'), table_name='filerevision') op.drop_index(op.f('ix_filerevision_created_at'), table_name='filerevision') op.drop_index(op.f('ix_filerevision_configuration'), table_name='filerevision') op.drop_table('filerevision') op.drop_index(op.f('ix_accesstoken_user'), table_name='accesstoken') op.drop_index(op.f('ix_accesstoken_updated_at'), table_name='accesstoken') op.drop_index(op.f('ix_accesstoken_token'), table_name='accesstoken') op.drop_index(op.f('ix_accesstoken_pk'), table_name='accesstoken') op.drop_index(op.f('ix_accesstoken_created_at'), table_name='accesstoken') op.drop_table('accesstoken') op.drop_index(op.f('ix_user_updated_at'), table_name='user') op.drop_index(op.f('ix_user_pk'), table_name='user') op.drop_index(op.f('ix_user_password_reset_code'), table_name='user') op.drop_index(op.f('ix_user_new_email'), table_name='user') op.drop_index(op.f('ix_user_name'), table_name='user') op.drop_index(op.f('ix_user_email_validation_code'), table_name='user') op.drop_index(op.f('ix_user_email_validated'), table_name='user') op.drop_index(op.f('ix_user_email'), table_name='user') op.drop_index(op.f('ix_user_delete'), table_name='user') op.drop_index(op.f('ix_user_created_at'), table_name='user') op.drop_table('user') op.drop_index(op.f('ix_tag_updated_at'), table_name='tag') op.drop_index(op.f('ix_tag_pk'), table_name='tag') op.drop_index(op.f('ix_tag_name'), table_name='tag') op.drop_index(op.f('ix_tag_created_at'), table_name='tag') op.drop_table('tag') op.drop_index(op.f('ix_project_updated_at'), table_name='project') op.drop_index(op.f('ix_project_source'), table_name='project') op.drop_index(op.f('ix_project_reset_requested_at'), table_name='project') op.drop_index(op.f('ix_project_reset'), table_name='project') op.drop_index(op.f('ix_project_public'), table_name='project') op.drop_index(op.f('ix_project_pk'), table_name='project') op.drop_index(op.f('ix_project_permalink'), table_name='project') op.drop_index(op.f('ix_project_name'), table_name='project') op.drop_index(op.f('ix_project_fetched_at'), table_name='project') op.drop_index(op.f('ix_project_fetch_status'), table_name='project') op.drop_index(op.f('ix_project_description'), table_name='project') op.drop_index(op.f('ix_project_deleted'), table_name='project') op.drop_index(op.f('ix_project_delete'), table_name='project') op.drop_index(op.f('ix_project_created_at'), table_name='project') op.drop_index(op.f('ix_project_configuration'), table_name='project') op.drop_index(op.f('ix_project_analyzed_at'), table_name='project') op.drop_index(op.f('ix_project_analyze'), table_name='project') op.drop_index(op.f('ix_project_analysis_status'), table_name='project') op.drop_index(op.f('ix_project_analysis_requested_at'), table_name='project') op.drop_index(op.f('ix_project_analysis_priority'), table_name='project') op.drop_table('project') op.drop_index(op.f('ix_issueclass_updated_at'), table_name='issueclass') op.drop_index(op.f('ix_issueclass_title'), table_name='issueclass') op.drop_index(op.f('ix_issueclass_severity'), table_name='issueclass') op.drop_index(op.f('ix_issueclass_pk'), table_name='issueclass') op.drop_index(op.f('ix_issueclass_occurrence_description'), table_name='issueclass') op.drop_index(op.f('ix_issueclass_language'), table_name='issueclass') op.drop_index(op.f('ix_issueclass_hash'), table_name='issueclass') op.drop_index(op.f('ix_issueclass_created_at'), table_name='issueclass') op.drop_index(op.f('ix_issueclass_code'), table_name='issueclass') op.drop_index(op.f('ix_issueclass_analyzer'), table_name='issueclass') op.drop_table('issueclass') op.drop_index(op.f('ix_issuecategory_updated_at'), table_name='issuecategory') op.drop_index(op.f('ix_issuecategory_pk'), table_name='issuecategory') op.drop_index(op.f('ix_issuecategory_name'), table_name='issuecategory') op.drop_index(op.f('ix_issuecategory_created_at'), table_name='issuecategory') op.drop_table('issuecategory') ### end Alembic commands ###
70.015
169
0.727035
5,725
42,009
5.071441
0.027074
0.066543
0.076049
0.095061
0.877454
0.844286
0.77485
0.681064
0.639905
0.480023
0
0.005994
0.094575
42,009
599
170
70.131886
0.757335
0.006689
0
0.22031
0
0
0.356901
0.154071
0
0
0
0
0
1
0.003442
false
0.008606
0.003442
0
0.006885
0.006885
0
0
0
null
0
0
0
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
b3c5892cc22f77a510d9793bfabef9653585e1df
654
py
Python
CriptoBot/menu.py
vitorcardmore/CriptoBot
42cb287e68d288f0f4fa629b972fb10185908ecb
[ "MIT" ]
null
null
null
CriptoBot/menu.py
vitorcardmore/CriptoBot
42cb287e68d288f0f4fa629b972fb10185908ecb
[ "MIT" ]
null
null
null
CriptoBot/menu.py
vitorcardmore/CriptoBot
42cb287e68d288f0f4fa629b972fb10185908ecb
[ "MIT" ]
2
2020-10-26T22:42:03.000Z
2021-07-29T16:12:46.000Z
class Menu: def __init__(self): self.divMenu = '=' * 40 self.infoBot = 'CriptoBot / Versao: 1.1.2' self.desenvolvedor = 'Guilherme Malaquias' def inicial_menu(self) -> str: return f'{self.divMenu}\n0 - Consultar Moeda\n1 - Consultar Moeda Por Intervalo\n\n2 - Para sair\n{self.divMenu}' def mostra_moedas_menu(self) -> str: return f'{self.divMenu}\n1 - BTC-BRL (Bitcoin)\n2 - LTC-BRL (Litecoin)\n3 - ETH-BRL (Ethereum)\n4 - XRP-BRL (' \ f'Ripple)\n\n9 - Voltar\n{self.divMenu}' def decisao_menu(self) -> str: return f'Deseja continuar?\n1 - Sim\n2 - Nao\n{self.divMenu}'
38.470588
121
0.616208
92
654
4.293478
0.532609
0.167089
0.083544
0.129114
0.192405
0.146835
0.146835
0
0
0
0
0.02988
0.232416
654
16
122
40.875
0.756972
0
0
0
0
0.166667
0.513761
0.033639
0
0
0
0
0
1
0.333333
false
0
0
0.25
0.666667
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
3
b3cfe19a755ad6a92d71c21e3ac7042ed597ee96
1,076
py
Python
project/server/models/user.py
TobiasPrt/Smartphoniker-shop
6b74a3cc1c81db7a56d70609dbca29ddeec3053f
[ "MIT" ]
2
2020-05-11T08:46:45.000Z
2020-05-11T09:09:57.000Z
project/server/models/user.py
TobiasPrt/Smartphoniker-shop
6b74a3cc1c81db7a56d70609dbca29ddeec3053f
[ "MIT" ]
4
2021-02-19T13:31:53.000Z
2022-02-20T13:34:10.000Z
project/server/models/user.py
TobiasPrt/Smartphoniker-shop
6b74a3cc1c81db7a56d70609dbca29ddeec3053f
[ "MIT" ]
5
2020-04-27T16:25:39.000Z
2020-06-07T16:03:15.000Z
# project/server/user.py import datetime from flask_login import UserMixin from werkzeug.security import generate_password_hash, check_password_hash from project.server import db from project.server.models.base import BaseModel class User(BaseModel, UserMixin): __tablename__ = "users" id = db.Column(db.Integer, primary_key=True, autoincrement=True) email = db.Column(db.String(255), unique=True, nullable=False) password_hash = db.Column(db.String(255), nullable=False) registered_on = db.Column(db.DateTime, nullable=False, default=datetime.datetime.now) admin = db.Column(db.Boolean, nullable=False, default=False) @property def password(self): raise AttributeError('password is not a readable attribute') @password.setter def password(self, password): self.password_hash = generate_password_hash(password) def verify_password(self, password): return check_password_hash(self.password_hash, password) def __repr__(self): return f"<User {self.email}>"
32.606061
90
0.719331
135
1,076
5.562963
0.422222
0.111851
0.066578
0.04261
0.050599
0
0
0
0
0
0
0.006849
0.185874
1,076
32
91
33.625
0.850457
0.020446
0
0
1
0
0.058824
0
0
0
0
0
0
1
0.181818
false
0.409091
0.227273
0.090909
0.818182
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
1
0
0
1
0
0
3
b3d16dfa68d50178714fa515f70e062410eeedc9
51
py
Python
src/debugpy/_vendored/pydevd/tests_python/resources/not_my_code/not_my_coroutine.py
r3m0t/debugpy
090e3c3ef5758e5b316514c9d6f44f9b9b488cf1
[ "MIT" ]
695
2020-01-30T14:34:51.000Z
2022-03-31T09:31:57.000Z
src/debugpy/_vendored/pydevd/tests_python/resources/not_my_code/not_my_coroutine.py
r3m0t/debugpy
090e3c3ef5758e5b316514c9d6f44f9b9b488cf1
[ "MIT" ]
1,095
2018-03-01T00:50:11.000Z
2019-05-06T17:44:15.000Z
src/debugpy/_vendored/pydevd/tests_python/resources/not_my_code/not_my_coroutine.py
r3m0t/debugpy
090e3c3ef5758e5b316514c9d6f44f9b9b488cf1
[ "MIT" ]
66
2020-01-30T13:10:38.000Z
2022-03-29T07:11:17.000Z
async def call1(callback): await callback()
8.5
26
0.666667
6
51
5.666667
0.833333
0
0
0
0
0
0
0
0
0
0
0.025641
0.235294
51
5
27
10.2
0.846154
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0
0
0
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
3
b60e17e89a7d8e1ca57601d6a9fe2ee56c00f391
407
py
Python
util/uatterminal.py
black-sliver/UAT
f840b76a52db49d7c5bf0aaeded52cd732db4927
[ "MIT" ]
null
null
null
util/uatterminal.py
black-sliver/UAT
f840b76a52db49d7c5bf0aaeded52cd732db4927
[ "MIT" ]
null
null
null
util/uatterminal.py
black-sliver/UAT
f840b76a52db49d7c5bf0aaeded52cd732db4927
[ "MIT" ]
null
null
null
#!/usr/bin/python from websockets.__main__ import main import sys if __name__ == '__main__': if len(sys.argv) < 2: sys.argv.append("ws://localhost:65399") elif sys.argv[1][0:5] != "ws://" and sys.argv[1][0:6] != "wss://": if ":" not in sys.argv[1]: sys.argv[1] = "ws://" + sys.argv[1] + ":65399" else: sys.argv[1] = "ws://" + sys.argv[1] main()
27.133333
70
0.513514
61
407
3.229508
0.442623
0.319797
0.284264
0.091371
0.182741
0.182741
0.182741
0
0
0
0
0.072848
0.257985
407
14
71
29.071429
0.57947
0.039312
0
0
0
0
0.14359
0
0
0
0
0
0
1
0
true
0
0.181818
0
0.181818
0
0
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
3
3734a58db2915847a839f8b06df4c9cd9d9e560b
409
py
Python
password_webapp_project/password_import_export_app/urls.py
wojtek9502/django-password-webapp
9c2fc40c61dcf19267b4409ef5cca2cb66dd2002
[ "MIT" ]
1
2021-04-28T19:55:05.000Z
2021-04-28T19:55:05.000Z
password_webapp_project/password_import_export_app/urls.py
wojtek9502/django-password-webapp
9c2fc40c61dcf19267b4409ef5cca2cb66dd2002
[ "MIT" ]
null
null
null
password_webapp_project/password_import_export_app/urls.py
wojtek9502/django-password-webapp
9c2fc40c61dcf19267b4409ef5cca2cb66dd2002
[ "MIT" ]
null
null
null
from django.urls import path from . import views app_name = 'password_import_export_app' urlpatterns = [ path('export_csv/', views.PasswordExportToCSV.as_view(), name='export_to_csv'), path('import_csv/upload/', views.PasswordImportFromCsvFile.as_view(), name='import_from_csv_upload'), path('import_csv/load/', views.PasswordImportFromCsvFileLoadData.as_view(), name='import_from_csv_load'), ]
37.181818
109
0.772616
52
409
5.730769
0.384615
0.060403
0.100671
0.107383
0.154362
0.154362
0
0
0
0
0
0
0.09291
409
10
110
40.9
0.803235
0
0
0
0
0
0.308068
0.117359
0
0
0
0
0
1
0
false
0.5
0.625
0
0.625
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
1
0
1
0
0
3
3762b29ec3abcb88231683fa6e037696dd3fd751
735
py
Python
axonius_api_client/cli/grp_system/__init__.py
rwils83/axonius_api_client
1990ed4d1287482a4648dc51edcaa5eb08255f5b
[ "MIT" ]
null
null
null
axonius_api_client/cli/grp_system/__init__.py
rwils83/axonius_api_client
1990ed4d1287482a4648dc51edcaa5eb08255f5b
[ "MIT" ]
3
2021-05-18T14:28:30.000Z
2021-09-06T20:01:56.000Z
axonius_api_client/cli/grp_system/__init__.py
rwils83/axonius_api_client
1990ed4d1287482a4648dc51edcaa5eb08255f5b
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- """Command line interface for Axonius API Client.""" import click from ..context import AliasedGroup from . import ( grp_central_core, grp_discover, grp_meta, grp_nodes, grp_roles, grp_settings, grp_users, ) @click.group(cls=AliasedGroup) def system(): """Group: System control commands.""" system.add_command(grp_meta.meta) system.add_command(grp_nodes.instances) system.add_command(grp_central_core.central_core) system.add_command(grp_roles.roles) system.add_command(grp_settings.settings_lifecycle) system.add_command(grp_settings.settings_gui) system.add_command(grp_settings.settings_core) system.add_command(grp_users.users) system.add_command(grp_discover.discover)
23.709677
52
0.778231
102
735
5.303922
0.333333
0.149723
0.266174
0.316081
0.279113
0.194085
0
0
0
0
0
0.001534
0.112925
735
30
53
24.5
0.828221
0.137415
0
0
0
0
0
0
0
0
0
0
0
1
0.045455
true
0
0.136364
0
0.181818
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
3
3783188c77e81848609fd2ace4adef4cd19f304f
570
py
Python
spirit/comment/utils.py
santeyio/phantastesproject
5ce1e2cb59e8283fe280e01d0e185be62cd4001a
[ "MIT" ]
1
2016-02-29T01:26:42.000Z
2016-02-29T01:26:42.000Z
spirit/comment/utils.py
santeyio/phantastesproject
5ce1e2cb59e8283fe280e01d0e185be62cd4001a
[ "MIT" ]
16
2015-08-10T18:28:18.000Z
2022-03-11T23:12:48.000Z
spirit/comment/utils.py
shriyanka/daemo-forum
58c555f69208beedbb0c09f7b7d1e32ab741b2c5
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- from __future__ import unicode_literals from ..topic.notification.models import TopicNotification, UNDEFINED from ..topic.unread.models import TopicUnread def comment_posted(comment, mentions): # Todo test detail views TopicNotification.create_maybe(user=comment.user, comment=comment, action=UNDEFINED) TopicNotification.notify_new_comment(comment=comment) TopicNotification.notify_new_mentions(comment=comment, mentions=mentions) TopicUnread.unread_new_comment(comment=comment) comment.topic.increase_comment_count()
38
88
0.805263
65
570
6.830769
0.476923
0.220721
0.141892
0.108108
0
0
0
0
0
0
0
0.001965
0.107018
570
15
89
38
0.870334
0.077193
0
0
0
0
0
0
0
0
0
0.066667
0
1
0.111111
false
0
0.333333
0
0.444444
0
0
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
0
0
0
1
0
0
0
0
3
37943f0bb4f7d9371aa14a7627f522d0f8f3279c
8,676
py
Python
pattern_matching/core/pattern.py
Xython/pattern-matching
17ccdb68189353f1c63032013f5ef6f1ca4c0902
[ "MIT" ]
20
2017-12-31T05:45:47.000Z
2021-05-15T22:08:21.000Z
pattern_matching/core/pattern.py
Xython/Destruct.py
17ccdb68189353f1c63032013f5ef6f1ca4c0902
[ "MIT" ]
null
null
null
pattern_matching/core/pattern.py
Xython/Destruct.py
17ccdb68189353f1c63032013f5ef6f1ca4c0902
[ "MIT" ]
1
2018-01-12T04:54:19.000Z
2018-01-12T04:54:19.000Z
from inspect import getfullargspec from typing import Union, List, Optional import operator from collections import namedtuple Patch = namedtuple('Patch', ['var']) match_err = object() class Pattern: def match(self, expr): raise NotImplemented def __repr__(self): return self.__str__() class Type(Pattern): pass class TypeVar(Type): def __init__(self, u_types: set, inf: set, sup: set, traits: set, yield_out: bool = True): self.negative_types = u_types self.inf = inf self.sup = sup self.traits = traits self.yield_out = yield_out def __str__(self): return f'Type[{self.inf}<= this <={self.sup}' \ f'| this /= {self.negative_types}, traits:{{{self.traits}}}]' def __le__(self, other: type): return TypeVar(self.negative_types - {other}, self.inf, self.sup | {other}, self.traits, self.yield_out) def __ge__(self, other: type): return TypeVar(self.negative_types - {other}, self.inf | {other}, self.sup, self.traits, self.yield_out) def __lt__(self, other: type): return TypeVar(self.negative_types | {other}, self.inf, self.sup | {other}, self.traits, self.yield_out) def __gt__(self, other: type): return TypeVar(self.negative_types | {other}, self.inf | {other}, self.sup, self.traits, self.yield_out) def __eq__(self, other: type): return TypeVar(self.negative_types - {other}, self.inf | {other}, self.sup | {other}, self.traits, self.yield_out) def __ne__(self, other: type): return TypeVar(self.negative_types | {other}, self.inf, self.sup, self.traits, self.yield_out) def __and__(self, other: Type): if not isinstance(other, Type): other = TypeVar(set(), {other}, {other}, set(), yield_out=False) return IntersectionType([self, other]) def __or__(self, other: Type): if not isinstance(other, Type): other = TypeVar(set(), {other}, {other}, set(), yield_out=False) return UnionType([self, other]) def __invert__(self): return DifferenceType(self) def __mod__(self, **kwargs): return TypeVar(self.negative_types, self.inf, self.sup, set(kwargs.items()) | self.traits) def when(self, trait): return TypeVar(self.negative_types, self.inf, self.sup, self.traits | {trait}, self.yield_out) def match(self, expr: type): def isn(u_type): return u_type is not expr def is_inf(u_type): return issubclass(expr, u_type) def is_sup(u_type): return issubclass(u_type, expr) if all(map(isn, self.negative_types)) and \ all(map(is_inf, self.inf)) and \ all(map(is_sup, self.sup)) and \ all(trait(expr) for trait in self.traits): if self.yield_out: return expr, return () else: return match_err class UnionType(Type): def __init__(self, types: List[Type]): self.types = types def __str__(self): return 'Union[{}]'.format(', '.join( [f'<{_type}>' for _type in self.types])) def match(self, expr): for typ in self.types: e = typ.match(expr) if e is not match_err: return e return match_err def __and__(self, other): return IntersectionType([self, other]) def __or__(self, other): return UnionType([*self.types, other]) def __invert__(self): return DifferenceType(self) class IntersectionType(Type): def __str__(self): return 'Intersection[{}]'.format(', '.join( [f'<{_type}>' for _type in self.types])) def __init__(self, types: List[Type]): self.types = types def match(self, expr): ret = [] for typ in self.types: e = typ.match(expr) if e is match_err: return match_err ret.extend(e) return tuple(ret) def __and__(self, other): return IntersectionType([*self.types, other]) def __or__(self, other): return UnionType([self, other]) def __invert__(self): return DifferenceType(self) class DifferenceType(Type): def __str__(self): return f'Difference[{self.type}]' def __init__(self, type): self.type = type def match(self, expr): e = self.type.match(expr) if e is not match_err: return match_err return () def __and__(self, other): return IntersectionType([self, other]) def __or__(self, other): return UnionType([self, other]) def __not__(self): return self.type class Var(Pattern): def __init__(self, match_fns: list, type: Optional[Type], arg_nums: int = -1, yield_out: bool = True): self.match_fns = match_fns if not isinstance(type, Type) and type is not None: self.type = TypeVar(set(), {type}, {type}, set(), False) else: self.type = type self.arg_nums = arg_nums self.yield_out = yield_out def __str__(self): type = self.type if self.type is not None else 'any' if self.arg_nums == -1: return str(type) else: return f'{type}/{self.arg_nums}' def __call__(self, *args, **kwargs): return Var(self.match_fns, self.type, self.arg_nums, self.yield_out) def __truediv__(self, other: Union[int, tuple]): return Var(self.match_fns, self.type, other, self.yield_out) def __getitem__(self, item: Union[type, TypeVar]): return Var( self.match_fns, item if isinstance(item, Type) else TypeVar( set(), {item}, {item}, set(), False), self.arg_nums, self.yield_out) def compare_with(self, other, by): def match_it(v): return by(v, other) return Var(self.match_fns + [match_it], self.type, self.arg_nums, self.yield_out) def __ge__(self, other): return self.compare_with(other, operator.ge) def __le__(self, other): return self.compare_with(other, operator.le) def __eq__(self, other): return self.compare_with(other, operator.eq) def __gt__(self, other): return self.compare_with(other, operator.gt) def __lt__(self, other): return self.compare_with(other, operator.lt) def when(self, condition): return Var(self.match_fns + [condition], self.type, self.arg_nums, self.yield_out) def match(self, expr: object): if self.type is not None: now = self.type.match(expr.__class__) else: now = () if now is match_err: return match_err # check param nums if self.arg_nums is not -1: if not callable(expr): return match_err arg_info = getfullargspec(expr) arg_least_num = len(arg_info.args) + len(arg_info.kwonlyargs) if hasattr(expr, '__self__'): # instance bound method arg_least_num -= 1 has_var_arg = arg_info.varkw or arg_info.varargs if isinstance(self.arg_nums, tuple): if len(self.arg_nums) is 1: if self.arg_nums[0] < arg_least_num: return match_err else: if has_var_arg or not (self.arg_nums[0] <= arg_least_num <= self.arg_nums[1]): return match_err else: assert isinstance(self.arg_nums, int) if has_var_arg or arg_least_num != self.arg_nums: return match_err if self.match_fns: def check_if_match(f): return f(expr) if not all(map(check_if_match, self.match_fns)): return match_err if self.yield_out: return (expr, ) + now else: return now def __iter__(self): yield Patch(self) es = set() T = TypeVar(es, es, es, es, yield_out=True) t = TypeVar(es, es, es, es, yield_out=False) var = Var([], None, yield_out=True) _ = Var([], None, yield_out=False)
28.352941
79
0.557746
1,070
8,676
4.24486
0.113084
0.053501
0.042272
0.03963
0.560326
0.49934
0.46081
0.415015
0.320343
0.261339
0
0.00138
0.33172
8,676
305
80
28.445902
0.781994
0.00438
0
0.373874
0
0
0.023625
0.010654
0
0
0
0
0.004505
1
0.238739
false
0.004505
0.018018
0.166667
0.554054
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
3
37a046bc54b84f444441f054b4745fd2f61ef66e
152
py
Python
seahub/dtable/apps.py
odontomachus/seahub
5b6f2153921da21a473d9ff20ce443d40efc93ab
[ "Apache-2.0" ]
null
null
null
seahub/dtable/apps.py
odontomachus/seahub
5b6f2153921da21a473d9ff20ce443d40efc93ab
[ "Apache-2.0" ]
null
null
null
seahub/dtable/apps.py
odontomachus/seahub
5b6f2153921da21a473d9ff20ce443d40efc93ab
[ "Apache-2.0" ]
null
null
null
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.apps import AppConfig class DtableConfig(AppConfig): name = 'dtable'
16.888889
39
0.730263
18
152
5.888889
0.833333
0
0
0
0
0
0
0
0
0
0
0.007874
0.164474
152
8
40
19
0.826772
0.138158
0
0
0
0
0.046512
0
0
0
0
0
0
1
0
false
0
0.5
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
3
807e867278e2a4d545edd6e60a577a55d74e975f
240
py
Python
shell.py
tuseef123/Countries-Api-Django-REST-Framework
7a0d919d97ba61cecbc096ecbc2ebf80230bcf9a
[ "MIT" ]
null
null
null
shell.py
tuseef123/Countries-Api-Django-REST-Framework
7a0d919d97ba61cecbc096ecbc2ebf80230bcf9a
[ "MIT" ]
null
null
null
shell.py
tuseef123/Countries-Api-Django-REST-Framework
7a0d919d97ba61cecbc096ecbc2ebf80230bcf9a
[ "MIT" ]
null
null
null
#import subprocess as s #s.call('start',shell=True) import os try: os.system('cmd /c "virtualenv venv & pip install django & django-admin startproject worldCountries . & python manage.py startapp new_app"') except: print('nothing')
30
143
0.729167
34
240
5.117647
0.882353
0
0
0
0
0
0
0
0
0
0
0
0.15
240
7
144
34.285714
0.852941
0.2
0
0
0
0.2
0.7
0
0
0
0
0
0
1
0
true
0
0.2
0
0.2
0.2
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
1
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
3
80a67c38dedb8dd068bc14c22ec98d181ae95723
265
py
Python
src/python/dxl/fs/path.old/web.py
Hong-Xiang/dxfs
df9125061490ed413bbdbd474e8a91bc40dbf74e
[ "MIT" ]
null
null
null
src/python/dxl/fs/path.old/web.py
Hong-Xiang/dxfs
df9125061490ed413bbdbd474e8a91bc40dbf74e
[ "MIT" ]
null
null
null
src/python/dxl/fs/path.old/web.py
Hong-Xiang/dxfs
df9125061490ed413bbdbd474e8a91bc40dbf74e
[ "MIT" ]
null
null
null
from flask_restful import Resource from .model import Path class PathResource(Resource): def get(self, path): return {'path': Path(path).abs} def add_api(api, root, name='path'): api.add_resource(PathResource, root + '/{0}/<path>'.format(name))
22.083333
69
0.683019
37
265
4.810811
0.540541
0.089888
0
0
0
0
0
0
0
0
0
0.004525
0.166038
265
11
70
24.090909
0.800905
0
0
0
0
0
0.071698
0
0
0
0
0
0
1
0.285714
false
0
0.285714
0.142857
0.857143
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
3
80f858ee93116c3b9c058f38721ad4eb2bf6f6df
391
py
Python
tests/test_main/test_dicomphi.py
medcognetics/dicom-utils
6eec1ac429b18fda58b9e7f1206f8718e08af0e9
[ "Apache-2.0" ]
null
null
null
tests/test_main/test_dicomphi.py
medcognetics/dicom-utils
6eec1ac429b18fda58b9e7f1206f8718e08af0e9
[ "Apache-2.0" ]
14
2021-07-03T18:00:53.000Z
2022-03-09T20:33:55.000Z
tests/test_main/test_dicomphi.py
medcognetics/dicom-utils
6eec1ac429b18fda58b9e7f1206f8718e08af0e9
[ "Apache-2.0" ]
null
null
null
#!/usr/bin/env python # -*- coding: utf-8 -*- import runpy import sys from tests.test_main.test_dicom_types import dicom_folder # Necessary so that "dicom_folder" is not seen as unused dicom_folder = dicom_folder def test_dicomphi(dicom_folder, tmp_path): sys.argv = [sys.argv[0], str(tmp_path)] runpy.run_module("dicom_utils.cli.dicomphi", run_name="__main__", alter_sys=True)
24.4375
85
0.749361
63
391
4.349206
0.619048
0.20073
0
0
0
0
0
0
0
0
0
0.005882
0.130435
391
15
86
26.066667
0.8
0.248082
0
0
0
0
0.109966
0.082474
0
0
0
0
0
1
0.142857
false
0
0.428571
0
0.571429
0
0
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
3
80fcd0a84635546be06f70ca2a316840f0486b05
140
py
Python
config_web/upheno_ontology.py
NikkiBytes/pending.api
3c83bb8e413c3032a3a4539d19a779b5f0b67650
[ "Apache-2.0" ]
3
2019-02-17T23:36:35.000Z
2022-03-01T16:43:06.000Z
config_web/upheno_ontology.py
NikkiBytes/pending.api
3c83bb8e413c3032a3a4539d19a779b5f0b67650
[ "Apache-2.0" ]
56
2019-01-26T16:34:12.000Z
2022-03-23T06:57:03.000Z
config_web/upheno_ontology.py
NikkiBytes/pending.api
3c83bb8e413c3032a3a4539d19a779b5f0b67650
[ "Apache-2.0" ]
6
2020-10-22T17:37:54.000Z
2022-03-01T16:56:55.000Z
ES_HOST = 'localhost:9200' ES_INDEX = 'pending-upheno_ontology' ES_DOC_TYPE = 'phenotype' API_PREFIX = 'upheno_ontology' API_VERSION = ''
17.5
36
0.757143
19
140
5.157895
0.736842
0.285714
0
0
0
0
0
0
0
0
0
0.03252
0.121429
140
7
37
20
0.764228
0
0
0
0
0
0.438849
0.165468
0
0
0
0
0
1
0
false
0
0
0
0
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
03803648b15995be00b380a501710535ba8a2259
407
py
Python
servo_pws_mapping.py
kenttw/BrachioGraph
535df4048281693f406dc9678c99af16852adfd1
[ "MIT" ]
2
2021-01-18T02:42:15.000Z
2021-01-28T09:14:33.000Z
servo_pws_mapping.py
kenttw/BrachioGraph
535df4048281693f406dc9678c99af16852adfd1
[ "MIT" ]
null
null
null
servo_pws_mapping.py
kenttw/BrachioGraph
535df4048281693f406dc9678c99af16852adfd1
[ "MIT" ]
null
null
null
servo_a_pw = [[-90.0, 2463] [-86.4, 2423] [-72.0, 2263] [-56.6, 2093] [-43.2, 2013] [-28.8, 1793] [-14.4, 1646] [0.0, 1436] [14.4, 1276] [28.8, 1096] [43.2, 916] [56.6, 746] [72.0, 586] [72.0, 590] [90.0, 390]]
25.4375
27
0.267813
48
407
2.229167
0.625
0.084112
0
0
0
0
0
0
0
0
0
0.553073
0.560197
407
15
28
27.133333
0.044693
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
1
0
0
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
03b1bdb95fcdfb32ef5d8620d14e914fa5b8814f
13,470
py
Python
Contrib-Microsoft/Olympus_rack_manager/python-ocs/commonapi/controls/manage_fpga.py
opencomputeproject/Rack-Manager
e1a61d3eeeba0ff655fe9c1301e8b510d9b2122a
[ "MIT" ]
5
2019-11-11T07:57:26.000Z
2022-03-28T08:26:53.000Z
Contrib-Microsoft/Olympus_rack_manager/python-ocs/commonapi/controls/manage_fpga.py
opencomputeproject/Rack-Manager
e1a61d3eeeba0ff655fe9c1301e8b510d9b2122a
[ "MIT" ]
3
2019-09-05T21:47:07.000Z
2019-09-17T18:10:45.000Z
Contrib-Microsoft/Olympus_rack_manager/python-ocs/commonapi/controls/manage_fpga.py
opencomputeproject/Rack-Manager
e1a61d3eeeba0ff655fe9c1301e8b510d9b2122a
[ "MIT" ]
11
2019-07-20T00:16:32.000Z
2022-01-11T14:17:48.000Z
# Copyright (C) Microsoft Corporation. All rights reserved. # This program is free software; you can redistribute it # and/or modify it under the terms of the GNU General Public License # as published by the Free Software Foundation; either version 2 # of the License, or (at your option) any later version. #!/usr/bin/python # -*- coding: utf-8 -*- from utils import * from ipmicmd_library import * ############################################################################################################ # FPGA set functions ############################################################################################################ def set_fpga_bypass(serverid, bypass): if bypass == "Enabled": return set_fpga_bypass_on(serverid) elif bypass == "Disabled": return set_fpga_bypass_off(serverid) else: return set_failure_dict("set_fpga_bypass invalid type {0}.".format(bypass), completion_code.failure) def set_fpga_bypass_on(serverid): """ Set FPGA bypass mode on """ try: interface = get_ipmi_interface(serverid, ["ocsoem", "fpgaaction", "setbypass"]) return parse_set_fpga_bypass(interface, "setbypass") except Exception, e: return set_failure_dict("set_fpga_bypass_on() Exception {0}".format(e), completion_code.failure) def set_fpga_bypass_off(serverid): """ Set FPGA bypass mode off """ try: interface = get_ipmi_interface(serverid, ["ocsoem", "fpgaaction", "clearbypass"]) return parse_set_fpga_bypass(interface, "clearbypass") except Exception, e: return set_failure_dict("set_fpga_bypass_off() Exception {0}".format(e), completion_code.failure) ############################################################################################################ # FPGA get functions ############################################################################################################ def get_fpga_bypass_mode(serverid): """ Read back FPGA bypass mode setting """ try: interface = get_ipmi_interface(serverid, ["ocsoem", "fpgaread", "mode"]) return parse_get_fpga_bypass_mode(interface, "mode") except Exception, e: return set_failure_dict("get_fpga_bypass_mode() Exception {0}".format(e), completion_code.failure) def get_fpga_health(serverid): """ Read back FPGA health """ try: interface = get_ipmi_interface(serverid, ["ocsoem", "fpgaread", "health"]) return parse_get_fpga_health(interface, "health") except Exception, e: return set_failure_dict("get_fpga_health() Exception {0}".format(e), completion_code.failure) def get_fpga_temp(serverid): """ Read back FPGA temperature """ try: interface = get_ipmi_interface(serverid, ["ocsoem", "fpgaread", "temp"]) return parse_get_fpga_temp(interface, "temp") except Exception, e: return set_failure_dict("get_fpga_temp() Exception {0}".format(e), completion_code.failure) def get_fpga_i2c_version(serverid): """ Read back FPGA I2C version """ try: interface = get_ipmi_interface(serverid, ["ocsoem", "fpgaread", "i2cversion"]) return parse_get_fpga_i2cversion(interface, "i2cversion") except Exception, e: return set_failure_dict("get_fpga_i2c_version() Exception {0}".format(e), completion_code.failure) def get_fpga_assetinfo(serverid): """ Read back product info area from FPGA FRU """ try: interface = get_ipmi_interface(serverid, ["ocsoem", "fpgaread", "assetinfo"]) return parse_get_fpga_assetinfo(interface, "assetinfo") except Exception, e: return set_failure_dict("get_fpga_assetinfo() Exception {0}".format(e), completion_code.failure) ############################################################################################################ # FPGA parse output functions ############################################################################################################ def parse_set_fpga_bypass(interface, command): try: output = call_ipmi(interface, command) if "ErrorCode" in output: return set_failure_dict(("Failed to run IPMITool: " + output), completion_code.failure) if output['status_code'] == 0: return set_success_dict() else: error_data = output['stderr'] return set_failure_dict(error_data.split(":")[-1].strip(), completion_code.failure) except Exception, e: return set_failure_dict("parse_set_fpga_bypass() Exception {0}".format(e), completion_code.failure) def parse_get_fpga_bypass_mode(interface, command): try: output = call_ipmi(interface, command) if "ErrorCode" in output: return set_failure_dict(("Failed to run IPMITool: " + output), completion_code.failure) get_mode = {} if output['status_code'] == 0: get_mode_data = output['stdout'].split('\n') # Removes empty strings from the list get_mode_data = filter(None, get_mode_data) get_mode[completion_code.cc_key] = completion_code.success for string in get_mode_data: if "Bypass Mode" in string: get_mode["Bypass Mode"] = string.split(":")[-1].strip() elif "User Logic Network" in string: get_mode["User Logic Network"] = string.split(":")[-1].strip() return get_mode else: error_data = output['stderr'] return set_failure_dict(error_data.split(":")[-1].strip(), completion_code.failure) except Exception, e: return set_failure_dict("parse_get_fpga_bypass_mode() Exception {0}".format(e), completion_code.failure) def parse_get_fpga_temp(interface, command): try: output = call_ipmi(interface, command) if "ErrorCode" in output: return set_failure_dict(("Failed to run IPMITool: " + output), completion_code.failure) get_temp = {} if output['status_code'] == 0: get_temp_data = output['stdout'].split('\n') # Removes empty strings from the list get_temp_data = filter(None, get_temp_data) get_temp[completion_code.cc_key] = completion_code.success for string in get_temp_data: if "Temperature in Celsius" in string: get_temp["Temperature in Celsius"] = string.split(":")[-1].strip() return get_temp else: error_data = output['stderr'] return set_failure_dict(error_data.split(":")[-1].strip(), completion_code.failure) except Exception, e: return set_failure_dict("parse_get_fpga_temp() Exception {0}".format(e), completion_code.failure) def parse_get_fpga_i2cversion(interface, command): try: output = call_ipmi(interface, command) if "ErrorCode" in output: return set_failure_dict(("Failed to run IPMITool: " + output), completion_code.failure) get_ver = {} if output['status_code'] == 0: get_ver_data = output['stdout'].split('\n') # Removes empty strings from the list get_ver_data = filter(None, get_ver_data) get_ver[completion_code.cc_key] = completion_code.success for string in get_ver_data: if "I2C Version" in string: get_ver["I2C Version"] = string.split(":")[-1].strip() return get_ver else: error_data = output['stderr'] return set_failure_dict(error_data.split(":")[-1].strip (), completion_code.failure) except Exception, e: return set_failure_dict("parse_get_fpga_i2cversion() Exception {0}".format(e), completion_code.failure) def parse_get_fpga_health(interface, command): try: output = call_ipmi(interface, command) if "ErrorCode" in output: return set_failure_dict(("Failed to run IPMITool: " + output), completion_code.failure) get_health = {} if output['status_code'] == 0: get_health_data = output['stdout'].split('\n') # Removes empty strings from the list get_health_data = filter(None, get_health_data) get_health[completion_code.cc_key] = completion_code.success for string in get_health_data: if "PCIe HIP 0 Up" in string: get_health["PCIe HIP 0 Up"] = string.split(":")[-1].strip() elif "PCIe HIP 1 Up" in string: get_health["PCIe HIP 1 Up"] = string.split(":")[-1].strip() elif "40G Link 0 Up" in string: get_health["40G Link 0 Up"] = string.split(":")[-1].strip() elif "40G Link 0 Tx Activity" in string: get_health["40G Link 0 Tx Activity"] = string.split(":")[-1].strip() elif "40G Link 0 Rx Activity" in string: get_health["40G Link 0 Rx Activity"] = string.split(":")[-1].strip() elif "40G Link 1 Up" in string: get_health["40G Link 1 Up"] = string.split(":")[-1].strip() elif "40G Link 1 Tx Activity" in string: get_health["40G Link 1 Tx Activity"] = string.split(":")[-1].strip() elif "40G Link 1 Rx Activity" in string: get_health["40G Link 1 Rx Activity"] = string.split(":")[-1].strip() return get_health else: error_data = output['stderr'] return set_failure_dict(error_data.split(":")[-1].strip (), completion_code.failure) except Exception, e: return set_failure_dict("parse_get_fpga_health() Exception {0}".format(e), completion_code.failure) def parse_get_fpga_assetinfo(interface, command): try: output = call_ipmi(interface, command) if "ErrorCode" in output: return set_failure_dict(("Failed to run IPMITool: " + output), completion_code.failure) get_assetinfo = {} if output['status_code'] == 0: get_fru_data = output['stdout'].split('\n') # Removes empty strings from the list get_fru_data = filter(None, get_fru_data) get_assetinfo[completion_code.cc_key] = completion_code.success for string in get_fru_data: if "Product Manufacturer" in string: get_assetinfo["Product Manufacturer"] = string.split(":")[-1].strip() elif "Product Name" in string: get_assetinfo["Product Name"] = string.split(":")[-1].strip() elif "Product Model Number" in string: get_assetinfo["Product Model Number"] = string.split(":")[-1].strip() elif "Product Version" in string: get_assetinfo["Product Version"] = string.split(":")[-1].strip() elif "Product Serial Number" in string: get_assetinfo["Product Serial Number"] = string.split(":")[-1].strip() elif "Product FRU File ID" in string: get_assetinfo["Product FRU File ID"] = string.split(":")[-1].strip() elif "Product Custom Field 1" in string: get_assetinfo["Product Custom Field 1"] = string.split(":")[-1].strip() elif "Product Custom Field 2" in string: get_assetinfo["Product Custom Field 2"] = string.split(":")[-1].strip() return get_assetinfo else: error_data = output['stderr'] return set_failure_dict(error_data.split(":")[-1].strip (), completion_code.failure) except Exception, e: return set_failure_dict("parse_get_fpga_assetinfo() Exception {0}".format(e), completion_code.failure)
45.201342
113
0.518486
1,369
13,470
4.871439
0.109569
0.075574
0.062378
0.077973
0.783476
0.719898
0.643275
0.589894
0.490778
0.431549
0
0.011423
0.33712
13,470
297
114
45.353535
0.735469
0.04343
0
0.365079
0
0
0.150385
0.017845
0
0
0
0
0
0
null
null
0.121693
0.010582
null
null
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
1
0
0
0
0
0
3
03dfde424caf0567737c1eb636d5a55fda8cc550
1,744
py
Python
tests/service_account/testSATokenUtils.py
skyflowapi/skyflow-python
1c7220de6698fd6a807932d3d3846b7fe4c61a5c
[ "MIT" ]
2
2022-03-08T22:08:34.000Z
2022-03-31T15:36:23.000Z
tests/service_account/testSATokenUtils.py
skyflowapi/skyflow-python
1c7220de6698fd6a807932d3d3846b7fe4c61a5c
[ "MIT" ]
1
2022-03-23T04:55:58.000Z
2022-03-23T04:55:58.000Z
tests/service_account/testSATokenUtils.py
skyflowapi/skyflow-python
1c7220de6698fd6a807932d3d3846b7fe4c61a5c
[ "MIT" ]
4
2022-01-04T10:38:36.000Z
2022-01-27T06:16:45.000Z
import os import unittest from dotenv import dotenv_values from skyflow.service_account._token import * from skyflow.service_account import is_expired class TestGenerateBearerToken(unittest.TestCase): def setUp(self) -> None: self.dataPath = os.path.join( os.getcwd(), 'tests/service_account/data/') return super().setUp() def testIsExpiredInvalidToken(self): try: token = 'invalid token' self.assertEqual(True, is_expired(token)) except SkyflowError as se: self.fail('raised exception for invalid token') def testIsExpiredEmptyToken(self): try: self.assertEqual(True, is_expired('')) except SkyflowError as se: self.fail('Error '+str(se.message)) def testIsExpiredTokenExpred(self): expiredToken = 'eyJhbGciOiJSUzI1NiIsInR5cCI6IkpXVCJ9.eyJhdWQiOiJodHRwczovL21hbmFnZS5za3lmbG93YXBpcy5jb20iLCJjbGkiOiJrOWZkN2ZiMzcyMDI0NDhiYmViOGNkNmUyYzQ4NTdkOSIsImV4cCI6MTY0NzI1NjM3NCwiaWF0IjoxNjQ3MjU2MzE1LCJpc3MiOiJzYS1hdXRoQG1hbmFnZS5za3lmbG93YXBpcy5jb20iLCJqdGkiOiJnYTMyZWJhMGJlMzQ0YWRmYjQxMzRjN2Y2ZTIzZjllMCIsInNjcCI6WyJyb2xlOnM1OTdjNzNjYjhjOTRlMjk4YzhlZjZjNzE0M2U0OWMyIl0sInN1YiI6InRlc3Qgc3ZjIGFjYyJ9.OrkSyNtXOVtfL3JNYaArlmUFg0txJFV6o3SE_wadPwZ_h1BtMuoKPo1LOAe-4HhS16i34HcfTTiHmg2ksx5KbD_sdx1intaDWZGXs-6TPvDK8mdFrBblp3nP1y1O_PHEnCMmPD3haZVMj_9jyTKPb6R8qBbMjr-UzXAUCCTiq9XqEd81wY8FsZeKwSQFqbdFdECaPsk8m-k8s7BKc_VLtHXdYXp4vNgjgleSeX4nHHhU1w0y18q2_tPwgLG-MZ2I7pF60Owk9T7f7gSuCpVfa6zYvpYiYFjQayFmYc6tJgEuOyGD_VFKKUUW4TszeNyJOCF15dPDO2JIeGh3xDJ8PA' try: self.assertEqual(True, is_expired(expiredToken)) except SkyflowError: self.fail('raised error for expired token')
49.828571
757
0.791284
123
1,744
11.081301
0.504065
0.026412
0.04182
0.046222
0.110051
0.089508
0
0
0
0
0
0.069595
0.151376
1,744
34
758
51.294118
0.851351
0
0
0.185185
1
0
0.482798
0.435206
0
0
0
0
0.111111
1
0.148148
false
0
0.185185
0
0.407407
0
0
0
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
03fc4245b3cc0fcac314f0f38459863e44696c7a
302
py
Python
iris_sdk/models/data/sites.py
NumberAI/python-bandwidth-iris
0e05f79d68b244812afb97e00fd65b3f46d00aa3
[ "MIT" ]
2
2020-04-13T13:47:59.000Z
2022-02-23T20:32:41.000Z
iris_sdk/models/data/sites.py
bandwidthcom/python-bandwidth-iris
dbcb30569631395041b92917252d913166f7d3c9
[ "MIT" ]
5
2020-09-18T20:59:24.000Z
2021-08-25T16:51:42.000Z
iris_sdk/models/data/sites.py
bandwidthcom/python-bandwidth-iris
dbcb30569631395041b92917252d913166f7d3c9
[ "MIT" ]
5
2018-12-12T14:39:50.000Z
2020-11-17T21:42:29.000Z
#!/usr/bin/env python from iris_sdk.models.base_resource import BaseData, BaseResourceList from iris_sdk.models.maps.sites import SitesMap from iris_sdk.models.site import Site class SitesData(SitesMap, BaseData): def __init__(self, parent=None): self.site = BaseResourceList(Site, self)
30.2
68
0.781457
42
302
5.428571
0.571429
0.105263
0.144737
0.223684
0
0
0
0
0
0
0
0
0.129139
302
10
69
30.2
0.86692
0.066225
0
0
0
0
0
0
0
0
0
0
0
1
0.166667
false
0
0.5
0
0.833333
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
3
ff113d382c37b0c140ffa3ab5fbeaf24edfbd568
904
py
Python
Website/client/migrations/0002_auto_20210427_0502.py
rutvikpadhiyar000/DBMS-Project
5bfa258f8161a986b85461f2257a91f5d27e0b8a
[ "MIT" ]
null
null
null
Website/client/migrations/0002_auto_20210427_0502.py
rutvikpadhiyar000/DBMS-Project
5bfa258f8161a986b85461f2257a91f5d27e0b8a
[ "MIT" ]
null
null
null
Website/client/migrations/0002_auto_20210427_0502.py
rutvikpadhiyar000/DBMS-Project
5bfa258f8161a986b85461f2257a91f5d27e0b8a
[ "MIT" ]
1
2021-04-26T13:56:01.000Z
2021-04-26T13:56:01.000Z
# Generated by Django 3.2 on 2021-04-27 05:02 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('client', '0001_initial'), ] operations = [ migrations.RemoveField( model_name='client', name='client_DOB', ), migrations.RemoveField( model_name='client', name='client_gender', ), migrations.RemoveField( model_name='client', name='client_occupation', ), migrations.AddField( model_name='client', name='client_contact', field=models.CharField(default=None, max_length=10, null=True), ), migrations.AlterField( model_name='client', name='client_name', field=models.CharField(max_length=100, null=True), ), ]
25.111111
75
0.550885
84
904
5.77381
0.5
0.206186
0.173196
0.195876
0.387629
0.284536
0.284536
0
0
0
0
0.03827
0.335177
904
35
76
25.828571
0.768719
0.047566
0
0.448276
1
0
0.131548
0
0
0
0
0
0
1
0
false
0
0.034483
0
0.137931
0
0
0
0
null
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
ff13f7a516e851277af1a9b617cfecc2fd026c16
1,944
py
Python
pyNastran/converters/dev/ansys/test_ansys.py
jtran10/pyNastran
4aed8e05b91576c2b50ee835f0497a9aad1d2cb0
[ "BSD-3-Clause" ]
null
null
null
pyNastran/converters/dev/ansys/test_ansys.py
jtran10/pyNastran
4aed8e05b91576c2b50ee835f0497a9aad1d2cb0
[ "BSD-3-Clause" ]
null
null
null
pyNastran/converters/dev/ansys/test_ansys.py
jtran10/pyNastran
4aed8e05b91576c2b50ee835f0497a9aad1d2cb0
[ "BSD-3-Clause" ]
null
null
null
if __name__ == '__main__': # pragma: no cover data = """ /com,*********** Create Remote Point "Internal Remote Point 39" *********** ! -------- Remote Point Used by "Fixed - Line Body To EndCap 14054021-1 d" -------- *set,_npilot,803315 _npilot474=_npilot et,332,170 type,332 real,332 mat,332 keyo,332,2,1 ! don't fix pilot node keyo,332,4,0 ! MPC for all DOF's tshape,pilo en,501901,803315 ! create pilot node for rigid link tshape en,501902,803315,127827 /com,*********** Create Remote Point "Internal Remote Point 40" *********** ! -------- Remote Point Used by "Fixed - Line Body To EndCap 14054021-1 d" -------- *set,tid,334 *set,cid,333 et,cid,175 et,tid,170 keyo,tid,2,1 ! Don't fix the pilot node keyo,tid,4,111111 keyo,cid,12,5 ! Bonded Contact keyo,cid,4,0 ! Rigid CERIG style load keyo,cid,2,2 ! MPC style contact mat,333 real,333 type,333 en,501903,418114 en,501904,418115 en,501905,418116 en,501906,418117 en,501907,418118 en,501908,418119 en,501909,418120 en,501910,418121 en,501911,418122 en,501912,418123 en,501913,418124 en,501914,427511 en,501915,427512 en,501916,427518 en,501917,427524 en,501918,427528 en,501919,427533 en,501920,427539 en,501921,427544 en,501922,427551 en,501923,427562 en,501924,427569 *set,_npilot,803316 _npilot475=_npilot type,tid mat ,cid real,cid tshape,pilo en,501925,_npilot tshape et,2,187 et,27,187 # element, group 27, element_type=187 -> tet10 et,30,188 etype nastran_name 187 tet10 186 hexa20 188 beam eblock,19,solid,,213 eblock,19,solid,,8 #---------------------------------------------------------------- et,_jid,184 et,tid,170 et,cid,174 keyo,tid,2,1 ! Don't fix the pilot node keyo,tid,4,111111 keyo,cid,12,5 ! Bonded Contact keyo,cid,4,2 ! Rigid CERIG style load keyo,cid,2,2 ! MPC style contact eblock,10,,,16 """
21.842697
83
0.636831
311
1,944
3.92283
0.446945
0.054098
0.012295
0.014754
0.345082
0.337705
0.337705
0.27377
0.27377
0.27377
0
0.300508
0.190329
1,944
88
84
22.090909
0.474587
0.00823
0
0.2
0
0
0.979751
0.045691
0
0
0
0
0
1
0
false
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
1
0
0
0
0
0
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
ff154e1665df0e3eb691c8f0e6858678bcfb4af8
89
py
Python
src/game/law_scope.py
Tequilac/Hacknarok2021
ab4fae17afe4b109e35ba888e7a354aba18de426
[ "MIT" ]
1
2021-03-28T11:32:44.000Z
2021-03-28T11:32:44.000Z
src/game/law_scope.py
Tequilac/Hacknarok2021
ab4fae17afe4b109e35ba888e7a354aba18de426
[ "MIT" ]
null
null
null
src/game/law_scope.py
Tequilac/Hacknarok2021
ab4fae17afe4b109e35ba888e7a354aba18de426
[ "MIT" ]
1
2021-03-27T22:26:10.000Z
2021-03-27T22:26:10.000Z
from enum import Enum, auto class LawScope(Enum): state = auto() city = auto()
12.714286
27
0.629213
12
89
4.666667
0.666667
0
0
0
0
0
0
0
0
0
0
0
0.258427
89
6
28
14.833333
0.848485
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.25
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
3
2060f1cf2eaeb9a7e196861eeafea2a4fb2752f7
261
py
Python
src/bpp/migrations/0236_merge_20210202_0850.py
iplweb/django-bpp
85f183a99d8d5027ae4772efac1e4a9f21675849
[ "BSD-3-Clause" ]
1
2017-04-27T19:50:02.000Z
2017-04-27T19:50:02.000Z
src/bpp/migrations/0236_merge_20210202_0850.py
mpasternak/django-bpp
434338821d5ad1aaee598f6327151aba0af66f5e
[ "BSD-3-Clause" ]
41
2019-11-07T00:07:02.000Z
2022-02-27T22:09:39.000Z
src/bpp/migrations/0236_merge_20210202_0850.py
iplweb/bpp
f027415cc3faf1ca79082bf7bacd4be35b1a6fdf
[ "BSD-3-Clause" ]
null
null
null
# Generated by Django 3.0.11 on 2021-02-02 07:50 from django.db import migrations class Migration(migrations.Migration): dependencies = [ ("bpp", "0235_auto_20210125_0042"), ("bpp", "0235_auto_20210201_1305"), ] operations = []
18.642857
48
0.651341
33
261
4.969697
0.787879
0.085366
0.134146
0
0
0
0
0
0
0
0
0.236453
0.222222
261
13
49
20.076923
0.571429
0.176245
0
0
1
0
0.244131
0.215962
0
0
0
0
0
1
0
false
0
0.142857
0
0.571429
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
3
2076da38f41ca9e6d8b90d59480fe73560687047
157
py
Python
abyssal_modules/metrics.py
kimnnmadsen/eve-abyssal-market
1e07498b98be9282b969badff51d55258c72e7ed
[ "MIT" ]
13
2018-08-23T14:27:22.000Z
2020-12-07T12:35:38.000Z
abyssal_modules/metrics.py
kimnnmadsen/eve-abyssal-market
1e07498b98be9282b969badff51d55258c72e7ed
[ "MIT" ]
25
2018-10-09T14:37:33.000Z
2020-05-15T20:21:48.000Z
abyssal_modules/metrics.py
kimnnmadsen/eve-abyssal-market
1e07498b98be9282b969badff51d55258c72e7ed
[ "MIT" ]
4
2021-08-12T05:34:05.000Z
2022-01-06T05:28:36.000Z
from prometheus_client import Counter COUNTER_MODULES_CREATED = Counter( 'mutaplasmid_modules_created', 'Number of modules created', ['type'] )
19.625
37
0.745223
17
157
6.588235
0.647059
0.375
0
0
0
0
0
0
0
0
0
0
0.171975
157
7
38
22.428571
0.861538
0
0
0
0
0
0.356688
0.171975
0
0
0
0
0
1
0
false
0
0.166667
0
0.166667
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
2077f7809cd4ce3f2d4711a1abee0402ce15d0a1
312
py
Python
eceval/syntax.py
nalssee/SICP
b19144bd939a461cf9f242dd9d144ca012b37932
[ "MIT" ]
2
2015-11-08T18:22:36.000Z
2016-05-26T15:24:54.000Z
eceval/syntax.py
nalssee/SICP
b19144bd939a461cf9f242dd9d144ca012b37932
[ "MIT" ]
null
null
null
eceval/syntax.py
nalssee/SICP
b19144bd939a461cf9f242dd9d144ca012b37932
[ "MIT" ]
null
null
null
def is_self_evaluating(exp): """number, string, booleans """ return \ isinstance(exp, int) or isinstance(exp, float) \ or (isinstance(exp, str) and len(exp) >= 2 and exp[0] == '"' and exp[-1] == '"') \ or exp == 'true' or exp == 'false' def text_of_quotation(exp): pass
26
90
0.557692
42
312
4.047619
0.595238
0.229412
0.176471
0
0
0
0
0
0
0
0
0.0131
0.266026
312
11
91
28.363636
0.729258
0.076923
0
0
0
0
0.039711
0
0
0
0
0
0
1
0.285714
false
0.142857
0
0
0.428571
0
0
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
0
0
0
0
3
207b5406bbe2223199a5b2634d6c4efeaed6f358
79
py
Python
Silver/Silver_V/1427.py
masterTyper/baekjoon_solved_ac
b9ce14d9bdaa5b5b06735ad075fb827de9f44b9c
[ "MIT" ]
null
null
null
Silver/Silver_V/1427.py
masterTyper/baekjoon_solved_ac
b9ce14d9bdaa5b5b06735ad075fb827de9f44b9c
[ "MIT" ]
null
null
null
Silver/Silver_V/1427.py
masterTyper/baekjoon_solved_ac
b9ce14d9bdaa5b5b06735ad075fb827de9f44b9c
[ "MIT" ]
null
null
null
N = list(map(int, input())) print(''.join(map(str, sorted(N, reverse=True))))
19.75
49
0.620253
13
79
3.769231
0.846154
0
0
0
0
0
0
0
0
0
0
0
0.101266
79
3
50
26.333333
0.690141
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0
0
0
0.5
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
3
2083751bf0d1b861a87f6eac68501c40c7950187
196
py
Python
polidoro_gitlab/__init__.py
heitorpolidoro/polidoro_gitlab
b9ef7260eaa667b0d03a7fd805e140eb09a1bce9
[ "MIT" ]
null
null
null
polidoro_gitlab/__init__.py
heitorpolidoro/polidoro_gitlab
b9ef7260eaa667b0d03a7fd805e140eb09a1bce9
[ "MIT" ]
5
2021-11-30T18:51:04.000Z
2021-12-13T13:53:04.000Z
polidoro_gitlab/__init__.py
heitorpolidoro/polidoro-gitlab
b9ef7260eaa667b0d03a7fd805e140eb09a1bce9
[ "MIT" ]
null
null
null
import subprocess from polidoro_gitlab.gitlab import GitLab, Project, Pipeline NAME = 'polidoro_gitlab' VERSION = subprocess.run(['cat', 'VERSION'], capture_output=True).stdout.strip().decode()
28
89
0.77551
24
196
6.208333
0.708333
0.187919
0
0
0
0
0
0
0
0
0
0
0.091837
196
6
90
32.666667
0.837079
0
0
0
0
0
0.127551
0
0
0
0
0
0
1
0
false
0
0.5
0
0.5
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
3
209ef2f28a9cfbed808fd92f2dfdf7dbd1aeb570
129
py
Python
quirk/__init__.py
ankane/quirk
332a912b4cf08c25c32c38b88dcaad59133520c0
[ "MIT" ]
23
2017-02-24T11:04:40.000Z
2020-11-06T07:05:07.000Z
predict/__init__.py
melvynkim/predict
dd4a2913fe1c661d711e6969ffbefeba8af9b2d3
[ "MIT" ]
1
2018-07-11T07:55:05.000Z
2018-07-11T07:55:05.000Z
predict/__init__.py
melvynkim/predict
dd4a2913fe1c661d711e6969ffbefeba8af9b2d3
[ "MIT" ]
2
2017-06-06T22:10:33.000Z
2020-06-24T23:35:01.000Z
from .classifier import Classifier from .regressor import Regressor __all__ = ['Classifier', 'Regressor'] __version__ = '0.1.3'
21.5
37
0.75969
15
129
6
0.6
0
0
0
0
0
0
0
0
0
0
0.026549
0.124031
129
5
38
25.8
0.769912
0
0
0
0
0
0.186047
0
0
0
0
0
0
1
0
false
0
0.5
0
0.5
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
3
20bd974ee643d37a32ad5c80f5ddcb0a071e8639
2,981
py
Python
imnetdb/rpools/client.py
imnetdb/imnetdb
f7e1ef41be2f3f2089c666dbf602ea99112bf516
[ "Apache-2.0" ]
5
2019-03-13T20:34:36.000Z
2020-04-18T15:35:13.000Z
imnetdb/rpools/client.py
imnetdb/imnetdb
f7e1ef41be2f3f2089c666dbf602ea99112bf516
[ "Apache-2.0" ]
2
2019-02-24T18:22:10.000Z
2019-05-07T10:35:32.000Z
imnetdb/rpools/client.py
imnetdb/imnetdb
f7e1ef41be2f3f2089c666dbf602ea99112bf516
[ "Apache-2.0" ]
2
2019-04-17T23:42:55.000Z
2019-04-20T15:55:43.000Z
# Copyright 2019 Jeremy Schulman, nwkautomaniac@gmail.com # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from imnetdb.db.common_client import CommonDBClient from imnetdb.rpools.rpool import ResourcePool __all__ = ['RPoolsDB'] class RPoolsDB(CommonDBClient): """ About Using a Resource Database ------------------------------- # TODO: need to write this up. """ def __init__(self, password, user='root', db_name='rpools', host='0.0.0.0', port=8529, connect_timeout=10): """ Create a client instance to the RPoolsDB stored within the ArangoDB server. If the database does not exist, then it will be created. Once available, the caller can then define new collections in the database using :meth:`resource_pool`. Parameters ---------- password : str The login password value user : str (optional) The login user-name, defaults to 'root' db_name : str (optional) The name of the database host : str (optional) The ArangoDB server host-name or ip-addr port : int (optional) The ArangoDB server port value connect_timeout : int (optional) When connecting to the ArangoDB server, this value defines the timeout in seconds before aborting. """ super(RPoolsDB, self).__init__(password=password, user=user, db_name=db_name, host=host, port=port, connect_timeout=connect_timeout) def resource_pool(self, pool_name, value_type=str): """ Ensure that a resource pool (database collection) exists by the given `pool_name`. If it does not exist, then it will be created. Parameters ---------- pool_name : str The pool name. value_type : type (optional) The item value type, by default is a string. This property is used when adding new items to the pool. The value_type will be called against each item to ensure that it stored as the type desired. Therefore, the caller could provide any callable, providing it returns a value that can be stored within the ArangoDB document. Returns ------- ResourcePool An instance of the resource pool. """ return ResourcePool(client=self, collection_name=pool_name, value_type=value_type)
35.915663
106
0.642402
385
2,981
4.888312
0.425974
0.031881
0.036132
0.027099
0.032944
0.032944
0.032944
0.032944
0
0
0
0.008364
0.278095
2,981
82
107
36.353659
0.866171
0.670245
0
0
0
0
0.038285
0
0
0
0
0.012195
0
1
0.2
false
0.2
0.2
0
0.6
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
0
0
1
0
0
1
0
0
3
20dacba3e0a301c5e764e081b6a4f7570f35369b
118,430
py
Python
pysnmp-with-texts/IBM-OSA-MIB.py
agustinhenze/mibs.snmplabs.com
1fc5c07860542b89212f4c8ab807057d9a9206c7
[ "Apache-2.0" ]
8
2019-05-09T17:04:00.000Z
2021-06-09T06:50:51.000Z
pysnmp-with-texts/IBM-OSA-MIB.py
agustinhenze/mibs.snmplabs.com
1fc5c07860542b89212f4c8ab807057d9a9206c7
[ "Apache-2.0" ]
4
2019-05-31T16:42:59.000Z
2020-01-31T21:57:17.000Z
pysnmp-with-texts/IBM-OSA-MIB.py
agustinhenze/mibs.snmplabs.com
1fc5c07860542b89212f4c8ab807057d9a9206c7
[ "Apache-2.0" ]
10
2019-04-30T05:51:36.000Z
2022-02-16T03:33:41.000Z
# # PySNMP MIB module IBM-OSA-MIB (http://snmplabs.com/pysmi) # ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/IBM-OSA-MIB # Produced by pysmi-0.3.4 at Wed May 1 13:51:11 2019 # On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4 # Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15) # ObjectIdentifier, Integer, OctetString = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "Integer", "OctetString") NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues") SingleValueConstraint, ValueRangeConstraint, ValueSizeConstraint, ConstraintsUnion, ConstraintsIntersection = mibBuilder.importSymbols("ASN1-REFINEMENT", "SingleValueConstraint", "ValueRangeConstraint", "ValueSizeConstraint", "ConstraintsUnion", "ConstraintsIntersection") ifIndex, = mibBuilder.importSymbols("IF-MIB", "ifIndex") NotificationGroup, ObjectGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ObjectGroup", "ModuleCompliance") Integer32, Gauge32, NotificationType, iso, Bits, MibScalar, MibTable, MibTableRow, MibTableColumn, ObjectIdentity, TimeTicks, ModuleIdentity, MibIdentifier, enterprises, Counter64, Unsigned32, IpAddress, Counter32 = mibBuilder.importSymbols("SNMPv2-SMI", "Integer32", "Gauge32", "NotificationType", "iso", "Bits", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "ObjectIdentity", "TimeTicks", "ModuleIdentity", "MibIdentifier", "enterprises", "Counter64", "Unsigned32", "IpAddress", "Counter32") TextualConvention, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "DisplayString") ibmOSAMib = ModuleIdentity((1, 3, 6, 1, 4, 1, 2, 6, 188)) ibmOSAMib.setRevisions(('2002-05-23 00:00', '2002-03-26 08:00',)) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): if mibBuilder.loadTexts: ibmOSAMib.setRevisionsDescriptions(('Editorial revisions', 'Initial release',)) if mibBuilder.loadTexts: ibmOSAMib.setLastUpdated('200205230000Z') if mibBuilder.loadTexts: ibmOSAMib.setOrganization('IBM eServer Development') if mibBuilder.loadTexts: ibmOSAMib.setContactInfo(' Joel Goldman Postal: International Business Machines Corporation 2455 South Road Dept. B44G/Bldg. 706 Poughkeepsie, NY 12601 USA Tel: +1 845 435 5501 Internet: jgoldman@us.ibm.com') if mibBuilder.loadTexts: ibmOSAMib.setDescription('The IBM Enterprise Specific MIB definitions for enabling management of an IBM OSA-Express feature. Licensed Materials - Property of IBM Restricted Materials of IBM 5694-A01 (C) Copyright IBM Corp. 2002 US Government Users Restricted Rights - Use, duplication or disclosure restricted by GSA ADP Schedule Contract with IBM Corp.') ibm = MibIdentifier((1, 3, 6, 1, 4, 1, 2)) ibmProd = MibIdentifier((1, 3, 6, 1, 4, 1, 2, 6)) ibmOSAMibObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 2, 6, 188, 1)) ibmOSAMibConformance = MibIdentifier((1, 3, 6, 1, 4, 1, 2, 6, 188, 2)) ibmOSAExpChannelTable = MibTable((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 1), ) if mibBuilder.loadTexts: ibmOSAExpChannelTable.setStatus('current') if mibBuilder.loadTexts: ibmOSAExpChannelTable.setDescription('Indexed by ifIndex. One entry in this table will exist per OSA Device Interface.') ibmOSAExpChannelEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 1, 1), ).setIndexNames((0, "IF-MIB", "ifIndex")) if mibBuilder.loadTexts: ibmOSAExpChannelEntry.setStatus('current') if mibBuilder.loadTexts: ibmOSAExpChannelEntry.setDescription('Definition of a single entry in the ibmOSAExpChannelTable. Indexed by the ifIndex of the corresponding Device interface.') ibmOSAExpChannelNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 1, 1, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(2, 2)).setFixedLength(2)).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOSAExpChannelNumber.setStatus('current') if mibBuilder.loadTexts: ibmOSAExpChannelNumber.setDescription('The CHPID corresponding to this ifIndex.') ibmOSAExpChannelType = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(17))).clone(namedValues=NamedValues(("osaDirectExpress", 17)))).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOSAExpChannelType.setStatus('current') if mibBuilder.loadTexts: ibmOSAExpChannelType.setDescription('The type of channel for this interface. OSA Direct Express has a value of 17.') ibmOSAExpChannelHdwLevel = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("unknown", 1), ("osaExp150", 2), ("osaExp175", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOSAExpChannelHdwLevel.setStatus('current') if mibBuilder.loadTexts: ibmOSAExpChannelHdwLevel.setDescription('Hardware model of the channel. The value oasExp150(2) indicates a hardware level of 1.50. The value oasExp175(3) indicates a hardware level of 1.75.') ibmOSAExpChannelSubType = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 65, 81, 82, 2304))).clone(namedValues=NamedValues(("unknown", 1), ("gigabitEthernet", 65), ("fastEthernet", 81), ("tokenRing", 82), ("atmEmulatedEthernet", 2304)))).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOSAExpChannelSubType.setStatus('current') if mibBuilder.loadTexts: ibmOSAExpChannelSubType.setDescription('Indicates the type of OSA feature present.') ibmOSAExpChannelShared = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("notShared", 0), ("shared", 1)))).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOSAExpChannelShared.setStatus('current') if mibBuilder.loadTexts: ibmOSAExpChannelShared.setDescription('An OSA-Express feature can be shared across multiple LPs. This object indicates if this feature is currently being shared between LPs') ibmOSAExpChannelNodeDesc = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 1, 1, 6), OctetString().subtype(subtypeSpec=ValueSizeConstraint(32, 32)).setFixedLength(32)).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOSAExpChannelNodeDesc.setStatus('current') if mibBuilder.loadTexts: ibmOSAExpChannelNodeDesc.setDescription("This is the Node Descriptor of the OSA feature. It represents the ND obtained from the Channel Subsystem. Bits Name Flag is first byte char(1) Validity Valid - always '20'x char(1) Reserved Reserved by architecture char(1) Class Class for subsystem node char(1) CHPID CHP ID for specified int char(6) TypeNum Type number of the SDC char(3) ModelNum Model number in the form of 3 EBCDIC OCTETS char(3) Manufacturer Manufacturer in the form of 3 EBCDIC OCTETS char(2) Mfr Plant Plant of manufacture-2 digit code char(12)SeqNum Sequence number (12 EBCDIC OCTETS) char(2) Tag Tag") ibmOSAExpChannelProcCodeLevel = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 1, 1, 8), OctetString().subtype(subtypeSpec=ValueSizeConstraint(2, 2)).setFixedLength(2)).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOSAExpChannelProcCodeLevel.setStatus('current') if mibBuilder.loadTexts: ibmOSAExpChannelProcCodeLevel.setDescription('This is the firmware (or micro code level) of the OSA feature. For example, OSA code level 05.6A would be represented as 0x056A.') ibmOSAExpChannelPCIBusUtil1Min = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 1, 1, 9), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 100))).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOSAExpChannelPCIBusUtil1Min.setStatus('current') if mibBuilder.loadTexts: ibmOSAExpChannelPCIBusUtil1Min.setDescription('The average, over a 1 minute interval, of the percentage of time that the PCI bus was utilized to transfer data. It does not include idle time or time used by routine maintenance tasks. The range for this value is from 0 to 100%.') ibmOSAExpChannelProcUtil1Min = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 1, 1, 10), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 100))).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOSAExpChannelProcUtil1Min.setStatus('current') if mibBuilder.loadTexts: ibmOSAExpChannelProcUtil1Min.setDescription('The average, over a 1 minute interval, of the percentage of time that the CHPID Processor was utilized to transfer data. It does not include idle time or time used by routine maintenance tasks. The range for this value is from 0 to 100%.') ibmOSAExpChannelPCIBusUtil5Min = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 1, 1, 11), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 100))).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOSAExpChannelPCIBusUtil5Min.setStatus('current') if mibBuilder.loadTexts: ibmOSAExpChannelPCIBusUtil5Min.setDescription('The average, over a 5 minute interval, of the percentage of time that the PCI bus was utilized to transfer data. It does not include idle time or time used by routine maintenance tasks. The range for this value is from 0 to 100%.') ibmOSAExpChannelProcUtil5Min = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 1, 1, 12), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 100))).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOSAExpChannelProcUtil5Min.setStatus('current') if mibBuilder.loadTexts: ibmOSAExpChannelProcUtil5Min.setDescription('The average, over a 5 minute interval, of the percentage of time that the CHPID Processor was utilized to transfer data. It does not include idle time or time used by routine maintenance tasks. The range for this value is from 0 to 100%.') ibmOSAExpChannelPCIBusUtilHour = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 1, 1, 13), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 100))).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOSAExpChannelPCIBusUtilHour.setStatus('current') if mibBuilder.loadTexts: ibmOSAExpChannelPCIBusUtilHour.setDescription('The average, over an hour interval, of the percentage of time that the PCI bus was utilized to transfer data. It does not include idle time or time used by routine maintenance tasks.') ibmOSAExpChannelProcUtilHour = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 1, 1, 14), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 100))).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOSAExpChannelProcUtilHour.setStatus('current') if mibBuilder.loadTexts: ibmOSAExpChannelProcUtilHour.setDescription('The average, over an hour interval, of the percentage of time that the CHPID Processor was utilized to transfer data. It does not include idle time or time used by routine maintenance tasks. The range for this value is from 0 to 100%.') ibmOSAExpPerfTable = MibTable((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 2), ) if mibBuilder.loadTexts: ibmOSAExpPerfTable.setStatus('current') if mibBuilder.loadTexts: ibmOSAExpPerfTable.setDescription('This table provides performance information for each Logical Partition (LP) the OSA can connect to.') ibmOSAExpPerfEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 2, 1), ).setIndexNames((0, "IF-MIB", "ifIndex")) if mibBuilder.loadTexts: ibmOSAExpPerfEntry.setStatus('current') if mibBuilder.loadTexts: ibmOSAExpPerfEntry.setDescription('Definition of a single entry for a single LP in the ibmOSAExpPerfTable. Indexed by the ifIndex of the corresponding Device interface.') ibmOSAExpPerfDataLP0 = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 2, 1, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(40, 40)).setFixedLength(40)).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOSAExpPerfDataLP0.setStatus('current') if mibBuilder.loadTexts: ibmOSAExpPerfDataLP0.setDescription('The performance data on this OSA for partition 0. The 40 bytes of hex data that are returned are decoded as follows: Offset Bytes Field Meaning 0 4 LP Number 4 4 Processor Util 1 Minute 8 4 In Kbytes Rate 1 Minute 12 4 Out Kbytes Rate 1 Minute 16 4 Processor Util 5 Minutes 20 4 In Kbytes Rate 5 Minutes 24 4 Out Kbytes Rate 5 Minutes 28 4 Processor Util 60 Minutes 32 4 In Kbytes Rate 60 Minutes 36 4 Out Kbytes Rate 60 Minutes The Processor Util 1 Minute is defined as follows: The average, over a 1 minute interval, of the percentage of time that the CHPID Processor was utilized to transfer data for a specific LP. It does not include idle time or time used by routine maintenance tasks. The range for this value is from 0 to 100%. The In Kbytes Rate 1 Minute is defined as follows: The average, over a 1 minute interval, of the number of inbound kilobytes processed for a specific LP. The Out Kbytes Rate 1 Minute is defined as follows: The average, over a 1 minute interval, of the number of outbound kilobytes processed for a specific LP. The 5 and 60 minute fields are defined similar to the 1 minute fields, but pertain to intervals of 5 and 60 minutes.') ibmOSAExpPerfDataLP1 = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 2, 1, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(40, 40)).setFixedLength(40)).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOSAExpPerfDataLP1.setStatus('current') if mibBuilder.loadTexts: ibmOSAExpPerfDataLP1.setDescription('The performance data on this OSA for partition 1. The 40 bytes of hex data that are returned are decoded the same as for partition 0.') ibmOSAExpPerfDataLP2 = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 2, 1, 3), OctetString().subtype(subtypeSpec=ValueSizeConstraint(40, 40)).setFixedLength(40)).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOSAExpPerfDataLP2.setStatus('current') if mibBuilder.loadTexts: ibmOSAExpPerfDataLP2.setDescription('The performance data on this OSA for partition 2. The 40 bytes of hex data that are returned are decoded the same as for partition 0.') ibmOSAExpPerfDataLP3 = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 2, 1, 4), OctetString().subtype(subtypeSpec=ValueSizeConstraint(40, 40)).setFixedLength(40)).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOSAExpPerfDataLP3.setStatus('current') if mibBuilder.loadTexts: ibmOSAExpPerfDataLP3.setDescription('The performance data on this OSA for partition 3. The 40 bytes of hex data that are returned are decoded the same as for partition 0.') ibmOSAExpPerfDataLP4 = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 2, 1, 5), OctetString().subtype(subtypeSpec=ValueSizeConstraint(40, 40)).setFixedLength(40)).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOSAExpPerfDataLP4.setStatus('current') if mibBuilder.loadTexts: ibmOSAExpPerfDataLP4.setDescription('The performance data on this OSA for partition 4. The 40 bytes of hex data that are returned are decoded the same as for partition 0.') ibmOSAExpPerfDataLP5 = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 2, 1, 6), OctetString().subtype(subtypeSpec=ValueSizeConstraint(40, 40)).setFixedLength(40)).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOSAExpPerfDataLP5.setStatus('current') if mibBuilder.loadTexts: ibmOSAExpPerfDataLP5.setDescription('The performance data on this OSA for partition 5. The 40 bytes of hex data that are returned are decoded the same as for partition 0.') ibmOSAExpPerfDataLP6 = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 2, 1, 7), OctetString().subtype(subtypeSpec=ValueSizeConstraint(40, 40)).setFixedLength(40)).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOSAExpPerfDataLP6.setStatus('current') if mibBuilder.loadTexts: ibmOSAExpPerfDataLP6.setDescription('The performance data on this OSA for partition 6. The 40 bytes of hex data that are returned are decoded the same as for partition 0.') ibmOSAExpPerfDataLP7 = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 2, 1, 8), OctetString().subtype(subtypeSpec=ValueSizeConstraint(40, 40)).setFixedLength(40)).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOSAExpPerfDataLP7.setStatus('current') if mibBuilder.loadTexts: ibmOSAExpPerfDataLP7.setDescription('The performance data on this OSA for partition 7. The 40 bytes of hex data that are returned are decoded the same as for partition 0.') ibmOSAExpPerfDataLP8 = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 2, 1, 9), OctetString().subtype(subtypeSpec=ValueSizeConstraint(40, 40)).setFixedLength(40)).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOSAExpPerfDataLP8.setStatus('current') if mibBuilder.loadTexts: ibmOSAExpPerfDataLP8.setDescription('The performance data on this OSA for partition 8. The 40 bytes of hex data that are returned are decoded the same as for partition 0.') ibmOSAExpPerfDataLP9 = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 2, 1, 10), OctetString().subtype(subtypeSpec=ValueSizeConstraint(40, 40)).setFixedLength(40)).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOSAExpPerfDataLP9.setStatus('current') if mibBuilder.loadTexts: ibmOSAExpPerfDataLP9.setDescription('The performance data on this OSA for partition 9. The 40 bytes of hex data that are returned are decoded the same as for partition 0.') ibmOSAExpPerfDataLP10 = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 2, 1, 11), OctetString().subtype(subtypeSpec=ValueSizeConstraint(40, 40)).setFixedLength(40)).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOSAExpPerfDataLP10.setStatus('current') if mibBuilder.loadTexts: ibmOSAExpPerfDataLP10.setDescription('The performance data on this OSA for partition 10. The 40 bytes of hex data that are returned are decoded the same as for partition 0.') ibmOSAExpPerfDataLP11 = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 2, 1, 12), OctetString().subtype(subtypeSpec=ValueSizeConstraint(40, 40)).setFixedLength(40)).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOSAExpPerfDataLP11.setStatus('current') if mibBuilder.loadTexts: ibmOSAExpPerfDataLP11.setDescription('The performance data on this OSA for partition 11. The 40 bytes of hex data that are returned are decoded the same as for partition 0.') ibmOSAExpPerfDataLP12 = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 2, 1, 13), OctetString().subtype(subtypeSpec=ValueSizeConstraint(40, 40)).setFixedLength(40)).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOSAExpPerfDataLP12.setStatus('current') if mibBuilder.loadTexts: ibmOSAExpPerfDataLP12.setDescription('The performance data on this OSA for partition 12. The 40 bytes of hex data that are returned are decoded the same as for partition 0.') ibmOSAExpPerfDataLP13 = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 2, 1, 14), OctetString().subtype(subtypeSpec=ValueSizeConstraint(40, 40)).setFixedLength(40)).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOSAExpPerfDataLP13.setStatus('current') if mibBuilder.loadTexts: ibmOSAExpPerfDataLP13.setDescription('The performance data on this OSA for partition 13. The 40 bytes of hex data that are returned are decoded the same as for partition 0.') ibmOSAExpPerfDataLP14 = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 2, 1, 15), OctetString().subtype(subtypeSpec=ValueSizeConstraint(40, 40)).setFixedLength(40)).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOSAExpPerfDataLP14.setStatus('current') if mibBuilder.loadTexts: ibmOSAExpPerfDataLP14.setDescription('The performance data on this OSA for partition 14. The 40 bytes of hex data that are returned are decoded the same as for partition 0.') ibmOSAExpPerfDataLP15 = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 2, 1, 16), OctetString().subtype(subtypeSpec=ValueSizeConstraint(40, 40)).setFixedLength(40)).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOSAExpPerfDataLP15.setStatus('current') if mibBuilder.loadTexts: ibmOSAExpPerfDataLP15.setDescription('The performance data on this OSA for partition 15. The 40 bytes of hex data that are returned are decoded the same as for partition 0.') ibmOSAExpPETable = MibTable((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 3), ) if mibBuilder.loadTexts: ibmOSAExpPETable.setStatus('current') if mibBuilder.loadTexts: ibmOSAExpPETable.setDescription('This table provides PE information to help IBM diagnose any OSA problems.') ibmOSAExpPEEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 3, 1), ).setIndexNames((0, "IF-MIB", "ifIndex")) if mibBuilder.loadTexts: ibmOSAExpPEEntry.setStatus('current') if mibBuilder.loadTexts: ibmOSAExpPEEntry.setDescription('Definition of a single entry in the ibmOSAExpPETable. Indexed by the ifIndex of the corresponding Device interface') ibmOSAExpPEMaxSizeArpCache = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 3, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 214783647))).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOSAExpPEMaxSizeArpCache.setStatus('current') if mibBuilder.loadTexts: ibmOSAExpPEMaxSizeArpCache.setDescription('The maximum size of the OSA ARP Cache') ibmOSAExpPEArpPendingEntries = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 3, 1, 2), Gauge32()).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOSAExpPEArpPendingEntries.setStatus('current') if mibBuilder.loadTexts: ibmOSAExpPEArpPendingEntries.setDescription('This is the number of Pending entries in the ARP cache.') ibmOSAExpPEArpActiveEntries = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 3, 1, 3), Gauge32()).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOSAExpPEArpActiveEntries.setStatus('current') if mibBuilder.loadTexts: ibmOSAExpPEArpActiveEntries.setDescription('This the number of active ARP entries.') ibmOSAExpPEIPEntries = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 3, 1, 4), Gauge32()).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOSAExpPEIPEntries.setStatus('current') if mibBuilder.loadTexts: ibmOSAExpPEIPEntries.setDescription('The number of IP addresses known to the OSA For OSD chpids, this is the maximum # of IP addresses that are: - home ip addresses (Version 4 and Version 6) - remote ip addresses in the arp cache (Version 4 only) - multicast ip addresses that the OSA must accept inbound data packets for (Version 4 and Version 6)') ibmOSAExpPEMulticastEntries = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 3, 1, 5), Gauge32()).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOSAExpPEMulticastEntries.setStatus('current') if mibBuilder.loadTexts: ibmOSAExpPEMulticastEntries.setDescription('This is the number of IP multicast addresses currently on the OSA') ibmOSAExpPEMulticastData = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 3, 1, 6), OctetString().subtype(subtypeSpec=ValueSizeConstraint(3360, 3360)).setFixedLength(3360)).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOSAExpPEMulticastData.setStatus('current') if mibBuilder.loadTexts: ibmOSAExpPEMulticastData.setDescription('This contains information on the multicast entries that are currently on the OSA. These are in a format that is for IBM use only') ibmOSAExpEthPortTable = MibTable((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 4), ) if mibBuilder.loadTexts: ibmOSAExpEthPortTable.setStatus('current') if mibBuilder.loadTexts: ibmOSAExpEthPortTable.setDescription('This table represents the data associated with a port on an OSA-Express Gigabit or Fast Ethernet OSA.') ibmOSAExpEthPortEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 4, 1), ).setIndexNames((0, "IF-MIB", "ifIndex")) if mibBuilder.loadTexts: ibmOSAExpEthPortEntry.setStatus('current') if mibBuilder.loadTexts: ibmOSAExpEthPortEntry.setDescription('Definition of a single entry in the ibmOSAExpEthPortTable. Indexed by the ifIndex of the corresponding Device interface.') ibmOsaExpEthPortNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 4, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 3))).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpEthPortNumber.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpEthPortNumber.setDescription('The physical port number for this port.') ibmOsaExpEthPortType = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 4, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(65, 81))).clone(namedValues=NamedValues(("gigabitEthernet", 65), ("fastEthernet", 81)))).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpEthPortType.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpEthPortType.setDescription('The physical port type.') ibmOsaExpEthLanTrafficState = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 4, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7, 8))).clone(namedValues=NamedValues(("undefined", 0), ("unavailable", 1), ("enabling", 2), ("disabling", 3), ("enabled", 4), ("disabled", 5), ("linkMonitor", 6), ("definitionError", 7), ("configuredOffline", 8)))).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpEthLanTrafficState.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpEthLanTrafficState.setDescription('The LAN state value ranges from 0 to 8. A value of 5, disabled is further explained in object ibmOsaExpEthDisabledStatus.') ibmOsaExpEthServiceMode = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 4, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("notInServiceMode", 0), ("inServiceMode", 1)))).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpEthServiceMode.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpEthServiceMode.setDescription('This object indicates if the processor is in service mode or not.') ibmOsaExpEthDisabledStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 4, 1, 5), Bits().clone(namedValues=NamedValues(("reserved0", 0), ("internalPortFailure", 1), ("reserved2", 2), ("reserved3", 3), ("reserved4", 4), ("reserved5", 5), ("portTemporarilyDisabled", 6), ("reserved7", 7), ("reserved8", 8), ("serviceProcessorRequest", 9), ("networkRequest", 10), ("osasfRequest", 11), ("configurationChange", 12), ("linkFailureThresholdExceeded", 13), ("reserved14", 14), ("reserved15", 15)))).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpEthDisabledStatus.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpEthDisabledStatus.setDescription('When the value of ibmOsaExpEthLanTrafficState is NOT disabled, the value of this object will be zero. When the value of ibmOsaExpEthLanTrafficState is disabled(5), this object explains the reason for the disabled state. The value for this object may be a combination of the bits shown.') ibmOsaExpEthConfigName = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 4, 1, 6), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 34))).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpEthConfigName.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpEthConfigName.setDescription('This is the name of the configuration that is on the OSA. It is set using OSA/SF. It is not used by OSA') ibmOsaExpEthConfigSpeedMode = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 4, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(-1, 0, 1, 2, 3, 4, 6))).clone(namedValues=NamedValues(("notValidGigabit", -1), ("autoNegotiate", 0), ("tenMbHalfDuplex", 1), ("tenMbFullDuplex", 2), ("oneHundredMbHalfDuplex", 3), ("oneHundredMbFullDuplex", 4), ("oneThousandMbFullDuplex", 6)))).setUnits('Megabits per second').setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpEthConfigSpeedMode.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpEthConfigSpeedMode.setDescription('The configured port speed. This field shows the speed that was configured by the user for the OSA-Express Fast Ethernet feature. It is not used by OSA-Express Gigabit features and will return -1 (FFFF)') ibmOsaExpEthActiveSpeedMode = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 4, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 6))).clone(namedValues=NamedValues(("unknown", 0), ("tenMbHalfDuplex", 1), ("tenMbFullDuplex", 2), ("oneHundredMbHalfDuplex", 3), ("oneHundredMbFullDuplex", 4), ("oneThousandMbFullDuplex", 6)))).setUnits('Megabits per second').setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpEthActiveSpeedMode.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpEthActiveSpeedMode.setDescription('The actual speed and mode the OSA is running in') ibmOsaExpEthMacAddrActive = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 4, 1, 9), OctetString().subtype(subtypeSpec=ValueSizeConstraint(6, 6)).setFixedLength(6)).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpEthMacAddrActive.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpEthMacAddrActive.setDescription('A 6 byte OCTET STRING which contains the current MAC address in use on the adapter. The values are in canonical format.') ibmOsaExpEthMacAddrBurntIn = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 4, 1, 10), OctetString().subtype(subtypeSpec=ValueSizeConstraint(6, 6)).setFixedLength(6)).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpEthMacAddrBurntIn.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpEthMacAddrBurntIn.setDescription('A 6 byte OCTET STRING which contains the burned in MAC address on the OSA. The values are in canonical format.') ibmOsaExpEthUserData = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 4, 1, 11), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpEthUserData.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpEthUserData.setDescription('Data set by the user. It is ignored by the OSA.') ibmOsaExpEthOutPackets = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 4, 1, 12), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpEthOutPackets.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpEthOutPackets.setDescription('This is the number of packets that have been transmitted by the OSA since the last time the OSA port was reset') ibmOsaExpEthInPackets = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 4, 1, 13), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpEthInPackets.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpEthInPackets.setDescription('This is the number of packets that have been received by the OSA since the last time the OSA port was reset') ibmOsaExpEthInGroupFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 4, 1, 14), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpEthInGroupFrames.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpEthInGroupFrames.setDescription('This is the number of multicast frames that have been received by the OSA.') ibmOsaExpEthInBroadcastFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 4, 1, 15), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpEthInBroadcastFrames.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpEthInBroadcastFrames.setDescription('This is the number of broadcast frames that have been received by the OSA.') ibmOsaExpEthPortName = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 4, 1, 16), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 8))).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpEthPortName.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpEthPortName.setDescription('Name of the port as used by TCP/IP') ibmOsaExpEthInUnknownIPFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 4, 1, 17), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpEthInUnknownIPFrames.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpEthInUnknownIPFrames.setDescription('This is the number of non-IP received frames') ibmOsaExpEthGroupAddrTable = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 4, 1, 18), OctetString().subtype(subtypeSpec=ValueSizeConstraint(256, 256)).setFixedLength(256)).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpEthGroupAddrTable.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpEthGroupAddrTable.setDescription('This field contains the active Group Addresses. An individual Group Address is 6 bytes long with an additional 2 bytes of padding. There are 32 group addresses.') ibmOSAExpTRPortTable = MibTable((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 5), ) if mibBuilder.loadTexts: ibmOSAExpTRPortTable.setStatus('current') if mibBuilder.loadTexts: ibmOSAExpTRPortTable.setDescription('This table represents the data associated with a port on an OSA-Express token ring feature.') ibmOSAExpTRPortEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 5, 1), ).setIndexNames((0, "IF-MIB", "ifIndex")) if mibBuilder.loadTexts: ibmOSAExpTRPortEntry.setStatus('current') if mibBuilder.loadTexts: ibmOSAExpTRPortEntry.setDescription('Definition of a single entry in the ibmOSAExpTRPortTable. Indexed by the ifIndex of the corresponding Device interface.') ibmOsaExpTRPortNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 5, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 3))).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpTRPortNumber.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpTRPortNumber.setDescription('The physical port number for this port.') ibmOsaExpTRPortType = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 5, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(82))).clone(namedValues=NamedValues(("tokenring", 82)))).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpTRPortType.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpTRPortType.setDescription('The physical port type.') ibmOsaExpTRLanTrafficState = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 5, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7, 8))).clone(namedValues=NamedValues(("undefined", 0), ("unavailable", 1), ("enabling", 2), ("disabling", 3), ("enabled", 4), ("disabled", 5), ("linkMonitor", 6), ("definitionError", 7), ("configuredOffline", 8)))).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpTRLanTrafficState.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpTRLanTrafficState.setDescription('The LAN state value ranges from 0 to 8. A value of 5, disabled is further explained in object ibmOsaExpTRDisabledStatus') ibmOsaExpTRServiceMode = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 5, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("notInServiceMode", 0), ("inServiceMode", 1)))).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpTRServiceMode.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpTRServiceMode.setDescription('This object indicates if the processor is in service mode or not.') ibmOsaExpTRDisabledStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 5, 1, 5), Bits().clone(namedValues=NamedValues(("reserved0", 0), ("internalPortFailure", 1), ("reserved2", 2), ("reserved3", 3), ("reserved4", 4), ("reserved5", 5), ("portTemporarilyDisabled", 6), ("reserved7", 7), ("reserved8", 8), ("serviceProcessorRequest", 9), ("networkRequest", 10), ("osasfRequest", 11), ("configurationChange", 12), ("linkFailureThresholdExceeded", 13), ("reserved14", 14), ("reserved15", 15)))).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpTRDisabledStatus.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpTRDisabledStatus.setDescription('When the value of ibmOsaExpTRLanTrafficState is NOT disabled, the value of this object will be zero. When the value of ibmOsaExpTRLanTrafficState is disabled(5), this object explains the reason for the disabled state. The value for this object may be a combination of the bits shown.') ibmOsaExpTRConfigName = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 5, 1, 6), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 34))).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpTRConfigName.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpTRConfigName.setDescription('This is the name of the configuration that is on the OSA. It is set using OSA/SF. It is not used by OSA') ibmOsaExpTRMacAddrActive = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 5, 1, 7), OctetString().subtype(subtypeSpec=ValueSizeConstraint(6, 6)).setFixedLength(6)).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpTRMacAddrActive.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpTRMacAddrActive.setDescription('A 6 byte OCTET STRING which contains the current MAC address in use on the OSA.') ibmOsaExpTRMacAddrBurntIn = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 5, 1, 8), OctetString().subtype(subtypeSpec=ValueSizeConstraint(6, 6)).setFixedLength(6)).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpTRMacAddrBurntIn.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpTRMacAddrBurntIn.setDescription('A 6 byte OCTET STRING which contains the burned in MAC address on the OSA') ibmOsaExpTRConfigSpeedMode = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 5, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 6))).clone(namedValues=NamedValues(("autoNegotiate", 0), ("fourMbHalfDuplex", 1), ("fourMbFullDuplex", 2), ("sixteenMbHalfDuplex", 3), ("sixteenMbFullDuplex", 4), ("oneHundredMbFullDuplex", 6)))).setUnits('Megabits per second').setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpTRConfigSpeedMode.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpTRConfigSpeedMode.setDescription('The configured port speed. This field shows the speed that was configured by the user for the OSA-Express Token Ring feature.') ibmOsaExpTRActiveSpeedMode = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 5, 1, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 6))).clone(namedValues=NamedValues(("unknown", 0), ("fourMbHalfDuplex", 1), ("fourMbFullDuplex", 2), ("sixteenMbHalfDuplex", 3), ("sixteenMbFullDuplex", 4), ("oneHundredMbFullDuplex", 6)))).setUnits('Megabits per second').setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpTRActiveSpeedMode.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpTRActiveSpeedMode.setDescription('The actual speed and mode the OSA is running in') ibmOsaExpTRUserData = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 5, 1, 11), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpTRUserData.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpTRUserData.setDescription('Data set by the user. It is ignored by the OSA.') ibmOsaExpTRPortName = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 5, 1, 12), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 8))).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpTRPortName.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpTRPortName.setDescription('Name of the port as used by TCP/IP') ibmOsaExpTRGroupAddrTable = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 5, 1, 13), OctetString().subtype(subtypeSpec=ValueSizeConstraint(256, 256)).setFixedLength(256)).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpTRGroupAddrTable.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpTRGroupAddrTable.setDescription('This field contains the active Group Addresses. An individual Group Address is 6 bytes long with an additional 2 bytes of padding.') ibmOsaExpTRFunctionalAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 5, 1, 14), OctetString().subtype(subtypeSpec=ValueSizeConstraint(4, 4)).setFixedLength(4)).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpTRFunctionalAddr.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpTRFunctionalAddr.setDescription("A 4 byte OCTET STRING which contains the OSA-Express's functional address.") ibmOsaExpTRRingStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 5, 1, 15), Bits().clone(namedValues=NamedValues(("reserved0", 0), ("reserved1", 1), ("reserved2", 2), ("reserved3", 3), ("reserved4", 4), ("reserved5", 5), ("reserved6", 6), ("reserved7", 7), ("reserved8", 8), ("reserved9", 9), ("reserved10", 10), ("reserved11", 11), ("reserved12", 12), ("reserved13", 13), ("noStatusOpenNotCompleted", 14), ("reserved15", 15), ("signalLoss", 16), ("hardError", 17), ("softError", 18), ("reserved19", 19), ("lobeWireFault", 20), ("autoRemovalError", 21), ("fdxProtocol", 22), ("removeReceived", 23), ("counterOverflow", 24), ("singleStation", 25), ("ringRecovery", 26), ("sRCounterOverflow", 27), ("reserved29", 28), ("openInFDXmode", 29), ("fourMbFullDuplex", 30), ("fourMbHalfDuplex", 31)))).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpTRRingStatus.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpTRRingStatus.setDescription("The current interface status which can be used to diagnose fluctuating problems that can occur on token rings after a station has successfully been added to the ring. Before an open is completed, this object has the value for the 'noStatusOpenNotCompleted' condition. The ibmOsaExpTRRingState and ibmOsaExpTRRingOpenStatus objects provide for debugging problems when the station can not even enter the ring. The object's value is a sum of values, one for each currently applicable condition. This information is essentially from RFC 1231.") ibmOsaExpTRAllowAccessPriority = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 5, 1, 16), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpTRAllowAccessPriority.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpTRAllowAccessPriority.setDescription('This field contains the maximum token priority the ring station defined by this entry in the table is permitted to transmit.') ibmOsaExpTREarlyTokenRelease = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 5, 1, 17), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("true", 0), ("false", 1)))).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpTREarlyTokenRelease.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpTREarlyTokenRelease.setDescription('Indicates if the ring station supports early token release. Only valid when port is running in 16Mb half duplex mode. Otherwise it is always set to false (1)') ibmOsaExpTRBeaconingAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 5, 1, 18), OctetString().subtype(subtypeSpec=ValueSizeConstraint(6, 6)).setFixedLength(6)).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpTRBeaconingAddress.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpTRBeaconingAddress.setDescription('This field contains the node address of the NAUN as reported in the most recently received Beacon MAC frame. This field is valid when ibmOsaExpTRRingOpenStatus is set to beaconing. Otherwise it is ignored') ibmOsaExpTRUpstreamNeighbor = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 5, 1, 19), OctetString().subtype(subtypeSpec=ValueSizeConstraint(6, 6)).setFixedLength(6)).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpTRUpstreamNeighbor.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpTRUpstreamNeighbor.setDescription('The MAC-address of the up stream neighbor station in the ring (NAUN).') ibmOsaExpTRRingState = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 5, 1, 20), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("opened", 1), ("closed", 2), ("opening", 3), ("closing", 4), ("openFailure", 5), ("ringFailure", 6)))).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpTRRingState.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpTRRingState.setDescription('The current interface state with respect to entering or leaving the ring.') ibmOsaExpTRRingOpenStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 5, 1, 21), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26))).clone(namedValues=NamedValues(("noOpen", 1), ("badParameter", 2), ("lobeFailed", 3), ("signalLoss", 4), ("insertionTimeout", 5), ("ringFailed", 6), ("beaconing", 7), ("duplicateMAC", 8), ("requestFailed", 9), ("removeReceived", 10), ("open", 11), ("sARecFrameNotEqualNAUNs", 12), ("claimTokenRec", 13), ("ringPurgeFramRec", 14), ("activeMonPresRec", 15), ("standbyMonPresRec", 16), ("accessProtocolDenied", 17), ("fDXInsDeniedDACfailOnOpen", 18), ("fDXInsDeniedDACfailOnBeaconTest", 19), ("beaconBeforeOpen", 20), ("insertTimerExpDuringDAC", 21), ("insertTimerExpDuringBeaconTest", 22), ("lobeMedizTestFailure", 23), ("heartbeatFailBeforeOpenCompleted", 24), ("heartbeatFailDuringBeaconTest", 25), ("recBeaconFrameWithInvalidSA", 26)))).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpTRRingOpenStatus.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpTRRingOpenStatus.setDescription("This object indicates the success, or the reason for failure of the station's most recent attempt to enter the ring.") ibmOsaExpTRPacketsTransmitted = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 5, 1, 22), Counter32()).setUnits('packets').setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpTRPacketsTransmitted.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpTRPacketsTransmitted.setDescription('This field contains the count of the total number of packets transmitted from this port since the OSA port was reset') ibmOsaExpTRPacketsReceived = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 5, 1, 23), Counter32()).setUnits('packets').setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpTRPacketsReceived.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpTRPacketsReceived.setDescription('This field contains the count of the total number of packets received by this port since the OSA port was reset') ibmOsaExpTRLineErrorCount = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 5, 1, 24), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpTRLineErrorCount.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpTRLineErrorCount.setDescription('This counter is incremented when a frame or token is copied or repeated by a station, the E bit is zero in the frame or token and one of the following conditions exists: 1) there is a non-data bit (J or K bit) between the SD and the ED of the frame or token, or 2) there is an FCS error in the frame.') ibmOsaExpTRBurstErrorCount = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 5, 1, 25), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpTRBurstErrorCount.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpTRBurstErrorCount.setDescription('This counter is incremented when a station detects the absence of transitions for five half-bit timers (burst-five error).') ibmOsaExpTRACErrorCount = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 5, 1, 26), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpTRACErrorCount.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpTRACErrorCount.setDescription('This counter is incremented when a station receives an AMP or SMP frame in which A is equal to C is equal to 0, and then receives another SMP frame with A is equal to C is equal to 0 without first receiving an AMP frame. It denotes a station that cannot set the AC bits properly.') ibmOsaExpTRAbortTransErrorCount = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 5, 1, 27), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpTRAbortTransErrorCount.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpTRAbortTransErrorCount.setDescription('This counter is incremented when a station transmits an abort delimiter while transmitting.') ibmOsaExpTRInternalErrorCount = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 5, 1, 28), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpTRInternalErrorCount.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpTRInternalErrorCount.setDescription('This counter is incremented when a station recognizes an internal error.') ibmOsaExpTRLostFrameErrorCount = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 5, 1, 29), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpTRLostFrameErrorCount.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpTRLostFrameErrorCount.setDescription('This counter is incremented when a station is transmitting and its TRR timer expires. This condition denotes a condition transmitting station in strip mode does not receive the trailer of the frame TRR timer goes off.') ibmOsaExpTRRcvCongestionCount = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 5, 1, 30), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpTRRcvCongestionCount.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpTRRcvCongestionCount.setDescription('This counter is incremented when a station recognizes a frame addressed to its specific address, but has no available buffer space indicating that the station is congested.') ibmOsaExpTRFrameCopyErrorCount = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 5, 1, 31), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpTRFrameCopyErrorCount.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpTRFrameCopyErrorCount.setDescription('This counter is incremented when a station recognizes a frame addressed to its specific address and detects that the FS field A bits are set to 1 indicating a possible line hit or duplicate address.') ibmOsaExpTRTokenErrorCount = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 5, 1, 32), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpTRTokenErrorCount.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpTRTokenErrorCount.setDescription('This counter is incremented when a station acting as the active monitor recognizes an error condition that needs a token transmitted.') ibmOsaExpTRFullDuplexErrorCount = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 5, 1, 33), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpTRFullDuplexErrorCount.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpTRFullDuplexErrorCount.setDescription('An error has been detected by the FDX protocol') ibmOsaExpTRSoftErrorCount = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 5, 1, 34), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpTRSoftErrorCount.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpTRSoftErrorCount.setDescription('The number of Soft Errors the interface has detected. It directly corresponds to the number of Report Error MAC frames that this interface has transmitted. Soft Errors are those which are recoverable by the MAC layer protocols.') ibmOsaExpTRHardErrorCount = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 5, 1, 35), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpTRHardErrorCount.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpTRHardErrorCount.setDescription('The number of times this interface has detected an immediately recoverable fatal error. It denotes the number of times this interface is either transmitting or receiving beacon MAC frames.') ibmOsaExpTRSignalLossErrorCount = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 5, 1, 36), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpTRSignalLossErrorCount.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpTRSignalLossErrorCount.setDescription('The number of times this interface has detected the loss of signal condition from the ring.') ibmOsaExpTRTransmitBeaconCount = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 5, 1, 37), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpTRTransmitBeaconCount.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpTRTransmitBeaconCount.setDescription('The number of times this interface has transmitted a beacon frame.') ibmOsaExpTRRecoveryCounter = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 5, 1, 38), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpTRRecoveryCounter.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpTRRecoveryCounter.setDescription('The number of Claim Token MAC frames received or transmitted after the interface has received a frame Ring Ring Purge MAC counter signifies the number of times the ring has been purged and is being recovered back into a normal operating state.') ibmOsaExpTRLobeWireFaultCount = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 5, 1, 39), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpTRLobeWireFaultCount.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpTRLobeWireFaultCount.setDescription('The number of times the interface has detected an open or short circuit in the lobe data path. The adapter will be closed and ibmOsaExpTRRingState will signify this condition.') ibmOsaExpTRRemoveReceivedCount = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 5, 1, 40), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpTRRemoveReceivedCount.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpTRRemoveReceivedCount.setDescription('The number of times the interface has received a Remove Ring Station MAC frame request. When this frame is received the interface will enter the closed state and ibmOsaExpTRRingState will signify this condition.') ibmOsaExpTRSingleStationCount = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 5, 1, 41), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpTRSingleStationCount.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpTRSingleStationCount.setDescription('The number of times the interface has sensed that it is the only station on the ring. This will happen if the interface is the first one up on a ring, or if there is a hardware problem.') ibmOSAExpATMPortTable = MibTable((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 7), ) if mibBuilder.loadTexts: ibmOSAExpATMPortTable.setStatus('current') if mibBuilder.loadTexts: ibmOSAExpATMPortTable.setDescription('This table represents the data associated with an emulated Ethernet port on an OSA-Express ATM feature. There are a maximum of 2 logical ports on each ATM feature, however, each port is shown as though it exists independently with each having an entry in the ibmOSAExpChannelTable.') ibmOSAExpATMPortEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 7, 1), ).setIndexNames((0, "IF-MIB", "ifIndex")) if mibBuilder.loadTexts: ibmOSAExpATMPortEntry.setStatus('current') if mibBuilder.loadTexts: ibmOSAExpATMPortEntry.setDescription('Definition of a single entry in the ibmOSAExpATMPortTable. Indexed by the ifIndex of the corresponding Device interface.') ibmOsaExpATMPortNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 7, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 3))).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpATMPortNumber.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpATMPortNumber.setDescription('The logical port number of this port') ibmOsaExpATMPortType = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 7, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(17))).clone(namedValues=NamedValues(("emulatedEthernet", 17)))).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpATMPortType.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpATMPortType.setDescription('The logical port type.') ibmOsaExpATMLanTrafficState = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 7, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7, 8))).clone(namedValues=NamedValues(("undefined", 0), ("unavailable", 1), ("enabling", 2), ("disabling", 3), ("enabled", 4), ("disabled", 5), ("linkMonitor", 6), ("definitionError", 7), ("configuredOffline", 8)))).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpATMLanTrafficState.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpATMLanTrafficState.setDescription('The LAN state value ranges from 0 to 8. A value of 5, disabled is further explained in object ibmOsaExpATMDisabledStatus.') ibmOsaExpATMServiceMode = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 7, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("notInServiceMode", 0), ("inServiceMode", 1)))).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpATMServiceMode.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpATMServiceMode.setDescription('This object indicates if the processor is in service mode or not.') ibmOsaExpATMDisabledStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 7, 1, 5), Bits().clone(namedValues=NamedValues(("reserved0", 0), ("internalPortFailure", 1), ("reserved2", 2), ("reserved3", 3), ("reserved4", 4), ("reserved5", 5), ("portTemporarilyDisabled", 6), ("reserved7", 7), ("reserved8", 8), ("serviceProcessorRequest", 9), ("networkRequest", 10), ("osasfRequest", 11), ("configurationChange", 12), ("linkFailureThresholdExceeded", 13), ("reserved14", 14), ("reserved15", 15)))).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpATMDisabledStatus.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpATMDisabledStatus.setDescription('When the value of ibmOsaExpATMLanTrafficState is NOT disabled, the value of this object will be zero. When the value of ibmOsaExpATMLanTrafficState is disabled(5), this object explains the reason for the disabled state. The value for this object may be a combination of the bits shown.') ibmOsaExpATMConfigName = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 7, 1, 6), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 34))).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpATMConfigName.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpATMConfigName.setDescription('This is the name of the configuration that is on the OSA. It is set using OSA/SF. It is not used by OSA') ibmOsaExpATMMacAddrActive = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 7, 1, 7), OctetString().subtype(subtypeSpec=ValueSizeConstraint(6, 6)).setFixedLength(6)).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpATMMacAddrActive.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpATMMacAddrActive.setDescription('A 6 byte OCTET STRING which contains the current MAC address in use on the OSA. The values are in canonical format.') ibmOsaExpATMMacAddrBurntIn = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 7, 1, 8), OctetString().subtype(subtypeSpec=ValueSizeConstraint(6, 6)).setFixedLength(6)).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpATMMacAddrBurntIn.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpATMMacAddrBurntIn.setDescription('A 6 byte OCTET STRING which contains the burned in MAC address on the OSA. The values are in canonical format.') ibmOsaExpATMUserData = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 7, 1, 9), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpATMUserData.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpATMUserData.setDescription('Data set by the user. It is ignored by the OSA.') ibmOsaExpATMPortName = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 7, 1, 12), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 8))).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpATMPortName.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpATMPortName.setDescription('Name of the port as used by TCP/IP') ibmOsaExpATMGroupMacAddrTable = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 7, 1, 13), OctetString().subtype(subtypeSpec=ValueSizeConstraint(256, 256)).setFixedLength(256)).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpATMGroupMacAddrTable.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpATMGroupMacAddrTable.setDescription('This field contains the active Group Addresses. An individual Group Address is 6 bytes long with an additional 2 bytes of padding.') ibmOsaExpATMIBMEnhancedMode = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 7, 1, 14), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("no", 0), ("yes", 1)))).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpATMIBMEnhancedMode.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpATMIBMEnhancedMode.setDescription('When set to Yes, this keeps data connections active when the connection to the LES is lost.') ibmOsaExpATMBestEffortPeakRate = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 7, 1, 15), Integer32()).setUnits('Megabytes per second').setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpATMBestEffortPeakRate.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpATMBestEffortPeakRate.setDescription('Values range from 10-1550 and must be divided by 10 to get the proper value. A value of 1550 indicates 155.0 Mbytes/sec') ibmOsaExpATMConfigMode = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 7, 1, 16), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("automatic", 1), ("manual", 2)))).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpATMConfigMode.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpATMConfigMode.setDescription('Indicates whether this LAN Emulation Client should auto-configure the next time it is (re)started. In automatic (1) mode, a client uses a LAN Emulation Configuration Server to learn the ATM address of its LAN Emulation Server, and to obtain other parameters. lecConfig (LanType, MaxDataFrameSize, LanName) are used in the configure request. ibmOsaExpATMConfigLESATMAddress is ignored. In manual (2) mode, management tells the client the ATM address of its LAN Emulation Server and the value of the other parmeters. lecConfig (LanType, MaxDataFrameSize, LanName) are used in the Join request. ibmOsaExpATMConfigLESATMAddress tells the client which LES to call.') ibmOsaExpATMConfigLanType = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 7, 1, 17), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(17))).clone(namedValues=NamedValues(("emulatedEthernet", 17)))).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpATMConfigLanType.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpATMConfigLanType.setDescription('The logical port type that the user configured the port for') ibmOsaExpATMActualLanType = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 7, 1, 18), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(17))).clone(namedValues=NamedValues(("emulatedEthernet", 17)))).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpATMActualLanType.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpATMActualLanType.setDescription('The actual logical port type the port is running in') ibmOsaExpATMConfigMaxDataFrmSz = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 7, 1, 19), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("unspecified", 1), ("f1516", 2), ("f4544", 3), ("f9234", 4), ("f18190", 5)))).setUnits('bytes').setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpATMConfigMaxDataFrmSz.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpATMConfigMaxDataFrmSz.setDescription('The maximum data frame size (in bytes) which this client will use the next time it returns to the Initial State. Auto-configuring clients use this parameter in their configure requests. Manually configured clients use it in their join requests.') ibmOsaExpATMActualMaxDataFrmSz = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 7, 1, 20), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("unspecified", 1), ("f1516", 2), ("f4544", 3), ("f9234", 4), ("f18190", 5)))).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpATMActualMaxDataFrmSz.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpATMActualMaxDataFrmSz.setDescription('The maximum data frame size (in bytes) which this client will use the next time it returns to the Initial State. Auto-configuring clients use this parameter in their configure requests. Manually configured clients use it in their join requests.') ibmOsaExpATMConfigELANName = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 7, 1, 21), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 36))).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpATMConfigELANName.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpATMConfigELANName.setDescription('The ELAN Name this client will use the next time it returns to the Initial State. Auto-configuring clients use this parameter in their configure requests. Manually configured clients use it in their join requests.') ibmOsaExpATMActualELANName = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 7, 1, 22), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 36))).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpATMActualELANName.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpATMActualELANName.setDescription('The ELAN Name this client will use the next time it returns to the Initial State. Auto-configuring clients use this parameter in their configure requests. Manually configured clients use it in their join requests.') ibmOsaExpATMConfigLESATMAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 7, 1, 23), OctetString().subtype(subtypeSpec=ValueSizeConstraint(20, 20)).setFixedLength(20)).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpATMConfigLESATMAddress.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpATMConfigLESATMAddress.setDescription("The LAN Emulation Server which this client will use the next time it is started in manual configuration mode. When ibmOsaExpATMConfigMode is 'automatic', there is no need to set this address, Address) and no advantage to doing so. The client will use the LECS to find a LES, putting the auto-configured address in ibmOsaExpATMActualLESATMAddress while leaving ibmOsaExpATMConfigLESATMAddress alone. Corresponds to Initial State Parameter C9. In LAN Emulation MIB, the OCTET STRING has length 0 or 20. For OSA, the length shall be 20, with the value 0 defined to mean that ibmOsaExpATMConfigMode is 'automatic'.") ibmOsaExpATMActualLESATMAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 7, 1, 24), OctetString().subtype(subtypeSpec=ValueSizeConstraint(20, 20)).setFixedLength(20)).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpATMActualLESATMAddress.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpATMActualLESATMAddress.setDescription("The LAN Emulation Server which this client will use the next time it is started in manual configuration mode. When lecConfigMode is 'automatic', there is no need to set this address, Address) and no advantage to doing so. The client will use the LECS to find a LES, putting the auto-configured address in ibmOsaExpATMActualLESATMAddress while leaving ibmOsaExpATMConfigLESATMAddress alone. Corresponds to Initial State Parameter C9. In LAN Emulation MIB, the OCTET STRING has length 0 or 20. For OSA, the length shall be 20, with the value 0 defined to mean that ibmOsaExpATMConfigMode is 'automatic'.") ibmOsaExpATMControlTimeout = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 7, 1, 25), Integer32().subtype(subtypeSpec=ValueRangeConstraint(10, 300))).setUnits('seconds').setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpATMControlTimeout.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpATMControlTimeout.setDescription('Control Time-out. Time out period used for timing out most request/response control frame interactions, as specified elsewhere in the LAN Emulation specification. This time value is expressed in seconds. Corresponds to Initial State Parameter C7.') ibmOsaExpATMMaxUnknownFrameCount = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 7, 1, 26), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 10))).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpATMMaxUnknownFrameCount.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpATMMaxUnknownFrameCount.setDescription('Maximum Unknown Frame Count. See the description of ibmOsaExpATMMaxUnknownFrameTime below. Corresponds to Initial State Parameter C10.') ibmOsaExpATMMaxUnknownFrameTime = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 7, 1, 27), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 60))).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpATMMaxUnknownFrameTime.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpATMMaxUnknownFrameTime.setDescription('Maximum Unknown Frame Time. Within the period of time defined by the Maximum Unknown Frame Time, a LE Client will send no more than Maximum Unknown Frame Count frames to the BUS for a given unicast LAN Destination, and it must also initiate the address resolution protocol to resolve that LAN Destination. This time value is expressed in seconds. Corresponds to Initial State Parameter C11.') ibmOsaExpATMVCCTimeoutPeriod = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 7, 1, 28), Integer32()).setUnits('seconds').setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpATMVCCTimeoutPeriod.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpATMVCCTimeoutPeriod.setDescription('VCC Time-out Period. A LE Client SHOULD release any Data Direct VCC that it has not used to transmit or receive any data frames for the length of the VCC Time-out Period. This parameter is only meaningful for SVC Data Direct VCCs. This time value is expressed in seconds. The default value is 20 minutes. A value of 0 seconds means that the timeout period is infinite. Negative values will be rejected by the agent. Corresponds to Initial State Parameter C12.') ibmOsaExpATMMaxRetryCount = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 7, 1, 29), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpATMMaxRetryCount.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpATMMaxRetryCount.setDescription("Maximum Retry Count. A LE CLient MUST not retry a LE_ARP_REQUEST for a given frame's LAN destination more than Maximum Retry Count times, after the first LE_ARP_REQUEST for that same frame's LAN destination. Corresponds to Initial State Parameter C13.") ibmOsaExpATMAgingTime = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 7, 1, 30), Integer32().subtype(subtypeSpec=ValueRangeConstraint(10, 300))).setUnits('seconds').setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpATMAgingTime.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpATMAgingTime.setDescription('Aging Time. The maximum time that a LE Client will maintain an entry in its LE_ARP cache in the absence of a verification of that relationship. This time value is expressed in seconds. Corresponds to Initial State Parameter C17.') ibmOsaExpATMForwardDelayTime = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 7, 1, 31), Integer32().subtype(subtypeSpec=ValueRangeConstraint(4, 30))).setUnits('seconds').setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpATMForwardDelayTime.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpATMForwardDelayTime.setDescription('Forward Delay Time. The maximum time that a LE Client will maintain an entry for a non-local MAC address in its LE_ARP cache in the absence of a verification of that relationship, as long as the Topology Change flag C19 is true. ibmOsaExpATMForwardDelayTime SHOULD BE less than ibmOsaExpATMAgingTIme. When it is not, ibmOsaExpATMAgingTime governs LE_ARP aging. This time value is expressed in seconds. Corresponds to Initial State Parameter C18.') ibmOsaExpATMExpectedARPRespTime = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 7, 1, 32), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 30))).setUnits('seconds').setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpATMExpectedARPRespTime.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpATMExpectedARPRespTime.setDescription('Expected LE_ARP Reponse Time. The maximum time that the LEC expects an LE_ARP_REQUEST/LE_ARP_RESPONSE cycle to take. Used for retries and verifies. This time value is expressed in seconds. Corresponds to Initial State Parameter C20.') ibmOsaExpATMFlushTimeout = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 7, 1, 33), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 4))).setUnits('seconds').setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpATMFlushTimeout.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpATMFlushTimeout.setDescription('Flush Time-out. Time limit to wait to receive a LE_FLUSH_RESPONSE after the LE_FLUSH_REQUEST has been sent before taking recovery action. This time value is expressed in seconds. Corresponds to Initial State Parameter C21.') ibmOsaExpATMPathSwitchingDelay = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 7, 1, 34), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 8))).setUnits('seconds').setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpATMPathSwitchingDelay.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpATMPathSwitchingDelay.setDescription('Path Switching Delay. The time since sending a frame to the BUS after which the LE Client may assume that the frame has been either discarded or delivered to the recipient. May be used to bypass the Flush protocol. This time value is expressed in seconds. Corresponds to Initial State Parameter C22.') ibmOsaExpATMLocalSegmentID = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 7, 1, 35), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 4095))).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpATMLocalSegmentID.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpATMLocalSegmentID.setDescription('Local Segment ID. The segment ID of the emulated LAN. This is only required for IEEE 802.5 clients that are Source Routing bridges. Corresponds to Initial State Parameter C23.') ibmOsaExpATMMltcstSendVCCType = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 7, 1, 36), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("bestEffort", 1), ("variableBitRate", 2), ("constantBitRate", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpATMMltcstSendVCCType.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpATMMltcstSendVCCType.setDescription('Multicast Send VCC Type. Signalling parameter that SHOULD be used by the LE Client when establishing the Multicast Send VCC. This is the method to be used by the LE Client when specifying traffic parameters when it sets up the Multicast Send VCC for this emulated LAN. Corresponds to Initial State Parameter C24.') ibmOsaExpATMMltcstSendVCCAvgRate = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 7, 1, 37), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpATMMltcstSendVCCAvgRate.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpATMMltcstSendVCCAvgRate.setDescription('Multicast Send VCC AvgRate. Signalling parameter that SHOULD be used by the LE Client when estabishing the Multicast Send VCC. Forward and Backward Sustained Cell Rate to be requested by LE Client when setting up Multicast Send VCC, if using Variable bit rate codings. Corresponds to Initial State Parameter C25.') ibmOsaExpATMMcastSendVCCPeakRate = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 7, 1, 38), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpATMMcastSendVCCPeakRate.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpATMMcastSendVCCPeakRate.setDescription('Multicast Send VCC PeakRate. Signalling parameter that SHOULD be used by the LE Client when establishing the Multicast Send VCC. Forward and Backward Peak Cell Rate to be requested by LE Client when setting up the Multicast Send VCC when using either Variable or Constant bit rate codings. Corresponds to Initial State Parameter C26.') ibmOsaExpATMConnectCompleteTimer = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 7, 1, 39), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 10))).setUnits('seconds').setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpATMConnectCompleteTimer.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpATMConnectCompleteTimer.setDescription('Connection Complete Timer. Optional. In Connection Establish ment this is the time period in which data or a READY_IND message is expected from a Calling Party. This time value is expressed in seconds. Corresponds to Initial State Parameter C28.') ibmOsaExpATMClientATMAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 7, 1, 40), OctetString().subtype(subtypeSpec=ValueSizeConstraint(20, 20)).setFixedLength(20)).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpATMClientATMAddress.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpATMClientATMAddress.setDescription("LE Client's ATM Addresses. The primary ATM address of this LAN Emulation Client. This address is used to establish the Control Direct and Multicast Send VCCs, and may also be used to set up Data Direct VCCs. A client may have additional ATM addresses for use with Data Direct VCCs. Corresponds to Initial State Parameter C1.") ibmOsaExpATMClientIdentifier = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 7, 1, 41), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65279))).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpATMClientIdentifier.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpATMClientIdentifier.setDescription("LE Client Identifier. Each LE Client requires a LE Client Identifier (LECID) assigned by the LE Server during the Join phase. The LECID is placed in control requests by the LE Client and MAY be used for echo suppression on multicast data frames sent by that LE Client. This value MUST NOT change without terminating the LE Client and returning to the Initial state. A valid LECID MUST be in the range X'0001' through X'FEFF'. The value of this object is only meaningful for a LEC that is connected to a LES. For a LEC which does not belong to an emulated LAN, the value of this object is defined to be 0. Corresponds to Initial State Parameter C14.") ibmOsaExpATMClientCurrentState = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 7, 1, 42), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("initialState", 1), ("lecsConnect", 2), ("configure", 3), ("join", 4), ("initialRegistration", 5), ("busConnect", 6), ("operational", 7)))).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpATMClientCurrentState.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpATMClientCurrentState.setDescription("The current state of the LAN Emulation Client. Note that 'ifOperStatus' is defined to be 'up' when, and only when, this field is 'operational'.") ibmOsaExpATMLastFailureRespCode = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 7, 1, 43), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15))).clone(namedValues=NamedValues(("none", 1), ("timeout", 2), ("undefinedError", 3), ("versionNotSupported", 4), ("invalidRequestParameters", 5), ("duplicateLanDestination", 6), ("duplicateAtmAddress", 7), ("insufficientResources", 8), ("accessDenied", 9), ("invalidRequesterId", 10), ("invalidLanDestination", 11), ("invalidAtmAddress", 12), ("noConfiguration", 13), ("leConfigureError", 14), ("insufficientInformation", 15)))).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpATMLastFailureRespCode.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpATMLastFailureRespCode.setDescription("Status code from the last failed Configure response or Join response. Failed responses are those for which the LE_CONFIGURE_RESPONSE / LE_JOIN_RESPONSE frame contains a non-zero code, or fails to arrive within a timeout period. If none of this client's requests have failed, this object has the value 'none'. If the failed response contained a STATUS code that is not defined in the LAN Emulation specification, this object has the value 'undefinedError'. The value 'timeout' is self explanatory. Other failure codes correspond to those defined in the specification, although they may have different numeric values.") ibmOsaExpATMLastFailureState = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 7, 1, 44), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("initialState", 1), ("lecsConnect", 2), ("configure", 3), ("join", 4), ("initialRegistration", 5), ("busConnect", 6), ("operational", 7)))).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpATMLastFailureState.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpATMLastFailureState.setDescription("The state this client was in when it updated the 'ibmOsaExpATMLastFailureRespCode'. If 'ibmOsaExpATMLastFailureRespCode' is 'none', this object has the value initialState(1).") ibmOsaExpATMProtocol = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 7, 1, 45), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 255))).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpATMProtocol.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpATMProtocol.setDescription('The LAN Emulation protocol which this client supports, and specifies in its LE_JOIN_REQUESTs.') ibmOsaExpATMLeVersion = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 7, 1, 46), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 255))).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpATMLeVersion.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpATMLeVersion.setDescription('The LAN Emulation protocol version which this client supports, and specifies in its LE_JOIN_REQUESTs.') ibmOsaExpATMTopologyChange = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 7, 1, 47), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("true", 1), ("false", 2)))).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpATMTopologyChange.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpATMTopologyChange.setDescription("Topology Change. Boolean indication that the LE Client is using the Forward Delay Time C18, instead of the Aging Time C17, to age non-local entries in its LE_ARP cache C16. For a client which is not connected to the LES, this object is defined to have the value 'false'. Corresponds to Initial State Parameter C19.") ibmOsaExpATMConfigServerATMAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 7, 1, 48), OctetString().subtype(subtypeSpec=ValueSizeConstraint(20, 20)).setFixedLength(20)).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpATMConfigServerATMAddr.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpATMConfigServerATMAddr.setDescription('The ATM address of the LAN Emulation Configuration Server (if known) or 0 (otherwise). In LAN Emulation MIB, the OCTET STRING is either 0 length or 20 octets. For OSA-ATM, this Address has been changed to a constant 20 octets, with the value 0 equivalent to the 0 length OCTET STRING.') ibmOsaExpATMConfigSource = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 7, 1, 49), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("gotAddressViaIlmi", 1), ("usedWellKnownAddress", 2), ("usedLecsPvc", 3), ("didNotUseLecs", 4)))).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpATMConfigSource.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpATMConfigSource.setDescription('Indicates whether this LAN Emulation Client used the LAN Emulation Configuration Server, and, if so, what method it used to establish the Configuration Direct VCC') ibmOsaExpATMProxyClient = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 7, 1, 50), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("true", 1), ("false", 2)))).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpATMProxyClient.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpATMProxyClient.setDescription('Indicates whether this client is acting as a proxy. Proxy clients are allowed to represent unregistered MAC addresses, and receive copies of LE_ARP_REQUEST frames for such addresses. Corresponds to Initial State Parameter C4.') ibmOsaExpATMLePDUOctetsInbound = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 7, 1, 51), Counter64()).setUnits('octets').setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpATMLePDUOctetsInbound.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpATMLePDUOctetsInbound.setDescription('The number of Le PDU Octets received') ibmOsaExpATMNonErrLePDUDiscIn = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 7, 1, 52), Counter32()).setUnits('octets').setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpATMNonErrLePDUDiscIn.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpATMNonErrLePDUDiscIn.setDescription('The number of Non Error Le PDU Octets received') ibmOsaExpATMErrLePDUDiscIn = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 7, 1, 53), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpATMErrLePDUDiscIn.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpATMErrLePDUDiscIn.setDescription('The number of Errored Le PDU Discards received') ibmOsaExpATMLePDUOctetsOutbound = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 7, 1, 54), Counter64()).setUnits('octets').setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpATMLePDUOctetsOutbound.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpATMLePDUOctetsOutbound.setDescription('The number of Le PDU Discards sent') ibmOsaExpATMNonErrLePDUDiscOut = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 7, 1, 55), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpATMNonErrLePDUDiscOut.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpATMNonErrLePDUDiscOut.setDescription('The number of Non Error Le PDU Discards sent') ibmOsaExpATMErrLePDUDiscOut = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 7, 1, 56), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpATMErrLePDUDiscOut.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpATMErrLePDUDiscOut.setDescription('The number of Errored Le PDU Discards sent') ibmOsaExpATMLeARPRequestsOut = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 7, 1, 57), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpATMLeARPRequestsOut.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpATMLeARPRequestsOut.setDescription('The number of LE ARP Requests sent') ibmOsaExpATMLeARPRequestsIn = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 7, 1, 58), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpATMLeARPRequestsIn.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpATMLeARPRequestsIn.setDescription('The number of LE ARP Requests received over the LUNI by this LAN Emulation Client. Requests may arrive on the Control Direct VCC or on the Control Distribute VCC, depending upon how the LES is implemented and the chances it has had for learning. This counter covers both VCCs.') ibmOsaExpATMLeARPRepliesOut = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 7, 1, 59), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpATMLeARPRepliesOut.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpATMLeARPRepliesOut.setDescription('The number of LE ARP Responses sent over the LUNI by this LAN Emulation Client.') ibmOsaExpATMLeARPRepliesIn = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 7, 1, 60), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpATMLeARPRepliesIn.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpATMLeARPRepliesIn.setDescription('The number of LE ARP Responses received over the LUNI by this LAN Emulation Client. This count includes all such replies, whether solicited or not. Replies may arrive on the Control Direct VCC or on the Control Distribute VCC, depending upon how the LES is implemented. This counter covers both VCCs.') ibmOsaExpATMControlFramesOut = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 7, 1, 61), Counter32()).setUnits('packets').setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpATMControlFramesOut.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpATMControlFramesOut.setDescription('The total number of control packets sent by this LAN Emulation Client over the LUNI.') ibmOsaExpATMControlFramesIn = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 7, 1, 62), Counter32()).setUnits('packets').setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpATMControlFramesIn.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpATMControlFramesIn.setDescription('The total number of control packets received by this LAN Emulation Client over the LUNI.') ibmOsaExpATMSVCFailures = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 7, 1, 63), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpATMSVCFailures.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpATMSVCFailures.setDescription('The total number of outgoing LAN Emulation SVCs which this client tried, but failed, to open; incoming LAN Emulation SVCs which this client tried, but failed to establish; and incoming LAN Emulation SVCs which this client rejected for protocol or security reasons.') ibmOsaExpATMConfigDirectIntfc = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 7, 1, 64), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647))).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpATMConfigDirectIntfc.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpATMConfigDirectIntfc.setDescription('The interface associated with the Configuration Direct VCC. If no Configuration Direct VCC exists, this object has the value 0. Otherwise, the objects ( ibmOsaExpATMConfigDirectIntfc, ibmOsaExpATMConfigDirectVPI, ibmOsaExpATMConfigDirectVCI) identify the circuit.') ibmOsaExpATMConfigDirectVPI = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 7, 1, 65), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpATMConfigDirectVPI.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpATMConfigDirectVPI.setDescription('If the Configuration Direct VCC exists, this object contains the VPI which identifies that VCC at the point where it connects to this LE client. Otherwise, this object has the value 0.') ibmOsaExpATMConfigDirectVCI = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 7, 1, 66), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpATMConfigDirectVCI.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpATMConfigDirectVCI.setDescription('If the Configuration Direct VCC exists, this object contains the VCI which identifies that VCC at the point where it connects to this LE client. Otherwise, this object has the value 0.') ibmOsaExpATMControlDirectIntfc = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 7, 1, 67), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647))).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpATMControlDirectIntfc.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpATMControlDirectIntfc.setDescription('The interface associated with the Control Direct VCC. If no Control Direct VCC exists, this object has the value 0. Otherwise, the objects ( ibmOsaExpATMConfigDirectIntfc, ibmOsaExpATMConfigDirectVPI, ibmOsaExpATMConfigDirectVCI) identify the circuit.') ibmOsaExpATMControlDirectVPI = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 7, 1, 68), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpATMControlDirectVPI.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpATMControlDirectVPI.setDescription('If the Control Direct VCC exists, this object contains the VPI which identifies that VCC at the point where it connects to this LE client. Otherwise, this object has the value 0.') ibmOsaExpATMControlDirectVCI = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 7, 1, 69), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpATMControlDirectVCI.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpATMControlDirectVCI.setDescription('If the Control Direct VCC exists, this object contains the VCI which identifies that VCC at the point where it connects to this LE client. Otherwise, this object has the value 0.') ibmOsaExpATMControlDistIntfc = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 7, 1, 70), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647))).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpATMControlDistIntfc.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpATMControlDistIntfc.setDescription('The interface associated with the Control Distribute VCC. If no Control Distribute VCC has been set up to this client, this object has the value 0. Otherwise, the objects ( ibmOsaExpATMControlDistIntfc, ibmOsaExpATMControlDistributeVPI. ibmOsaExpATMControlDistributeVCI) identify the circuit.') ibmOsaExpATMControlDistributeVPI = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 7, 1, 71), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpATMControlDistributeVPI.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpATMControlDistributeVPI.setDescription('If the Control Distribute VCC exists, this object contains the VPI which identifies that VCC at the point where it connects to this LE client. Otherwise, this object has the value 0.') ibmOsaExpATMControlDistributeVCI = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 7, 1, 72), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpATMControlDistributeVCI.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpATMControlDistributeVCI.setDescription('If the Control Distribute VCC exists, this object contains the VCI which identifies that VCC at the point where it connects to this LE client. Otherwise, this object contains the value 0.') ibmOsaExpATMMulticastSendIntfc = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 7, 1, 73), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647))).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpATMMulticastSendIntfc.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpATMMulticastSendIntfc.setDescription('The interface associated with the Multicast Send VCC. If no Multicast Send VCC exists, this object has the value 0. Otherwise, the objects ( ibmOsaExpATMMulticastSendIntfc, ibmOsaExpATMMulticastSendVPI, ibmOsaExpATMMulticastSendVCI) identify the circuit.') ibmOsaExpATMMulticastSendVPI = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 7, 1, 74), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpATMMulticastSendVPI.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpATMMulticastSendVPI.setDescription('If the Multicast Send VCC exists, this object contains the VPI which identifies that VCC at the point where it connects to this LE client. Otherwise, this object has the value 0.') ibmOsaExpATMMulticastSendVCI = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 7, 1, 75), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpATMMulticastSendVCI.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpATMMulticastSendVCI.setDescription('If the Multicast Send VCC exists, this object contains the VCI which identifies that VCC at the point where it connects to this LE client. Otherwise, this object has the value 0.') ibmOsaExpATMMulticastFwdIntfc = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 7, 1, 76), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647))).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpATMMulticastFwdIntfc.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpATMMulticastFwdIntfc.setDescription('The interface associated with the Multicast Forward VCC. If no Multicast Forward VCC has been set up to this client, this object has the value 0. Otherwise, the objects ( ibmOsaExpATMMulticastFwdIntfc, ibmOsaExpATMMulticastForwardVPI, ibmOsaExpATMMulticastForwardVCI) identify the circuit.') ibmOsaExpATMMulticastForwardVPI = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 7, 1, 77), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpATMMulticastForwardVPI.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpATMMulticastForwardVPI.setDescription('If the Multicast Forward VCC exists, this object contains the VPI which identifies that VCC at the point where it connects to this LE client. Otherwise, this object has the value 0.') ibmOsaExpATMMulticastForwardVCI = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 188, 1, 7, 1, 78), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly") if mibBuilder.loadTexts: ibmOsaExpATMMulticastForwardVCI.setStatus('current') if mibBuilder.loadTexts: ibmOsaExpATMMulticastForwardVCI.setDescription('If the Multicast Forward VCC exists, this object contains the VCI which identifies that VCC at the point where it connects to this LE client. Otherwise, this object has the value 0.') ibmOSAMibCompliances = MibIdentifier((1, 3, 6, 1, 4, 1, 2, 6, 188, 2, 1)) ibmOSAMibGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 2, 6, 188, 2, 2)) ibmOSAMibCompliance = ModuleCompliance((1, 3, 6, 1, 4, 1, 2, 6, 188, 2, 1, 1)).setObjects(("IBM-OSA-MIB", "ibmOSAExpChannelGroup"), ("IBM-OSA-MIB", "ibmOSAExpPerfGroup"), ("IBM-OSA-MIB", "ibmOSAExpPEGroup"), ("IBM-OSA-MIB", "ibmOSAExpEthGroup"), ("IBM-OSA-MIB", "ibmOSAExpTRGroup"), ("IBM-OSA-MIB", "ibmOSAExpATMGroup")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): ibmOSAMibCompliance = ibmOSAMibCompliance.setStatus('current') if mibBuilder.loadTexts: ibmOSAMibCompliance.setDescription('The compliance statement for the OSA DIrect SNMP product.') ibmOSAExpChannelGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2, 6, 188, 2, 2, 1)).setObjects(("IBM-OSA-MIB", "ibmOSAExpChannelNumber"), ("IBM-OSA-MIB", "ibmOSAExpChannelType"), ("IBM-OSA-MIB", "ibmOSAExpChannelHdwLevel"), ("IBM-OSA-MIB", "ibmOSAExpChannelSubType"), ("IBM-OSA-MIB", "ibmOSAExpChannelShared"), ("IBM-OSA-MIB", "ibmOSAExpChannelNodeDesc"), ("IBM-OSA-MIB", "ibmOSAExpChannelProcCodeLevel"), ("IBM-OSA-MIB", "ibmOSAExpChannelPCIBusUtil1Min"), ("IBM-OSA-MIB", "ibmOSAExpChannelProcUtil1Min"), ("IBM-OSA-MIB", "ibmOSAExpChannelPCIBusUtil5Min"), ("IBM-OSA-MIB", "ibmOSAExpChannelProcUtil5Min"), ("IBM-OSA-MIB", "ibmOSAExpChannelPCIBusUtilHour"), ("IBM-OSA-MIB", "ibmOSAExpChannelProcUtilHour")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): ibmOSAExpChannelGroup = ibmOSAExpChannelGroup.setStatus('current') if mibBuilder.loadTexts: ibmOSAExpChannelGroup.setDescription('This group comprises those objects that are related to OSA-Express Channel support.') ibmOSAExpPerfGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2, 6, 188, 2, 2, 2)).setObjects(("IBM-OSA-MIB", "ibmOSAExpPerfDataLP0"), ("IBM-OSA-MIB", "ibmOSAExpPerfDataLP1"), ("IBM-OSA-MIB", "ibmOSAExpPerfDataLP2"), ("IBM-OSA-MIB", "ibmOSAExpPerfDataLP3"), ("IBM-OSA-MIB", "ibmOSAExpPerfDataLP4"), ("IBM-OSA-MIB", "ibmOSAExpPerfDataLP5"), ("IBM-OSA-MIB", "ibmOSAExpPerfDataLP6"), ("IBM-OSA-MIB", "ibmOSAExpPerfDataLP7"), ("IBM-OSA-MIB", "ibmOSAExpPerfDataLP8"), ("IBM-OSA-MIB", "ibmOSAExpPerfDataLP9"), ("IBM-OSA-MIB", "ibmOSAExpPerfDataLP10"), ("IBM-OSA-MIB", "ibmOSAExpPerfDataLP11"), ("IBM-OSA-MIB", "ibmOSAExpPerfDataLP12"), ("IBM-OSA-MIB", "ibmOSAExpPerfDataLP13"), ("IBM-OSA-MIB", "ibmOSAExpPerfDataLP14"), ("IBM-OSA-MIB", "ibmOSAExpPerfDataLP15")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): ibmOSAExpPerfGroup = ibmOSAExpPerfGroup.setStatus('current') if mibBuilder.loadTexts: ibmOSAExpPerfGroup.setDescription('This group comprises those objects that are related to OSA-Express Performance data support.') ibmOSAExpPEGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2, 6, 188, 2, 2, 3)).setObjects(("IBM-OSA-MIB", "ibmOSAExpPEMaxSizeArpCache"), ("IBM-OSA-MIB", "ibmOSAExpPEArpPendingEntries"), ("IBM-OSA-MIB", "ibmOSAExpPEArpActiveEntries"), ("IBM-OSA-MIB", "ibmOSAExpPEIPEntries"), ("IBM-OSA-MIB", "ibmOSAExpPEMulticastEntries"), ("IBM-OSA-MIB", "ibmOSAExpPEMulticastData")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): ibmOSAExpPEGroup = ibmOSAExpPEGroup.setStatus('current') if mibBuilder.loadTexts: ibmOSAExpPEGroup.setDescription('This group comprises those objects that are related to OSA-Express PE data support.') ibmOSAExpEthGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2, 6, 188, 2, 2, 4)).setObjects(("IBM-OSA-MIB", "ibmOsaExpEthPortNumber"), ("IBM-OSA-MIB", "ibmOsaExpEthPortType"), ("IBM-OSA-MIB", "ibmOsaExpEthLanTrafficState"), ("IBM-OSA-MIB", "ibmOsaExpEthServiceMode"), ("IBM-OSA-MIB", "ibmOsaExpEthDisabledStatus"), ("IBM-OSA-MIB", "ibmOsaExpEthConfigName"), ("IBM-OSA-MIB", "ibmOsaExpEthConfigSpeedMode"), ("IBM-OSA-MIB", "ibmOsaExpEthActiveSpeedMode"), ("IBM-OSA-MIB", "ibmOsaExpEthMacAddrActive"), ("IBM-OSA-MIB", "ibmOsaExpEthMacAddrBurntIn"), ("IBM-OSA-MIB", "ibmOsaExpEthUserData"), ("IBM-OSA-MIB", "ibmOsaExpEthOutPackets"), ("IBM-OSA-MIB", "ibmOsaExpEthInPackets"), ("IBM-OSA-MIB", "ibmOsaExpEthInGroupFrames"), ("IBM-OSA-MIB", "ibmOsaExpEthInBroadcastFrames"), ("IBM-OSA-MIB", "ibmOsaExpEthPortName"), ("IBM-OSA-MIB", "ibmOsaExpEthInUnknownIPFrames"), ("IBM-OSA-MIB", "ibmOsaExpEthGroupAddrTable")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): ibmOSAExpEthGroup = ibmOSAExpEthGroup.setStatus('current') if mibBuilder.loadTexts: ibmOSAExpEthGroup.setDescription('This group comprises those objects that are related to OSA-Express Fast Ethernet and Gigabit features only') ibmOSAExpTRGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2, 6, 188, 2, 2, 5)).setObjects(("IBM-OSA-MIB", "ibmOsaExpTRPortNumber"), ("IBM-OSA-MIB", "ibmOsaExpTRPortType"), ("IBM-OSA-MIB", "ibmOsaExpTRLanTrafficState"), ("IBM-OSA-MIB", "ibmOsaExpTRServiceMode"), ("IBM-OSA-MIB", "ibmOsaExpTRDisabledStatus"), ("IBM-OSA-MIB", "ibmOsaExpTRConfigName"), ("IBM-OSA-MIB", "ibmOsaExpTRMacAddrActive"), ("IBM-OSA-MIB", "ibmOsaExpTRMacAddrBurntIn"), ("IBM-OSA-MIB", "ibmOsaExpTRConfigSpeedMode"), ("IBM-OSA-MIB", "ibmOsaExpTRActiveSpeedMode"), ("IBM-OSA-MIB", "ibmOsaExpTRUserData"), ("IBM-OSA-MIB", "ibmOsaExpTRPortName"), ("IBM-OSA-MIB", "ibmOsaExpTRGroupAddrTable"), ("IBM-OSA-MIB", "ibmOsaExpTRFunctionalAddr"), ("IBM-OSA-MIB", "ibmOsaExpTRRingStatus"), ("IBM-OSA-MIB", "ibmOsaExpTRAllowAccessPriority"), ("IBM-OSA-MIB", "ibmOsaExpTREarlyTokenRelease"), ("IBM-OSA-MIB", "ibmOsaExpTRBeaconingAddress"), ("IBM-OSA-MIB", "ibmOsaExpTRUpstreamNeighbor"), ("IBM-OSA-MIB", "ibmOsaExpTRRingState"), ("IBM-OSA-MIB", "ibmOsaExpTRRingOpenStatus"), ("IBM-OSA-MIB", "ibmOsaExpTRPacketsTransmitted"), ("IBM-OSA-MIB", "ibmOsaExpTRPacketsReceived"), ("IBM-OSA-MIB", "ibmOsaExpTRLineErrorCount"), ("IBM-OSA-MIB", "ibmOsaExpTRBurstErrorCount"), ("IBM-OSA-MIB", "ibmOsaExpTRACErrorCount"), ("IBM-OSA-MIB", "ibmOsaExpTRAbortTransErrorCount"), ("IBM-OSA-MIB", "ibmOsaExpTRInternalErrorCount"), ("IBM-OSA-MIB", "ibmOsaExpTRLostFrameErrorCount"), ("IBM-OSA-MIB", "ibmOsaExpTRRcvCongestionCount"), ("IBM-OSA-MIB", "ibmOsaExpTRFrameCopyErrorCount"), ("IBM-OSA-MIB", "ibmOsaExpTRTokenErrorCount"), ("IBM-OSA-MIB", "ibmOsaExpTRFullDuplexErrorCount"), ("IBM-OSA-MIB", "ibmOsaExpTRSoftErrorCount"), ("IBM-OSA-MIB", "ibmOsaExpTRHardErrorCount"), ("IBM-OSA-MIB", "ibmOsaExpTRSignalLossErrorCount"), ("IBM-OSA-MIB", "ibmOsaExpTRTransmitBeaconCount"), ("IBM-OSA-MIB", "ibmOsaExpTRRecoveryCounter"), ("IBM-OSA-MIB", "ibmOsaExpTRLobeWireFaultCount"), ("IBM-OSA-MIB", "ibmOsaExpTRRemoveReceivedCount"), ("IBM-OSA-MIB", "ibmOsaExpTRSingleStationCount")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): ibmOSAExpTRGroup = ibmOSAExpTRGroup.setStatus('current') if mibBuilder.loadTexts: ibmOSAExpTRGroup.setDescription('This group comprises those objects that are related to OSA-Express Token Ring feature only') ibmOSAExpATMGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2, 6, 188, 2, 2, 7)).setObjects(("IBM-OSA-MIB", "ibmOsaExpATMPortNumber"), ("IBM-OSA-MIB", "ibmOsaExpATMPortType"), ("IBM-OSA-MIB", "ibmOsaExpATMLanTrafficState"), ("IBM-OSA-MIB", "ibmOsaExpATMServiceMode"), ("IBM-OSA-MIB", "ibmOsaExpATMDisabledStatus"), ("IBM-OSA-MIB", "ibmOsaExpATMConfigName"), ("IBM-OSA-MIB", "ibmOsaExpATMMacAddrActive"), ("IBM-OSA-MIB", "ibmOsaExpATMMacAddrBurntIn"), ("IBM-OSA-MIB", "ibmOsaExpATMUserData"), ("IBM-OSA-MIB", "ibmOsaExpATMPortName"), ("IBM-OSA-MIB", "ibmOsaExpATMGroupMacAddrTable"), ("IBM-OSA-MIB", "ibmOsaExpATMIBMEnhancedMode"), ("IBM-OSA-MIB", "ibmOsaExpATMBestEffortPeakRate"), ("IBM-OSA-MIB", "ibmOsaExpATMConfigMode"), ("IBM-OSA-MIB", "ibmOsaExpATMConfigLanType"), ("IBM-OSA-MIB", "ibmOsaExpATMActualLanType"), ("IBM-OSA-MIB", "ibmOsaExpATMConfigMaxDataFrmSz"), ("IBM-OSA-MIB", "ibmOsaExpATMActualMaxDataFrmSz"), ("IBM-OSA-MIB", "ibmOsaExpATMConfigELANName"), ("IBM-OSA-MIB", "ibmOsaExpATMActualELANName"), ("IBM-OSA-MIB", "ibmOsaExpATMConfigLESATMAddress"), ("IBM-OSA-MIB", "ibmOsaExpATMActualLESATMAddress"), ("IBM-OSA-MIB", "ibmOsaExpATMControlTimeout"), ("IBM-OSA-MIB", "ibmOsaExpATMMaxUnknownFrameCount"), ("IBM-OSA-MIB", "ibmOsaExpATMMaxUnknownFrameTime"), ("IBM-OSA-MIB", "ibmOsaExpATMVCCTimeoutPeriod"), ("IBM-OSA-MIB", "ibmOsaExpATMMaxRetryCount"), ("IBM-OSA-MIB", "ibmOsaExpATMAgingTime"), ("IBM-OSA-MIB", "ibmOsaExpATMForwardDelayTime"), ("IBM-OSA-MIB", "ibmOsaExpATMExpectedARPRespTime"), ("IBM-OSA-MIB", "ibmOsaExpATMFlushTimeout"), ("IBM-OSA-MIB", "ibmOsaExpATMPathSwitchingDelay"), ("IBM-OSA-MIB", "ibmOsaExpATMLocalSegmentID"), ("IBM-OSA-MIB", "ibmOsaExpATMMltcstSendVCCType"), ("IBM-OSA-MIB", "ibmOsaExpATMMltcstSendVCCAvgRate"), ("IBM-OSA-MIB", "ibmOsaExpATMMcastSendVCCPeakRate"), ("IBM-OSA-MIB", "ibmOsaExpATMConnectCompleteTimer"), ("IBM-OSA-MIB", "ibmOsaExpATMClientATMAddress"), ("IBM-OSA-MIB", "ibmOsaExpATMClientIdentifier"), ("IBM-OSA-MIB", "ibmOsaExpATMClientCurrentState"), ("IBM-OSA-MIB", "ibmOsaExpATMLastFailureRespCode"), ("IBM-OSA-MIB", "ibmOsaExpATMLastFailureState"), ("IBM-OSA-MIB", "ibmOsaExpATMProtocol"), ("IBM-OSA-MIB", "ibmOsaExpATMLeVersion"), ("IBM-OSA-MIB", "ibmOsaExpATMTopologyChange"), ("IBM-OSA-MIB", "ibmOsaExpATMConfigServerATMAddr"), ("IBM-OSA-MIB", "ibmOsaExpATMConfigSource"), ("IBM-OSA-MIB", "ibmOsaExpATMProxyClient"), ("IBM-OSA-MIB", "ibmOsaExpATMLePDUOctetsInbound"), ("IBM-OSA-MIB", "ibmOsaExpATMNonErrLePDUDiscIn"), ("IBM-OSA-MIB", "ibmOsaExpATMErrLePDUDiscIn"), ("IBM-OSA-MIB", "ibmOsaExpATMLePDUOctetsOutbound"), ("IBM-OSA-MIB", "ibmOsaExpATMNonErrLePDUDiscOut"), ("IBM-OSA-MIB", "ibmOsaExpATMErrLePDUDiscOut"), ("IBM-OSA-MIB", "ibmOsaExpATMLeARPRequestsOut"), ("IBM-OSA-MIB", "ibmOsaExpATMLeARPRequestsIn"), ("IBM-OSA-MIB", "ibmOsaExpATMLeARPRepliesOut"), ("IBM-OSA-MIB", "ibmOsaExpATMLeARPRepliesIn"), ("IBM-OSA-MIB", "ibmOsaExpATMControlFramesOut"), ("IBM-OSA-MIB", "ibmOsaExpATMControlFramesIn"), ("IBM-OSA-MIB", "ibmOsaExpATMSVCFailures"), ("IBM-OSA-MIB", "ibmOsaExpATMConfigDirectIntfc"), ("IBM-OSA-MIB", "ibmOsaExpATMConfigDirectVPI"), ("IBM-OSA-MIB", "ibmOsaExpATMConfigDirectVCI"), ("IBM-OSA-MIB", "ibmOsaExpATMControlDirectIntfc"), ("IBM-OSA-MIB", "ibmOsaExpATMControlDirectVPI"), ("IBM-OSA-MIB", "ibmOsaExpATMControlDirectVCI"), ("IBM-OSA-MIB", "ibmOsaExpATMControlDistIntfc"), ("IBM-OSA-MIB", "ibmOsaExpATMControlDistributeVPI"), ("IBM-OSA-MIB", "ibmOsaExpATMControlDistributeVCI"), ("IBM-OSA-MIB", "ibmOsaExpATMMulticastSendIntfc"), ("IBM-OSA-MIB", "ibmOsaExpATMMulticastSendVPI"), ("IBM-OSA-MIB", "ibmOsaExpATMMulticastSendVCI"), ("IBM-OSA-MIB", "ibmOsaExpATMMulticastFwdIntfc"), ("IBM-OSA-MIB", "ibmOsaExpATMMulticastForwardVPI"), ("IBM-OSA-MIB", "ibmOsaExpATMMulticastForwardVCI")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): ibmOSAExpATMGroup = ibmOSAExpATMGroup.setStatus('current') if mibBuilder.loadTexts: ibmOSAExpATMGroup.setDescription('This group comprises those objects that are related to OSA-Express ATM LAN Emulation feature only') mibBuilder.exportSymbols("IBM-OSA-MIB", ibmOsaExpATMControlFramesIn=ibmOsaExpATMControlFramesIn, ibmOSAExpChannelGroup=ibmOSAExpChannelGroup, ibmOsaExpTRConfigName=ibmOsaExpTRConfigName, ibmOsaExpATMNonErrLePDUDiscOut=ibmOsaExpATMNonErrLePDUDiscOut, ibmOsaExpTRSingleStationCount=ibmOsaExpTRSingleStationCount, ibmOSAExpPerfDataLP7=ibmOSAExpPerfDataLP7, ibmOsaExpTRTransmitBeaconCount=ibmOsaExpTRTransmitBeaconCount, ibmOsaExpTRUpstreamNeighbor=ibmOsaExpTRUpstreamNeighbor, ibmOsaExpATMControlTimeout=ibmOsaExpATMControlTimeout, ibmOSAMibConformance=ibmOSAMibConformance, ibmOsaExpEthOutPackets=ibmOsaExpEthOutPackets, ibmOsaExpEthConfigName=ibmOsaExpEthConfigName, ibmOsaExpTRInternalErrorCount=ibmOsaExpTRInternalErrorCount, ibmOsaExpATMMulticastSendVCI=ibmOsaExpATMMulticastSendVCI, ibmOsaExpATMControlDirectIntfc=ibmOsaExpATMControlDirectIntfc, ibm=ibm, ibmOsaExpEthUserData=ibmOsaExpEthUserData, ibmOsaExpATMGroupMacAddrTable=ibmOsaExpATMGroupMacAddrTable, ibmOSAExpPEArpPendingEntries=ibmOSAExpPEArpPendingEntries, ibmOsaExpTRUserData=ibmOsaExpTRUserData, ibmOSAExpPETable=ibmOSAExpPETable, ibmOsaExpATMMacAddrBurntIn=ibmOsaExpATMMacAddrBurntIn, ibmOsaExpATMPortName=ibmOsaExpATMPortName, ibmOsaExpATMMulticastForwardVCI=ibmOsaExpATMMulticastForwardVCI, ibmOsaExpTRPortName=ibmOsaExpTRPortName, ibmOsaExpTRPortNumber=ibmOsaExpTRPortNumber, ibmOsaExpTRTokenErrorCount=ibmOsaExpTRTokenErrorCount, ibmOsaExpATMControlDistributeVPI=ibmOsaExpATMControlDistributeVPI, ibmOSAExpPerfDataLP15=ibmOSAExpPerfDataLP15, ibmOsaExpATMMulticastFwdIntfc=ibmOsaExpATMMulticastFwdIntfc, ibmOsaExpTRGroupAddrTable=ibmOsaExpTRGroupAddrTable, ibmOsaExpATMProxyClient=ibmOsaExpATMProxyClient, ibmOSAExpPerfTable=ibmOSAExpPerfTable, ibmOSAExpChannelEntry=ibmOSAExpChannelEntry, ibmOSAExpTRGroup=ibmOSAExpTRGroup, ibmOsaExpEthMacAddrBurntIn=ibmOsaExpEthMacAddrBurntIn, ibmOSAExpChannelSubType=ibmOSAExpChannelSubType, ibmOsaExpATMControlDistIntfc=ibmOsaExpATMControlDistIntfc, ibmOsaExpATMAgingTime=ibmOsaExpATMAgingTime, ibmOsaExpTRHardErrorCount=ibmOsaExpTRHardErrorCount, ibmOsaExpTRAbortTransErrorCount=ibmOsaExpTRAbortTransErrorCount, ibmOsaExpTRActiveSpeedMode=ibmOsaExpTRActiveSpeedMode, ibmOSAExpChannelTable=ibmOSAExpChannelTable, ibmOsaExpATMConfigServerATMAddr=ibmOsaExpATMConfigServerATMAddr, ibmOSAExpTRPortEntry=ibmOSAExpTRPortEntry, ibmOsaExpTREarlyTokenRelease=ibmOsaExpTREarlyTokenRelease, ibmOSAExpPerfDataLP13=ibmOSAExpPerfDataLP13, ibmOsaExpATMTopologyChange=ibmOsaExpATMTopologyChange, ibmOsaExpEthLanTrafficState=ibmOsaExpEthLanTrafficState, ibmOSAExpPEMulticastEntries=ibmOSAExpPEMulticastEntries, ibmOsaExpEthPortNumber=ibmOsaExpEthPortNumber, ibmOsaExpTRMacAddrActive=ibmOsaExpTRMacAddrActive, ibmOSAExpPerfGroup=ibmOSAExpPerfGroup, ibmOsaExpTRSignalLossErrorCount=ibmOsaExpTRSignalLossErrorCount, ibmOSAExpPerfDataLP11=ibmOSAExpPerfDataLP11, ibmOsaExpATMVCCTimeoutPeriod=ibmOsaExpATMVCCTimeoutPeriod, ibmOSAExpPEMaxSizeArpCache=ibmOSAExpPEMaxSizeArpCache, ibmOsaExpTRRecoveryCounter=ibmOsaExpTRRecoveryCounter, ibmOsaExpTRPacketsTransmitted=ibmOsaExpTRPacketsTransmitted, ibmOsaExpATMLePDUOctetsOutbound=ibmOsaExpATMLePDUOctetsOutbound, ibmOsaExpATMConfigLanType=ibmOsaExpATMConfigLanType, ibmOsaExpTRConfigSpeedMode=ibmOsaExpTRConfigSpeedMode, ibmOSAExpPerfDataLP4=ibmOSAExpPerfDataLP4, ibmOSAMib=ibmOSAMib, ibmOsaExpEthInGroupFrames=ibmOsaExpEthInGroupFrames, ibmOSAExpPerfDataLP6=ibmOSAExpPerfDataLP6, ibmOSAExpChannelProcUtilHour=ibmOSAExpChannelProcUtilHour, ibmOsaExpATMPortNumber=ibmOsaExpATMPortNumber, ibmOSAExpChannelHdwLevel=ibmOSAExpChannelHdwLevel, ibmOsaExpEthInUnknownIPFrames=ibmOsaExpEthInUnknownIPFrames, ibmOSAExpChannelType=ibmOSAExpChannelType, ibmOsaExpATMUserData=ibmOsaExpATMUserData, ibmOsaExpATMForwardDelayTime=ibmOsaExpATMForwardDelayTime, ibmOsaExpEthActiveSpeedMode=ibmOsaExpEthActiveSpeedMode, ibmOSAExpPEGroup=ibmOSAExpPEGroup, ibmOSAExpPEEntry=ibmOSAExpPEEntry, ibmOSAExpTRPortTable=ibmOSAExpTRPortTable, ibmOsaExpATMActualELANName=ibmOsaExpATMActualELANName, ibmOsaExpATMMacAddrActive=ibmOsaExpATMMacAddrActive, ibmOsaExpEthConfigSpeedMode=ibmOsaExpEthConfigSpeedMode, ibmOsaExpTRBeaconingAddress=ibmOsaExpTRBeaconingAddress, ibmOsaExpTRRemoveReceivedCount=ibmOsaExpTRRemoveReceivedCount, ibmOsaExpATMProtocol=ibmOsaExpATMProtocol, ibmOsaExpATMNonErrLePDUDiscIn=ibmOsaExpATMNonErrLePDUDiscIn, ibmOsaExpATMErrLePDUDiscOut=ibmOsaExpATMErrLePDUDiscOut, ibmOsaExpATMControlFramesOut=ibmOsaExpATMControlFramesOut, ibmOsaExpTRACErrorCount=ibmOsaExpTRACErrorCount, ibmOsaExpTRMacAddrBurntIn=ibmOsaExpTRMacAddrBurntIn, ibmOsaExpATMMaxUnknownFrameTime=ibmOsaExpATMMaxUnknownFrameTime, ibmOsaExpTRDisabledStatus=ibmOsaExpTRDisabledStatus, ibmOSAExpChannelShared=ibmOSAExpChannelShared, ibmOsaExpEthDisabledStatus=ibmOsaExpEthDisabledStatus, ibmOsaExpEthMacAddrActive=ibmOsaExpEthMacAddrActive, ibmOSAExpEthPortTable=ibmOSAExpEthPortTable, ibmOsaExpTRLineErrorCount=ibmOsaExpTRLineErrorCount, ibmOSAExpEthPortEntry=ibmOSAExpEthPortEntry, ibmOsaExpEthInPackets=ibmOsaExpEthInPackets, ibmOSAExpPerfDataLP8=ibmOSAExpPerfDataLP8, ibmOsaExpTRRingOpenStatus=ibmOsaExpTRRingOpenStatus, ibmOsaExpEthPortName=ibmOsaExpEthPortName, ibmOsaExpATMExpectedARPRespTime=ibmOsaExpATMExpectedARPRespTime, ibmOsaExpTRFunctionalAddr=ibmOsaExpTRFunctionalAddr, ibmOsaExpATMLeARPRequestsIn=ibmOsaExpATMLeARPRequestsIn, ibmOsaExpATMConfigDirectIntfc=ibmOsaExpATMConfigDirectIntfc, ibmOSAExpPerfDataLP1=ibmOSAExpPerfDataLP1, ibmOsaExpATMConfigMaxDataFrmSz=ibmOsaExpATMConfigMaxDataFrmSz, ibmOsaExpATMConfigName=ibmOsaExpATMConfigName, ibmOsaExpATMLeARPRequestsOut=ibmOsaExpATMLeARPRequestsOut, ibmOsaExpATMMulticastForwardVPI=ibmOsaExpATMMulticastForwardVPI, ibmOSAExpChannelPCIBusUtilHour=ibmOSAExpChannelPCIBusUtilHour, ibmOsaExpATMConfigDirectVPI=ibmOsaExpATMConfigDirectVPI, ibmOsaExpTRLostFrameErrorCount=ibmOsaExpTRLostFrameErrorCount, ibmOsaExpTRRingState=ibmOsaExpTRRingState, ibmProd=ibmProd, ibmOsaExpTRLobeWireFaultCount=ibmOsaExpTRLobeWireFaultCount, ibmOSAExpPerfDataLP2=ibmOSAExpPerfDataLP2, ibmOSAExpPEMulticastData=ibmOSAExpPEMulticastData, ibmOsaExpATMLocalSegmentID=ibmOsaExpATMLocalSegmentID, ibmOsaExpATMMulticastSendVPI=ibmOsaExpATMMulticastSendVPI, ibmOSAExpATMGroup=ibmOSAExpATMGroup, ibmOsaExpEthInBroadcastFrames=ibmOsaExpEthInBroadcastFrames, ibmOsaExpATMConfigMode=ibmOsaExpATMConfigMode, ibmOsaExpATMMltcstSendVCCType=ibmOsaExpATMMltcstSendVCCType, ibmOSAExpEthGroup=ibmOSAExpEthGroup, ibmOsaExpATMConfigLESATMAddress=ibmOsaExpATMConfigLESATMAddress, ibmOsaExpTRRcvCongestionCount=ibmOsaExpTRRcvCongestionCount, ibmOsaExpTRPortType=ibmOsaExpTRPortType, ibmOsaExpTRSoftErrorCount=ibmOsaExpTRSoftErrorCount, PYSNMP_MODULE_ID=ibmOSAMib, ibmOsaExpTRRingStatus=ibmOsaExpTRRingStatus, ibmOsaExpATMControlDirectVPI=ibmOsaExpATMControlDirectVPI, ibmOsaExpATMSVCFailures=ibmOsaExpATMSVCFailures, ibmOsaExpATMPortType=ibmOsaExpATMPortType, ibmOsaExpATMMulticastSendIntfc=ibmOsaExpATMMulticastSendIntfc, ibmOsaExpTRBurstErrorCount=ibmOsaExpTRBurstErrorCount, ibmOsaExpATMClientCurrentState=ibmOsaExpATMClientCurrentState, ibmOSAExpPEArpActiveEntries=ibmOSAExpPEArpActiveEntries, ibmOSAExpPerfDataLP9=ibmOSAExpPerfDataLP9, ibmOSAExpATMPortTable=ibmOSAExpATMPortTable, ibmOsaExpATMMltcstSendVCCAvgRate=ibmOsaExpATMMltcstSendVCCAvgRate, ibmOsaExpATMLeARPRepliesOut=ibmOsaExpATMLeARPRepliesOut, ibmOsaExpATMServiceMode=ibmOsaExpATMServiceMode, ibmOSAExpChannelProcCodeLevel=ibmOSAExpChannelProcCodeLevel, ibmOsaExpTRLanTrafficState=ibmOsaExpTRLanTrafficState, ibmOsaExpATMMaxUnknownFrameCount=ibmOsaExpATMMaxUnknownFrameCount, ibmOSAExpChannelProcUtil5Min=ibmOSAExpChannelProcUtil5Min, ibmOsaExpATMBestEffortPeakRate=ibmOsaExpATMBestEffortPeakRate, ibmOSAExpPerfDataLP5=ibmOSAExpPerfDataLP5, ibmOSAExpPEIPEntries=ibmOSAExpPEIPEntries, ibmOsaExpATMLanTrafficState=ibmOsaExpATMLanTrafficState, ibmOsaExpTRFullDuplexErrorCount=ibmOsaExpTRFullDuplexErrorCount, ibmOSAExpChannelNumber=ibmOSAExpChannelNumber, ibmOsaExpATMLastFailureState=ibmOsaExpATMLastFailureState, ibmOsaExpATMClientIdentifier=ibmOsaExpATMClientIdentifier, ibmOsaExpTRServiceMode=ibmOsaExpTRServiceMode, ibmOsaExpEthPortType=ibmOsaExpEthPortType, ibmOsaExpATMIBMEnhancedMode=ibmOsaExpATMIBMEnhancedMode, ibmOSAExpPerfDataLP0=ibmOSAExpPerfDataLP0, ibmOsaExpTRFrameCopyErrorCount=ibmOsaExpTRFrameCopyErrorCount, ibmOsaExpATMControlDirectVCI=ibmOsaExpATMControlDirectVCI, ibmOsaExpTRAllowAccessPriority=ibmOsaExpTRAllowAccessPriority, ibmOsaExpATMFlushTimeout=ibmOsaExpATMFlushTimeout, ibmOsaExpATMConfigELANName=ibmOsaExpATMConfigELANName, ibmOSAExpPerfDataLP10=ibmOSAExpPerfDataLP10, ibmOsaExpEthServiceMode=ibmOsaExpEthServiceMode, ibmOSAMibCompliance=ibmOSAMibCompliance, ibmOSAExpChannelNodeDesc=ibmOSAExpChannelNodeDesc, ibmOSAExpPerfDataLP12=ibmOSAExpPerfDataLP12, ibmOsaExpTRPacketsReceived=ibmOsaExpTRPacketsReceived, ibmOsaExpATMMaxRetryCount=ibmOsaExpATMMaxRetryCount, ibmOsaExpATMActualLESATMAddress=ibmOsaExpATMActualLESATMAddress, ibmOSAMibCompliances=ibmOSAMibCompliances, ibmOSAExpChannelProcUtil1Min=ibmOSAExpChannelProcUtil1Min, ibmOsaExpATMActualMaxDataFrmSz=ibmOsaExpATMActualMaxDataFrmSz, ibmOsaExpEthGroupAddrTable=ibmOsaExpEthGroupAddrTable, ibmOsaExpATMLeVersion=ibmOsaExpATMLeVersion, ibmOsaExpATMConfigDirectVCI=ibmOsaExpATMConfigDirectVCI, ibmOSAExpATMPortEntry=ibmOSAExpATMPortEntry, ibmOsaExpATMControlDistributeVCI=ibmOsaExpATMControlDistributeVCI, ibmOsaExpATMConnectCompleteTimer=ibmOsaExpATMConnectCompleteTimer, ibmOsaExpATMDisabledStatus=ibmOsaExpATMDisabledStatus, ibmOSAExpChannelPCIBusUtil5Min=ibmOSAExpChannelPCIBusUtil5Min, ibmOsaExpATMClientATMAddress=ibmOsaExpATMClientATMAddress, ibmOsaExpATMLePDUOctetsInbound=ibmOsaExpATMLePDUOctetsInbound, ibmOsaExpATMConfigSource=ibmOsaExpATMConfigSource, ibmOsaExpATMMcastSendVCCPeakRate=ibmOsaExpATMMcastSendVCCPeakRate, ibmOsaExpATMErrLePDUDiscIn=ibmOsaExpATMErrLePDUDiscIn, ibmOsaExpATMLastFailureRespCode=ibmOsaExpATMLastFailureRespCode, ibmOSAExpPerfDataLP3=ibmOSAExpPerfDataLP3, ibmOSAExpChannelPCIBusUtil1Min=ibmOSAExpChannelPCIBusUtil1Min, ibmOSAExpPerfEntry=ibmOSAExpPerfEntry, ibmOsaExpATMLeARPRepliesIn=ibmOsaExpATMLeARPRepliesIn, ibmOsaExpATMActualLanType=ibmOsaExpATMActualLanType, ibmOsaExpATMPathSwitchingDelay=ibmOsaExpATMPathSwitchingDelay, ibmOSAMibObjects=ibmOSAMibObjects, ibmOSAExpPerfDataLP14=ibmOSAExpPerfDataLP14, ibmOSAMibGroups=ibmOSAMibGroups)
195.429043
10,333
0.792949
13,923
118,430
6.742728
0.089349
0.048062
0.084108
0.008394
0.51968
0.420339
0.381843
0.366525
0.344422
0.322916
0
0.046319
0.09289
118,430
605
10,334
195.752066
0.827551
0.002668
0
0.013423
0
0.20302
0.386512
0.051657
0
0
0.000051
0
0
1
0
false
0.001678
0.011745
0
0.011745
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
1
0
0
0
0
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
20e2f6675f768e26b7817f0a0c86bd8f61cf7220
995
py
Python
farmers/api/market/models.py
BuildForSDG/Farmers-Edge-backend
4924c7f73f3e84698fde6a3d8a893c1ca282ed88
[ "MIT" ]
2
2020-05-17T18:20:50.000Z
2021-04-20T21:42:43.000Z
farmers/api/market/models.py
BuildForSDG/Farmers-Edge-backend
4924c7f73f3e84698fde6a3d8a893c1ca282ed88
[ "MIT" ]
19
2020-05-14T14:36:31.000Z
2022-03-12T00:34:40.000Z
farmers/api/market/models.py
BuildForSDG/Farmers-Edge-backend
4924c7f73f3e84698fde6a3d8a893c1ca282ed88
[ "MIT" ]
1
2020-05-20T20:09:35.000Z
2020-05-20T20:09:35.000Z
from django.db import models from django.utils import timezone from django.conf import settings from django.contrib.auth.models import User class Product(models.Model): # user = models.ForeignKey(settings.AUTH_USER_MODEL, default=None, on_delete=models.CASCADE) retailerEmail = models.EmailField(max_length=254) product = models.CharField(max_length=200) quantity = models.CharField(max_length=200, null=True) totalCost = models.CharField(max_length=100, null=True) ready = models.BooleanField(default=False) def __str__(self): return self.product class Order(models.Model): # user = models.ForeignKey(settings.AUTH_USER_MODEL, default=None, on_delete=models.CASCADE) productName = models.CharField(max_length=100, null=True) totalCost = models.CharField(max_length=200) quantity = models.CharField(max_length=100, null=True) waitTime = models.CharField(max_length=100, null=True) def __str__(self): return self.productName
38.269231
96
0.756784
131
995
5.580153
0.328244
0.098495
0.172367
0.229822
0.644323
0.585499
0.585499
0.391245
0.391245
0.391245
0
0.028169
0.143719
995
25
97
39.8
0.829812
0.18191
0
0.105263
0
0
0
0
0
0
0
0
0
1
0.105263
false
0
0.210526
0.105263
1
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
1
1
0
0
3
20e3799a103413d1ebae2de8264413c623950634
17,497
py
Python
tsconto.py
chkastorm/tsconto
fd705fc5484a3c6ee3766b0e341c18ddc70bd089
[ "BSD-2-Clause" ]
null
null
null
tsconto.py
chkastorm/tsconto
fd705fc5484a3c6ee3766b0e341c18ddc70bd089
[ "BSD-2-Clause" ]
null
null
null
tsconto.py
chkastorm/tsconto
fd705fc5484a3c6ee3766b0e341c18ddc70bd089
[ "BSD-2-Clause" ]
null
null
null
#!/usr/bin/python3 ################################################################################ ### ### ### Script : Telnet-Simulated-CONsole shortcut TO consoles of guests in GNS3 ### ### Abbreviation : tsconto ### ### Author : Kastor M. ### ### Modified : Kastor M. ### ### Version : 1.0.3 ### ### Date : Sun 10 Apr 2022 03:20:16 PM EDT ### ### ### ################################################################################ import os import sys import json import datetime import subprocess from pprint import pprint db_name_append_datetime = datetime.datetime.now().strftime("%Y%m%d%H%M%S") db_folder_at_home_directory = os.path.expanduser("~/.rhpn-gns3-stsc/") db_at_user_home_directory = os.path.expanduser("~/.rhpn-gns3-stsc/dict-GNS3-NEs-list.json") def message_user_manual(): print("\n#############\n### Usage ###\n#############\n") print("=============================================================== Connect to console ===============================================================\n") print("tsconto connect < Registered Hostname > ::: Connect to the Telnet-Simulated Console of a node in GNS3") print("\n") print("============================================================== Database Operations ===============================================================\n") print("tsconto database list ::: List out contents of the existing Database") print("tsconto database list checkpoint < Filename > ::: List out contents of the Database at a Specific Checkpoint") print("tsconto database checkpoint ::: List out ALL Checkpoints of the Database") print("tsconto database revert from < Filename > ::: Revert the Database status back to the Reference Point") print("tsconto database clean [ --all ] ::: Clean ALL Database Checkpoints [ Deep Clean including the in-force Database ]") print("tsconto database scan ::: Automatically scan over a specific .gns3 file for a project and import ALL nodes to the DB") print("tsconto database add < Hostname > < Port Number > ::: To add a node manually") print("tsconto database delete < Hostname > ::: To delete a node manually from the DB\n") def message_database_empty(): print("\nIn-Force Database Empty.\nPlease register at least one node to the in-force Database.\n\nFormat : tsconto database add < Hostname > < Port Number >\n///OR/// tsconto database scan\n") def message_database_checkpoint_empty(): print("\nCheckpoint Empty. Checkpoint will be generated automatically when registering node(s).\n\nFormat : tsconto database add < Hostname > < Port Number >\n///OR/// tsconto database scan\n") def message_database_checkpoint_not_found(): print("\nCheckpoint < Filename > CANNOT be found from the Registry. Please check and try again.\n\nFormat : tsconto database list checkpoint < Filename >\n e.g. tsconto database list checkpoint dict-GNS3-NEs-list.json.20220116222106\n") def message_hostname_not_found(): print("\nThe input < Hostname > CANNOT be found from the Registry. Please check and try again. Thank you.\n") def message_unknown_cmd(): print("\n[ Unknown command ] Please check and try again. Thank you.\n") if (len(sys.argv) >=3 and sys.argv[1] == "database" and os.path.isfile(db_at_user_home_directory) == True): if len(sys.argv) == 3: if sys.argv[2] == "list": with open(db_at_user_home_directory, "r") as list_nodes: list_registered_nodes = json.loads(list_nodes.read()) print("") pprint(list_registered_nodes) print("") elif sys.argv[2] == "checkpoint": print("") os.system('ls -trl ' + db_folder_at_home_directory) print("") elif sys.argv[2] == "clean": double_confirm_clean_db_backup = input("Do you really want to CLEAR ALL Backup of the Database? (yes/no) : ") if double_confirm_clean_db_backup == "yes": os.system('rm -rf ' + db_at_user_home_directory + '.*') print("\nALL Checkpoints cleared.\nONLY the in-force DB remains.\n") elif double_confirm_clean_db_backup == "no": print("\nGood Choice ^^\" !!\n") else: print("\nPlease ONLY Enter \"yes\" or \"no\". Thank you.\n") elif sys.argv[2] == "scan": scan_target_project_id = input("Please Enter GNS3 Project ID : ") scan_target_project_name = input("Please Enter GNS3 Project Name : ") scan_target_gns3_file_path = "/opt/gns3/projects/" + scan_target_project_id + "/" + scan_target_project_name + ".gns3" with open(scan_target_gns3_file_path, "r") as gns3_file: list_all_gns3_parameters = json.loads(gns3_file.read()) list_all_gns3_nodes = ((list_all_gns3_parameters["topology"])["nodes"]) list_registered_nodes_duplicate_hostname_checked = [] list_registered_nodes_duplicate_checked = [] with open(db_at_user_home_directory, "r") as list_nodes: list_registered_nodes = json.loads(list_nodes.read()) for each_node in list_all_gns3_nodes: for entry_as_dict_hostname_check in list_registered_nodes: for entry_of_list_registered_nodes_duplicate_hostname_checked in list_registered_nodes_duplicate_hostname_checked: if (entry_as_dict_hostname_check["Hostname"] != each_node["name"] and entry_as_dict_hostname_check["Hostname"] != entry_of_list_registered_nodes_duplicate_hostname_checked["Hostname"]): list_registered_nodes_duplicate_hostname_checked.append({"Hostname": entry_as_dict_hostname_check["Hostname"], "Port": entry_as_dict_hostname_check["Port"]}) for each_node in list_all_gns3_nodes: for entry_as_dict in list_registered_nodes_duplicate_hostname_checked: for entry_of_list_registered_nodes_duplicate_checked in list_registered_nodes_duplicate_checked: if (entry_as_dict["Port"] != each_node["console"] and entry_as_dict["Port"] != entry_of_list_registered_nodes_duplicate_checked["Port"]): list_registered_nodes_duplicate_checked.append({"Hostname": entry_as_dict["Hostname"], "Port": entry_as_dict["Port"]}) for each_node in list_all_gns3_nodes: list_registered_nodes_duplicate_checked.append({"Hostname": each_node["name"], "Port": each_node["console"]}) os.system('mv ' + db_at_user_home_directory + ' ' + db_at_user_home_directory + '.' + db_name_append_datetime) with open(db_at_user_home_directory, "w") as list_nodes: list_nodes.write(json.dumps(list_registered_nodes_duplicate_checked)) print("\nBackup Complete.\nNew Database Created.\n") else: message_user_manual() elif len(sys.argv) == 4: if sys.argv[2] == "clean" and sys.argv[3] == "--all": double_confirm_clean_db_backup = input("Do you really want to CLEAR the Entire Database? (yes/no) : ") if double_confirm_clean_db_backup == "yes": os.system('rm -rf ' + db_at_user_home_directory + '*') print("\nDatabase Deep clean finished.\nDatabase is now Empty.\n") elif double_confirm_clean_db_backup == "no": print("\nGood Choice ^^\" !!\n") elif sys.argv[2] == "delete": input_node_hostname = sys.argv[3] with open(db_at_user_home_directory, "r") as list_nodes: list_registered_nodes = json.loads(list_nodes.read()) is_deletable_hostname = [] for deletable_hostname_entry in list_registered_nodes: if deletable_hostname_entry["Hostname"] == input_node_hostname: is_deletable_hostname.append(deletable_hostname_entry) if not is_deletable_hostname: print("\nNo matched entry in the in-force DB. Please check as below,\n\nFormat : tsconto database list\n") else: reduced_list_registered_nodes = [] for entry_as_dict in list_registered_nodes: if not entry_as_dict["Hostname"] == input_node_hostname: reduced_list_registered_nodes.append(entry_as_dict) else: pass os.system('mv ' + db_at_user_home_directory + ' ' + db_at_user_home_directory + '.' + db_name_append_datetime) with open(db_at_user_home_directory, "w") as list_nodes: list_nodes.write(json.dumps(reduced_list_registered_nodes)) if not reduced_list_registered_nodes: os.system('rm -rf ' + db_at_user_home_directory) else: pass print("\nEntry Deleted.\n") else: message_user_manual() elif len(sys.argv) == 5: if (sys.argv[2] == "list" and sys.argv[3] == "checkpoint"): try: input_checkpoint_filename = sys.argv[4] specific_checkpoint_file = (db_folder_at_home_directory + input_checkpoint_filename) with open(specific_checkpoint_file, "r") as list_nodes: list_registered_nodes = json.loads(list_nodes.read()) print("") pprint(list_registered_nodes) print("") except: message_database_checkpoint_not_found() sys.exit(1) elif (sys.argv[2] == "revert" and sys.argv[3] == "from"): input_checkpoint_filename = sys.argv[4] os.system('cp ' + db_at_user_home_directory + ' ' + db_at_user_home_directory + '.' + db_name_append_datetime) revert_file = db_folder_at_home_directory + input_checkpoint_filename revert_file_output = subprocess.run(['cp', revert_file, db_at_user_home_directory], capture_output = True, text = True) if revert_file_output.returncode == 0: print("\nReverted to " + sys.argv[4] + "\n") else: message_database_checkpoint_not_found() elif sys.argv[2] == "add": input_node_hostname = sys.argv[3] input_node_port_number = sys.argv[4] with open(db_at_user_home_directory, "r") as list_nodes: list_registered_nodes = json.loads(list_nodes.read()) list_registered_nodes_duplicate_hostname_checked = [] for entry_as_dict_hostname_check in list_registered_nodes: if entry_as_dict_hostname_check["Hostname"] != input_node_hostname: list_registered_nodes_duplicate_hostname_checked.append({"Hostname": entry_as_dict_hostname_check["Hostname"], "Port": entry_as_dict_hostname_check["Port"]}) list_registered_nodes_duplicate_checked = [] for entry_as_dict in list_registered_nodes_duplicate_hostname_checked: if entry_as_dict["Port"] != input_node_port_number: list_registered_nodes_duplicate_checked.append({"Hostname": entry_as_dict["Hostname"], "Port": entry_as_dict["Port"]}) list_registered_nodes_duplicate_checked.append({"Hostname": input_node_hostname, "Port": input_node_port_number}) os.system('mv ' + db_at_user_home_directory + ' ' + db_at_user_home_directory + '.' + db_name_append_datetime) with open(db_at_user_home_directory, "w") as list_nodes: list_nodes.write(json.dumps(list_registered_nodes_duplicate_checked)) print("\nBackup Complete.\nNew Database Created.\n") else: message_user_manual() else: message_user_manual() elif len(sys.argv) == 2: message_user_manual() elif (len(sys.argv) == 3 and sys.argv[1] == "database"): if sys.argv[2] == "checkpoint": if os.path.isdir(db_folder_at_home_directory): if not len(os.listdir(db_folder_at_home_directory)) == 0: print("") os.system('ls -trl ' + db_folder_at_home_directory) print("") else: message_database_checkpoint_empty() else: message_database_checkpoint_empty() elif sys.argv[2] == "scan": os.system('mkdir -p ' + db_folder_at_home_directory) scan_target_project_id = input("Please Enter GNS3 Project ID : ") scan_target_project_name = input("Please Enter GNS3 Project Name : ") scan_target_gns3_file_path = "/opt/gns3/projects/" + scan_target_project_id + "/" + scan_target_project_name + ".gns3" with open(scan_target_gns3_file_path, "r") as gns3_file: list_all_gns3_parameters = json.loads(gns3_file.read()) list_all_gns3_nodes = ((list_all_gns3_parameters["topology"])["nodes"]) list_registered_nodes = [] for each_node in list_all_gns3_nodes: list_registered_nodes.append({"Hostname": each_node["name"], "Port": each_node["console"]}) with open(db_at_user_home_directory, "w") as list_nodes: list_nodes.write(json.dumps(list_registered_nodes)) print("\nCreated new Database.\n") elif (sys.argv[2] == "list" or sys.argv[2] == "clean"): message_database_empty() else: message_user_manual() elif (len(sys.argv) == 4 and sys.argv[1] == "database"): if (sys.argv[2] == "clean" and sys.argv[3] =="--all"): double_confirm_clean_db_backup = input("Do you really want to CLEAR the Entire Database? (yes/no) : ") if double_confirm_clean_db_backup == "yes": os.system('rm -rf ' + db_at_user_home_directory + '*') print("\nDatabase Deep clean finished.\nDatabase is now Empty.\n") elif double_confirm_clean_db_backup == "no": print("\nGood Choice ^^\" !!\n") elif sys.argv[2] == "delete": message_database_empty() else: message_user_manual() elif (len(sys.argv) == 5 and sys.argv[1] == "database"): if (sys.argv[2] == "list" and sys.argv[3] == "checkpoint"): try: input_checkpoint_filename = sys.argv[4] specific_checkpoint_file = (db_folder_at_home_directory + input_checkpoint_filename) with open(specific_checkpoint_file, "r") as list_nodes: list_registered_nodes = json.loads(list_nodes.read()) print("") pprint(list_registered_nodes) print("") except: message_database_checkpoint_not_found() sys.exit(1) elif (sys.argv[2] == "revert" and sys.argv[3] == "from"): if not len(os.listdir(db_folder_at_home_directory)) == 0: input_checkpoint_filename = sys.argv[4] os.system('cp ' + db_at_user_home_directory + ' ' + db_at_user_home_directory + '.' + db_name_append_datetime) revert_file = db_folder_at_home_directory + input_checkpoint_filename revert_file_output = subprocess.run(['cp', revert_file, db_at_user_home_directory], capture_output = True, text = True) if revert_file_output.returncode == 0: print("\nReverted to " + sys.argv[4] + "\n") else: message_database_checkpoint_not_found() else: message_database_checkpoint_empty() elif sys.argv[2] == "add": input_node_hostname = sys.argv[3] input_node_port_number = sys.argv[4] list_registered_nodes = [{"Hostname": input_node_hostname, "Port": input_node_port_number}] os.system('mkdir -p ' + db_folder_at_home_directory + ' && touch ' + db_at_user_home_directory) with open(db_at_user_home_directory, "a") as list_nodes: list_nodes.write(json.dumps(list_registered_nodes)) print("\nCreated new Database.\n") else: message_user_manual() elif (len(sys.argv) == 3 and sys.argv[1] == "connect"): if os.path.isfile(db_at_user_home_directory) == True: check_if_hostname_in_list_registered_nodes_exist = False db_at_user_home_directory = os.path.expanduser("~/.rhpn-gns3-stsc/dict-GNS3-NEs-list.json") with open(db_at_user_home_directory, "r") as list_nodes: list_registered_nodes = json.loads(list_nodes.read()) for entry_as_dict in list_registered_nodes: if entry_as_dict["Hostname"] == sys.argv[2]: os.system('telnet 127.0.0.1 ' + str(entry_as_dict["Port"])) check_if_hostname_in_list_registered_nodes_exist = True else: pass if check_if_hostname_in_list_registered_nodes_exist == False: message_hostname_not_found() else: pass elif os.path.isfile(db_at_user_home_directory) == False: message_database_empty() else: message_user_manual() else: message_user_manual()
54.170279
243
0.607818
2,112
17,497
4.692235
0.107481
0.035318
0.090111
0.038749
0.795964
0.748739
0.727548
0.709284
0.666095
0.595964
0
0.009854
0.263417
17,497
322
244
54.338509
0.759078
0.028576
0
0.664122
0
0.026718
0.222595
0.022607
0
0
0
0
0
1
0.022901
false
0.015267
0.026718
0
0.049618
0.179389
0
0
0
null
0
0
0
0
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
457733343cca642cbffbb70a0e251eea64564f12
112
py
Python
tcsdk/api/performance/__init__.py
tsbxmw/tcsdk
fb4042cdece83f681a9057ff36ca09954988a31d
[ "MIT" ]
1
2019-12-04T09:30:06.000Z
2019-12-04T09:30:06.000Z
tcsdk/api/performance/__init__.py
tsbxmw/tcsdk
fb4042cdece83f681a9057ff36ca09954988a31d
[ "MIT" ]
null
null
null
tcsdk/api/performance/__init__.py
tsbxmw/tcsdk
fb4042cdece83f681a9057ff36ca09954988a31d
[ "MIT" ]
null
null
null
from .performance import PerformanceApi, Task, Label PerformanceApi = PerformanceApi Task = Task Label = Label
18.666667
52
0.803571
12
112
7.5
0.5
0.4
0
0
0
0
0
0
0
0
0
0
0.142857
112
5
53
22.4
0.9375
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.25
0
0.25
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
4594d5e9c6171b900540cc9f0d141171ef10e9d4
23,620
py
Python
tupan/lib/extensions.py
ggf84/tupan
67d3aa103d77248a04e8f112930ba7bdb55024b2
[ "MIT" ]
1
2016-06-12T19:43:51.000Z
2016-06-12T19:43:51.000Z
tupan/lib/extensions.py
ggf84/tupan
67d3aa103d77248a04e8f112930ba7bdb55024b2
[ "MIT" ]
1
2021-09-24T13:28:57.000Z
2021-09-24T13:28:57.000Z
tupan/lib/extensions.py
ggf84/tupan
67d3aa103d77248a04e8f112930ba7bdb55024b2
[ "MIT" ]
3
2015-11-03T15:35:31.000Z
2021-03-02T17:41:27.000Z
# -*- coding: utf-8 -*- # """This module implements highlevel interfaces for C/CL-extensions. """ from __future__ import print_function, division import sys import logging from .utils import ctype from .utils.timing import decallmethods, timings __all__ = ["Phi", "phi", "Acc", "acc", "AccJerk", "acc_jerk", "SnapCrackle", "snap_crackle", "Tstep", "tstep", "PNAcc", "pnacc", "Sakura", "sakura", "NREG_X", "nreg_x", "NREG_V", "nreg_v", ] logger = logging.getLogger(__name__) @decallmethods(timings) class Clight(object): """This class holds the values of the PN-order, the speed of light and some of its inverse powers. """ def __init__(self): self._pn_order = 0 self._clight = None @property def pn_order(self): return self._pn_order @pn_order.setter def pn_order(self, value): self._pn_order = int(value) @property def clight(self): return self._clight @clight.setter def clight(self, value): self._clight = float(value) self.inv1 = 1.0/self._clight self.inv2 = self.inv1**2 self.inv3 = self.inv1**3 self.inv4 = self.inv1**4 self.inv5 = self.inv1**5 self.inv6 = self.inv1**6 self.inv7 = self.inv1**7 @timings def get_kernel(name, backend, prec): if backend == "C": from .cffi_backend import CKernel as Kernel elif backend == "CL": from .opencl_backend import CLKernel as Kernel else: msg = "Inappropriate 'backend': {}. Supported values: ['C', 'CL']" raise ValueError(msg.format(backend)) logger.debug( "Using '%s' from %s precision %s extension module.", name, prec, backend ) return Kernel(prec, name) @timings def prepare_args(args, argtypes): return [argtype(arg) for (arg, argtype) in zip(args, argtypes)] class AbstractExtension(object): def set_args(self, ips, jps, *args): raise NotImplemented def run(self): self.kernel.run() def get_result(self): return self.kernel.map_buffers(self._outargs, self.outargs) def calc(self, ips, jps, *args): self.set_args(ips, jps, *args) self.run() return self.get_result() @decallmethods(timings) class Phi(AbstractExtension): """ """ def __init__(self, backend, prec): self.kernel = get_kernel("phi_kernel", backend, prec) cty = self.kernel.cty argtypes = (cty.c_uint, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_uint, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p) restypes = (cty.c_real_p,) self.argtypes = argtypes self.restypes = restypes def set_args(self, ips, jps): ni = ips.n nj = jps.n self.kernel.set_gsize(ni, nj) if not "phi" in ips.__dict__: ips.register_auxiliary_attribute("phi", "real") self._inargs = (ni, ips.mass, ips.rx, ips.ry, ips.rz, ips.eps2, nj, jps.mass, jps.rx, jps.ry, jps.rz, jps.eps2) self._outargs = (ips.phi,) self.inargs = prepare_args(self._inargs, self.argtypes) self.outargs = prepare_args(self._outargs, self.restypes) self.kernel.set_args(self.inargs + self.outargs) def _pycalc(self, ips, jps): # Never use this method for production runs. It is very slow # and is here only for performance comparisons. It is also # likely that only the classes Acc and Phi will have an # implementation of this method. import numpy as np ni = ips.n if not "phi" in ips.__dict__: ips.register_auxiliary_attribute("phi", "real") for i in range(ni): rx = ips.rx[i] - jps.rx ry = ips.ry[i] - jps.ry rz = ips.rz[i] - jps.rz e2 = ips.eps2[i] + jps.eps2 r2 = rx * rx + ry * ry + rz * rz mask = r2 > 0 inv_r2 = 1 / (r2 + e2) inv_r = np.sqrt(inv_r2) ips.phi[i] = -(jps.mass * inv_r)[mask].sum() return (ips.phi,) # calc = _pycalc @decallmethods(timings) class Acc(AbstractExtension): """ """ def __init__(self, backend, prec): self.kernel = get_kernel("acc_kernel", backend, prec) cty = self.kernel.cty argtypes = (cty.c_uint, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_uint, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p) restypes = (cty.c_real_p, cty.c_real_p, cty.c_real_p) self.argtypes = argtypes self.restypes = restypes def set_args(self, ips, jps): ni = ips.n nj = jps.n self.kernel.set_gsize(ni, nj) if not "ax" in ips.__dict__: ips.register_auxiliary_attribute("ax", "real") if not "ay" in ips.__dict__: ips.register_auxiliary_attribute("ay", "real") if not "az" in ips.__dict__: ips.register_auxiliary_attribute("az", "real") self._inargs = (ni, ips.mass, ips.rx, ips.ry, ips.rz, ips.eps2, nj, jps.mass, jps.rx, jps.ry, jps.rz, jps.eps2) self._outargs = (ips.ax, ips.ay, ips.az) self.inargs = prepare_args(self._inargs, self.argtypes) self.outargs = prepare_args(self._outargs, self.restypes) self.kernel.set_args(self.inargs + self.outargs) def _pycalc(self, ips, jps): # Never use this method for production runs. It is very slow # and is here only for performance comparisons. It is also # likely that only the classes Acc and Phi will have an # implementation of this method. import numpy as np ni = ips.n if not "ax" in ips.__dict__: ips.register_auxiliary_attribute("ax", "real") if not "ay" in ips.__dict__: ips.register_auxiliary_attribute("ay", "real") if not "az" in ips.__dict__: ips.register_auxiliary_attribute("az", "real") for i in range(ni): rx = ips.rx[i] - jps.rx ry = ips.ry[i] - jps.ry rz = ips.rz[i] - jps.rz e2 = ips.eps2[i] + jps.eps2 r2 = rx * rx + ry * ry + rz * rz mask = r2 > 0 inv_r2 = 1 / (r2 + e2) inv_r = np.sqrt(inv_r2) inv_r3 = inv_r * inv_r2 inv_r3 *= jps.mass ips.ax[i] = -(inv_r3 * rx)[mask].sum() ips.ay[i] = -(inv_r3 * ry)[mask].sum() ips.az[i] = -(inv_r3 * rz)[mask].sum() return (ips.ax, ips.ay, ips.az) # calc = _pycalc @decallmethods(timings) class AccJerk(AbstractExtension): """ """ def __init__(self, backend, prec): self.kernel = get_kernel("acc_jerk_kernel", backend, prec) cty = self.kernel.cty argtypes = (cty.c_uint, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_uint, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p) restypes = (cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p) self.argtypes = argtypes self.restypes = restypes def set_args(self, ips, jps): ni = ips.n nj = jps.n self.kernel.set_gsize(ni, nj) if not "ax" in ips.__dict__: ips.register_auxiliary_attribute("ax", "real") if not "ay" in ips.__dict__: ips.register_auxiliary_attribute("ay", "real") if not "az" in ips.__dict__: ips.register_auxiliary_attribute("az", "real") if not "jx" in ips.__dict__: ips.register_auxiliary_attribute("jx", "real") if not "jy" in ips.__dict__: ips.register_auxiliary_attribute("jy", "real") if not "jz" in ips.__dict__: ips.register_auxiliary_attribute("jz", "real") self._inargs = (ni, ips.mass, ips.rx, ips.ry, ips.rz, ips.eps2, ips.vx, ips.vy, ips.vz, nj, jps.mass, jps.rx, jps.ry, jps.rz, jps.eps2, jps.vx, jps.vy, jps.vz) self._outargs = (ips.ax, ips.ay, ips.az, ips.jx, ips.jy, ips.jz) self.inargs = prepare_args(self._inargs, self.argtypes) self.outargs = prepare_args(self._outargs, self.restypes) self.kernel.set_args(self.inargs + self.outargs) @decallmethods(timings) class SnapCrackle(AbstractExtension): """ """ def __init__(self, backend, prec): self.kernel = get_kernel("snap_crackle_kernel", backend, prec) cty = self.kernel.cty argtypes = (cty.c_uint, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_uint, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p) restypes = (cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p) self.argtypes = argtypes self.restypes = restypes def set_args(self, ips, jps): ni = ips.n nj = jps.n self.kernel.set_gsize(ni, nj) if not "sx" in ips.__dict__: ips.register_auxiliary_attribute("sx", "real") if not "sy" in ips.__dict__: ips.register_auxiliary_attribute("sy", "real") if not "sz" in ips.__dict__: ips.register_auxiliary_attribute("sz", "real") if not "cx" in ips.__dict__: ips.register_auxiliary_attribute("cx", "real") if not "cy" in ips.__dict__: ips.register_auxiliary_attribute("cy", "real") if not "cz" in ips.__dict__: ips.register_auxiliary_attribute("cz", "real") self._inargs = (ni, ips.mass, ips.rx, ips.ry, ips.rz, ips.eps2, ips.vx, ips.vy, ips.vz, ips.ax, ips.ay, ips.az, ips.jx, ips.jy, ips.jz, nj, jps.mass, jps.rx, jps.ry, jps.rz, jps.eps2, jps.vx, jps.vy, jps.vz, jps.ax, jps.ay, jps.az, jps.jx, jps.jy, jps.jz) self._outargs = (ips.sx, ips.sy, ips.sz, ips.cx, ips.cy, ips.cz) self.inargs = prepare_args(self._inargs, self.argtypes) self.outargs = prepare_args(self._outargs, self.restypes) self.kernel.set_args(self.inargs + self.outargs) @decallmethods(timings) class Tstep(AbstractExtension): """ """ def __init__(self, backend, prec): self.kernel = get_kernel("tstep_kernel", backend, prec) cty = self.kernel.cty argtypes = (cty.c_uint, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_uint, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real) restypes = (cty.c_real_p, cty.c_real_p) self.argtypes = argtypes self.restypes = restypes def set_args(self, ips, jps, eta): ni = ips.n nj = jps.n self.kernel.set_gsize(ni, nj) if not "tstep" in ips.__dict__: ips.register_auxiliary_attribute("tstep", "real") if not "tstepij" in ips.__dict__: ips.register_auxiliary_attribute("tstepij", "real") self._inargs = (ni, ips.mass, ips.rx, ips.ry, ips.rz, ips.eps2, ips.vx, ips.vy, ips.vz, nj, jps.mass, jps.rx, jps.ry, jps.rz, jps.eps2, jps.vx, jps.vy, jps.vz, eta) self._outargs = (ips.tstep, ips.tstepij) self.inargs = prepare_args(self._inargs, self.argtypes) self.outargs = prepare_args(self._outargs, self.restypes) self.kernel.set_args(self.inargs + self.outargs) @decallmethods(timings) class PNAcc(AbstractExtension): """ """ def __init__(self, backend, prec): self.kernel = get_kernel("pnacc_kernel", backend, prec) cty = self.kernel.cty argtypes = (cty.c_uint, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_uint, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_uint, cty.c_real, cty.c_real, cty.c_real, cty.c_real, cty.c_real, cty.c_real, cty.c_real) restypes = (cty.c_real_p, cty.c_real_p, cty.c_real_p) self.argtypes = argtypes self.restypes = restypes def set_args(self, ips, jps): ni = ips.n nj = jps.n self.kernel.set_gsize(ni, nj) if not "pnax" in ips.__dict__: ips.register_auxiliary_attribute("pnax", "real") if not "pnay" in ips.__dict__: ips.register_auxiliary_attribute("pnay", "real") if not "pnaz" in ips.__dict__: ips.register_auxiliary_attribute("pnaz", "real") self._inargs = (ni, ips.mass, ips.rx, ips.ry, ips.rz, ips.eps2, ips.vx, ips.vy, ips.vz, nj, jps.mass, jps.rx, jps.ry, jps.rz, jps.eps2, jps.vx, jps.vy, jps.vz, clight.pn_order, clight.inv1, clight.inv2, clight.inv3, clight.inv4, clight.inv5, clight.inv6, clight.inv7) self._outargs = (ips.pnax, ips.pnay, ips.pnaz) self.inargs = prepare_args(self._inargs, self.argtypes) self.outargs = prepare_args(self._outargs, self.restypes) self.kernel.set_args(self.inargs + self.outargs) @decallmethods(timings) class Sakura(AbstractExtension): """ """ def __init__(self, backend, prec): self.kernel = get_kernel("sakura_kernel", backend, prec) cty = self.kernel.cty argtypes = (cty.c_uint, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_uint, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real, cty.c_int) restypes = (cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p) self.argtypes = argtypes self.restypes = restypes def set_args(self, ips, jps, dt, flag): ni = ips.n nj = jps.n if hasattr(self.kernel, "max_lsize"): vw = 1 wpt = self.kernel.work_per_thread max_lsize = self.kernel.max_lsize import math gs = (ni + wpt * vw - 1) // (wpt * vw) ls = 2**int(math.log(gs, 2)) lsize = min(ls, max_lsize) gsize = ((gs + lsize - 1) // lsize) * lsize self.kernel.global_size = (gsize, 1, 1) self.kernel.local_size = (lsize, 1, 1) # self.kernel.set_gsize(ni, nj) if not "drx" in ips.__dict__: ips.register_auxiliary_attribute("drx", "real") if not "dry" in ips.__dict__: ips.register_auxiliary_attribute("dry", "real") if not "drz" in ips.__dict__: ips.register_auxiliary_attribute("drz", "real") if not "dvx" in ips.__dict__: ips.register_auxiliary_attribute("dvx", "real") if not "dvy" in ips.__dict__: ips.register_auxiliary_attribute("dvy", "real") if not "dvz" in ips.__dict__: ips.register_auxiliary_attribute("dvz", "real") self._inargs = (ni, ips.mass, ips.rx, ips.ry, ips.rz, ips.eps2, ips.vx, ips.vy, ips.vz, nj, jps.mass, jps.rx, jps.ry, jps.rz, jps.eps2, jps.vx, jps.vy, jps.vz, dt, flag) self._outargs = (ips.drx, ips.dry, ips.drz, ips.dvx, ips.dvy, ips.dvz) self.inargs = prepare_args(self._inargs, self.argtypes) self.outargs = prepare_args(self._outargs, self.restypes) self.kernel.set_args(self.inargs + self.outargs) @decallmethods(timings) class NREG_X(AbstractExtension): """ """ def __init__(self, backend, prec): self.kernel = get_kernel("nreg_Xkernel", backend, prec) cty = self.kernel.cty argtypes = (cty.c_uint, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_uint, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real) restypes = (cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p) self.argtypes = argtypes self.restypes = restypes def set_args(self, ips, jps, dt): ni = ips.n nj = jps.n self.kernel.set_gsize(ni, nj) if not "mrx" in ips.__dict__: ips.register_auxiliary_attribute("mrx", "real") if not "mry" in ips.__dict__: ips.register_auxiliary_attribute("mry", "real") if not "mrz" in ips.__dict__: ips.register_auxiliary_attribute("mrz", "real") if not "ax" in ips.__dict__: ips.register_auxiliary_attribute("ax", "real") if not "ay" in ips.__dict__: ips.register_auxiliary_attribute("ay", "real") if not "az" in ips.__dict__: ips.register_auxiliary_attribute("az", "real") if not "u" in ips.__dict__: ips.register_auxiliary_attribute("u", "real") self._inargs = (ni, ips.mass, ips.rx, ips.ry, ips.rz, ips.eps2, ips.vx, ips.vy, ips.vz, nj, jps.mass, jps.rx, jps.ry, jps.rz, jps.eps2, jps.vx, jps.vy, jps.vz, dt) self._outargs = (ips.mrx, ips.mry, ips.mrz, ips.ax, ips.ay, ips.az, ips.u) self.inargs = prepare_args(self._inargs, self.argtypes) self.outargs = prepare_args(self._outargs, self.restypes) self.kernel.set_args(self.inargs + self.outargs) @decallmethods(timings) class NREG_V(AbstractExtension): """ """ def __init__(self, backend, prec): self.kernel = get_kernel("nreg_Vkernel", backend, prec) cty = self.kernel.cty argtypes = (cty.c_uint, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_uint, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real) restypes = (cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p) self.argtypes = argtypes self.restypes = restypes def set_args(self, ips, jps, dt): ni = ips.n nj = jps.n self.kernel.set_gsize(ni, nj) if not "mvx" in ips.__dict__: ips.register_auxiliary_attribute("mvx", "real") if not "mvy" in ips.__dict__: ips.register_auxiliary_attribute("mvy", "real") if not "mvz" in ips.__dict__: ips.register_auxiliary_attribute("mvz", "real") if not "mk" in ips.__dict__: ips.register_auxiliary_attribute("mk", "real") self._inargs = (ni, ips.mass, ips.vx, ips.vy, ips.vz, ips.ax, ips.ay, ips.az, nj, jps.mass, jps.vx, jps.vy, jps.vz, jps.ax, jps.ay, jps.az, dt) self._outargs = (ips.mvx, ips.mvy, ips.mvz, ips.mk) self.inargs = prepare_args(self._inargs, self.argtypes) self.outargs = prepare_args(self._outargs, self.restypes) self.kernel.set_args(self.inargs + self.outargs) @decallmethods(timings) class Kepler(AbstractExtension): """ """ def __init__(self, backend, prec): if backend == "CL": # No need for CL support. backend = "C" # C is fast enough! self.kernel = get_kernel("kepler_solver_kernel", backend, prec) cty = self.kernel.cty argtypes = (cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real) restypes = (cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p, cty.c_real_p) self.argtypes = argtypes self.restypes = restypes def set_args(self, ips, jps, dt): ni = ips.n nj = jps.n self._inargs = (ips.mass, ips.rx, ips.ry, ips.rz, ips.eps2, ips.vx, ips.vy, ips.vz, dt) self._outargs = (ips.rx, ips.ry, ips.rz, ips.vx, ips.vy, ips.vz) self.inargs = prepare_args(self._inargs, self.argtypes) self.outargs = prepare_args(self._outargs, self.restypes) self.kernel.set_args(self.inargs + self.outargs) backend = "CL" if "--use_cl" in sys.argv else "C" clight = Clight() phi = Phi(backend, ctype.prec) acc = Acc(backend, ctype.prec) acc_jerk = AccJerk(backend, ctype.prec) snap_crackle = SnapCrackle(backend, ctype.prec) tstep = Tstep(backend, ctype.prec) pnacc = PNAcc(backend, ctype.prec) sakura = Sakura(backend, ctype.prec) nreg_x = NREG_X(backend, ctype.prec) nreg_v = NREG_V(backend, ctype.prec) kepler = Kepler(backend, ctype.prec) ########## end of file ##########
35.253731
75
0.545766
3,261
23,620
3.675253
0.07421
0.075428
0.137505
0.145682
0.759032
0.750605
0.742428
0.648728
0.644556
0.644556
0
0.005355
0.335902
23,620
669
76
35.306428
0.758702
0.031837
0
0.589942
0
0
0.03361
0
0
0
0
0
0
1
0.06383
false
0
0.019342
0.007737
0.121857
0.001934
0
0
0
null
0
0
0
0
1
1
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
4596166c7adf25cdd4d41a87929b5820e68f0550
191
py
Python
tests/services/test_cli.py
rscohn2/sensepy
1c00cd6d0c1920057cc732d6c5b6a791b7e79e36
[ "MIT" ]
null
null
null
tests/services/test_cli.py
rscohn2/sensepy
1c00cd6d0c1920057cc732d6c5b6a791b7e79e36
[ "MIT" ]
1
2021-02-20T17:26:15.000Z
2021-02-20T17:26:15.000Z
tests/services/test_cli.py
zignalz/zignalz
1c00cd6d0c1920057cc732d6c5b6a791b7e79e36
[ "MIT" ]
null
null
null
# SPDX-FileCopyrightText: 2020 Robert Cohn # # SPDX-License-Identifier: MIT import pytest from zignalz import cli @pytest.mark.skip def test_db_start(): cli.db.up() cli.db.down()
13.642857
42
0.717277
28
191
4.821429
0.75
0.074074
0
0
0
0
0
0
0
0
0
0.025157
0.167539
191
13
43
14.692308
0.823899
0.361257
0
0
0
0
0
0
0
0
0
0
0
1
0.166667
true
0
0.333333
0
0.5
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
3
45a15006ad087869f30300bca8e4712962c1e859
286
py
Python
ExamPrep/Assignments 2014-15/A2/Py2Q3.py
FHomewood/ScientificComputing
bc3477b4607b25a700f2d89ca4f01cb3ea0998c4
[ "IJG" ]
null
null
null
ExamPrep/Assignments 2014-15/A2/Py2Q3.py
FHomewood/ScientificComputing
bc3477b4607b25a700f2d89ca4f01cb3ea0998c4
[ "IJG" ]
null
null
null
ExamPrep/Assignments 2014-15/A2/Py2Q3.py
FHomewood/ScientificComputing
bc3477b4607b25a700f2d89ca4f01cb3ea0998c4
[ "IJG" ]
null
null
null
print 'Question 3b' import math as m #Variables h1=0.5 h2=0.1 m.tanh(1+h1) def G(h): x=1.0 return (((4*m.tanh(x+(h/2)))-(4*m.tanh(x-(h/2))))/(3*h))-((m.tanh(x+h)-m.tanh(x-h))/(6*h)) print "at x=1 and h=0.5" print "G(0.5) =",G(h1) print "at x=1 and h=0.1" print "G(0.1) =",G(h2)
14.3
91
0.541958
77
286
2.012987
0.324675
0.16129
0.154839
0.180645
0.393548
0.296774
0.180645
0
0
0
0
0.117886
0.13986
286
19
92
15.052632
0.512195
0.031469
0
0
0
0
0.214545
0
0
0
0
0
0
0
null
null
0
0.083333
null
null
0.416667
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
1
0
3
45a8da4b58e200c454ed9369441c872d55bcbeff
187
py
Python
stackstore/apps.py
salexkidd/django-stackstore-model
fb0bb6431dd772a80b8c9d6d2b625eae69562fa9
[ "MIT" ]
5
2020-05-28T07:04:25.000Z
2020-09-26T05:29:46.000Z
stackstore/apps.py
salexkidd/django-stackstore-model
fb0bb6431dd772a80b8c9d6d2b625eae69562fa9
[ "MIT" ]
1
2020-09-26T05:34:19.000Z
2020-09-26T05:34:19.000Z
stackstore/apps.py
salexkidd/django-stackstore-model
fb0bb6431dd772a80b8c9d6d2b625eae69562fa9
[ "MIT" ]
null
null
null
from django.apps import AppConfig from django.utils.translation import gettext_lazy as _ class StackstoreConfig(AppConfig): name = 'stackstore' verbose_name = _("Stack store")
20.777778
54
0.764706
22
187
6.318182
0.772727
0.143885
0
0
0
0
0
0
0
0
0
0
0.160428
187
8
55
23.375
0.88535
0
0
0
0
0
0.112903
0
0
0
0
0
0
1
0
false
0
0.4
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
3
45d65891a06d722f73ec82d918dc0869a9d1d47f
141
py
Python
src/image_uploader/urls.py
Hazzari/backend-api-uploader
38702abe72ccc85c2bf83951ff16c71ce510f46d
[ "MIT" ]
null
null
null
src/image_uploader/urls.py
Hazzari/backend-api-uploader
38702abe72ccc85c2bf83951ff16c71ce510f46d
[ "MIT" ]
null
null
null
src/image_uploader/urls.py
Hazzari/backend-api-uploader
38702abe72ccc85c2bf83951ff16c71ce510f46d
[ "MIT" ]
null
null
null
from django.urls import path from . import views app_name = 'authentication' urlpatterns = [ path('', views.ImageViewSet.as_view()), ]
15.666667
43
0.70922
17
141
5.764706
0.764706
0
0
0
0
0
0
0
0
0
0
0
0.163121
141
8
44
17.625
0.830508
0
0
0
0
0
0.099291
0
0
0
0
0
0
1
0
false
0
0.333333
0
0.333333
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
3
45d83074a6a311e52e10397d84bfed014e9507fd
272
py
Python
src/io/__init__.py
jonathan-scholbach/jargon
1bc49efb4002b4f348b1449ca5bad32bc416dab7
[ "MIT" ]
null
null
null
src/io/__init__.py
jonathan-scholbach/jargon
1bc49efb4002b4f348b1449ca5bad32bc416dab7
[ "MIT" ]
null
null
null
src/io/__init__.py
jonathan-scholbach/jargon
1bc49efb4002b4f348b1449ca5bad32bc416dab7
[ "MIT" ]
null
null
null
"""Module for all i/o functionality.""" from .string_manipulation import title_from_path, pluralize, mask from .clear import clear from .colorprint import cprint from .argument_parser import argument_parser from .print_table import Table from .date_diff import date_diff
30.222222
65
0.823529
40
272
5.4
0.575
0.12963
0
0
0
0
0
0
0
0
0
0
0.117647
272
8
66
34
0.9
0.121324
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0.333333
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
3
afeaa00c02e4ffc8d5fd4b255c6a2dfaae61fdbf
5,955
py
Python
transform/views/test_views.py
ONSdigital/sdx-transform-cora
4f4536a6a4f9f12e6e1c5667d16e9746bd4d85db
[ "MIT" ]
null
null
null
transform/views/test_views.py
ONSdigital/sdx-transform-cora
4f4536a6a4f9f12e6e1c5667d16e9746bd4d85db
[ "MIT" ]
42
2017-03-15T11:46:44.000Z
2018-11-13T11:52:10.000Z
transform/views/test_views.py
ONSdigital/sdx-transform-cora
4f4536a6a4f9f12e6e1c5667d16e9746bd4d85db
[ "MIT" ]
1
2021-04-11T08:11:49.000Z
2021-04-11T08:11:49.000Z
from transform.transformers.pdf_transformer import PDFTransformer from transform.transformers.pdf_transformer_style_cora import CoraPdfTransformerStyle from transform.transformers.image_transformer import ImageTransformer from transform import app from jinja2 import Environment, PackageLoader from flask import make_response, send_file import logging from structlog import wrap_logger import json logger = wrap_logger(logging.getLogger(__name__)) env = Environment(loader=PackageLoader('transform', 'templates')) test_message = """{ "type": "uk.gov.ons.edc.eq:surveyresponse", "origin": "uk.gov.ons.edc.eq", "survey_id": "144", "version": "0.0.1", "collection": { "exercise_sid": "hfjdskf", "instrument_id": "0001", "period": "201605" }, "submitted_at": "2016-03-12T10:39:40Z", "metadata": { "user_id": "789473423", "ru_ref": "12345678901A" }, "data": { "0210": "1", "0220": "0", "0230": "1", "0240": "0", "0410": "1", "0420": "0", "0430": "1", "0440": "0", "2310": "1", "2320": "0", "2330": "1", "2340": "0", "1310": "0", "2675": "0", "2676": "1", "2677": "0", "1410": "123456", "1320": "1", "1420": "123456", "1331": "0", "1332": "1", "1333": "0", "1430": "123456", "1340": "1", "1440": "123456", "1350": "0", "1450": "123456", "1360": "1", "1460": "123456", "1371": "0", "1372": "1", "1373": "0", "1374": "1", "1470": "123456", "0510": "yes", "0610": "0", "0620": "1", "0630": "0", "0520": "yes", "0601": "0", "0602": "1", "0603": "0", "0710": "0", "0720": "1", "0810": "123", "0820": "010", "0830": "789", "0840": "963", "0900": "yes", "1010": "0", "1020": "1", "1030": "0", "1100": "yes", "1510": "1", "1530": "0", "1520": "1", "2657": "0100", "2658": "1000", "2659": "0001", "2660": "0010", "2661": "0100", "2662": "1000", "2663": "0001", "2664": "0010", "2665": "0100", "2666": "1000", "2667": "0001", "2011": "1", "2020": "0", "2030": "1", "2040": "0", "1210": "0100", "1211": "1000", "1220": "0001", "1230": "0010", "1240": "0100", "1250": "1000", "1290": "0001", "1260": "0010", "1270": "0100", "1212": "1000", "1213": "0001", "1280": "0010", "1601": "0001", "1620": "0010", "1631": "0100", "1632": "1000", "1640": "0001", "1650": "0010", "1660": "0100", "1670": "1000", "1680": "0001", "1610": "0010", "1611": "0100", "1690": "1000", "1811": "0", "1812": "1", "1813": "0", "1814": "1", "1821": "1", "1822": "0", "1823": "1", "1824": "0", "1881": "0", "1882": "1", "1883": "0", "1884": "1", "1891": "1", "1892": "0", "1893": "1", "1894": "0", "1841": "0", "1842": "1", "1843": "0", "1844": "1", "1851": "1", "1852": "0", "1853": "1", "1854": "0", "1861": "0", "1862": "1", "1863": "0", "1864": "1", "1871": "1", "1872": "0", "1873": "1", "1874": "0", "2650": "0001", "2651": "0010", "2652": "0100", "2653": "1000", "2654": "0001", "2655": "0010", "2656": "0100", "2668": "0", "2669": "1", "2670": "0", "2671": "1", "2672": "0", "2673": "1", "2674": "0", "2410": "123456", "2420": "123456", "2440": "123456", "2510": "4567890", "2520": "1234567", "2610": "963", "2620": "123", "2631": "1", "2632": "0", "2633": "1", "2634": "0", "2635": "1", "2636": "0", "2678": "0", "2700": "1", "2800": "000", "2801": "10", "2900": "no" } }""" @app.route('/images-test', methods=['GET']) def images_test(): survey_response = json.loads(test_message) form_id = survey_response['collection']['instrument_id'] with open("./transform/surveys/%s.%s.json" % (survey_response['survey_id'], form_id)) as json_file: survey = json.load(json_file) itransformer = ImageTransformer(logger, survey, survey_response, CoraPdfTransformerStyle()) itransformer.get_zipped_images() return send_file(itransformer.zip, mimetype='application/zip') @app.route('/pdf-test', methods=['GET']) def pdf_test(): survey_response = json.loads(test_message) form_id = survey_response['collection']['instrument_id'] with open("./transform/surveys/%s.%s.json" % (survey_response['survey_id'], form_id)) as json_file: survey = json.load(json_file) pdf = PDFTransformer(survey, survey_response, CoraPdfTransformerStyle()) rendered_pdf = pdf.render() response = make_response(rendered_pdf) response.mimetype = 'application/pdf' return response @app.route('/html-test', methods=['GET']) def html_test(): response = json.loads(test_message) template = env.get_template('html.tmpl') form_id = response['collection']['instrument_id'] with open("./transform/surveys/%s.%s.json" % (response['survey_id'], form_id)) as json_file: survey = json.load(json_file) return template.render(response=response, survey=survey)
25.126582
103
0.451889
603
5,955
4.364842
0.441128
0.042553
0.028495
0.019377
0.222644
0.172492
0.172492
0.172492
0.172492
0.172492
0
0.262048
0.330982
5,955
236
104
25.233051
0.398594
0
0
0.041667
0
0
0.708312
0.024853
0
0
0
0
0
1
0.013889
false
0
0.041667
0
0.069444
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
1
0
0
0
0
0
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
b300af201b5b6ca40f07dc6e4d46529eeb960016
35,585
py
Python
pyaz/vmss/__init__.py
py-az-cli/py-az-cli
9a7dc44e360c096a5a2f15595353e9dad88a9792
[ "MIT" ]
null
null
null
pyaz/vmss/__init__.py
py-az-cli/py-az-cli
9a7dc44e360c096a5a2f15595353e9dad88a9792
[ "MIT" ]
null
null
null
pyaz/vmss/__init__.py
py-az-cli/py-az-cli
9a7dc44e360c096a5a2f15595353e9dad88a9792
[ "MIT" ]
1
2022-02-03T09:12:01.000Z
2022-02-03T09:12:01.000Z
''' Manage groupings of virtual machines in an Azure Virtual Machine Scale Set (VMSS). ''' from .. pyaz_utils import _call_az from . import application, diagnostics, disk, encryption, extension, identity, nic, rolling_upgrade, run_command def create(name, resource_group, accelerated_networking=None, admin_password=None, admin_username=None, app_gateway=None, app_gateway_capacity=None, app_gateway_sku=None, app_gateway_subnet_address_prefix=None, asgs=None, assign_identity=None, authentication_type=None, automatic_repairs_grace_period=None, backend_pool_name=None, backend_port=None, capacity_reservation_group=None, computer_name_prefix=None, custom_data=None, data_disk_caching=None, data_disk_encryption_sets=None, data_disk_iops=None, data_disk_mbps=None, data_disk_sizes_gb=None, disable_overprovision=None, dns_servers=None, edge_zone=None, enable_agent=None, enable_auto_update=None, enable_cross_zone_upgrade=None, enable_spot_restore=None, encryption_at_host=None, ephemeral_os_disk=None, ephemeral_os_disk_placement=None, eviction_policy=None, generate_ssh_keys=None, health_probe=None, host_group=None, image=None, instance_count=None, lb_nat_pool_name=None, lb_sku=None, license_type=None, load_balancer=None, location=None, max_batch_instance_percent=None, max_price=None, max_unhealthy_instance_percent=None, max_unhealthy_upgraded_instance_percent=None, network_api_version=None, no_wait=None, nsg=None, orchestration_mode=None, os_disk_caching=None, os_disk_encryption_set=None, os_disk_name=None, os_disk_size_gb=None, os_type=None, patch_mode=None, pause_time_between_batches=None, plan_name=None, plan_product=None, plan_promotion_code=None, plan_publisher=None, platform_fault_domain_count=None, ppg=None, prioritize_unhealthy_instances=None, priority=None, public_ip_address=None, public_ip_address_allocation=None, public_ip_address_dns_name=None, public_ip_per_vm=None, role=None, scale_in_policy=None, scope=None, secrets=None, single_placement_group=None, specialized=None, spot_restore_timeout=None, ssh_dest_key_path=None, ssh_key_values=None, storage_container_name=None, storage_sku=None, subnet=None, subnet_address_prefix=None, tags=None, terminate_notification_time=None, ultra_ssd_enabled=None, upgrade_policy_mode=None, use_unmanaged_disk=None, user_data=None, validate=None, vm_domain_name=None, vm_sku=None, vnet_address_prefix=None, vnet_name=None, zones=None): ''' Create an Azure Virtual Machine Scale Set. Required Parameters: - name -- Name of the virtual machine scale set. - resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>` Optional Parameters: - accelerated_networking -- enable accelerated networking. Unless specified, CLI will enable it based on machine image and size - admin_password -- Password for the VM if authentication type is 'Password'. - admin_username -- Username for the VM. Default value is current username of OS. If the default value is system reserved, then default value will be set to azureuser. Please refer to https://docs.microsoft.com/rest/api/compute/virtualmachines/createorupdate#osprofile to get a full list of reserved values. - app_gateway -- Name to use when creating a new application gateway (default) or referencing an existing one. Can also reference an existing application gateway by ID or specify "" for none. - app_gateway_capacity -- The number of instances to use when creating a new application gateway. - app_gateway_sku -- SKU when creating a new application gateway. - app_gateway_subnet_address_prefix -- The subnet IP address prefix to use when creating a new application gateway in CIDR format. - asgs -- Space-separated list of existing application security groups to associate with the VM. - assign_identity -- accept system or user assigned identities separated by spaces. Use '[system]' to refer system assigned identity, or a resource id to refer user assigned identity. Check out help for more examples - authentication_type -- Type of authentication to use with the VM. Defaults to password for Windows and SSH public key for Linux. "all" enables both ssh and password authentication. - automatic_repairs_grace_period -- The amount of time (in minutes, between 30 and 90) for which automatic repairs are suspended due to a state change on VM. - backend_pool_name -- Name to use for the backend pool when creating a new load balancer or application gateway. - backend_port -- When creating a new load balancer, backend port to open with NAT rules (Defaults to 22 on Linux and 3389 on Windows). When creating an application gateway, the backend port to use for the backend HTTP settings. - capacity_reservation_group -- The ID or name of the capacity reservation group that is used to allocate. Pass in "None" to disassociate the capacity reservation group. Please note that if you want to delete a VM/VMSS that has been associated with capacity reservation group, you need to disassociate the capacity reservation group first. - computer_name_prefix -- Computer name prefix for all of the virtual machines in the scale set. Computer name prefixes must be 1 to 15 characters long - custom_data -- Custom init script file or text (cloud-init, cloud-config, etc..) - data_disk_caching -- storage caching type for data disk(s), including 'None', 'ReadOnly', 'ReadWrite', etc. Use a singular value to apply on all disks, or use `<lun>=<vaule1> <lun>=<value2>` to configure individual disk - data_disk_encryption_sets -- Names or IDs (space delimited) of disk encryption sets for data disks. - data_disk_iops -- Specify the Read-Write IOPS (space delimited) for the managed disk. Should be used only when StorageAccountType is UltraSSD_LRS. If not specified, a default value would be assigned based on diskSizeGB. - data_disk_mbps -- Specify the bandwidth in MB per second (space delimited) for the managed disk. Should be used only when StorageAccountType is UltraSSD_LRS. If not specified, a default value would be assigned based on diskSizeGB. - data_disk_sizes_gb -- space-separated empty managed data disk sizes in GB to create - disable_overprovision -- Overprovision option (see https://azure.microsoft.com/documentation/articles/virtual-machine-scale-sets-overview/ for details). - dns_servers -- space-separated IP addresses of DNS servers, e.g. 10.0.0.5 10.0.0.6 - edge_zone -- The name of edge zone. - enable_agent -- Indicate whether virtual machine agent should be provisioned on the virtual machine. When this property is not specified, default behavior is to set it to true. This will ensure that VM Agent is installed on the VM so that extensions can be added to the VM later - enable_auto_update -- Indicate whether Automatic Updates is enabled for the Windows virtual machine - enable_cross_zone_upgrade -- Set this Boolean property will allow VMSS to ignore AZ boundaries when constructing upgrade batches, and only consider Update Domain and maxBatchInstancePercent to determine the batch size - enable_spot_restore -- Enable the Spot-Try-Restore feature where evicted VMSS SPOT instances will be tried to be restored opportunistically based on capacity availability and pricing constraints - encryption_at_host -- Enable Host Encryption for the VM or VMSS. This will enable the encryption for all the disks including Resource/Temp disk at host itself. - ephemeral_os_disk -- Allows you to create an OS disk directly on the host node, providing local disk performance and faster VM/VMSS reimage time. - ephemeral_os_disk_placement -- Only applicable when used with `--ephemeral-os-disk`. Allows you to choose the Ephemeral OS disk provisioning location. - eviction_policy -- The eviction policy for virtual machines in a Spot priority scale set. Default eviction policy is Deallocate for a Spot priority scale set - generate_ssh_keys -- Generate SSH public and private key files if missing. The keys will be stored in the ~/.ssh directory - health_probe -- Probe name from the existing load balancer, mainly used for rolling upgrade or automatic repairs - host_group -- Name or ID of dedicated host group that the virtual machine scale set resides in - image -- None - instance_count -- Number of VMs in the scale set. - lb_nat_pool_name -- Name to use for the NAT pool when creating a new load balancer. - lb_sku -- Sku of the Load Balancer to create. Default to 'Standard' when single placement group is turned off; otherwise, default to 'Basic'. The public IP is supported to be created on edge zone only when it is 'Standard' - license_type -- Specifies that the Windows image or disk was licensed on-premises. To enable Azure Hybrid Benefit for Windows Server, use 'Windows_Server'. To enable Multitenant Hosting Rights for Windows 10, use 'Windows_Client'. For more information see the Azure Windows VM online docs. - load_balancer -- Name to use when creating a new load balancer (default) or referencing an existing one. Can also reference an existing load balancer by ID or specify "" for none. - location -- Location in which to create VM and related resources. If default location is not configured, will default to the resource group's location - max_batch_instance_percent -- The maximum percent of total virtual machine instances that will be upgraded simultaneously by the rolling upgrade in one batch. Default: 20% - max_price -- The maximum price (in US Dollars) you are willing to pay for a Spot VM/VMSS. -1 indicates that the Spot VM/VMSS should not be evicted for price reasons - max_unhealthy_instance_percent -- The maximum percentage of the total virtual machine instances in the scale set that can be simultaneously unhealthy. Default: 20% - max_unhealthy_upgraded_instance_percent -- The maximum percentage of upgraded virtual machine instances that can be found to be in an unhealthy state. Default: 20% - network_api_version -- Specify the Microsoft.Network API version used when creating networking resources in the Network Interface Configurations for Virtual Machine Scale Set with orchestration mode 'Flexible'. Default value is 2020-11-01. - no_wait -- Do not wait for the long-running operation to finish. - nsg -- Name or ID of an existing Network Security Group. - orchestration_mode -- Choose how virtual machines are managed by the scale set. In Uniform mode, you define a virtual machine model and Azure will generate identical instances based on that model. In Flexible mode, you manually create and add a virtual machine of any configuration to the scale set or generate identical instances based on virtual machine model defined for the scale set. - os_disk_caching -- Storage caching type for the VM OS disk. Default: ReadWrite - os_disk_encryption_set -- Name or ID of disk encryption set for OS disk. - os_disk_name -- The name of the new VM OS disk. - os_disk_size_gb -- OS disk size in GB to create. - os_type -- Type of OS installed on a custom VHD. Do not use when specifying an URN or URN alias. - patch_mode -- Mode of in-guest patching to IaaS virtual machine. Allowed values for Windows VM: AutomaticByOS, AutomaticByPlatform, Manual. Allowed values for Linux VM: AutomaticByPlatform, ImageDefault. Manual - You control the application of patches to a virtual machine. You do this by applying patches manually inside the VM. In this mode, automatic updates are disabled; the paramater --enable-auto-update must be false. AutomaticByOS - The virtual machine will automatically be updated by the OS. The parameter --enable-auto-update must be true. AutomaticByPlatform - the virtual machine will automatically updated by the OS. ImageDefault - The virtual machine's default patching configuration is used. The parameter --enable-agent and --enable-auto-update must be true - pause_time_between_batches -- The wait time between completing the update for all virtual machines in one batch and starting the next batch. Default: 0 seconds - plan_name -- plan name - plan_product -- plan product - plan_promotion_code -- plan promotion code - plan_publisher -- plan publisher - platform_fault_domain_count -- Fault Domain count for each placement group in the availability zone - ppg -- The name or ID of the proximity placement group the VMSS should be associated with. - prioritize_unhealthy_instances -- Set this Boolean property will lead to all unhealthy instances in a scale set getting upgraded before any healthy instances - priority -- Priority. Use 'Spot' to run short-lived workloads in a cost-effective way. 'Low' enum will be deprecated in the future. Please use 'Spot' to deploy Azure spot VM and/or VMSS. Default to Regular. - public_ip_address -- Name of the public IP address when creating one (default) or referencing an existing one. Can also reference an existing public IP by ID or specify "" for None ('""' in Azure CLI using PowerShell or --% operator). - public_ip_address_allocation -- None - public_ip_address_dns_name -- Globally unique DNS name for a newly created public IP. - public_ip_per_vm -- Each VM instance will have a public ip. For security, you can use '--nsg' to apply appropriate rules - role -- Role name or id the system assigned identity will have - scale_in_policy -- Specify the scale-in policy (space delimited) that decides which virtual machines are chosen for removal when a Virtual Machine Scale Set is scaled-in. - scope -- Scope that the system assigned identity can access - secrets -- One or many Key Vault secrets as JSON strings or files via `@{path}` containing `[{ "sourceVault": { "id": "value" }, "vaultCertificates": [{ "certificateUrl": "value", "certificateStore": "cert store name (only on windows)"}] }]` - single_placement_group -- Limit the scale set to a single placement group. See https://docs.microsoft.com/azure/virtual-machine-scale-sets/virtual-machine-scale-sets-placement-groups for details. - specialized -- Indicate whether the source image is specialized. - spot_restore_timeout -- Timeout value expressed as an ISO 8601 time duration after which the platform will not try to restore the VMSS SPOT instances - ssh_dest_key_path -- Destination file path on the VM for the SSH key. If the file already exists, the specified key(s) are appended to the file. Destination path for SSH public keys is currently limited to its default value "/home/username/.ssh/authorized_keys" due to a known issue in Linux provisioning agent. - ssh_key_values -- None - storage_container_name -- Only applicable when used with `--use-unmanaged-disk`. Name of the storage container for the VM OS disk. Default: vhds - storage_sku -- The SKU of the storage account with which to persist VM. Use a singular sku that would be applied across all disks, or specify individual disks. Usage: [--storage-sku SKU | --storage-sku ID=SKU ID=SKU ID=SKU...], where each ID is "os" or a 0-indexed lun. Allowed values: Standard_LRS, Premium_LRS, StandardSSD_LRS, UltraSSD_LRS, Premium_ZRS, StandardSSD_ZRS. - subnet -- The name of the subnet when creating a new VNet or referencing an existing one. Can also reference an existing subnet by ID. If both vnet-name and subnet are omitted, an appropriate VNet and subnet will be selected automatically, or a new one will be created. - subnet_address_prefix -- The subnet IP address prefix to use when creating a new VNet in CIDR format. - tags -- space-separated tags: key[=value] [key[=value] ...]. Use '' to clear existing tags. - terminate_notification_time -- Length of time (in minutes, between 5 and 15) a notification to be sent to the VM on the instance metadata server till the VM gets deleted - ultra_ssd_enabled -- Enables or disables the capability to have 1 or more managed data disks with UltraSSD_LRS storage account - upgrade_policy_mode -- None - use_unmanaged_disk -- Do not use managed disk to persist VM - user_data -- UserData for the virtual machines in the scale set. It can be passed in as file or string. - validate -- Generate and validate the ARM template without creating any resources. - vm_domain_name -- domain name of VM instances, once configured, the FQDN is `vm<vm-index>.<vm-domain-name>.<..rest..>` - vm_sku -- Size of VMs in the scale set. Default to "Standard_DS1_v2". See https://azure.microsoft.com/pricing/details/virtual-machines/ for size info. - vnet_address_prefix -- The IP address prefix to use when creating a new VNet in CIDR format. - vnet_name -- Name of the virtual network when creating a new one or referencing an existing one. - zones -- Space-separated list of availability zones into which to provision the resource. ''' return _call_az("az vmss create", locals()) def deallocate(name, resource_group, instance_ids=None, no_wait=None): ''' Deallocate VMs within a VMSS. Required Parameters: - name -- Scale set name. You can configure the default using `az configure --defaults vmss=<name>` - resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>` Optional Parameters: - instance_ids -- Space-separated list of IDs (ex: 1 2 3 ...) or * for all instances. If not provided, the action will be applied on the scaleset itself - no_wait -- Do not wait for the long-running operation to finish. ''' return _call_az("az vmss deallocate", locals()) def delete(name, resource_group, force_deletion=None, no_wait=None): ''' Required Parameters: - name -- Scale set name. You can configure the default using `az configure --defaults vmss=<name>` - resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>` Optional Parameters: - force_deletion -- Optional parameter to force delete a VM scale set. (Feature in Preview). - no_wait -- Do not wait for the long-running operation to finish. ''' return _call_az("az vmss delete", locals()) def delete_instances(instance_ids, name, resource_group, no_wait=None): ''' Delete VMs within a VMSS. Required Parameters: - instance_ids -- Space-separated list of IDs (ex: 1 2 3 ...) or * for all instances. - name -- Scale set name. You can configure the default using `az configure --defaults vmss=<name>` - resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>` Optional Parameters: - no_wait -- Do not wait for the long-running operation to finish. ''' return _call_az("az vmss delete-instances", locals()) def get_instance_view(name, resource_group, instance_id=None): ''' View an instance of a VMSS. Required Parameters: - name -- Scale set name. You can configure the default using `az configure --defaults vmss=<name>` - resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>` Optional Parameters: - instance_id -- None ''' return _call_az("az vmss get-instance-view", locals()) def list(resource_group=None): ''' List VMSS. Optional Parameters: - resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>` ''' return _call_az("az vmss list", locals()) def list_instances(name, resource_group, expand=None, filter=None, select=None): ''' Get a list of all virtual machines in a VM scale sets. Required Parameters: - name -- Scale set name. You can configure the default using `az configure --defaults vmss=<name>` - resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>` Optional Parameters: - expand -- The expand expression to apply to the operation. Allowed values are 'instanceView'. - filter -- The filter to apply to the operation. Allowed values are 'startswith(instanceView/statuses/code, 'PowerState') eq true', 'properties/latestModelApplied eq true', 'properties/latestModelApplied eq false'. - select -- The list parameters. Allowed values are 'instanceView', 'instanceView/statuses'. ''' return _call_az("az vmss list-instances", locals()) def list_instance_connection_info(name, resource_group): ''' Get the IP address and port number used to connect to individual VM instances within a set. Required Parameters: - name -- Scale set name. You can configure the default using `az configure --defaults vmss=<name>` - resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>` ''' return _call_az("az vmss list-instance-connection-info", locals()) def list_instance_public_ips(name, resource_group): ''' List public IP addresses of VM instances within a set. Required Parameters: - name -- Scale set name. You can configure the default using `az configure --defaults vmss=<name>` - resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>` ''' return _call_az("az vmss list-instance-public-ips", locals()) def list_skus(name, resource_group): ''' Required Parameters: - name -- Scale set name. You can configure the default using `az configure --defaults vmss=<name>` - resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>` ''' return _call_az("az vmss list-skus", locals()) def reimage(name, resource_group, instance_id=None, no_wait=None): ''' Reimage VMs within a VMSS. Required Parameters: - name -- Scale set name. You can configure the default using `az configure --defaults vmss=<name>` - resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>` Optional Parameters: - instance_id -- None - no_wait -- Do not wait for the long-running operation to finish. ''' return _call_az("az vmss reimage", locals()) def perform_maintenance(name, resource_group, vm_instance_i_ds=None): ''' Required Parameters: - name -- Scale set name. You can configure the default using `az configure --defaults vmss=<name>` - resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>` Optional Parameters: - vm_instance_i_ds -- A list of virtual machine instance IDs from the VM scale set. ''' return _call_az("az vmss perform-maintenance", locals()) def restart(name, resource_group, instance_ids=None, no_wait=None): ''' Restart VMs within a VMSS. Required Parameters: - name -- Scale set name. You can configure the default using `az configure --defaults vmss=<name>` - resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>` Optional Parameters: - instance_ids -- Space-separated list of IDs (ex: 1 2 3 ...) or * for all instances. If not provided, the action will be applied on the scaleset itself - no_wait -- Do not wait for the long-running operation to finish. ''' return _call_az("az vmss restart", locals()) def scale(name, new_capacity, resource_group, no_wait=None): ''' Change the number of VMs within a VMSS. Required Parameters: - name -- Scale set name. You can configure the default using `az configure --defaults vmss=<name>` - new_capacity -- None - resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>` Optional Parameters: - no_wait -- Do not wait for the long-running operation to finish. ''' return _call_az("az vmss scale", locals()) def show(name, resource_group, include_user_data=None, instance_id=None): ''' Get details on VMs within a VMSS. Required Parameters: - name -- Scale set name. You can configure the default using `az configure --defaults vmss=<name>` - resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>` Optional Parameters: - include_user_data -- Include the user data properties in the query result. - instance_id -- None ''' return _call_az("az vmss show", locals()) def simulate_eviction(instance_id, name, resource_group): ''' Simulate the eviction of a Spot virtual machine in a VM scale set. Required Parameters: - instance_id -- The instance ID of the virtual machine. - name -- Scale set name. You can configure the default using `az configure --defaults vmss=<name>` - resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>` ''' return _call_az("az vmss simulate-eviction", locals()) def start(name, resource_group, instance_ids=None, no_wait=None): ''' Start VMs within a VMSS. Required Parameters: - name -- Scale set name. You can configure the default using `az configure --defaults vmss=<name>` - resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>` Optional Parameters: - instance_ids -- Space-separated list of IDs (ex: 1 2 3 ...) or * for all instances. If not provided, the action will be applied on the scaleset itself - no_wait -- Do not wait for the long-running operation to finish. ''' return _call_az("az vmss start", locals()) def stop(name, resource_group, instance_ids=None, no_wait=None, skip_shutdown=None): ''' Power off (stop) VMs within a VMSS. Required Parameters: - name -- Scale set name. You can configure the default using `az configure --defaults vmss=<name>` - resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>` Optional Parameters: - instance_ids -- Space-separated list of IDs (ex: 1 2 3 ...) or * for all instances. If not provided, the action will be applied on the scaleset itself - no_wait -- Do not wait for the long-running operation to finish. - skip_shutdown -- Skip shutdown and power-off immediately. ''' return _call_az("az vmss stop", locals()) def update(name, resource_group, add=None, automatic_repairs_grace_period=None, capacity_reservation_group=None, enable_automatic_repairs=None, enable_cross_zone_upgrade=None, enable_spot_restore=None, enable_terminate_notification=None, ephemeral_os_disk_placement=None, force_string=None, instance_id=None, license_type=None, max_batch_instance_percent=None, max_price=None, max_unhealthy_instance_percent=None, max_unhealthy_upgraded_instance_percent=None, no_wait=None, pause_time_between_batches=None, ppg=None, prioritize_unhealthy_instances=None, priority=None, protect_from_scale_in=None, protect_from_scale_set_actions=None, remove=None, scale_in_policy=None, set=None, spot_restore_timeout=None, terminate_notification_time=None, ultra_ssd_enabled=None, user_data=None, vm_sku=None): ''' Update a VMSS. Run 'az vmss update-instances' command to roll out the changes to VMs if you have not configured upgrade policy. Required Parameters: - name -- Scale set name. You can configure the default using `az configure --defaults vmss=<name>` - resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>` Optional Parameters: - add -- Add an object to a list of objects by specifying a path and key value pairs. Example: --add property.listProperty <key=value, string or JSON string> - automatic_repairs_grace_period -- The amount of time (in minutes, between 30 and 90) for which automatic repairs are suspended due to a state change on VM. - capacity_reservation_group -- The ID or name of the capacity reservation group that is used to allocate. Pass in "None" to disassociate the capacity reservation group. Please note that if you want to delete a VM/VMSS that has been associated with capacity reservation group, you need to disassociate the capacity reservation group first. - enable_automatic_repairs -- Enable automatic repairs - enable_cross_zone_upgrade -- Set this Boolean property will allow VMSS to ignore AZ boundaries when constructing upgrade batches, and only consider Update Domain and maxBatchInstancePercent to determine the batch size - enable_spot_restore -- Enable the Spot-Try-Restore feature where evicted VMSS SPOT instances will be tried to be restored opportunistically based on capacity availability and pricing constraints - enable_terminate_notification -- Enable terminate notification - ephemeral_os_disk_placement -- Only applicable when used with `--vm-sku`. Allows you to choose the Ephemeral OS disk provisioning location. - force_string -- When using 'set' or 'add', preserve string literals instead of attempting to convert to JSON. - instance_id -- Update the VM instance with this ID. If missing, Update the VMSS. - license_type -- Specifies that the Windows image or disk was licensed on-premises. To enable Azure Hybrid Benefit for Windows Server, use 'Windows_Server'. To enable Multitenant Hosting Rights for Windows 10, use 'Windows_Client'. For more information see the Azure Windows VM online docs. - max_batch_instance_percent -- The maximum percent of total virtual machine instances that will be upgraded simultaneously by the rolling upgrade in one batch. Default: 20% - max_price -- The maximum price (in US Dollars) you are willing to pay for a Spot VM/VMSS. -1 indicates that the Spot VM/VMSS should not be evicted for price reasons - max_unhealthy_instance_percent -- The maximum percentage of the total virtual machine instances in the scale set that can be simultaneously unhealthy. Default: 20% - max_unhealthy_upgraded_instance_percent -- The maximum percentage of upgraded virtual machine instances that can be found to be in an unhealthy state. Default: 20% - no_wait -- Do not wait for the long-running operation to finish. - pause_time_between_batches -- The wait time between completing the update for all virtual machines in one batch and starting the next batch. Default: 0 seconds - ppg -- The name or ID of the proximity placement group the VMSS should be associated with. - prioritize_unhealthy_instances -- Set this Boolean property will lead to all unhealthy instances in a scale set getting upgraded before any healthy instances - priority -- Priority. Use 'Spot' to run short-lived workloads in a cost-effective way. 'Low' enum will be deprecated in the future. Please use 'Spot' to deploy Azure spot VM and/or VMSS. Default to Regular. - protect_from_scale_in -- Protect the VM instance from scale-in operations. - protect_from_scale_set_actions -- Protect the VM instance from scale set actions (including scale-in). - remove -- Remove a property or an element from a list. Example: --remove property.list <indexToRemove> OR --remove propertyToRemove - scale_in_policy -- Specify the scale-in policy (space delimited) that decides which virtual machines are chosen for removal when a Virtual Machine Scale Set is scaled-in. - set -- Update an object by specifying a property path and value to set. Example: --set property1.property2=<value> - spot_restore_timeout -- Timeout value expressed as an ISO 8601 time duration after which the platform will not try to restore the VMSS SPOT instances - terminate_notification_time -- Length of time (in minutes, between 5 and 15) a notification to be sent to the VM on the instance metadata server till the VM gets deleted - ultra_ssd_enabled -- Enables or disables the capability to have 1 or more managed data disks with UltraSSD_LRS storage account - user_data -- UserData for the virtual machines in the scale set. It can be passed in as file or string. If empty string is passed in, the existing value will be deleted. - vm_sku -- The new size of the virtual machine instances in the scale set. Default to "Standard_DS1_v2". See https://azure.microsoft.com/pricing/details/virtual-machines/ for size info. ''' return _call_az("az vmss update", locals()) def update_instances(instance_ids, name, resource_group, no_wait=None): ''' Upgrade VMs within a VMSS. Required Parameters: - instance_ids -- Space-separated list of IDs (ex: 1 2 3 ...) or * for all instances. - name -- Scale set name. You can configure the default using `az configure --defaults vmss=<name>` - resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>` Optional Parameters: - no_wait -- Do not wait for the long-running operation to finish. ''' return _call_az("az vmss update-instances", locals()) def wait(name, resource_group, created=None, custom=None, deleted=None, exists=None, instance_id=None, interval=None, timeout=None, updated=None): ''' Place the CLI in a waiting state until a condition of a scale set is met. Required Parameters: - name -- Scale set name. You can configure the default using `az configure --defaults vmss=<name>` - resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>` Optional Parameters: - created -- wait until created with 'provisioningState' at 'Succeeded' - custom -- Wait until the condition satisfies a custom JMESPath query. E.g. provisioningState!='InProgress', instanceView.statuses[?code=='PowerState/running'] - deleted -- wait until deleted - exists -- wait until the resource exists - instance_id -- Wait on the VM instance with this ID. If missing, Wait on the VMSS. - interval -- polling interval in seconds - timeout -- maximum wait in seconds - updated -- wait until updated with provisioningState at 'Succeeded' ''' return _call_az("az vmss wait", locals()) def get_os_upgrade_history(name, resource_group): ''' Required Parameters: - name -- Scale set name. You can configure the default using `az configure --defaults vmss=<name>` - resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>` ''' return _call_az("az vmss get-os-upgrade-history", locals()) def set_orchestration_service_state(action, name, resource_group, service_name, no_wait=None): ''' Change ServiceState property for a given service within a VMSS. Required Parameters: - action -- The action to be performed. - name -- Scale set name. You can configure the default using `az configure --defaults vmss=<name>` - resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>` - service_name -- The name of the orchestration service. Optional Parameters: - no_wait -- Do not wait for the long-running operation to finish. ''' return _call_az("az vmss set-orchestration-service-state", locals())
78.208791
2,164
0.753295
5,279
35,585
4.96344
0.121235
0.03473
0.025189
0.030227
0.593581
0.559957
0.531448
0.51851
0.50561
0.483627
0
0.003336
0.17454
35,585
454
2,165
78.381057
0.888677
0.794492
0
0
0
0
0.078149
0.017776
0
0
0
0
0
1
0.479167
false
0.020833
0.041667
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
3
b315a7ab57cde2354dac3f1288b4d8c4b34ce5b5
329
py
Python
app/application.py
safo-bora/MobileTestingWithAppium
135fc898222dc82b0179f594f93117cdea0b61e7
[ "MIT" ]
3
2020-05-24T17:48:43.000Z
2022-02-04T22:03:28.000Z
app/application.py
safo-bora/MobileTestingWithAppium
135fc898222dc82b0179f594f93117cdea0b61e7
[ "MIT" ]
null
null
null
app/application.py
safo-bora/MobileTestingWithAppium
135fc898222dc82b0179f594f93117cdea0b61e7
[ "MIT" ]
4
2020-05-13T06:32:17.000Z
2021-11-15T15:33:03.000Z
from features.pages.launch_page import LaunchPage from features.pages.login_page import LoginPage from features.pages.main_page import MainPage class Application: def __init__(self, driver): self.launch_page = LaunchPage(driver) self.login_page = LoginPage(driver) self.main_page = MainPage(driver)
29.909091
49
0.759878
42
329
5.714286
0.404762
0.15
0.2125
0
0
0
0
0
0
0
0
0
0.170213
329
10
50
32.9
0.879121
0
0
0
0
0
0
0
0
0
0
0
0
1
0.125
false
0
0.375
0
0.625
0
0
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
3
b317781a0f7ed0d7d20dba0b2eb5b4bfc7c2fa2b
181
py
Python
Codeforces/C_The_Child_and_Toy.py
anubhab-code/Competitive-Programming
de28cb7d44044b9e7d8bdb475da61e37c018ac35
[ "MIT" ]
null
null
null
Codeforces/C_The_Child_and_Toy.py
anubhab-code/Competitive-Programming
de28cb7d44044b9e7d8bdb475da61e37c018ac35
[ "MIT" ]
null
null
null
Codeforces/C_The_Child_and_Toy.py
anubhab-code/Competitive-Programming
de28cb7d44044b9e7d8bdb475da61e37c018ac35
[ "MIT" ]
null
null
null
n,m = list(map(int,input().split())) cost = list(map(int,input().split())) ans=0 for i in range(m): f,s = list(map(int,input().split())) ans += min(cost[f-1],cost[s-1]) print(ans)
25.857143
37
0.607735
37
181
2.972973
0.513514
0.190909
0.272727
0.409091
0.6
0.418182
0
0
0
0
0
0.018519
0.104972
181
7
38
25.857143
0.660494
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0
0
0
0.142857
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
b3757dbfeed37e72ecd596a3d9716e0a9bbc3d3e
57
py
Python
pyVenv/Lib/site-packages/bpython/_version.py
thephilosophicaljijutsumaster/InventoryManagement
7c57fcc435976c39b249106642ee848da2eea201
[ "MIT" ]
null
null
null
pyVenv/Lib/site-packages/bpython/_version.py
thephilosophicaljijutsumaster/InventoryManagement
7c57fcc435976c39b249106642ee848da2eea201
[ "MIT" ]
12
2020-07-05T14:30:46.000Z
2020-08-06T21:06:00.000Z
pyVenv/Lib/site-packages/bpython/_version.py
thephilosophicaljijutsumaster/InventoryManagement
7c57fcc435976c39b249106642ee848da2eea201
[ "MIT" ]
null
null
null
# Auto-generated file, do not edit! __version__ = '0.19'
19
35
0.701754
9
57
4
1
0
0
0
0
0
0
0
0
0
0
0.0625
0.157895
57
2
36
28.5
0.6875
0.578947
0
0
1
0
0.181818
0
0
0
0
0
0
1
0
false
0
0
0
0
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
2fa1084a1feeed4394f0865ef6038f23e9ec3185
570
py
Python
src/webdev/server-side/hello/application.py
tangaw/cs50
1ed09b3199f6a366c43d3b0ecd89481b1650af17
[ "MIT" ]
null
null
null
src/webdev/server-side/hello/application.py
tangaw/cs50
1ed09b3199f6a366c43d3b0ecd89481b1650af17
[ "MIT" ]
6
2021-02-07T21:47:03.000Z
2021-03-04T20:56:34.000Z
src/webdev/server-side/hello/application.py
tangaw/cs50
1ed09b3199f6a366c43d3b0ecd89481b1650af17
[ "MIT" ]
null
null
null
from flask import Flask, render_template, request app = Flask(__name__) # Turns current file into an application # Specifying URL path/route @app.route("/") # `@` is a specialy Python declarator def index(): # Render HTML template # Use the `request` library to parse URL input "name" and store in variable `name` # Set "world" as default value for `name` return render_template("index.html") # Point to alternate URL path @app.route("/greet", methods=['POST']) def greet(): return render_template("greet.html", name=request.form.get("name", "world"))
31.666667
84
0.712281
81
570
4.925926
0.617284
0.105263
0.100251
0
0
0
0
0
0
0
0
0
0.159649
570
18
85
31.666667
0.832985
0.473684
0
0
0
0
0.136519
0
0
0
0
0
0
1
0.25
false
0
0.125
0.25
0.625
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
3
2faab8a47f14e1e80ef9778047cb061953ec9fa6
357
py
Python
Meus_dessafios/Exercicios2021/ex109/testes.py
DiegoSilvaHoffmann/Curso-de-Python
62824bbd3ed42b256fda77acd49536ec7cf23b29
[ "MIT" ]
null
null
null
Meus_dessafios/Exercicios2021/ex109/testes.py
DiegoSilvaHoffmann/Curso-de-Python
62824bbd3ed42b256fda77acd49536ec7cf23b29
[ "MIT" ]
null
null
null
Meus_dessafios/Exercicios2021/ex109/testes.py
DiegoSilvaHoffmann/Curso-de-Python
62824bbd3ed42b256fda77acd49536ec7cf23b29
[ "MIT" ]
null
null
null
from ex109 import moeda valor = float(input('Informe um valor: R$')) print(f'A metade de {moeda.moeda(valor)}, é {moeda.metade(valor, True)}.') print(f'O dobro de {moeda.moeda(valor)}, é {moeda.dobro(valor, True)}.') print(f'Com aumento de 10%, fica {moeda.aumentar(valor, 10, True)}.') print(f'Com 15% de desconto, fica {moeda.diminuir(valor, 15, True)}')
51
74
0.691877
61
357
4.04918
0.442623
0.097166
0.121457
0.137652
0.186235
0.186235
0
0
0
0
0
0.0347
0.112045
357
6
75
59.5
0.74448
0
0
0
0
0
0.739496
0.240896
0
0
0
0
0
1
0
false
0
0.166667
0
0.166667
0.666667
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
1
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
3
2fb88e1929d9b8ffe0a84fc88c2c6ff310a6ba11
228
py
Python
config.py
gtracy/APODEmail
328dbb610963c23af4e1ff9285cd7bf3dcce2b6c
[ "MIT" ]
1
2021-09-18T11:28:16.000Z
2021-09-18T11:28:16.000Z
config.py
gtracy/APODEmail
328dbb610963c23af4e1ff9285cd7bf3dcce2b6c
[ "MIT" ]
1
2020-01-11T22:27:00.000Z
2020-01-12T03:38:07.000Z
config.py
gtracy/APODEmail
328dbb610963c23af4e1ff9285cd7bf3dcce2b6c
[ "MIT" ]
null
null
null
API_SECRET = 'fluffernutterpie' # google reCAPTCHA sign-up # https://www.google.com/recaptcha/admin RECAPTCHA_KEY = '6LedVBUUAAAAANb2vWVUSByvl66ob3k9r-zSruCu' RECAPTCHA_PRIVATE_KEY = '6LedVBUUAAAAALWe4MQ2VJQhI9rKi6GJTukS6Hrl'
28.5
66
0.837719
21
228
8.904762
0.761905
0
0
0
0
0
0
0
0
0
0
0.056872
0.074561
228
7
67
32.571429
0.829384
0.276316
0
0
0
0
0.596273
0.496894
0
0
0
0
0
1
0
false
0
0
0
0
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
2fcaa216027e124c0296f7d075047c948f06e37e
100
py
Python
tests/runbook/roles/slow_output/main.py
acoomans/prvsn
af6b313c2e779ae4e3a9cdba0b1c3a1f4b4c085e
[ "BSD-2-Clause" ]
null
null
null
tests/runbook/roles/slow_output/main.py
acoomans/prvsn
af6b313c2e779ae4e3a9cdba0b1c3a1f4b4c085e
[ "BSD-2-Clause" ]
null
null
null
tests/runbook/roles/slow_output/main.py
acoomans/prvsn
af6b313c2e779ae4e3a9cdba0b1c3a1f4b4c085e
[ "BSD-2-Clause" ]
null
null
null
bash(''' echo "hello" ''') bash(''' for i in $(seq 1 10); do echo $i sleep 2 done ''')
6.25
21
0.46
16
100
2.875
0.8125
0
0
0
0
0
0
0
0
0
0
0.057971
0.31
100
15
22
6.666667
0.608696
0
0
0.4
0
0
0.727273
0
0
0
0
0
0
1
0
true
0
0
0
0
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
3
2febe828324595b72252acdfcd42dc8ab022af2d
1,156
py
Python
scraper/storage_spiders/huyhoangvn.py
chongiadung/choinho
d2a216fe7a5064d73cdee3e928a7beef7f511fd1
[ "MIT" ]
null
null
null
scraper/storage_spiders/huyhoangvn.py
chongiadung/choinho
d2a216fe7a5064d73cdee3e928a7beef7f511fd1
[ "MIT" ]
10
2020-02-11T23:34:28.000Z
2022-03-11T23:16:12.000Z
scraper/storage_spiders/huyhoangvn.py
chongiadung/choinho
d2a216fe7a5064d73cdee3e928a7beef7f511fd1
[ "MIT" ]
3
2018-08-05T14:54:25.000Z
2021-06-07T01:49:59.000Z
# Auto generated by generator.py. Delete this line if you make modification. from scrapy.spiders import Rule from scrapy.linkextractors import LinkExtractor XPATH = { 'name' : "//div[@class='main-full-content']/div[@class='content-center fl']/div[@class='content-main']/h1", 'price' : "//div[@class='info-detail']/div[@class='su-pi']/div/span[@class='price-new']|//div[@class='spritespin-stage']//img/@src", 'category' : "//div[@class='content-center fl']/div[@class='content-main']/div[@class='tree-url']/a", 'description' : "//div[@class='content-main']/div[@id='tab-content']/div[@class='multi-tab-content']/div[@class='tong-quan block']", 'images' : "//meta[@property='og:image']/@content", 'canonical' : "", 'base_url' : "//base/@href", 'brand' : "" } name = 'huyhoang.vn' allowed_domains = ['huyhoang.vn'] start_urls = ['http://huyhoang.vn'] tracking_url = '' sitemap_urls = [''] sitemap_rules = [('', 'parse_item')] sitemap_follow = [''] rules = [ #Rule(LinkExtractor(), 'parse_item'), #Rule(LinkExtractor(), 'parse'), Rule(LinkExtractor(allow=['/[a-zA-Z0-9-]+($|\?p=\d+$)']), 'parse_item_and_links'), ]
42.814815
136
0.636678
148
1,156
4.891892
0.547297
0.132597
0.103591
0.078729
0.150552
0.116022
0.116022
0.116022
0.116022
0
0
0.002918
0.110727
1,156
26
137
44.461538
0.701362
0.121972
0
0
1
0.181818
0.60633
0.459941
0
0
0
0
0
1
0
false
0
0.090909
0
0.090909
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
1
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
2ff23e263f8eda218cf1246796602e91343eaac8
7,611
py
Python
pymbt/constants/molecular_bio.py
klavinslab/pymbt-legacy
d638aecd954664e416ed28b30cdbbcfcb176bae5
[ "ECL-2.0", "Apache-2.0" ]
34
2015-12-26T22:13:51.000Z
2021-11-17T11:46:37.000Z
pymbt/constants/molecular_bio.py
klavinslab/pymbt-legacy
d638aecd954664e416ed28b30cdbbcfcb176bae5
[ "ECL-2.0", "Apache-2.0" ]
13
2015-09-11T23:27:51.000Z
2018-06-25T20:44:28.000Z
pymbt/constants/molecular_bio.py
klavinslab/pymbt-legacy
d638aecd954664e416ed28b30cdbbcfcb176bae5
[ "ECL-2.0", "Apache-2.0" ]
14
2015-10-08T17:08:48.000Z
2022-02-22T04:25:54.000Z
'''Defines data and parameters in an easily resuable format.''' # Common sequence alphabets. ALPHABETS = { 'dna': 'ATGCNatgcn-', 'rna': 'AUGCNaugcn', 'peptide': 'ACDEFGHIKLMNPQRSTVWYXacdefghiklmnpqrstvwyx'} COMPLEMENTS = { 'dna': {'A': 'T', 'T': 'A', 'G': 'C', 'C': 'G', 'N': 'N', 'a': 't', 't': 'a', 'g': 'c', 'c': 'g', 'n': 'n', '-': '-'}, 'rna': {'A': 'U', 'U': 'A', 'G': 'C', 'C': 'G', 'N': 'N', 'a': 'u', 'u': 'a', 'g': 'c', 'c': 'g', 'n': 'n'}} # The standard codon table. CODON_TABLE = { 'A': ['GCG', 'GCA', 'GCU', 'GCC'], 'R': ['AGG', 'AGA', 'CGG', 'CGA', 'CGU', 'CGC'], 'N': ['AAU', 'AAC'], 'D': ['GAU', 'GAC'], 'C': ['UGU', 'UGC'], '*': ['UGA', 'UAG', 'UAA'], 'Q': ['CAG', 'CAA'], 'E': ['GAG', 'GAA'], 'G': ['GGG', 'GGA', 'GGU', 'GGC'], 'H': ['CAU', 'CAC'], 'I': ['AUA', 'AUU', 'AUC'], 'L': ['UUG', 'UUA', 'CUG', 'CUA', 'CUU', 'CUC'], 'K': ['AAG', 'AAA'], 'M': ['AUG'], 'F': ['UUU', 'UUC'], 'P': ['CCG', 'CCA', 'CCU', 'CCC'], 'S': ['AGU', 'AGC', 'UCG', 'UCA', 'UCU', 'UCC'], 'T': ['ACG', 'ACA', 'ACU', 'ACC'], 'W': ['UGG'], 'Y': ['UAU', 'UAC'], 'V': ['GUG', 'GUA', 'GUU', 'GUC']} # Saccharomyces cerevisiae # source: http://www.kazusa.or.jp/codon/ # (which cites GenBank, i.e. yeast genome project CDS database) CODON_FREQ = { 'sc': { 'GCG': 0.109972396541529, 'GCA': 0.288596474496094, 'GCU': 0.377014739102356, 'GCC': 0.224416389860021, 'AGG': 0.208564104515562, 'AGA': 0.481137590939125, 'CGG': 0.0392677130215486, 'CGA': 0.0676728924436203, 'CGU': 0.144572019635586, 'CGC': 0.0587856794445578, 'AAU': 0.589705127199784, 'AAC': 0.410294872800217, 'GAU': 0.65037901553924, 'GAC': 0.34962098446076, 'UGU': 0.629812614586062, 'UGC': 0.370187385413938, 'UGA': 0.303094329334787, 'UAG': 0.225736095965104, 'UAA': 0.471169574700109, 'CAG': 0.307418833439535, 'CAA': 0.692581166560465, 'GAG': 0.296739610207218, 'GAA': 0.703260389792782, 'GGG': 0.119057918187951, 'GGA': 0.215422869017838, 'GGU': 0.472217600813099, 'GGC': 0.193301611981112, 'CAU': 0.636710255236351, 'CAC': 0.363289744763649, 'AUA': 0.273331091899568, 'AUU': 0.462925823433014, 'AUC': 0.263743084667417, 'UUG': 0.286319859527146, 'UUA': 0.275534472444779, 'CUG': 0.110440170850593, 'CUA': 0.141277445174148, 'CUU': 0.129115062940288, 'CUC': 0.0573129890630467, 'AAG': 0.423936637198697, 'AAA': 0.576063362801303, 'AUG': 1, 'UUU': 0.586126603840976, 'UUC': 0.413873396159024, 'CCG': 0.120626895854398, 'CCA': 0.417143753704543, 'CCU': 0.307740315888567, 'CCC': 0.154489034552491, 'AGU': 0.159245398699046, 'AGC': 0.109749229743856, 'UCG': 0.0963590866114069, 'UCA': 0.210157220085731, 'UCU': 0.264456618519558, 'UCC': 0.160032446340401, 'ACG': 0.135583991997041, 'ACA': 0.302413913478422, 'ACU': 0.345237040780705, 'ACC': 0.216765053743832, 'UGG': 1, 'UAU': 0.559573963633711, 'UAC': 0.440426036366289, 'GUG': 0.190897642582249, 'GUA': 0.208783185960798, 'GUU': 0.391481704636128, 'GUC': 0.208837466820824}} # Codon usage organized by organism, then amino acid CODON_FREQ_BY_AA = { 'sc': { 'A': {'GCG': 0.109972396541529, 'GCA': 0.288596474496094, 'GCU': 0.377014739102356, 'GCC': 0.224416389860021}, 'R': {'AGG': 0.208564104515562, 'AGA': 0.481137590939125, 'CGG': 0.0392677130215486, 'CGA': 0.0676728924436203, 'CGU': 0.144572019635586, 'CGC': 0.0587856794445578}, 'N': {'AAU': 0.589705127199784, 'AAC': 0.410294872800217}, 'D': {'GAU': 0.65037901553924, 'GAC': 0.34962098446076}, 'C': {'UGU': 0.629812614586062, 'UGC': 0.370187385413938}, '*': {'UGA': 0.303094329334787, 'UAG': 0.225736095965104, 'UAA': 0.471169574700109}, 'Q': {'CAG': 0.307418833439535, 'CAA': 0.692581166560465}, 'E': {'GAG': 0.296739610207218, 'GAA': 0.703260389792782}, 'G': {'GGG': 0.119057918187951, 'GGA': 0.215422869017838, 'GGU': 0.472217600813099, 'GGC': 0.193301611981112}, 'H': {'CAU': 0.636710255236351, 'CAC': 0.363289744763649}, 'I': {'AUA': 0.273331091899568, 'AUU': 0.462925823433014, 'AUC': 0.263743084667417}, 'L': {'UUG': 0.286319859527146, 'UUA': 0.275534472444779, 'CUG': 0.110440170850593, 'CUA': 0.141277445174148, 'CUU': 0.129115062940288, 'CUC': 0.0573129890630467}, 'K': {'AAG': 0.423936637198697, 'AAA': 0.576063362801303}, 'M': {'AUG': 1}, 'F': {'UUU': 0.586126603840976, 'UUC': 0.413873396159024}, 'P': {'CCG': 0.120626895854398, 'CCA': 0.417143753704543, 'CCU': 0.307740315888567, 'CCC': 0.154489034552491}, 'S': {'AGU': 0.159245398699046, 'AGC': 0.109749229743856, 'UCG': 0.0963590866114069, 'UCA': 0.210157220085731, 'UCU': 0.264456618519558, 'UCC': 0.160032446340401}, 'T': {'ACG': 0.135583991997041, 'ACA': 0.302413913478422, 'ACU': 0.345237040780705, 'ACC': 0.216765053743832}, 'W': {'UGG': 1}, 'Y': {'UAU': 0.559573963633711, 'UAC': 0.440426036366289}, 'V': {'GUG': 0.190897642582249, 'GUA': 0.208783185960798, 'GUU': 0.391481704636128, 'GUC': 0.208837466820824}}} # Complete list of codons. CODONS = {'AAA': 'K', 'AAC': 'N', 'AAG': 'K', 'AAU': 'N', 'ACA': 'T', 'ACC': 'T', 'ACG': 'T', 'ACU': 'T', 'AGA': 'R', 'AGC': 'S', 'AGG': 'R', 'AGU': 'S', 'AUA': 'I', 'AUC': 'I', 'AUG': 'M', 'AUU': 'I', 'CAA': 'Q', 'CAC': 'H', 'CAG': 'Q', 'CAU': 'H', 'CCA': 'P', 'CCC': 'P', 'CCG': 'P', 'CCU': 'P', 'CGA': 'R', 'CGC': 'R', 'CGG': 'R', 'CGU': 'R', 'CUA': 'L', 'CUC': 'L', 'CUG': 'L', 'CUU': 'L', 'GAA': 'E', 'GAC': 'D', 'GAG': 'E', 'GAU': 'D', 'GCA': 'A', 'GCC': 'A', 'GCG': 'A', 'GCU': 'A', 'GGA': 'G', 'GGC': 'G', 'GGG': 'G', 'GGU': 'G', 'GUA': 'V', 'GUC': 'V', 'GUG': 'V', 'GUU': 'V', 'UAA': '*', 'UAC': 'Y', 'UAG': '*', 'UAU': 'Y', 'UCA': 'S', 'UCC': 'S', 'UCG': 'S', 'UCU': 'S', 'UGA': '*', 'UGC': 'C', 'UGG': 'W', 'UGU': 'C', 'UUA': 'L', 'UUC': 'F', 'UUG': 'L', 'UUU': 'F'}
30.689516
71
0.434109
717
7,611
4.601116
0.281729
0.002425
0.003637
0.00485
0.728099
0.728099
0.545014
0.545014
0.545014
0.510458
0
0.40348
0.350677
7,611
247
72
30.813765
0.264063
0.041125
0
0.220264
0
0
0.137524
0.005764
0
0
0
0
0
1
0
false
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
1
0
0
0
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
2ff5830b670322ea798889ed6512f739eb75aafd
1,378
py
Python
pyy1/.pycharm_helpers/python_stubs/-1550516950/_dbus_bindings/MethodCallMessage.py
pyy1988/pyy_test1
6bea878409e658aa87441384419be51aaab061e7
[ "Apache-2.0" ]
null
null
null
pyy1/.pycharm_helpers/python_stubs/-1550516950/_dbus_bindings/MethodCallMessage.py
pyy1988/pyy_test1
6bea878409e658aa87441384419be51aaab061e7
[ "Apache-2.0" ]
null
null
null
pyy1/.pycharm_helpers/python_stubs/-1550516950/_dbus_bindings/MethodCallMessage.py
pyy1988/pyy_test1
6bea878409e658aa87441384419be51aaab061e7
[ "Apache-2.0" ]
null
null
null
# encoding: utf-8 # module _dbus_bindings # from /usr/lib/python3/dist-packages/_dbus_bindings.cpython-35m-x86_64-linux-gnu.so # by generator 1.145 """ Low-level Python bindings for libdbus. Don't use this module directly - the public API is provided by the `dbus`, `dbus.service`, `dbus.mainloop` and `dbus.mainloop.glib` modules, with a lower-level API provided by the `dbus.lowlevel` module. """ # imports import dbus.lowlevel as __dbus_lowlevel class MethodCallMessage(__dbus_lowlevel.Message): """ A method-call message. Constructor:: dbus.lowlevel.MethodCallMessage(destination: str or None, path: str, interface: str or None, method: str) ``destination`` is the destination bus name, or None to send the message directly to the peer (usually the bus daemon). ``path`` is the object-path of the object whose method is to be called. ``interface`` is the interface qualifying the method name, or None to omit the interface from the message header. ``method`` is the method name (member name). """ def __init__(self, destination_or_None, path, interface_or_None, method): # real signature unknown; restored from __doc__ pass def __repr__(self, *args, **kwargs): # real signature unknown """ Return repr(self). """ pass
32.046512
125
0.677068
186
1,378
4.870968
0.489247
0.039735
0.028698
0.037528
0
0
0
0
0
0
0
0.011289
0.228592
1,378
42
126
32.809524
0.841016
0.740203
0
0.333333
0
0
0
0
0
0
0
0
0
1
0.333333
false
0.333333
0.166667
0
0.666667
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
0
1
0
0
3
64075624df733eb1202dba9a9f571b9cd81d7495
719
py
Python
jogo/objetos/objeto.py
luizklitzke1/SpaceInvaders-PyArcade
82f3e594b05e4362dcb7092dff0ab50699b8cb3b
[ "MIT" ]
6
2020-01-10T00:35:53.000Z
2020-05-11T23:40:17.000Z
jogo/objetos/objeto.py
luizklitzke1/SpaceInvader-PyArcade
82f3e594b05e4362dcb7092dff0ab50699b8cb3b
[ "MIT" ]
null
null
null
jogo/objetos/objeto.py
luizklitzke1/SpaceInvader-PyArcade
82f3e594b05e4362dcb7092dff0ab50699b8cb3b
[ "MIT" ]
null
null
null
import arcade class Objeto(arcade.Sprite): def __init__(self, imagem, dimensao_imagem, center_x=0, center_y=0): super().__init__(imagem, dimensao_imagem) self.center_x = center_x self.center_y = center_y def set_center_x(self, novo_x): self.center_x = novo_x def get_center_x(self): return self.center_x def set_center_y(self, novo_y): self.center_y = novo_y def get_center_y(self): return self.center_y #Remove o objeto da tela caso passe dos limites: def remover_se_sair(self, width, height, bottom, top, left, right): if bottom < 50 or top > height - 70 or right < 0 or left > width: self.kill()
27.653846
73
0.642559
110
719
3.890909
0.372727
0.114486
0.077103
0.093458
0
0
0
0
0
0
0
0.013333
0.269819
719
25
74
28.76
0.801905
0.065369
0
0
0
0
0
0
0
0
0
0
0
1
0.352941
false
0
0.058824
0.117647
0.588235
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
3
640a1e9c0021e877850d77dfcd8d80863636459f
2,232
py
Python
aliyun-python-sdk-cms/aliyunsdkcms/request/v20180308/CreateTaskRequest.py
liusc27/aliyun-openapi-python-sdk
5e3db3535dd21de987dc5981e71151327d5a884f
[ "Apache-2.0" ]
1
2019-12-23T12:36:43.000Z
2019-12-23T12:36:43.000Z
aliyun-python-sdk-cms/aliyunsdkcms/request/v20180308/CreateTaskRequest.py
liusc27/aliyun-openapi-python-sdk
5e3db3535dd21de987dc5981e71151327d5a884f
[ "Apache-2.0" ]
null
null
null
aliyun-python-sdk-cms/aliyunsdkcms/request/v20180308/CreateTaskRequest.py
liusc27/aliyun-openapi-python-sdk
5e3db3535dd21de987dc5981e71151327d5a884f
[ "Apache-2.0" ]
null
null
null
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from aliyunsdkcore.request import RpcRequest class CreateTaskRequest(RpcRequest): def __init__(self): RpcRequest.__init__(self, 'Cms', '2018-03-08', 'CreateTask','cms') def get_Address(self): return self.get_query_params().get('Address') def set_Address(self,Address): self.add_query_param('Address',Address) def get_TaskType(self): return self.get_query_params().get('TaskType') def set_TaskType(self,TaskType): self.add_query_param('TaskType',TaskType) def get_IspCity(self): return self.get_query_params().get('IspCity') def set_IspCity(self,IspCity): self.add_query_param('IspCity',IspCity) def get_AlertIds(self): return self.get_query_params().get('AlertIds') def set_AlertIds(self,AlertIds): self.add_query_param('AlertIds',AlertIds) def get_Options(self): return self.get_query_params().get('Options') def set_Options(self,Options): self.add_query_param('Options',Options) def get_TaskName(self): return self.get_query_params().get('TaskName') def set_TaskName(self,TaskName): self.add_query_param('TaskName',TaskName) def get_Interval(self): return self.get_query_params().get('Interval') def set_Interval(self,Interval): self.add_query_param('Interval',Interval) def get_AlertRule(self): return self.get_query_params().get('AlertRule') def set_AlertRule(self,AlertRule): self.add_query_param('AlertRule',AlertRule)
31
69
0.749104
316
2,232
5.113924
0.322785
0.029703
0.069307
0.084158
0.153465
0.153465
0.153465
0
0
0
0
0.006283
0.144265
2,232
72
70
31
0.839791
0.337814
0
0
0
0
0.106232
0
0
0
0
0
0
1
0.472222
false
0
0.027778
0.222222
0.75
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
3
64159686010cda84f115c8c65c316794d2fa1f14
350
py
Python
tests/test_checks.py
PeterJCLaw/flake8-multiline-containers
490fab7dfe0cbe306841c179d9d55abc0a2ed2b2
[ "MIT" ]
null
null
null
tests/test_checks.py
PeterJCLaw/flake8-multiline-containers
490fab7dfe0cbe306841c179d9d55abc0a2ed2b2
[ "MIT" ]
null
null
null
tests/test_checks.py
PeterJCLaw/flake8-multiline-containers
490fab7dfe0cbe306841c179d9d55abc0a2ed2b2
[ "MIT" ]
null
null
null
from flake8_multiline_containers import ErrorCodes def test_check_opening_contains_error(linter): linter._check_opening('{', '}', 0, "foo={a\n", ErrorCodes.JS101) assert 1 == len(linter.errors) def test_check_opening_no_error(linter): linter._check_opening('{', '}', 0, "foo={\n", ErrorCodes.JS101) assert 0 == len(linter.errors)
29.166667
68
0.714286
47
350
5.021277
0.489362
0.20339
0.101695
0.161017
0.279661
0.279661
0.279661
0
0
0
0
0.036184
0.131429
350
11
69
31.818182
0.740132
0
0
0
0
0
0.054286
0
0
0
0
0
0.285714
1
0.285714
false
0
0.142857
0
0.428571
0
0
0
0
null
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
3
641e55ec8c37dd8e31dcede4ac9789370a659741
1,840
py
Python
student_records/migrations/0001_initial.py
HillaryOkello/CIT-blockchain-DB
0b931e196da43a87bf842cee0e130f57557169bd
[ "MIT" ]
1
2021-12-10T13:54:38.000Z
2021-12-10T13:54:38.000Z
student_records/migrations/0001_initial.py
HillaryOkello/CIT-blockchain-DB
0b931e196da43a87bf842cee0e130f57557169bd
[ "MIT" ]
null
null
null
student_records/migrations/0001_initial.py
HillaryOkello/CIT-blockchain-DB
0b931e196da43a87bf842cee0e130f57557169bd
[ "MIT" ]
null
null
null
# Generated by Django 3.1.1 on 2021-11-29 09:44 from django.db import migrations, models class Migration(migrations.Migration): initial = True dependencies = [ ] operations = [ migrations.CreateModel( name='StudentRecord', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('first_name', models.CharField(max_length=50)), ('last_name', models.CharField(max_length=50)), ('dob', models.CharField(max_length=50)), ('address', models.CharField(max_length=100)), ('phone', models.CharField(max_length=50)), ('email', models.CharField(max_length=100)), ('date_of_enrollment', models.DateField(auto_now_add=True)), ('quiz_score', models.IntegerField(default=0)), ('python_entry_level', models.CharField(choices=[('pass', 'PASS'), ('fail', 'FAIL')], default='pass', max_length=4)), ('aws_practitioner_exam', models.CharField(choices=[('pass', 'PASS'), ('fail', 'FAIL')], default='pass', max_length=4)), ('python_associate_exam', models.CharField(choices=[('pass', 'PASS'), ('fail', 'FAIL')], default='pass', max_length=4)), ('blockchain_exam', models.CharField(choices=[('pass', 'PASS'), ('fail', 'FAIL')], default='pass', max_length=4)), ('aws_ml_exam', models.CharField(choices=[('pass', 'PASS'), ('fail', 'FAIL')], default='pass', max_length=4)), ('dissertation', models.CharField(default='pass', max_length=50)), ('date_of_graduation', models.DateField(auto_now_add=True)), ('job_placement', models.CharField(max_length=100)), ], ), ]
49.72973
136
0.580435
196
1,840
5.260204
0.377551
0.189137
0.122211
0.162949
0.550921
0.42192
0.307468
0.307468
0.307468
0.307468
0
0.028653
0.241304
1,840
36
137
51.111111
0.709885
0.024457
0
0
1
0
0.176799
0.023424
0
0
0
0
0
1
0
false
0.206897
0.034483
0
0.172414
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
3
64364d57fbf77402a81c753d147e6d8619dee81f
828
py
Python
expose.py
mzal/pysweeper
175ae570c001760d4f7a5122a5eb0b50ef36e046
[ "MIT" ]
null
null
null
expose.py
mzal/pysweeper
175ae570c001760d4f7a5122a5eb0b50ef36e046
[ "MIT" ]
null
null
null
expose.py
mzal/pysweeper
175ae570c001760d4f7a5122a5eb0b50ef36e046
[ "MIT" ]
null
null
null
def expose(board, x, y): if not board[y][x].flagged: board[y][x].exposed = True #debug print(x, y, sep='\t') if x == 0 or x == len(board[0]) - 1 or y == 0 or y == len(board) - 1 or board[y][x].mine: return 0 if board[y][x].neighbours == 0: if board[y-1][x-1].exposed is False: expose(board, x-1, y-1) if board[y-1][x].exposed is False: expose(board, x, y-1) if board[y-1][x+1].exposed is False: expose(board, x+1, y-1) if board[y][x-1].exposed is False: expose(board, x-1, y) if board[y][x+1].exposed is False: expose(board, x+1, y) if board[y+1][x-1].exposed is False: expose(board, x-1, y+1) if board[y+1][x].exposed is False: expose(board, x, y+1) if board[y+1][x+1].exposed is False: expose(board, x+1, y+1) return 0
43.578947
93
0.556763
166
828
2.777108
0.138554
0.156182
0.234273
0.347072
0.67679
0.67679
0.67679
0.67679
0.67679
0.67679
0
0.05153
0.25
828
18
94
46
0.690821
0.033816
0
0.133333
0
0
0
0
0
0
0
0
0
1
0.066667
false
0
0
0
0.2
0
0
0
0
null
0
1
1
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
ff306b64d40f76058b91d06bd6c1cdf8dfdec22e
578
py
Python
exiftool/exiftool/commands.py
GuillaumeCouchard/snake-scales
1b4c84130295dd3ba2c47b884db73dc9c68bfec5
[ "BSD-3-Clause" ]
5
2018-03-15T21:02:19.000Z
2021-07-25T11:34:22.000Z
exiftool/exiftool/commands.py
GuillaumeCouchard/snake-scales
1b4c84130295dd3ba2c47b884db73dc9c68bfec5
[ "BSD-3-Clause" ]
7
2019-05-08T09:33:27.000Z
2021-02-24T02:10:40.000Z
exiftool/exiftool/commands.py
GuillaumeCouchard/snake-scales
1b4c84130295dd3ba2c47b884db73dc9c68bfec5
[ "BSD-3-Clause" ]
8
2018-04-21T17:55:34.000Z
2021-09-02T12:54:51.000Z
import shutil import subprocess from snake import error from snake import scale class Commands(scale.Commands): def check(self): exiftool = shutil.which("exiftool") if not exiftool: raise error.CommandError("binary 'exiftool' not found") @scale.command({ 'info': 'parse exif data of the file passed' }) def exiftool(self, args, file, opts): return {'exiftool': str(subprocess.check_output(['exiftool', file.file_path]), encoding='latin-1')} def exiftool_plaintext(self, json): return json['exiftool']
26.272727
107
0.6609
70
578
5.414286
0.571429
0.047493
0.079156
0
0
0
0
0
0
0
0
0.002227
0.223183
578
21
108
27.52381
0.841871
0
0
0
0
0
0.179931
0
0
0
0
0
0
1
0.1875
false
0.0625
0.25
0.125
0.625
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
1
1
0
0
3
ff4ae0fb493ff3a426fea0f383a07b7ed27fd160
2,984
py
Python
init.py
vherrin/MatrixNTFScan
0f9fb0a8f8f262786456726a00dde9ba31ba54fa
[ "MIT" ]
null
null
null
init.py
vherrin/MatrixNTFScan
0f9fb0a8f8f262786456726a00dde9ba31ba54fa
[ "MIT" ]
null
null
null
init.py
vherrin/MatrixNTFScan
0f9fb0a8f8f262786456726a00dde9ba31ba54fa
[ "MIT" ]
null
null
null
#!/usr/bin/env python # coding: utf-8 # In[1]: import pandas as pd import glob import numpy as np import cloudscraper import re import math import shutil import os import sys import cfscrape import time import requests import IPython import webbrowser import json import logging from requests.adapters import HTTPAdapter from requests.packages.urllib3.util.retry import Retry from datetime import date from IPython.display import HTML from IPython.display import display from IPython.display import clear_output from urllib.request import urlopen from bs4 import BeautifulSoup # In[2]: pd.set_option("display.max_rows", None) dataDir = './data/' #contractURLs=['ckw5j7pqf0011g4731pvaj6gm/'] #contractURLs=['ckw5j7pqf0011g4731pvaj6gm/', 'matrix-red-contract/', 'matrix-blue-contract/'] contractURLs=['matrix-red-contract/', 'matrix-blue-contract/'] #contractURLs='ckw5j7pqf0011g4731pvaj6gm/' #contractURLs='matrix-red-contract/' # In[3]: attributeList = {} dfDict = {} contractColumnDict={} for contractURL in contractURLs: contractPath = dataDir + contractURL if os.path.exists(contractPath + 'retrievedColumnsList.txt'): #print('path exists') with open(contractPath + 'retrievedColumnsList.txt', 'r') as filehandle: contractColumnDict[contractURL] = json.load(filehandle) #print(contractColumnDict[contractURL]) attributeList[contractURL] = [contractPath + 'Attributes_1_5000.csv', contractPath + 'Attributes_5001_10000.csv', contractPath + 'Attributes_10001_15000.csv', contractPath + 'Attributes_15001_20000.csv', contractPath + 'Attributes_20001_25000.csv', contractPath + 'Attributes_25001_30000.csv', contractPath + 'Attributes_30001_35000.csv', contractPath + 'Attributes_35001_40000.csv', contractPath + 'Attributes_40001_45000.csv', contractPath + 'Attributes_45001_50000.csv', contractPath + 'Attributes_50001_55000.csv', contractPath + 'Attributes_55001_60000.csv', contractPath + 'Attributes_60001_65000.csv', contractPath + 'Attributes_65001_70000.csv', contractPath + 'Attributes_70001_75000.csv', contractPath + 'Attributes_75001_80000.csv', contractPath + 'Attributes_80001_85000.csv', contractPath + 'Attributes_85001_90000.csv', contractPath + 'Attributes_90001_95000.csv', contractPath + 'Attributes_95001_100000.csv'] dfDict[contractURL] = pd.concat(map(pd.read_csv, attributeList[contractURL])) mci = contractColumnDict[contractURL].copy() mci.insert(0,'ID') dfDict[contractURL].columns = mci.copy() print('Length of ' + contractURL + ' attributes added ', len(dfDict[contractURL].index)) #print(dfDict[contractURL].columns) dfTemp = dfDict[contractURL] validCount = len(dfTemp.index) - len(np.where(pd.isnull(dfTemp['Background']))[0]) print('Number of valid rows with ' + contractURL + ' attributes ', validCount) print('\n') # In[ ]:
28.692308
93
0.732909
325
2,984
6.593846
0.436923
0.20532
0.221652
0.033598
0.043864
0.043864
0.043864
0
0
0
0
0.093539
0.154491
2,984
103
94
28.970874
0.755846
0.122319
0
0
0
0
0.272098
0.224443
0
0
0
0
0
1
0
false
0
0.375
0
0.375
0.046875
0
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
3
ff4b19bb820d8551888322ab2be543dee4241e4b
101
py
Python
CoV19/base/urls.py
just-ary27/CovBot-revamp
31af847237c4c5e7d5086a78950d06ecfd81318f
[ "MIT" ]
1
2021-05-12T18:44:30.000Z
2021-05-12T18:44:30.000Z
CoV19/needs/urls.py
just-ary27/CovBot-revamp
31af847237c4c5e7d5086a78950d06ecfd81318f
[ "MIT" ]
2
2021-09-22T18:41:37.000Z
2022-02-10T09:28:52.000Z
CoV19/needs/urls.py
just-ary27/CovBot-revamp
31af847237c4c5e7d5086a78950d06ecfd81318f
[ "MIT" ]
null
null
null
from django.urls import path,include from . import views urlpatterns = [ path('',views.index), ]
16.833333
36
0.70297
13
101
5.461538
0.692308
0
0
0
0
0
0
0
0
0
0
0
0.168317
101
6
37
16.833333
0.845238
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.4
0
0.4
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
3
ff5ee06aeacb21a9acd7fe5496e8ffa95107915e
1,569
py
Python
src/backend/tests/test_sort_record_by_date.py
sico/recordexpungPDX
c2f18322014add7c78b27736bde2d29d1d086aa8
[ "MIT" ]
38
2019-05-09T03:13:43.000Z
2022-03-16T22:59:25.000Z
src/backend/tests/test_sort_record_by_date.py
sico/recordexpungPDX
c2f18322014add7c78b27736bde2d29d1d086aa8
[ "MIT" ]
938
2019-05-02T15:13:21.000Z
2022-02-27T20:59:00.000Z
src/backend/tests/test_sort_record_by_date.py
kenichi/recordexpungPDX
100d9249473a01953451b83a72ec1b74574acc43
[ "MIT" ]
65
2019-05-09T03:28:12.000Z
2022-03-21T00:06:39.000Z
from expungeservice.record_creator import RecordCreator from expungeservice.models.record import Record from tests.factories.case_factory import CaseFactory def test_sort_by_case_date(): case1 = CaseFactory.create(case_number="1", date_location=["1/1/2018", "Multnomah"]) case2 = CaseFactory.create(case_number="2", date_location=["1/1/2019", "Multnomah"]) case3 = CaseFactory.create(case_number="3", date_location=["1/1/2020", "Multnomah"]) record = Record(tuple([case1, case2, case3])) assert record.cases[0].summary.case_number == "1" assert record.cases[1].summary.case_number == "2" assert record.cases[2].summary.case_number == "3" sorted_record = RecordCreator.sort_record_by_case_date(record) assert sorted_record.cases[0].summary.case_number == "3" assert sorted_record.cases[1].summary.case_number == "2" assert sorted_record.cases[2].summary.case_number == "1" def test_sort_if_all_dates_are_same(): case1 = CaseFactory.create(case_number="1") case2 = CaseFactory.create(case_number="2") case3 = CaseFactory.create(case_number="3") record = Record(tuple([case1, case2, case3])) assert record.cases[0].summary.case_number == "1" assert record.cases[1].summary.case_number == "2" assert record.cases[2].summary.case_number == "3" sorted_record = RecordCreator.sort_record_by_case_date(record) assert sorted_record.cases[0].summary.case_number == "1" assert sorted_record.cases[1].summary.case_number == "2" assert sorted_record.cases[2].summary.case_number == "3"
43.583333
88
0.732314
219
1,569
5.022831
0.187215
0.163636
0.185455
0.147273
0.754545
0.754545
0.574545
0.573636
0.572727
0.572727
0
0.043828
0.12747
1,569
35
89
44.828571
0.759679
0
0
0.444444
0
0
0.043977
0
0
0
0
0
0.444444
1
0.074074
false
0
0.111111
0
0.185185
0
0
0
0
null
0
1
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
1
0
0
0
0
0
0
0
0
0
3
ff761144fdb405559153f7ec1ffedb9d9e57923b
102
py
Python
declaraciones/translator_gdl/apps.py
rafaelhn2021/proyecto
97d01a0524df782985342bf07671ab60e318657f
[ "MIT" ]
null
null
null
declaraciones/translator_gdl/apps.py
rafaelhn2021/proyecto
97d01a0524df782985342bf07671ab60e318657f
[ "MIT" ]
null
null
null
declaraciones/translator_gdl/apps.py
rafaelhn2021/proyecto
97d01a0524df782985342bf07671ab60e318657f
[ "MIT" ]
2
2021-07-15T17:20:10.000Z
2022-03-18T10:26:38.000Z
from django.apps import AppConfig class TranslatorGdlConfig(AppConfig): name = 'translator_gdl'
17
37
0.784314
11
102
7.181818
0.909091
0
0
0
0
0
0
0
0
0
0
0
0.147059
102
5
38
20.4
0.908046
0
0
0
0
0
0.137255
0
0
0
0
0
0
1
0
false
0
0.333333
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
3
ffbb48584e60d6cc61869e5a032d3006921f8054
298
py
Python
_TEMPLATE/tests/unit_tests/test_version.py
realead/pyproj_maker
2846a02855a0b612fda4973544221dcbcb02e4ac
[ "MIT" ]
1
2020-01-12T07:15:52.000Z
2020-01-12T07:15:52.000Z
_TEMPLATE/tests/unit_tests/test_version.py
realead/pyproj_maker
2846a02855a0b612fda4973544221dcbcb02e4ac
[ "MIT" ]
null
null
null
_TEMPLATE/tests/unit_tests/test_version.py
realead/pyproj_maker
2846a02855a0b612fda4973544221dcbcb02e4ac
[ "MIT" ]
null
null
null
import unittest import {name} class VersionTester(unittest.TestCase): def test_major(self): self.assertEqual({name}.__version__[0], 0) def test_minor(self): self.assertEqual({name}.__version__[1], 1) def test_last(self): self.assertEqual({name}.__version__[2], 0)
18.625
48
0.684564
38
298
4.973684
0.447368
0.111111
0.301587
0.365079
0.47619
0
0
0
0
0
0
0.02439
0.174497
298
15
49
19.866667
0.743902
0
0
0
0
0
0
0
0
0
0
0
0.333333
0
null
null
0
0.222222
null
null
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
3
44063d0df5ac65a630b90f2f301d0584e9a77f1a
593
py
Python
tests/conftest.py
marcoaaguiar/channels-ws-auth
a9b4e87f3be81097c5078803dead9fbd8dd7be1f
[ "MIT" ]
1
2020-05-04T11:13:22.000Z
2020-05-04T11:13:22.000Z
tests/conftest.py
marcoaaguiar/channels-ws-auth
a9b4e87f3be81097c5078803dead9fbd8dd7be1f
[ "MIT" ]
21
2020-05-20T23:29:51.000Z
2021-06-25T15:38:45.000Z
tests/conftest.py
marcoaaguiar/channels-ws-auth
a9b4e87f3be81097c5078803dead9fbd8dd7be1f
[ "MIT" ]
null
null
null
import pytest from rest_framework.test import APIClient from channels_ws_auth.models import WSAuthTicket @pytest.fixture @pytest.mark.django_db def user(django_user_model): return django_user_model.objects.create_user(username="user") @pytest.fixture @pytest.mark.django_db def ticket(user): return WSAuthTicket.objects.create(user=user) @pytest.fixture def api_client(): return APIClient() @pytest.fixture def api_client_with_credentials(db, user, api_client): api_client.force_authenticate(user=user) yield api_client api_client.force_authenticate(user=None)
21.178571
65
0.797639
83
593
5.457831
0.385542
0.119205
0.083885
0.101545
0.432671
0.322296
0.322296
0
0
0
0
0
0.114671
593
27
66
21.962963
0.862857
0
0
0.315789
0
0
0.006745
0
0
0
0
0
0
1
0.210526
false
0
0.157895
0.157895
0.526316
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
3
4419ff8b97d4973d38305f9d1db86688eb0ee98f
158
py
Python
positive.py
rahul-samal/positive-no-in-a-range
214e9bc7c31c1e3a72fc25f895914a14842d0f69
[ "MIT" ]
null
null
null
positive.py
rahul-samal/positive-no-in-a-range
214e9bc7c31c1e3a72fc25f895914a14842d0f69
[ "MIT" ]
null
null
null
positive.py
rahul-samal/positive-no-in-a-range
214e9bc7c31c1e3a72fc25f895914a14842d0f69
[ "MIT" ]
null
null
null
list1=[12,-7,5,64,-14] for num in list1: if num>=0: print(num,end=" ") list2=[12,14,-95,3] for num in list2: if num>=0: print(num,end=" ")
17.555556
24
0.537975
31
158
2.741935
0.516129
0.141176
0.188235
0.258824
0.4
0.4
0
0
0
0
0
0.173554
0.234177
158
8
25
19.75
0.528926
0
0
0.5
0
0
0.012658
0
0
0
0
0
0
1
0
false
0
0
0
0
0.25
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
442bacb79b01df544306890f19f6c0819c5a5929
1,467
py
Python
content/migrations/0011_auto_20200224_1318.py
Revibe-Music/core-services
6b11cf16ad2c35d948f3a5c0e7a161e5b7cfc1b2
[ "MIT" ]
2
2022-01-24T23:30:18.000Z
2022-01-26T00:21:22.000Z
content/migrations/0011_auto_20200224_1318.py
Revibe-Music/core-services
6b11cf16ad2c35d948f3a5c0e7a161e5b7cfc1b2
[ "MIT" ]
null
null
null
content/migrations/0011_auto_20200224_1318.py
Revibe-Music/core-services
6b11cf16ad2c35d948f3a5c0e7a161e5b7cfc1b2
[ "MIT" ]
null
null
null
# Generated by Django 3.0 on 2020-02-24 19:18 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('content', '0010_auto_20200218_1124'), ] operations = [ migrations.AddField( model_name='song', name='streams_last_month', field=models.IntegerField(default=0, help_text='Number of streams recorded in DynamoDB in the last 30 days. Will be updated automatically.', verbose_name='streams in the last 30 days'), ), migrations.AddField( model_name='song', name='streams_last_week', field=models.IntegerField(blank=True, default=0, help_text='Number of streams recorded in DynamoDB in the last 7 days. Will be updated automatically.', verbose_name='streams in the last 7 days'), ), migrations.AddField( model_name='song', name='streams_this_year', field=models.IntegerField(default=0, help_text='Number of streams recorded in DynamoDB during this calendar year. Will be updated automatically.', verbose_name='streams this calendar year'), ), migrations.AddField( model_name='song', name='streams_yesterday', field=models.IntegerField(blank=True, default=0, help_text='Number of streams recorded in DynamoDB yesterday. Will be updated automatically.', verbose_name='streams yesterday'), ), ]
43.147059
207
0.660532
176
1,467
5.380682
0.335227
0.092925
0.097149
0.114044
0.756072
0.749736
0.749736
0.61246
0.462513
0.462513
0
0.036036
0.243354
1,467
33
208
44.454545
0.817117
0.029312
0
0.444444
1
0
0.398031
0.016174
0
0
0
0
0
1
0
false
0
0.037037
0
0.148148
0
0
0
0
null
0
0
0
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
443364b93cce01dcca1b820f92bf7a1836a52bac
318
py
Python
python/en/archive/dropbox/miscellaneous_python_files/test.py
aimldl/coding
70ddbfaa454ab92fd072ee8dc614ecc330b34a70
[ "MIT" ]
null
null
null
python/en/archive/dropbox/miscellaneous_python_files/test.py
aimldl/coding
70ddbfaa454ab92fd072ee8dc614ecc330b34a70
[ "MIT" ]
null
null
null
python/en/archive/dropbox/miscellaneous_python_files/test.py
aimldl/coding
70ddbfaa454ab92fd072ee8dc614ecc330b34a70
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- """ Created on Sat Aug 17 19:39:45 2019 @author: aimldl """ import numpy as np print( np.random.choice(5, 3, replace=False ) ) a = ['pooh', 'rabbit', 'piglet', 'Christopher'] print( np.random.choice(a, 3, replace=False ) ) print( np.random.choice(8, 32, replace=False ) )
18.705882
49
0.603774
47
318
4.085106
0.659574
0.109375
0.203125
0.296875
0
0
0
0
0
0
0
0.075397
0.207547
318
16
50
19.875
0.686508
0.235849
0
0
0
0
0.123853
0
0
0
0
0
0
1
0
false
0
0.2
0
0.2
0.6
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
3
44348e69bd00c61dbc6f5a2fa68fa0ad6767c0a1
1,371
py
Python
argminCVX.py
rameezw/BanditDynamicPricing
352d5ccadb40a25601f12000b4df2e4703980bbb
[ "MIT" ]
null
null
null
argminCVX.py
rameezw/BanditDynamicPricing
352d5ccadb40a25601f12000b4df2e4703980bbb
[ "MIT" ]
null
null
null
argminCVX.py
rameezw/BanditDynamicPricing
352d5ccadb40a25601f12000b4df2e4703980bbb
[ "MIT" ]
null
null
null
"""implementation of argmin step""" from scipy import optimize import numpy as np from BanditPricing import randUnitVector def argmin(eta, s_radius, barrier, g_bar_aggr_t, g_tilde, d, max_iter = 1e4): #implement argmin_ball(eta * (g_bar_1:t + g_tilde_t+1)^T x + barrier(x) #argmin is over ball with radius r TOL = 1e-10 # numerical error allowed #g_bar_aggr_t = complex_to_real(g_bar_aggr_t) #g_tilde = complex_to_real(g_tilde) #init_pt = complex_to_real(randUnitVector(d)*s_radius/2) cons = {'type': 'ineq', 'fun': lambda x: s_radius - np.linalg.norm(x), 'jac': lambda x: x / np.linalg.norm(x) if np.linalg.norm(x) > TOL else np.zeros(x.shape)} res = optimize.minimize(fun=obj, x0=randUnitVector(d)*s_radius/2, args=(eta, barrier, g_bar_aggr_t, g_tilde), constraints=cons, options={'disp': False, 'maxiter': max_iter}) return res['x'] def obj(x, eta, barrier, g_bar_aggr_t, g_tilde): return eta * np.dot(g_bar_aggr_t + g_tilde, x) + barrier(x) def real_to_complex(z): # real vector of length 2n -> complex of length n return z[:len(z)//2] + 1j * z[len(z)//2:] def complex_to_real(z): # complex vector of length n -> real of length 2n return np.concatenate((np.real(z), np.imag(z)))
44.225806
101
0.628738
222
1,371
3.675676
0.36036
0.034314
0.058824
0.066176
0.181373
0.125
0.088235
0.061275
0
0
0
0.014451
0.242888
1,371
31
102
44.225806
0.771676
0.281546
0
0
0
0
0.026721
0
0
0
0
0
0
1
0.210526
false
0
0.157895
0.157895
0.578947
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
3
443697ff5e4be043883b688450f81a4e8c093f19
268
py
Python
PythonExecicios/ex045.2.py
lucasohara98/Python_CursoemVideo
e5266abaf67ef3e27fe8928458b3860feb0ed46d
[ "MIT" ]
null
null
null
PythonExecicios/ex045.2.py
lucasohara98/Python_CursoemVideo
e5266abaf67ef3e27fe8928458b3860feb0ed46d
[ "MIT" ]
null
null
null
PythonExecicios/ex045.2.py
lucasohara98/Python_CursoemVideo
e5266abaf67ef3e27fe8928458b3860feb0ed46d
[ "MIT" ]
null
null
null
from random import randint itens = ('Pedra', 'Papel', 'Tesoura') computador = randint(0,2) print(f'O computador escolheu {itens[computador]}') jogador=(int(input('Qual é a sua jogada:'))) print('jogador jogou:') print(f'o computador jogou:{itens[computador]}')
33.5
52
0.701493
37
268
5.081081
0.648649
0.06383
0.074468
0.180851
0
0
0
0
0
0
0
0.008511
0.123134
268
8
53
33.5
0.791489
0
0
0
0
0
0.496183
0.09542
0
0
0
0
0
1
0
false
0
0.142857
0
0.142857
0.428571
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
3
44591c961571bc4e7e09eba3facd17594e8b7ef7
298
py
Python
moe/bandit/bla/__init__.py
dstoeckel/MOE
5b5a6a2c6c3cf47320126f7f5894e2a83e347f5c
[ "Apache-2.0" ]
966
2015-01-10T05:27:30.000Z
2022-03-26T21:04:36.000Z
moe/bandit/bla/__init__.py
dstoeckel/MOE
5b5a6a2c6c3cf47320126f7f5894e2a83e347f5c
[ "Apache-2.0" ]
46
2015-01-16T22:33:08.000Z
2019-09-04T16:33:27.000Z
moe/bandit/bla/__init__.py
dstoeckel/MOE
5b5a6a2c6c3cf47320126f7f5894e2a83e347f5c
[ "Apache-2.0" ]
143
2015-01-07T03:57:19.000Z
2022-02-28T01:10:45.000Z
# -*- coding: utf-8 -*- """Bandit directory containing multi-armed bandit implementations of BLA policies in python. **Files in this package** * :mod:`moe.bandit.bla.bla`: :class:`~moe.bandit.bla.bla.BLA` object for allocating bandit arms and choosing the winning arm based on BLA policy. """
29.8
92
0.721477
44
298
4.886364
0.727273
0.083721
0.111628
0.139535
0
0
0
0
0
0
0
0.003922
0.144295
298
9
93
33.111111
0.839216
0.966443
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
3
4459615ac22e8065f2b66d59b282e9c4800dc54a
90
py
Python
tests/test_api/test_authentication/test_oauth/protocols/__init__.py
maxzhenzhera/my_vocab_backend
2e9f968374e0bc2fcc0ae40830ca40f3cf5754d1
[ "MIT" ]
null
null
null
tests/test_api/test_authentication/test_oauth/protocols/__init__.py
maxzhenzhera/my_vocab_backend
2e9f968374e0bc2fcc0ae40830ca40f3cf5754d1
[ "MIT" ]
null
null
null
tests/test_api/test_authentication/test_oauth/protocols/__init__.py
maxzhenzhera/my_vocab_backend
2e9f968374e0bc2fcc0ae40830ca40f3cf5754d1
[ "MIT" ]
null
null
null
from .used_meta_user import HasUsedMetaUserFixture __all__ = ['HasUsedMetaUserFixture']
18
50
0.833333
8
90
8.625
0.875
0
0
0
0
0
0
0
0
0
0
0
0.1
90
4
51
22.5
0.851852
0
0
0
0
0
0.244444
0.244444
0
0
0
0
0
1
0
false
0
0.5
0
0.5
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
3
445fc9c2fbcc52252f1dece336d9634dde1a0676
32,563
py
Python
pysnmp_mibs/IPMROUTE-STD-MIB.py
jackjack821/pysnmp-mibs
9835ea0bb2420715caf4ee9aaa07d59bb263acd6
[ "BSD-2-Clause" ]
6
2017-04-21T13:48:08.000Z
2022-01-06T19:42:52.000Z
pysnmp_mibs/IPMROUTE-STD-MIB.py
jackjack821/pysnmp-mibs
9835ea0bb2420715caf4ee9aaa07d59bb263acd6
[ "BSD-2-Clause" ]
1
2020-05-05T16:42:25.000Z
2020-05-05T16:42:25.000Z
pysnmp_mibs/IPMROUTE-STD-MIB.py
jackjack821/pysnmp-mibs
9835ea0bb2420715caf4ee9aaa07d59bb263acd6
[ "BSD-2-Clause" ]
6
2020-02-08T20:28:49.000Z
2021-09-14T13:36:46.000Z
# # PySNMP MIB module IPMROUTE-STD-MIB (http://pysnmp.sf.net) # ASN.1 source http://mibs.snmplabs.com:80/asn1/IPMROUTE-STD-MIB # Produced by pysmi-0.0.7 at Sun Feb 14 00:18:06 2016 # On host bldfarm platform Linux version 4.1.13-100.fc21.x86_64 by user goose # Using Python version 3.5.0 (default, Jan 5 2016, 17:11:52) # ( OctetString, Integer, ObjectIdentifier, ) = mibBuilder.importSymbols("ASN1", "OctetString", "Integer", "ObjectIdentifier") ( NamedValues, ) = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues") ( ValueSizeConstraint, ValueRangeConstraint, SingleValueConstraint, ConstraintsUnion, ConstraintsIntersection, ) = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueSizeConstraint", "ValueRangeConstraint", "SingleValueConstraint", "ConstraintsUnion", "ConstraintsIntersection") ( IANAipMRouteProtocol, IANAipRouteProtocol, ) = mibBuilder.importSymbols("IANA-RTPROTO-MIB", "IANAipMRouteProtocol", "IANAipRouteProtocol") ( InterfaceIndexOrZero, InterfaceIndex, ) = mibBuilder.importSymbols("IF-MIB", "InterfaceIndexOrZero", "InterfaceIndex") ( SnmpAdminString, ) = mibBuilder.importSymbols("SNMP-FRAMEWORK-MIB", "SnmpAdminString") ( ObjectGroup, ModuleCompliance, NotificationGroup, ) = mibBuilder.importSymbols("SNMPv2-CONF", "ObjectGroup", "ModuleCompliance", "NotificationGroup") ( mib_2, TimeTicks, ModuleIdentity, Integer32, Counter32, iso, MibScalar, MibTable, MibTableRow, MibTableColumn, ObjectIdentity, Unsigned32, MibIdentifier, Gauge32, Counter64, Bits, NotificationType, IpAddress, ) = mibBuilder.importSymbols("SNMPv2-SMI", "mib-2", "TimeTicks", "ModuleIdentity", "Integer32", "Counter32", "iso", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "ObjectIdentity", "Unsigned32", "MibIdentifier", "Gauge32", "Counter64", "Bits", "NotificationType", "IpAddress") ( RowStatus, DisplayString, TruthValue, TextualConvention, ) = mibBuilder.importSymbols("SNMPv2-TC", "RowStatus", "DisplayString", "TruthValue", "TextualConvention") ipMRouteStdMIB = ModuleIdentity((1, 3, 6, 1, 2, 1, 83)).setRevisions(("2000-09-22 00:00",)) if mibBuilder.loadTexts: ipMRouteStdMIB.setLastUpdated('200009220000Z') if mibBuilder.loadTexts: ipMRouteStdMIB.setOrganization('IETF IDMR Working Group') if mibBuilder.loadTexts: ipMRouteStdMIB.setContactInfo(' Dave Thaler\n Microsoft Corporation\n One Microsoft Way\n Redmond, WA 98052-6399\n US\n\n Phone: +1 425 703 8835\n EMail: dthaler@microsoft.com') if mibBuilder.loadTexts: ipMRouteStdMIB.setDescription('The MIB module for management of IP Multicast routing, but\n independent of the specific multicast routing protocol in\n use.') class LanguageTag(OctetString, TextualConvention): displayHint = '100a' subtypeSpec = OctetString.subtypeSpec+ValueSizeConstraint(1,100) ipMRouteMIBObjects = MibIdentifier((1, 3, 6, 1, 2, 1, 83, 1)) ipMRoute = MibIdentifier((1, 3, 6, 1, 2, 1, 83, 1, 1)) ipMRouteEnable = MibScalar((1, 3, 6, 1, 2, 1, 83, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2,))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2),))).setMaxAccess("readwrite") if mibBuilder.loadTexts: ipMRouteEnable.setDescription('The enabled status of IP Multicast routing on this router.') ipMRouteEntryCount = MibScalar((1, 3, 6, 1, 2, 1, 83, 1, 1, 7), Gauge32()).setMaxAccess("readonly") if mibBuilder.loadTexts: ipMRouteEntryCount.setDescription('The number of rows in the ipMRouteTable. This can be used\n to monitor the multicast routing table size.') ipMRouteTable = MibTable((1, 3, 6, 1, 2, 1, 83, 1, 1, 2), ) if mibBuilder.loadTexts: ipMRouteTable.setDescription('The (conceptual) table containing multicast routing\n information for IP datagrams sent by particular sources to\n the IP multicast groups known to this router.') ipMRouteEntry = MibTableRow((1, 3, 6, 1, 2, 1, 83, 1, 1, 2, 1), ).setIndexNames((0, "IPMROUTE-STD-MIB", "ipMRouteGroup"), (0, "IPMROUTE-STD-MIB", "ipMRouteSource"), (0, "IPMROUTE-STD-MIB", "ipMRouteSourceMask")) if mibBuilder.loadTexts: ipMRouteEntry.setDescription('An entry (conceptual row) containing the multicast routing\n information for IP datagrams from a particular source and\n addressed to a particular IP multicast group address.\n Discontinuities in counters in this entry can be detected by\n observing the value of ipMRouteUpTime.') ipMRouteGroup = MibTableColumn((1, 3, 6, 1, 2, 1, 83, 1, 1, 2, 1, 1), IpAddress()) if mibBuilder.loadTexts: ipMRouteGroup.setDescription('The IP multicast group address for which this entry\n contains multicast routing information.') ipMRouteSource = MibTableColumn((1, 3, 6, 1, 2, 1, 83, 1, 1, 2, 1, 2), IpAddress()) if mibBuilder.loadTexts: ipMRouteSource.setDescription('The network address which when combined with the\n corresponding value of ipMRouteSourceMask identifies the\n sources for which this entry contains multicast routing\n information.') ipMRouteSourceMask = MibTableColumn((1, 3, 6, 1, 2, 1, 83, 1, 1, 2, 1, 3), IpAddress()) if mibBuilder.loadTexts: ipMRouteSourceMask.setDescription('The network mask which when combined with the corresponding\n value of ipMRouteSource identifies the sources for which\n this entry contains multicast routing information.') ipMRouteUpstreamNeighbor = MibTableColumn((1, 3, 6, 1, 2, 1, 83, 1, 1, 2, 1, 4), IpAddress()).setMaxAccess("readonly") if mibBuilder.loadTexts: ipMRouteUpstreamNeighbor.setDescription('The address of the upstream neighbor (e.g., RPF neighbor)\n from which IP datagrams from these sources to this multicast\n address are received, or 0.0.0.0 if the upstream neighbor is\n unknown (e.g., in CBT).') ipMRouteInIfIndex = MibTableColumn((1, 3, 6, 1, 2, 1, 83, 1, 1, 2, 1, 5), InterfaceIndexOrZero()).setMaxAccess("readonly") if mibBuilder.loadTexts: ipMRouteInIfIndex.setDescription('The value of ifIndex for the interface on which IP\n datagrams sent by these sources to this multicast address\n are received. A value of 0 indicates that datagrams are not\n subject to an incoming interface check, but may be accepted\n on multiple interfaces (e.g., in CBT).') ipMRouteUpTime = MibTableColumn((1, 3, 6, 1, 2, 1, 83, 1, 1, 2, 1, 6), TimeTicks()).setMaxAccess("readonly") if mibBuilder.loadTexts: ipMRouteUpTime.setDescription('The time since the multicast routing information\n represented by this entry was learned by the router.') ipMRouteExpiryTime = MibTableColumn((1, 3, 6, 1, 2, 1, 83, 1, 1, 2, 1, 7), TimeTicks()).setMaxAccess("readonly") if mibBuilder.loadTexts: ipMRouteExpiryTime.setDescription('The minimum amount of time remaining before this entry will\n be aged out. The value 0 indicates that the entry is not\n subject to aging.') ipMRoutePkts = MibTableColumn((1, 3, 6, 1, 2, 1, 83, 1, 1, 2, 1, 8), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: ipMRoutePkts.setDescription('The number of packets which this router has received from\n these sources and addressed to this multicast group\n address.') ipMRouteDifferentInIfPackets = MibTableColumn((1, 3, 6, 1, 2, 1, 83, 1, 1, 2, 1, 9), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: ipMRouteDifferentInIfPackets.setDescription('The number of packets which this router has received from\n these sources and addressed to this multicast group address,\n which were dropped because they were not received on the\n interface indicated by ipMRouteInIfIndex. Packets which are\n not subject to an incoming interface check (e.g., using CBT)\n are not counted.') ipMRouteOctets = MibTableColumn((1, 3, 6, 1, 2, 1, 83, 1, 1, 2, 1, 10), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: ipMRouteOctets.setDescription('The number of octets contained in IP datagrams which were\n received from these sources and addressed to this multicast\n group address, and which were forwarded by this router.') ipMRouteProtocol = MibTableColumn((1, 3, 6, 1, 2, 1, 83, 1, 1, 2, 1, 11), IANAipMRouteProtocol()).setMaxAccess("readonly") if mibBuilder.loadTexts: ipMRouteProtocol.setDescription('The multicast routing protocol via which this multicast\n forwarding entry was learned.') ipMRouteRtProto = MibTableColumn((1, 3, 6, 1, 2, 1, 83, 1, 1, 2, 1, 12), IANAipRouteProtocol()).setMaxAccess("readonly") if mibBuilder.loadTexts: ipMRouteRtProto.setDescription('The routing mechanism via which the route used to find the\n upstream or parent interface for this multicast forwarding\n entry was learned. Inclusion of values for routing\n protocols is not intended to imply that those protocols need\n be supported.') ipMRouteRtAddress = MibTableColumn((1, 3, 6, 1, 2, 1, 83, 1, 1, 2, 1, 13), IpAddress()).setMaxAccess("readonly") if mibBuilder.loadTexts: ipMRouteRtAddress.setDescription('The address portion of the route used to find the upstream\n or parent interface for this multicast forwarding entry.') ipMRouteRtMask = MibTableColumn((1, 3, 6, 1, 2, 1, 83, 1, 1, 2, 1, 14), IpAddress()).setMaxAccess("readonly") if mibBuilder.loadTexts: ipMRouteRtMask.setDescription('The mask associated with the route used to find the upstream\n or parent interface for this multicast forwarding entry.') ipMRouteRtType = MibTableColumn((1, 3, 6, 1, 2, 1, 83, 1, 1, 2, 1, 15), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2,))).clone(namedValues=NamedValues(("unicast", 1), ("multicast", 2),))).setMaxAccess("readonly") if mibBuilder.loadTexts: ipMRouteRtType.setDescription('The reason the given route was placed in the (logical)\n multicast Routing Information Base (RIB). A value of\n unicast means that the route would normally be placed only\n in the unicast RIB, but was placed in the multicast RIB\n (instead or in addition) due to local configuration, such as\n when running PIM over RIP. A value of multicast means that\n\n\n\n\n\n the route was explicitly added to the multicast RIB by the\n routing protocol, such as DVMRP or Multiprotocol BGP.') ipMRouteHCOctets = MibTableColumn((1, 3, 6, 1, 2, 1, 83, 1, 1, 2, 1, 16), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: ipMRouteHCOctets.setDescription('The number of octets contained in IP datagrams which were\n received from these sources and addressed to this multicast\n group address, and which were forwarded by this router.\n This object is a 64-bit version of ipMRouteOctets.') ipMRouteNextHopTable = MibTable((1, 3, 6, 1, 2, 1, 83, 1, 1, 3), ) if mibBuilder.loadTexts: ipMRouteNextHopTable.setDescription('The (conceptual) table containing information on the next-\n hops on outgoing interfaces for routing IP multicast\n datagrams. Each entry is one of a list of next-hops on\n outgoing interfaces for particular sources sending to a\n particular multicast group address.') ipMRouteNextHopEntry = MibTableRow((1, 3, 6, 1, 2, 1, 83, 1, 1, 3, 1), ).setIndexNames((0, "IPMROUTE-STD-MIB", "ipMRouteNextHopGroup"), (0, "IPMROUTE-STD-MIB", "ipMRouteNextHopSource"), (0, "IPMROUTE-STD-MIB", "ipMRouteNextHopSourceMask"), (0, "IPMROUTE-STD-MIB", "ipMRouteNextHopIfIndex"), (0, "IPMROUTE-STD-MIB", "ipMRouteNextHopAddress")) if mibBuilder.loadTexts: ipMRouteNextHopEntry.setDescription('An entry (conceptual row) in the list of next-hops on\n outgoing interfaces to which IP multicast datagrams from\n particular sources to a IP multicast group address are\n routed. Discontinuities in counters in this entry can be\n detected by observing the value of ipMRouteUpTime.') ipMRouteNextHopGroup = MibTableColumn((1, 3, 6, 1, 2, 1, 83, 1, 1, 3, 1, 1), IpAddress()) if mibBuilder.loadTexts: ipMRouteNextHopGroup.setDescription('The IP multicast group for which this entry specifies a\n next-hop on an outgoing interface.') ipMRouteNextHopSource = MibTableColumn((1, 3, 6, 1, 2, 1, 83, 1, 1, 3, 1, 2), IpAddress()) if mibBuilder.loadTexts: ipMRouteNextHopSource.setDescription('The network address which when combined with the\n corresponding value of ipMRouteNextHopSourceMask identifies\n the sources for which this entry specifies a next-hop on an\n outgoing interface.') ipMRouteNextHopSourceMask = MibTableColumn((1, 3, 6, 1, 2, 1, 83, 1, 1, 3, 1, 3), IpAddress()) if mibBuilder.loadTexts: ipMRouteNextHopSourceMask.setDescription('The network mask which when combined with the corresponding\n value of ipMRouteNextHopSource identifies the sources for\n which this entry specifies a next-hop on an outgoing\n interface.') ipMRouteNextHopIfIndex = MibTableColumn((1, 3, 6, 1, 2, 1, 83, 1, 1, 3, 1, 4), InterfaceIndex()) if mibBuilder.loadTexts: ipMRouteNextHopIfIndex.setDescription('The ifIndex value of the interface for the outgoing\n interface for this next-hop.') ipMRouteNextHopAddress = MibTableColumn((1, 3, 6, 1, 2, 1, 83, 1, 1, 3, 1, 5), IpAddress()) if mibBuilder.loadTexts: ipMRouteNextHopAddress.setDescription('The address of the next-hop specific to this entry. For\n most interfaces, this is identical to ipMRouteNextHopGroup.\n NBMA interfaces, however, may have multiple next-hop\n addresses out a single outgoing interface.') ipMRouteNextHopState = MibTableColumn((1, 3, 6, 1, 2, 1, 83, 1, 1, 3, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2,))).clone(namedValues=NamedValues(("pruned", 1), ("forwarding", 2),))).setMaxAccess("readonly") if mibBuilder.loadTexts: ipMRouteNextHopState.setDescription("An indication of whether the outgoing interface and next-\n hop represented by this entry is currently being used to\n forward IP datagrams. The value 'forwarding' indicates it\n is currently being used; the value 'pruned' indicates it is\n not.") ipMRouteNextHopUpTime = MibTableColumn((1, 3, 6, 1, 2, 1, 83, 1, 1, 3, 1, 7), TimeTicks()).setMaxAccess("readonly") if mibBuilder.loadTexts: ipMRouteNextHopUpTime.setDescription('The time since the multicast routing information\n represented by this entry was learned by the router.') ipMRouteNextHopExpiryTime = MibTableColumn((1, 3, 6, 1, 2, 1, 83, 1, 1, 3, 1, 8), TimeTicks()).setMaxAccess("readonly") if mibBuilder.loadTexts: ipMRouteNextHopExpiryTime.setDescription('The minimum amount of time remaining before this entry will\n be aged out. If ipMRouteNextHopState is pruned(1), the\n remaining time until the prune expires and the state reverts\n to forwarding(2). Otherwise, the remaining time until this\n entry is removed from the table. The time remaining may be\n copied from ipMRouteExpiryTime if the protocol in use for\n this entry does not specify next-hop timers. The value 0\n\n\n\n\n\n indicates that the entry is not subject to aging.') ipMRouteNextHopClosestMemberHops = MibTableColumn((1, 3, 6, 1, 2, 1, 83, 1, 1, 3, 1, 9), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: ipMRouteNextHopClosestMemberHops.setDescription('The minimum number of hops between this router and any\n member of this IP multicast group reached via this next-hop\n on this outgoing interface. Any IP multicast datagrams for\n the group which have a TTL less than this number of hops\n will not be forwarded to this next-hop.') ipMRouteNextHopProtocol = MibTableColumn((1, 3, 6, 1, 2, 1, 83, 1, 1, 3, 1, 10), IANAipMRouteProtocol()).setMaxAccess("readonly") if mibBuilder.loadTexts: ipMRouteNextHopProtocol.setDescription('The routing mechanism via which this next-hop was learned.') ipMRouteNextHopPkts = MibTableColumn((1, 3, 6, 1, 2, 1, 83, 1, 1, 3, 1, 11), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: ipMRouteNextHopPkts.setDescription('The number of packets which have been forwarded using this\n route.') ipMRouteInterfaceTable = MibTable((1, 3, 6, 1, 2, 1, 83, 1, 1, 4), ) if mibBuilder.loadTexts: ipMRouteInterfaceTable.setDescription('The (conceptual) table containing multicast routing\n information specific to interfaces.') ipMRouteInterfaceEntry = MibTableRow((1, 3, 6, 1, 2, 1, 83, 1, 1, 4, 1), ).setIndexNames((0, "IPMROUTE-STD-MIB", "ipMRouteInterfaceIfIndex")) if mibBuilder.loadTexts: ipMRouteInterfaceEntry.setDescription('An entry (conceptual row) containing the multicast routing\n information for a particular interface.') ipMRouteInterfaceIfIndex = MibTableColumn((1, 3, 6, 1, 2, 1, 83, 1, 1, 4, 1, 1), InterfaceIndex()) if mibBuilder.loadTexts: ipMRouteInterfaceIfIndex.setDescription('The ifIndex value of the interface for which this entry\n contains information.') ipMRouteInterfaceTtl = MibTableColumn((1, 3, 6, 1, 2, 1, 83, 1, 1, 4, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0,255))).setMaxAccess("readwrite") if mibBuilder.loadTexts: ipMRouteInterfaceTtl.setDescription('The datagram TTL threshold for the interface. Any IP\n multicast datagrams with a TTL less than this threshold will\n not be forwarded out the interface. The default value of 0\n means all multicast packets are forwarded out the\n interface.') ipMRouteInterfaceProtocol = MibTableColumn((1, 3, 6, 1, 2, 1, 83, 1, 1, 4, 1, 3), IANAipMRouteProtocol()).setMaxAccess("readonly") if mibBuilder.loadTexts: ipMRouteInterfaceProtocol.setDescription('The routing protocol running on this interface.') ipMRouteInterfaceRateLimit = MibTableColumn((1, 3, 6, 1, 2, 1, 83, 1, 1, 4, 1, 4), Integer32()).setMaxAccess("readwrite") if mibBuilder.loadTexts: ipMRouteInterfaceRateLimit.setDescription('The rate-limit, in kilobits per second, of forwarded\n multicast traffic on the interface. A rate-limit of 0\n indicates that no rate limiting is done.') ipMRouteInterfaceInMcastOctets = MibTableColumn((1, 3, 6, 1, 2, 1, 83, 1, 1, 4, 1, 5), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: ipMRouteInterfaceInMcastOctets.setDescription('The number of octets of multicast packets that have arrived\n on the interface, including framing characters. This object\n is similar to ifInOctets in the Interfaces MIB, except that\n only multicast packets are counted.') ipMRouteInterfaceOutMcastOctets = MibTableColumn((1, 3, 6, 1, 2, 1, 83, 1, 1, 4, 1, 6), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: ipMRouteInterfaceOutMcastOctets.setDescription('The number of octets of multicast packets that have been\n sent on the interface.') ipMRouteInterfaceHCInMcastOctets = MibTableColumn((1, 3, 6, 1, 2, 1, 83, 1, 1, 4, 1, 7), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: ipMRouteInterfaceHCInMcastOctets.setDescription('The number of octets of multicast packets that have arrived\n on the interface, including framing characters. This object\n is a 64-bit version of ipMRouteInterfaceInMcastOctets. It\n is similar to ifHCInOctets in the Interfaces MIB, except\n that only multicast packets are counted.') ipMRouteInterfaceHCOutMcastOctets = MibTableColumn((1, 3, 6, 1, 2, 1, 83, 1, 1, 4, 1, 8), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: ipMRouteInterfaceHCOutMcastOctets.setDescription('The number of octets of multicast packets that have been\n\n\n\n\n\n sent on the interface. This object is a 64-bit version of\n ipMRouteInterfaceOutMcastOctets.') ipMRouteBoundaryTable = MibTable((1, 3, 6, 1, 2, 1, 83, 1, 1, 5), ) if mibBuilder.loadTexts: ipMRouteBoundaryTable.setDescription("The (conceptual) table listing the router's scoped\n multicast address boundaries.") ipMRouteBoundaryEntry = MibTableRow((1, 3, 6, 1, 2, 1, 83, 1, 1, 5, 1), ).setIndexNames((0, "IPMROUTE-STD-MIB", "ipMRouteBoundaryIfIndex"), (0, "IPMROUTE-STD-MIB", "ipMRouteBoundaryAddress"), (0, "IPMROUTE-STD-MIB", "ipMRouteBoundaryAddressMask")) if mibBuilder.loadTexts: ipMRouteBoundaryEntry.setDescription('An entry (conceptual row) in the ipMRouteBoundaryTable\n representing a scoped boundary.') ipMRouteBoundaryIfIndex = MibTableColumn((1, 3, 6, 1, 2, 1, 83, 1, 1, 5, 1, 1), InterfaceIndex()) if mibBuilder.loadTexts: ipMRouteBoundaryIfIndex.setDescription('The IfIndex value for the interface to which this boundary\n applies. Packets with a destination address in the\n associated address/mask range will not be forwarded out this\n interface.') ipMRouteBoundaryAddress = MibTableColumn((1, 3, 6, 1, 2, 1, 83, 1, 1, 5, 1, 2), IpAddress()) if mibBuilder.loadTexts: ipMRouteBoundaryAddress.setDescription('The group address which when combined with the\n corresponding value of ipMRouteBoundaryAddressMask\n identifies the group range for which the scoped boundary\n exists. Scoped addresses must come from the range 239.x.x.x\n as specified in RFC 2365.') ipMRouteBoundaryAddressMask = MibTableColumn((1, 3, 6, 1, 2, 1, 83, 1, 1, 5, 1, 3), IpAddress()) if mibBuilder.loadTexts: ipMRouteBoundaryAddressMask.setDescription('The group address mask which when combined with the\n corresponding value of ipMRouteBoundaryAddress identifies\n the group range for which the scoped boundary exists.') ipMRouteBoundaryStatus = MibTableColumn((1, 3, 6, 1, 2, 1, 83, 1, 1, 5, 1, 4), RowStatus()).setMaxAccess("readcreate") if mibBuilder.loadTexts: ipMRouteBoundaryStatus.setDescription('The status of this row, by which new entries may be\n created, or old entries deleted from this table.') ipMRouteScopeNameTable = MibTable((1, 3, 6, 1, 2, 1, 83, 1, 1, 6), ) if mibBuilder.loadTexts: ipMRouteScopeNameTable.setDescription('The (conceptual) table listing the multicast scope names.') ipMRouteScopeNameEntry = MibTableRow((1, 3, 6, 1, 2, 1, 83, 1, 1, 6, 1), ).setIndexNames((0, "IPMROUTE-STD-MIB", "ipMRouteScopeNameAddress"), (0, "IPMROUTE-STD-MIB", "ipMRouteScopeNameAddressMask"), (1, "IPMROUTE-STD-MIB", "ipMRouteScopeNameLanguage")) if mibBuilder.loadTexts: ipMRouteScopeNameEntry.setDescription('An entry (conceptual row) in the ipMRouteScopeNameTable\n representing a multicast scope name.') ipMRouteScopeNameAddress = MibTableColumn((1, 3, 6, 1, 2, 1, 83, 1, 1, 6, 1, 1), IpAddress()) if mibBuilder.loadTexts: ipMRouteScopeNameAddress.setDescription('The group address which when combined with the\n corresponding value of ipMRouteScopeNameAddressMask\n identifies the group range associated with the multicast\n scope. Scoped addresses must come from the range\n 239.x.x.x.') ipMRouteScopeNameAddressMask = MibTableColumn((1, 3, 6, 1, 2, 1, 83, 1, 1, 6, 1, 2), IpAddress()) if mibBuilder.loadTexts: ipMRouteScopeNameAddressMask.setDescription('The group address mask which when combined with the\n corresponding value of ipMRouteScopeNameAddress identifies\n the group range associated with the multicast scope.') ipMRouteScopeNameLanguage = MibTableColumn((1, 3, 6, 1, 2, 1, 83, 1, 1, 6, 1, 3), LanguageTag()) if mibBuilder.loadTexts: ipMRouteScopeNameLanguage.setDescription('The RFC 1766-style language tag associated with the scope\n name.') ipMRouteScopeNameString = MibTableColumn((1, 3, 6, 1, 2, 1, 83, 1, 1, 6, 1, 4), SnmpAdminString()).setMaxAccess("readcreate") if mibBuilder.loadTexts: ipMRouteScopeNameString.setDescription('The textual name associated with the multicast scope. The\n value of this object should be suitable for displaying to\n end-users, such as when allocating a multicast address in\n this scope. When no name is specified, the default value of\n this object should be the string 239.x.x.x/y with x and y\n replaced appropriately to describe the address and mask\n length associated with the scope.') ipMRouteScopeNameDefault = MibTableColumn((1, 3, 6, 1, 2, 1, 83, 1, 1, 6, 1, 5), TruthValue().clone('false')).setMaxAccess("readcreate") if mibBuilder.loadTexts: ipMRouteScopeNameDefault.setDescription('If true, indicates a preference that the name in the\n following language should be used by applications if no name\n is available in a desired language.') ipMRouteScopeNameStatus = MibTableColumn((1, 3, 6, 1, 2, 1, 83, 1, 1, 6, 1, 6), RowStatus()).setMaxAccess("readcreate") if mibBuilder.loadTexts: ipMRouteScopeNameStatus.setDescription('The status of this row, by which new entries may be\n created, or old entries deleted from this table.') ipMRouteMIBConformance = MibIdentifier((1, 3, 6, 1, 2, 1, 83, 2)) ipMRouteMIBCompliances = MibIdentifier((1, 3, 6, 1, 2, 1, 83, 2, 1)) ipMRouteMIBGroups = MibIdentifier((1, 3, 6, 1, 2, 1, 83, 2, 2)) ipMRouteMIBCompliance = ModuleCompliance((1, 3, 6, 1, 2, 1, 83, 2, 1, 1)).setObjects(*(("IPMROUTE-STD-MIB", "ipMRouteMIBBasicGroup"), ("IPMROUTE-STD-MIB", "ipMRouteMIBRouteGroup"), ("IPMROUTE-STD-MIB", "ipMRouteMIBBoundaryGroup"), ("IPMROUTE-STD-MIB", "ipMRouteMIBHCInterfaceGroup"),)) if mibBuilder.loadTexts: ipMRouteMIBCompliance.setDescription('The compliance statement for the IP Multicast MIB.') ipMRouteMIBBasicGroup = ObjectGroup((1, 3, 6, 1, 2, 1, 83, 2, 2, 1)).setObjects(*(("IPMROUTE-STD-MIB", "ipMRouteEnable"), ("IPMROUTE-STD-MIB", "ipMRouteEntryCount"), ("IPMROUTE-STD-MIB", "ipMRouteUpstreamNeighbor"), ("IPMROUTE-STD-MIB", "ipMRouteInIfIndex"), ("IPMROUTE-STD-MIB", "ipMRouteUpTime"), ("IPMROUTE-STD-MIB", "ipMRouteExpiryTime"), ("IPMROUTE-STD-MIB", "ipMRouteNextHopState"), ("IPMROUTE-STD-MIB", "ipMRouteNextHopUpTime"), ("IPMROUTE-STD-MIB", "ipMRouteNextHopExpiryTime"), ("IPMROUTE-STD-MIB", "ipMRouteNextHopProtocol"), ("IPMROUTE-STD-MIB", "ipMRouteNextHopPkts"), ("IPMROUTE-STD-MIB", "ipMRouteInterfaceTtl"), ("IPMROUTE-STD-MIB", "ipMRouteInterfaceProtocol"), ("IPMROUTE-STD-MIB", "ipMRouteInterfaceRateLimit"), ("IPMROUTE-STD-MIB", "ipMRouteInterfaceInMcastOctets"), ("IPMROUTE-STD-MIB", "ipMRouteInterfaceOutMcastOctets"), ("IPMROUTE-STD-MIB", "ipMRouteProtocol"),)) if mibBuilder.loadTexts: ipMRouteMIBBasicGroup.setDescription('A collection of objects to support basic management of IP\n Multicast routing.') ipMRouteMIBHopCountGroup = ObjectGroup((1, 3, 6, 1, 2, 1, 83, 2, 2, 2)).setObjects(*(("IPMROUTE-STD-MIB", "ipMRouteNextHopClosestMemberHops"),)) if mibBuilder.loadTexts: ipMRouteMIBHopCountGroup.setDescription('A collection of objects to support management of the use of\n hop counts in IP Multicast routing.') ipMRouteMIBBoundaryGroup = ObjectGroup((1, 3, 6, 1, 2, 1, 83, 2, 2, 3)).setObjects(*(("IPMROUTE-STD-MIB", "ipMRouteBoundaryStatus"), ("IPMROUTE-STD-MIB", "ipMRouteScopeNameString"), ("IPMROUTE-STD-MIB", "ipMRouteScopeNameDefault"), ("IPMROUTE-STD-MIB", "ipMRouteScopeNameStatus"),)) if mibBuilder.loadTexts: ipMRouteMIBBoundaryGroup.setDescription('A collection of objects to support management of scoped\n multicast address boundaries.') ipMRouteMIBPktsOutGroup = ObjectGroup((1, 3, 6, 1, 2, 1, 83, 2, 2, 4)).setObjects(*(("IPMROUTE-STD-MIB", "ipMRouteNextHopPkts"),)) if mibBuilder.loadTexts: ipMRouteMIBPktsOutGroup.setDescription('A collection of objects to support management of packet\n counters for each outgoing interface entry of a route.') ipMRouteMIBHCInterfaceGroup = ObjectGroup((1, 3, 6, 1, 2, 1, 83, 2, 2, 5)).setObjects(*(("IPMROUTE-STD-MIB", "ipMRouteInterfaceHCInMcastOctets"), ("IPMROUTE-STD-MIB", "ipMRouteInterfaceHCOutMcastOctets"), ("IPMROUTE-STD-MIB", "ipMRouteHCOctets"),)) if mibBuilder.loadTexts: ipMRouteMIBHCInterfaceGroup.setDescription('A collection of objects providing information specific to\n high speed (greater than 20,000,000 bits/second) network\n interfaces.') ipMRouteMIBRouteGroup = ObjectGroup((1, 3, 6, 1, 2, 1, 83, 2, 2, 6)).setObjects(*(("IPMROUTE-STD-MIB", "ipMRouteRtProto"), ("IPMROUTE-STD-MIB", "ipMRouteRtAddress"), ("IPMROUTE-STD-MIB", "ipMRouteRtMask"), ("IPMROUTE-STD-MIB", "ipMRouteRtType"),)) if mibBuilder.loadTexts: ipMRouteMIBRouteGroup.setDescription('A collection of objects providing information on the\n relationship between multicast routing information, and the\n IP Forwarding Table.') ipMRouteMIBPktsGroup = ObjectGroup((1, 3, 6, 1, 2, 1, 83, 2, 2, 7)).setObjects(*(("IPMROUTE-STD-MIB", "ipMRoutePkts"), ("IPMROUTE-STD-MIB", "ipMRouteDifferentInIfPackets"), ("IPMROUTE-STD-MIB", "ipMRouteOctets"),)) if mibBuilder.loadTexts: ipMRouteMIBPktsGroup.setDescription('A collection of objects to support management of packet\n counters for each forwarding entry.') mibBuilder.exportSymbols("IPMROUTE-STD-MIB", ipMRouteMIBConformance=ipMRouteMIBConformance, ipMRouteMIBPktsGroup=ipMRouteMIBPktsGroup, ipMRouteEntryCount=ipMRouteEntryCount, LanguageTag=LanguageTag, ipMRouteHCOctets=ipMRouteHCOctets, ipMRouteNextHopUpTime=ipMRouteNextHopUpTime, ipMRouteScopeNameTable=ipMRouteScopeNameTable, ipMRouteMIBBasicGroup=ipMRouteMIBBasicGroup, ipMRoutePkts=ipMRoutePkts, ipMRouteNextHopSource=ipMRouteNextHopSource, ipMRouteInterfaceRateLimit=ipMRouteInterfaceRateLimit, ipMRouteScopeNameDefault=ipMRouteScopeNameDefault, ipMRouteNextHopClosestMemberHops=ipMRouteNextHopClosestMemberHops, ipMRouteScopeNameAddress=ipMRouteScopeNameAddress, ipMRouteRtProto=ipMRouteRtProto, ipMRouteNextHopProtocol=ipMRouteNextHopProtocol, ipMRouteTable=ipMRouteTable, ipMRouteNextHopExpiryTime=ipMRouteNextHopExpiryTime, ipMRouteRtType=ipMRouteRtType, ipMRouteScopeNameEntry=ipMRouteScopeNameEntry, ipMRouteRtAddress=ipMRouteRtAddress, ipMRouteScopeNameString=ipMRouteScopeNameString, ipMRouteInterfaceProtocol=ipMRouteInterfaceProtocol, ipMRouteMIBCompliances=ipMRouteMIBCompliances, ipMRouteBoundaryTable=ipMRouteBoundaryTable, ipMRouteScopeNameStatus=ipMRouteScopeNameStatus, ipMRouteGroup=ipMRouteGroup, ipMRouteNextHopTable=ipMRouteNextHopTable, ipMRouteSource=ipMRouteSource, ipMRouteMIBHopCountGroup=ipMRouteMIBHopCountGroup, ipMRouteEntry=ipMRouteEntry, PYSNMP_MODULE_ID=ipMRouteStdMIB, ipMRouteExpiryTime=ipMRouteExpiryTime, ipMRouteBoundaryAddress=ipMRouteBoundaryAddress, ipMRouteMIBPktsOutGroup=ipMRouteMIBPktsOutGroup, ipMRouteSourceMask=ipMRouteSourceMask, ipMRouteNextHopSourceMask=ipMRouteNextHopSourceMask, ipMRouteInIfIndex=ipMRouteInIfIndex, ipMRouteScopeNameLanguage=ipMRouteScopeNameLanguage, ipMRouteOctets=ipMRouteOctets, ipMRouteNextHopPkts=ipMRouteNextHopPkts, ipMRouteNextHopAddress=ipMRouteNextHopAddress, ipMRouteNextHopState=ipMRouteNextHopState, ipMRouteMIBRouteGroup=ipMRouteMIBRouteGroup, ipMRouteBoundaryAddressMask=ipMRouteBoundaryAddressMask, ipMRouteRtMask=ipMRouteRtMask, ipMRouteInterfaceInMcastOctets=ipMRouteInterfaceInMcastOctets, ipMRouteBoundaryIfIndex=ipMRouteBoundaryIfIndex, ipMRouteProtocol=ipMRouteProtocol, ipMRouteNextHopIfIndex=ipMRouteNextHopIfIndex, ipMRouteMIBHCInterfaceGroup=ipMRouteMIBHCInterfaceGroup, ipMRouteDifferentInIfPackets=ipMRouteDifferentInIfPackets, ipMRouteInterfaceHCInMcastOctets=ipMRouteInterfaceHCInMcastOctets, ipMRouteNextHopEntry=ipMRouteNextHopEntry, ipMRouteInterfaceHCOutMcastOctets=ipMRouteInterfaceHCOutMcastOctets, ipMRouteBoundaryStatus=ipMRouteBoundaryStatus, ipMRouteEnable=ipMRouteEnable, ipMRouteMIBCompliance=ipMRouteMIBCompliance, ipMRouteInterfaceOutMcastOctets=ipMRouteInterfaceOutMcastOctets, ipMRouteNextHopGroup=ipMRouteNextHopGroup, ipMRouteInterfaceIfIndex=ipMRouteInterfaceIfIndex, ipMRouteInterfaceEntry=ipMRouteInterfaceEntry, ipMRouteStdMIB=ipMRouteStdMIB, ipMRouteInterfaceTable=ipMRouteInterfaceTable, ipMRouteUpstreamNeighbor=ipMRouteUpstreamNeighbor, ipMRouteUpTime=ipMRouteUpTime, ipMRouteScopeNameAddressMask=ipMRouteScopeNameAddressMask, ipMRoute=ipMRoute, ipMRouteInterfaceTtl=ipMRouteInterfaceTtl, ipMRouteMIBBoundaryGroup=ipMRouteMIBBoundaryGroup, ipMRouteMIBObjects=ipMRouteMIBObjects, ipMRouteBoundaryEntry=ipMRouteBoundaryEntry, ipMRouteMIBGroups=ipMRouteMIBGroups)
201.006173
3,289
0.746522
3,885
32,563
6.256113
0.130502
0.007982
0.010862
0.011685
0.390455
0.335034
0.250854
0.230611
0.211027
0.1928
0
0.039854
0.148543
32,563
161
3,290
202.254658
0.83676
0.009459
0
0
0
0.281046
0.480556
0.035012
0
0
0
0
0
1
0
false
0
0.058824
0
0.078431
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
1
0
0
0
0
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
4469dae33136cc5533e4430017f88f84bbccbca9
149
py
Python
samples/proj/newYears.py
Mister-Meeseeks/init-sh
f1876bf9e18dcce3b177dd5deab43473d95df353
[ "Apache-2.0" ]
null
null
null
samples/proj/newYears.py
Mister-Meeseeks/init-sh
f1876bf9e18dcce3b177dd5deab43473d95df353
[ "Apache-2.0" ]
5
2020-02-18T18:14:54.000Z
2020-02-20T08:10:27.000Z
samples/proj/newYears.py
Mister-Meeseeks/initSh
f1876bf9e18dcce3b177dd5deab43473d95df353
[ "Apache-2.0" ]
null
null
null
#!/usr/bin/env initPy import sys import myProj.newYears as nye nCount = int(sys.argv[1]) \ if len(sys.argv) > 1 else 10 nye.countdown(nCount)
14.9
32
0.691275
26
149
3.961538
0.730769
0.135922
0.15534
0
0
0
0
0
0
0
0
0.03252
0.174497
149
9
33
16.555556
0.804878
0.134228
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.4
0
0.4
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
3
4472f668403a4b273ff13e11acb7f9233fde0379
426
py
Python
katas/XX-Primers/unittest_sample.py
plipp/Python-Coding-Dojos
4a206dfc0c73fc0d2b39ab33032b2aaf89055609
[ "MIT" ]
null
null
null
katas/XX-Primers/unittest_sample.py
plipp/Python-Coding-Dojos
4a206dfc0c73fc0d2b39ab33032b2aaf89055609
[ "MIT" ]
null
null
null
katas/XX-Primers/unittest_sample.py
plipp/Python-Coding-Dojos
4a206dfc0c73fc0d2b39ab33032b2aaf89055609
[ "MIT" ]
null
null
null
# test with: python -m unittest unittest_sample.py import unittest def divide(a, b): return a / b class BasicArithmeticTest(unittest.TestCase): def test_divide(self): self.assertEqual(divide(10, 5), 2) def test_divide_by_zero(self): with self.assertRaises(ZeroDivisionError): divide(10, 0) # ... and much more: see https://docs.python.org/3/library/unittest.html#module-unittest
22.421053
88
0.690141
58
426
4.982759
0.637931
0.013841
0.089965
0
0
0
0
0
0
0
0
0.023324
0.194836
426
18
89
23.666667
0.819242
0.314554
0
0
0
0
0
0
0
0
0
0
0.222222
1
0.333333
false
0
0.111111
0.111111
0.666667
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
3
447b419f1a1bf9055de2c6233794cbe9630430c9
370
py
Python
setup.py
adamharsh/crud_generator
d6eecd3c9a39b7b94156ebc7b86046a3c329a8c9
[ "MIT" ]
null
null
null
setup.py
adamharsh/crud_generator
d6eecd3c9a39b7b94156ebc7b86046a3c329a8c9
[ "MIT" ]
1
2020-05-01T15:02:37.000Z
2020-05-02T00:56:32.000Z
setup.py
adamharsh/crud_generator
d6eecd3c9a39b7b94156ebc7b86046a3c329a8c9
[ "MIT" ]
null
null
null
from setuptools import setup with open("README.md","r") as fh: long_description = fh.read() setup( name='crud_generator', version='1.0.2', description='This will generate a crud operations (crud.py) for your Database tables.', long_description=long_description, long_description_content_type="text/markdown", py_modules=["crud_generator"], )
30.833333
91
0.721622
50
370
5.16
0.72
0.232558
0.147287
0.232558
0
0
0
0
0
0
0
0.009585
0.154054
370
11
92
33.636364
0.814696
0
0
0
1
0
0.345946
0
0
0
0
0
0
1
0
false
0
0.090909
0
0.090909
0
0
0
0
null
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
447f1b3d29dfc34af858d449462868ae8893e701
798
py
Python
equip/visitors/blocks.py
neuroo/equip
470c168cf26d1d8340aa5ab37a5364d999a0b2f4
[ "Apache-2.0" ]
102
2015-01-03T13:51:03.000Z
2022-02-28T03:56:26.000Z
equip/visitors/blocks.py
neuroo/equip
470c168cf26d1d8340aa5ab37a5364d999a0b2f4
[ "Apache-2.0" ]
4
2016-12-09T00:31:39.000Z
2019-07-28T09:48:18.000Z
equip/visitors/blocks.py
neuroo/equip
470c168cf26d1d8340aa5ab37a5364d999a0b2f4
[ "Apache-2.0" ]
9
2015-05-08T12:17:28.000Z
2020-12-17T08:20:00.000Z
# -*- coding: utf-8 -*- """ equip.visitors.blocks ~~~~~~~~~~~~~~~~~~~~~ Callback the visit basic blocks in the program. :copyright: (c) 2014 by Romain Gaucher (@rgaucher) :license: Apache 2, see LICENSE for more details. """ class BlockVisitor(object): """ A basic block visitor. It first receives the control-flow graph, and then the ``visit`` method is called with all basic blocks in the CFG. The blocks are not passed to the ``visit`` method with a particular order. """ def __init__(self): self._control_flow = None @property def control_flow(self): return self._control_flow @control_flow.setter def control_flow(self, value): self._control_flow = value def new_control_flow(self): pass def visit(self, block): pass
21.567568
71
0.661654
109
798
4.706422
0.568807
0.17154
0.087719
0.062378
0
0
0
0
0
0
0
0.0096
0.216792
798
36
72
22.166667
0.8112
0.541353
0
0.153846
0
0
0
0
0
0
0
0
0
1
0.384615
false
0.153846
0
0.076923
0.538462
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
0
1
0
0
3
44934010d9f29b100420f8257c51b9cf96bc0009
1,075
py
Python
glycan_profiling/structure/__init__.py
mstim/glycresoft
1d305c42c7e6cba60326d8246e4a485596a53513
[ "Apache-2.0" ]
null
null
null
glycan_profiling/structure/__init__.py
mstim/glycresoft
1d305c42c7e6cba60326d8246e4a485596a53513
[ "Apache-2.0" ]
null
null
null
glycan_profiling/structure/__init__.py
mstim/glycresoft
1d305c42c7e6cba60326d8246e4a485596a53513
[ "Apache-2.0" ]
null
null
null
from .lru import LRUCache, LRUNode, LRUMapping from .structure_loader import ( CachingGlycanCompositionParser, CachingGlycopeptideParser, FragmentCachingGlycopeptide, PeptideProteinRelation, DecoyFragmentCachingGlycopeptide, SequenceReversingCachingGlycopeptideParser, GlycopeptideCache, CachingPeptideParser, PeptideDatabaseRecord,) from .scan import ( ScanStub, ScanWrapperBase, ScanInformation) from .fragment_match_map import FragmentMatchMap, SpectrumGraph from .utils import KeyTransformingDecoratorDict __all__ = [ "LRUNode", "LRUCache", "LRUMapping", "CachingGlycanCompositionParser", "CachingGlycopeptideParser", "CachingPeptideParser", "FragmentCachingGlycopeptide", "PeptideProteinRelation", "DecoyFragmentCachingGlycopeptide", "SequenceReversingCachingGlycopeptideParser", "GlycopeptideCache", "ScanStub", "ScanWrapperBase", "ScanInformation", "FragmentMatchMap", "SpectrumGraph", "KeyTransformingDecoratorDict", "PeptideDatabaseRecord", ]
26.219512
63
0.75814
55
1,075
14.690909
0.527273
0.136139
0.200495
0.304455
0.346535
0
0
0
0
0
0
0
0.167442
1,075
40
64
26.875
0.902793
0
0
0
0
0
0.331163
0.211163
0
0
0
0
0
1
0
false
0
0.135135
0
0.135135
0
0
0
1
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
922aaf587088ac12b5ecba9af37e0c9fb4e0253b
179
py
Python
voters/api/urls.py
isidaruk/eurovision_project
976743e66a2fed17c0513f17a9a7d35850e9cde5
[ "MIT" ]
null
null
null
voters/api/urls.py
isidaruk/eurovision_project
976743e66a2fed17c0513f17a9a7d35850e9cde5
[ "MIT" ]
8
2020-02-12T00:23:27.000Z
2022-03-08T21:10:13.000Z
voters/api/urls.py
isidaruk/eurovision_project
976743e66a2fed17c0513f17a9a7d35850e9cde5
[ "MIT" ]
null
null
null
from rest_framework.routers import DefaultRouter from voters.api.views import VoterViewSet router = DefaultRouter() router.register('', VoterViewSet) urlpatterns = router.urls
19.888889
48
0.815642
20
179
7.25
0.7
0
0
0
0
0
0
0
0
0
0
0
0.106145
179
8
49
22.375
0.90625
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.4
0
0.4
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
3
92354d970621773f03931eaf9eb3207125ff4c2f
128
py
Python
common/constants.py
Twilighters/test_task_with_email
76ef8bcd4690804abe3e6ad68b37f6665cac5236
[ "Apache-2.0" ]
null
null
null
common/constants.py
Twilighters/test_task_with_email
76ef8bcd4690804abe3e6ad68b37f6665cac5236
[ "Apache-2.0" ]
null
null
null
common/constants.py
Twilighters/test_task_with_email
76ef8bcd4690804abe3e6ad68b37f6665cac5236
[ "Apache-2.0" ]
null
null
null
class EmailConstants: SEND_EMAIL_BUTTON_TEXT = "Написать" # noqa SUCCESS_SEND_EMAIL_TEXT = "Письмо отправлено" # noqa
32
57
0.75
15
128
6
0.733333
0.2
0
0
0
0
0
0
0
0
0
0
0.179688
128
3
58
42.666667
0.857143
0.070313
0
0
0
0
0.215517
0
0
0
0
0
0
1
0
false
0
0
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
3
923633001e4e5ac81dc7a749954114bfb54131be
142
py
Python
emag_marketplace/__init__.py
chawel/python-emag
481c47a4f845c41bfac192e336947030f1a98ebc
[ "MIT" ]
2
2018-12-16T09:11:21.000Z
2022-01-20T13:23:26.000Z
emag_marketplace/__init__.py
chawel/python-emag
481c47a4f845c41bfac192e336947030f1a98ebc
[ "MIT" ]
null
null
null
emag_marketplace/__init__.py
chawel/python-emag
481c47a4f845c41bfac192e336947030f1a98ebc
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- from .client import EMAGClient VERSION = __version__ = '0.0.1' AUTHOR = __author__ = 'Paweł Chaniewski (www.cwsi.pl)'
28.4
54
0.683099
19
142
4.684211
0.842105
0
0
0
0
0
0
0
0
0
0
0.033058
0.147887
142
5
54
28.4
0.702479
0.147887
0
0
0
0
0.291667
0
0
0
0
0
0
1
0
false
0
0.333333
0
0.333333
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
3
923ccd2f1d21a022a7f14472a033510e29b53dc6
286
py
Python
Practice/Python/Sets/no_idea.py
nifannn/HackerRank
b05318251226704b1fb949c29aa49493d6ced44b
[ "MIT" ]
7
2019-02-22T10:34:26.000Z
2021-07-13T01:51:48.000Z
Practice/Python/Sets/no_idea.py
nifannn/HackerRank
b05318251226704b1fb949c29aa49493d6ced44b
[ "MIT" ]
null
null
null
Practice/Python/Sets/no_idea.py
nifannn/HackerRank
b05318251226704b1fb949c29aa49493d6ced44b
[ "MIT" ]
7
2018-11-09T13:52:34.000Z
2021-03-18T20:36:22.000Z
if __name__ == '__main__': n, m = list(map(int, input().split(" "))) arr = list(map(int, input().split(" "))) set_a = set(map(int, input().split(" "))) set_b = set(map(int, input().split(' '))) print(sum([1 if e in set_a else -1 if e in set_b else 0 for e in arr]))
40.857143
75
0.555944
51
286
2.882353
0.431373
0.163265
0.29932
0.435374
0.693878
0
0
0
0
0
0
0.013393
0.216783
286
6
76
47.666667
0.642857
0
0
0
0
0
0.041958
0
0
0
0
0
0
1
0
false
0
0
0
0
0.166667
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
925e76b65d37215de639b1e5dcfb17c249ed0a29
112
py
Python
dont_binge/__init__.py
ichbinjon/Dont_Binge
a39dcddaa97d5208cdaa1b1b668e939dd60de96e
[ "MIT" ]
null
null
null
dont_binge/__init__.py
ichbinjon/Dont_Binge
a39dcddaa97d5208cdaa1b1b668e939dd60de96e
[ "MIT" ]
null
null
null
dont_binge/__init__.py
ichbinjon/Dont_Binge
a39dcddaa97d5208cdaa1b1b668e939dd60de96e
[ "MIT" ]
null
null
null
from flask import Flask #API Keys are taken from OS environment! app = Flask(__name__) import dont_binge.views
18.666667
40
0.794643
18
112
4.666667
0.777778
0
0
0
0
0
0
0
0
0
0
0
0.151786
112
6
41
18.666667
0.884211
0.348214
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.666667
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
3
9276a678b901f4384af2c34ef04dabbf1ae24125
656
py
Python
tilecloud/filter/inboundingpyramid.py
camptocamp/tilecloud
fa9864c969917ad47a6d4ce945fc19fe5cdcfb68
[ "Unlicense" ]
134
2017-02-25T22:14:50.000Z
2022-03-04T09:51:13.000Z
tilecloud/filter/inboundingpyramid.py
camptocamp/tilecloud
fa9864c969917ad47a6d4ce945fc19fe5cdcfb68
[ "Unlicense" ]
106
2017-03-07T13:45:04.000Z
2022-03-31T13:01:49.000Z
tilecloud/filter/inboundingpyramid.py
camptocamp/tilecloud
fa9864c969917ad47a6d4ce945fc19fe5cdcfb68
[ "Unlicense" ]
14
2017-05-05T14:46:50.000Z
2022-01-02T18:37:31.000Z
from typing import Optional from tilecloud import BoundingPyramid, Tile class InBoundingPyramid: """ Creates a filter that filters out tiles that are not in the specified bounding pyramid. When called the filter returns ``None`` if the tile is not in the bounding pyramid. bounding_pyramid: A :class:`tilecloud.BoundingPyramid` object. """ def __init__(self, bounding_pyramid: BoundingPyramid): self.bounding_pyramid = bounding_pyramid def __call__(self, tile: Tile) -> Optional[Tile]: if tile is None or tile.tilecoord not in self.bounding_pyramid: return None return tile
29.818182
107
0.702744
82
656
5.463415
0.45122
0.234375
0.127232
0.133929
0
0
0
0
0
0
0
0
0.23628
656
21
108
31.238095
0.894212
0.370427
0
0
0
0
0
0
0
0
0
0
0
1
0.222222
false
0
0.222222
0
0.777778
0
0
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
3